1

使用项目探戈工作室内导航应用this repository。它将起始点和结束点设置为适配器时更新所有有效的姿势并找到路径。它使用A *算法找出两点之间的最短路径,因为它已经为给定的坐标构建了一个Quadtree,它是从有效姿态中提取的。它适用于服务启动项目探戈:增强现实导航与ADF不工作

当我加载ADF并更新姿势相对于ADF时,它不给我路径或给出零大小的路径。即使我曾尝试从有效姿势收集所有坐标,构建了Quadtree并试图找出路径。它返回0大小的路径,其中的路径是Vector2对象的集合

Activity类

public class SoSPathActivity extends AppCompatActivity implements Tango.OnTangoUpdateListener { 

    // frame pairs for adf based ar pose tracking 
    public static final TangoCoordinateFramePair SOS_T_DEVICE_FRAME_PAIR = 
      new TangoCoordinateFramePair(
        TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE, 
        TangoPoseData.COORDINATE_FRAME_DEVICE); 
    public static final TangoCoordinateFramePair DEVICE_T_PREVIOUS_FRAME_PAIR = 
      new TangoCoordinateFramePair(
        TangoPoseData.COORDINATE_FRAME_PREVIOUS_DEVICE_POSE, 
        TangoPoseData.COORDINATE_FRAME_DEVICE); 

    // This changes the Camera Texture and Intrinsics 
    protected static final int ACTIVE_CAMERA_INTRINSICS = TangoCameraIntrinsics.TANGO_CAMERA_COLOR; 
    protected static final int INVALID_TEXTURE_ID = -1; 
    private static final String TAG = SoSPathActivity.class.getSimpleName(); 
    protected AtomicBoolean tangoIsConnected = new AtomicBoolean(false); 
    protected AtomicBoolean tangoFrameIsAvailable = new AtomicBoolean(false); 

    protected Tango tango; 
    protected TangoUx tangoUx; 
    protected TangoCameraIntrinsics intrinsics; 
    protected DeviceExtrinsics extrinsics; 

    protected int connectedTextureId; 
    protected double rgbFrameTimestamp; 
    protected double cameraPoseTimestamp; 

    protected SoSPathRenderer renderer; 


    RajawaliSurfaceView mainSurfaceView; 
    Toolbar toolbar; 
    TangoUxLayout uxLayout; 
    MapView mapView; 
    private TangoPointCloudManager mPointCloudManager; 

    private static DeviceExtrinsics setupExtrinsics(Tango tango) { 
     // Create camera to IMU transform. 
     TangoCoordinateFramePair framePair = new TangoCoordinateFramePair(); 
     framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU; 
     framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR; 
     TangoPoseData imuToRgbPose = tango.getPoseAtTime(0.0, framePair); 

     // Create device to IMU transform. 
     framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE; 
     TangoPoseData imuToDevicePose = tango.getPoseAtTime(0.0, framePair); 

     // Create depth camera to IMU transform. 
     framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH; 
     TangoPoseData imuToDepthPose = tango.getPoseAtTime(0.0, framePair); 

     return new DeviceExtrinsics(imuToDevicePose, imuToRgbPose, imuToDepthPose); 
    } 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     tango = new Tango(this); 
     tangoUx = new TangoUx(this); 
     renderer = new SoSPathRenderer(this); 

     setContentView(R.layout.main_layout); 

     mainSurfaceView = (RajawaliSurfaceView)findViewById(R.id.gl_main_surface_view); 
     toolbar = (Toolbar)findViewById(R.id.toolbar); 
     uxLayout = (TangoUxLayout)findViewById(R.id.tango_ux_layout); 
     mapView = (MapView)findViewById(R.id.map_view); 

     setSupportActionBar(toolbar); 
     tangoUx.setLayout(uxLayout); 
     renderer.renderVirtualObjects(true); 
     mainSurfaceView.setSurfaceRenderer(renderer); 
     mainSurfaceView.setZOrderOnTop(false); 
     mapView.setFloorPlanData(renderer.getFloorPlanData()); 

     mPointCloudManager = new TangoPointCloudManager(); 
    } 

    @Override 
    protected void onResume() { 
     super.onResume(); 
     synchronized (this) { 
      if (tangoIsConnected.compareAndSet(false, true)) { 
       try { 
        connectTango(); 
        connectRenderer(); 
       } catch (TangoOutOfDateException e) { 
        message(R.string.exception_out_of_date); 
       } 
      } 
     } 
    } 

    @Override 
    protected void onPause() { 
     super.onPause(); 
     synchronized (this) { 
      if (tangoIsConnected.compareAndSet(true, false)) { 
       renderer.getCurrentScene().clearFrameCallbacks(); 
       tango.disconnectCamera(ACTIVE_CAMERA_INTRINSICS); 
       connectedTextureId = INVALID_TEXTURE_ID; 
       tango.disconnect(); 
       tangoUx.stop(); 
      } 
     } 
    } 

    @Override 
    public boolean onCreateOptionsMenu(Menu menu) { 
     getMenuInflater().inflate(R.menu.main_menu, menu); 
     return super.onCreateOptionsMenu(menu); 
    } 

    @Override 
    public boolean onOptionsItemSelected(MenuItem item) { 
     switch (item.getItemId()) { 
      case R.id.set_start_point: 
       renderer.setStartPoint(getCurrentPose(), extrinsics); 
       break; 
      case R.id.set_end_point: 
       renderer.setEndPoint(getCurrentPose(), extrinsics 
       ); 
       break; 
     } 
     return super.onOptionsItemSelected(item); 
    } 

    @Override 
    public void onFrameAvailable(int cameraId) { 
     if (cameraId == ACTIVE_CAMERA_INTRINSICS) { 
      tangoFrameIsAvailable.set(true); 
      mainSurfaceView.requestRender(); 
     } 
    } 

    @Override 
    public void onTangoEvent(TangoEvent event) { 
     if (tangoUx != null) { 
      tangoUx.updateTangoEvent(event); 
     } 
    } 

    @Override 
    public void onPoseAvailable(TangoPoseData pose) { 
     if (tangoUx != null) { 
      tangoUx.updatePoseStatus(pose.statusCode); 
     } 
    } 

    @Override 
    public void onXyzIjAvailable(TangoXyzIjData xyzIj) { 
     if (tangoUx != null) { 
      tangoUx.updateXyzCount(xyzIj.xyzCount); 
     } 
    } 

    private void message(final int message_resource) { 
     Toast.makeText(this, message_resource, Toast.LENGTH_SHORT).show(); 
    } 

    protected void setupCameraProperties(Tango tango) { 
     extrinsics = setupExtrinsics(tango); 
     intrinsics = tango.getCameraIntrinsics(ACTIVE_CAMERA_INTRINSICS); 
    } 


    protected void connectTango() { 
     TangoUx.StartParams params = new TangoUx.StartParams(); 
     tangoUx.start(params); 
     TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT); 
     config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true); 
     config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true); 
     tango.connect(config); 
     ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<>(); 
     framePairs.add(SOS_T_DEVICE_FRAME_PAIR); 
     framePairs.add(DEVICE_T_PREVIOUS_FRAME_PAIR); 
     tango.connectListener(framePairs, this); 
     setupCameraProperties(tango); 
    } 


    public TangoPoseData getCurrentPose() { 
     return tango.getPoseAtTime(rgbFrameTimestamp, SOS_T_DEVICE_FRAME_PAIR); 
    } 

    int position = 0; 
    protected void connectRenderer() { 
     renderer.getCurrentScene().registerFrameCallback(new ScenePreFrameCallbackAdapter() { 
      @Override 
      public void onPreFrame(long sceneTime, double deltaTime) { 
       synchronized (SoSPathActivity.this) { 
        if (!tangoIsConnected.get()) { 
         return; 
        } 
        if (!renderer.isSceneCameraConfigured()) { 
         renderer.setProjectionMatrix(intrinsics); 
        } 
        if (connectedTextureId != renderer.getTextureId()) { 
         tango.connectTextureId(ACTIVE_CAMERA_INTRINSICS, renderer.getTextureId()); 
         connectedTextureId = renderer.getTextureId(); 
        } 
        if (tangoFrameIsAvailable.compareAndSet(true, false)) { 
         rgbFrameTimestamp = tango.updateTexture(ACTIVE_CAMERA_INTRINSICS); 
        } 
        if (rgbFrameTimestamp > cameraPoseTimestamp) { 
         TangoPoseData currentPose = getCurrentPose(); 
         if (currentPose != null && currentPose.statusCode == TangoPoseData.POSE_VALID) { 
          renderer.updateRenderCameraPose(currentPose, extrinsics, position); 
          cameraPoseTimestamp = currentPose.timestamp; 
          position++; 
         } 
        } 
       } 
      } 
     }); 
    } 
} 

,这是渲染器类

public class SoSPathRenderer extends TangoRajawaliRenderer { 

    public static final int QUAD_TREE_START = -60; 
    public static final int QUAD_TREE_RANGE = 120; 
    private static final String TAG = SoSPathRenderer.class.getSimpleName(); 
    private final QuadTree data; 
    // Rajawali texture used to render the Tango color camera 
    private ATexture mTangoCameraTexture; 
    // Keeps track of whether the scene camera has been configured 
    private boolean mSceneCameraConfigured; 

    private FloorPlan floorPlan; 
    private Pose startPoint; 
    private Pose endPoint; 
    private List<Cube> pathCubes = new ArrayList<>(); 
    private boolean fillPath = false; 
    private Material blue; 
    private boolean renderVirtualObjects; 

    private Vector3 startingPoint; 
    private Vector3 endingPoint; 

    public SoSPathRenderer(Context context) { 
     super(context); 
     data = new QuadTree(new Vector2(QUAD_TREE_START, QUAD_TREE_START), QUAD_TREE_RANGE, 8); 
    } 

    @Override 
    protected void initScene() { 
     // Create a quad covering the whole background and assign a texture to it where the 
     // Tango color camera contents will be rendered. 
     ScreenQuad backgroundQuad = new ScreenQuad(); 
     Material tangoCameraMaterial = new Material(); 
     tangoCameraMaterial.setColorInfluence(0); 
     // We need to use Rajawali's {@code StreamingTexture} since it sets up the texture 
     // for GL_TEXTURE_EXTERNAL_OES rendering 
     mTangoCameraTexture = 
       new StreamingTexture("camera", (StreamingTexture.ISurfaceListener) null); 
     try { 
      tangoCameraMaterial.addTexture(mTangoCameraTexture); 
      backgroundQuad.setMaterial(tangoCameraMaterial); 
     } catch (ATexture.TextureException e) { 
      Log.e(TAG, "Exception creating texture for RGB camera contents", e); 
     } 
     getCurrentScene().addChildAt(backgroundQuad, 0); 

     // Add a directional light in an arbitrary direction. 
     DirectionalLight light = new DirectionalLight(1, 0.2, -1); 
     light.setColor(1, 1, 1); 
     light.setPower(0.8f); 
     light.setPosition(3, 2, 4); 
     getCurrentScene().addLight(light); 

     blue = new Material(); 
     blue.setColor(Color.BLUE); 

     floorPlan = new FloorPlan(data); 
     getCurrentScene().addChild(floorPlan); 
     floorPlan.setVisible(renderVirtualObjects); 

    } 

    /** 
    * Update the scene camera based on the provided pose in Tango start of service frame. 
    * The device pose should match the pose of the device at the time the last rendered RGB 
    * frame, which can be retrieved with this.getTimestamp(); 
    * NOTE: This must be called from the OpenGL render thread - it is not thread safe. 
    */ 
    public void updateRenderCameraPose(TangoPoseData devicePose, DeviceExtrinsics extrinsics, int position) { 
     Pose cameraPose = ScenePoseCalculator.toOpenGlCameraPose(devicePose, extrinsics); 
     getCurrentCamera().setRotation(cameraPose.getOrientation()); 
     getCurrentCamera().setPosition(cameraPose.getPosition()); 

     Vector3 vector3 = cameraPose.getPosition(); 
     floorPlan.setTrajectoryPosition(cameraPose.getPosition()); 
     Log.d(TAG, "P: " + cameraPose.toString()); 

     /*if(position<getLatestPathPoints().size()) { 
      Log.d(TAG, "XXX Adding ADF Pose position into FloorPlan (x,y,z): " + getLatestPathPoints().get(position).x + ", " 
        + getLatestPathPoints().get(position).y + ", " + getLatestPathPoints().get(position).z); 
      floorPlan.setTrajectoryPosition(getLatestPathPoints().get(position)); 
     }*/ 
    } 

    /** 
    * It returns the ID currently assigned to the texture where the Tango color camera contents 
    * should be rendered. 
    * NOTE: This must be called from the OpenGL render thread - it is not thread safe. 
    */ 
    public int getTextureId() { 
     return mTangoCameraTexture == null ? -1 : mTangoCameraTexture.getTextureId(); 
    } 

    /** 
    * We need to override this method to mark the camera for re-configuration (set proper 
    * projection matrix) since it will be reset by Rajawali on surface changes. 
    */ 
    @Override 
    public void onRenderSurfaceSizeChanged(GL10 gl, int width, int height) { 
     super.onRenderSurfaceSizeChanged(gl, width, height); 
     mSceneCameraConfigured = false; 
    } 

    public boolean isSceneCameraConfigured() { 
     return mSceneCameraConfigured; 
    } 

    /** 
    * Sets the projection matrix for the scene camera to match the parameters of the color camera, 
    * provided by the {@code TangoCameraIntrinsics}. 
    */ 
    public void setProjectionMatrix(TangoCameraIntrinsics intrinsics) { 
     Matrix4 projectionMatrix = ScenePoseCalculator.calculateProjectionMatrix(
       intrinsics.width, intrinsics.height, 
       intrinsics.fx, intrinsics.fy, intrinsics.cx, intrinsics.cy); 
     getCurrentCamera().setProjectionMatrix(projectionMatrix); 
    } 

    @Override 
    public void onOffsetsChanged(float xOffset, float yOffset, 
           float xOffsetStep, float yOffsetStep, 
           int xPixelOffset, int yPixelOffset) { 
    } 

    @Override 
    public void onTouchEvent(MotionEvent event) { 

    } 

    @Override 
    protected void onRender(long ellapsedRealtime, double deltaTime) { 
     super.onRender(ellapsedRealtime, deltaTime); 

     // add routing cubes to scene graph if available 
     if (fillPath) { 
      for (Cube pathCube : pathCubes) { 
       getCurrentScene().removeChild(pathCube); 
      } 
      pathCubes.clear(); 
      PathFinder finder = new PathFinder(floorPlan.getData()); 
      try { 
       List<Vector2> path = finder.findPathBetween(startPoint.getPosition(), endPoint.getPosition()); 
       //List<Vector2> path = finder.findPathBetween(startingPoint, endingPoint); 
       Log.d(TAG, "XXX Pathpoints: " + path.size()); 
       for (Vector2 vector2 : path) { 
        Cube cube = new Cube(0.2f); 
        cube.setMaterial(blue); 
        cube.setPosition(new Vector3(vector2.getX(), -1.2, vector2.getY())); 
        getCurrentScene().addChild(cube); 
        pathCubes.add(cube); 
       } 
      } catch (Exception e) { 
       Log.e(TAG, "onRender: " + e.getMessage(), e); 
      } finally { 
       fillPath = false; 
      } 
     } 
    } 

    public void setStartPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) { 
     startPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics); 
     floorPlan.addPoint(startPoint.getPosition()); 
     if (startPoint != null && endPoint != null) { 
      fillPath = true; 
     } 
     /*startingPoint = getLatestPathPoints().get(0); 
     floorPlan.addPoint(startingPoint); 
     if (startingPoint != null && endingPoint != null) { 
      fillPath = true; 
     }*/ 
    } 

    public void setEndPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) { 
     endPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics); 
     floorPlan.addPoint(endPoint.getPosition()); 
     if (startPoint != null && endPoint != null) { 
      fillPath = true; 
     } 
     /*endingPoint = getLatestPathPoints().get(getLatestPathPoints().size()-10); 
     floorPlan.addPoint(endingPoint); 
     if (startingPoint != null && endingPoint != null) { 
      fillPath = true; 
     }*/ 
    } 

    public QuadTree getFloorPlanData() { 
     return data; 
    } 

    public void renderVirtualObjects(boolean renderObjects) { 
     renderVirtualObjects = renderObjects; 
     if (this.floorPlan != null) 
      this.floorPlan.setVisible(renderObjects); 
    } 

} 

我不知道,我是缺少在这里在加载ADF后获取路径。如果有人有这方面的经验,请更新我。

回答

0

对不起回答,但我没有足够的声望写评论。你能告诉我你的代码/项目在Github或其他?它也必须与学习模式一起工作。我可以想象,加载adf后平板电脑不会重新定位,所以姿势数据无效。

编辑: 检查,如果你的框架对是这样的:

/** Record Device to Area Description as the main frame pair to be used for device pose queries. */ 
private static final TangoCoordinateFramePair FRAME_PAIR = new TangoCoordinateFramePair(
     TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION, 
     TangoPoseData.COORDINATE_FRAME_DEVICE); 

并检查是否有区域上的学习模式,并正确装入自动输稿器:

config.putBoolean(TangoConfig.KEY_BOOLEAN_LEARNINGMODE, true); //learning mode on 
config.putString(TangoConfig.KEY_STRING_AREADESCRIPTION, mLoadedADFPair.getUuid()); //load adf 

然后检查如果您以类似于以下的方式要求您的姿势数据:

TangoPoseData lastFramePose = mTango.getPoseAtTime(mRgbTimestampGlThread, 
         FRAME_PAIR); 
if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) { 

     // Device is re-located!    

     // Update the camera pose from the renderer 
     mRenderer.updateRenderCameraPose(lastFramePose); 
     mCameraPoseTimestamp = lastFramePose.timestamp; 
} else { 
     Log.w(TAG, "Can't get device pose at time: " + mRgbTimestampGlThread); 
} 

在有可用的姿势数据之前,可能需要3-5分钟。走动,不要放弃。

+0

加载ADF后,它被识别...并且我正在更新姿势(设备和ADF)。添加代码的问题.. – ABI

+0

看看这个问题:http://stackoverflow.com/questions/37363225/check-if-area-is-localized-in-loaded-adf/37374225#37374225 – Konsti