Reputation: 1556
Working on an indoor navigation app using project tango by referring this repository. It updates all the valid poses and finds path when the starting and ending points are set to the adapter. It uses A* algorithm to find out the shortest path between two points, as it has already constructed a Quadtree for the given coordinates which are extracted from valid poses. It works well with respect to Start Of Service
It does not giving me path or gives zero-sized path, when I load an ADF and update the poses with respect to ADF. Even I had tried collecting all the coordinates from valid poses, constructed Quadtree and tried to find out the path. It returns 0 sized path, where the path is a collection of Vector2 objects
Activity class
public class SoSPathActivity extends AppCompatActivity implements Tango.OnTangoUpdateListener {
// frame pairs for adf based ar pose tracking
public static final TangoCoordinateFramePair SOS_T_DEVICE_FRAME_PAIR =
new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
TangoPoseData.COORDINATE_FRAME_DEVICE);
public static final TangoCoordinateFramePair DEVICE_T_PREVIOUS_FRAME_PAIR =
new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_PREVIOUS_DEVICE_POSE,
TangoPoseData.COORDINATE_FRAME_DEVICE);
// This changes the Camera Texture and Intrinsics
protected static final int ACTIVE_CAMERA_INTRINSICS = TangoCameraIntrinsics.TANGO_CAMERA_COLOR;
protected static final int INVALID_TEXTURE_ID = -1;
private static final String TAG = SoSPathActivity.class.getSimpleName();
protected AtomicBoolean tangoIsConnected = new AtomicBoolean(false);
protected AtomicBoolean tangoFrameIsAvailable = new AtomicBoolean(false);
protected Tango tango;
protected TangoUx tangoUx;
protected TangoCameraIntrinsics intrinsics;
protected DeviceExtrinsics extrinsics;
protected int connectedTextureId;
protected double rgbFrameTimestamp;
protected double cameraPoseTimestamp;
protected SoSPathRenderer renderer;
RajawaliSurfaceView mainSurfaceView;
Toolbar toolbar;
TangoUxLayout uxLayout;
MapView mapView;
private TangoPointCloudManager mPointCloudManager;
private static DeviceExtrinsics setupExtrinsics(Tango tango) {
// Create camera to IMU transform.
TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
TangoPoseData imuToRgbPose = tango.getPoseAtTime(0.0, framePair);
// Create device to IMU transform.
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
TangoPoseData imuToDevicePose = tango.getPoseAtTime(0.0, framePair);
// Create depth camera to IMU transform.
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH;
TangoPoseData imuToDepthPose = tango.getPoseAtTime(0.0, framePair);
return new DeviceExtrinsics(imuToDevicePose, imuToRgbPose, imuToDepthPose);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
tango = new Tango(this);
tangoUx = new TangoUx(this);
renderer = new SoSPathRenderer(this);
setContentView(R.layout.main_layout);
mainSurfaceView = (RajawaliSurfaceView)findViewById(R.id.gl_main_surface_view);
toolbar = (Toolbar)findViewById(R.id.toolbar);
uxLayout = (TangoUxLayout)findViewById(R.id.tango_ux_layout);
mapView = (MapView)findViewById(R.id.map_view);
setSupportActionBar(toolbar);
tangoUx.setLayout(uxLayout);
renderer.renderVirtualObjects(true);
mainSurfaceView.setSurfaceRenderer(renderer);
mainSurfaceView.setZOrderOnTop(false);
mapView.setFloorPlanData(renderer.getFloorPlanData());
mPointCloudManager = new TangoPointCloudManager();
}
@Override
protected void onResume() {
super.onResume();
synchronized (this) {
if (tangoIsConnected.compareAndSet(false, true)) {
try {
connectTango();
connectRenderer();
} catch (TangoOutOfDateException e) {
message(R.string.exception_out_of_date);
}
}
}
}
@Override
protected void onPause() {
super.onPause();
synchronized (this) {
if (tangoIsConnected.compareAndSet(true, false)) {
renderer.getCurrentScene().clearFrameCallbacks();
tango.disconnectCamera(ACTIVE_CAMERA_INTRINSICS);
connectedTextureId = INVALID_TEXTURE_ID;
tango.disconnect();
tangoUx.stop();
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.set_start_point:
renderer.setStartPoint(getCurrentPose(), extrinsics);
break;
case R.id.set_end_point:
renderer.setEndPoint(getCurrentPose(), extrinsics
);
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onFrameAvailable(int cameraId) {
if (cameraId == ACTIVE_CAMERA_INTRINSICS) {
tangoFrameIsAvailable.set(true);
mainSurfaceView.requestRender();
}
}
@Override
public void onTangoEvent(TangoEvent event) {
if (tangoUx != null) {
tangoUx.updateTangoEvent(event);
}
}
@Override
public void onPoseAvailable(TangoPoseData pose) {
if (tangoUx != null) {
tangoUx.updatePoseStatus(pose.statusCode);
}
}
@Override
public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
if (tangoUx != null) {
tangoUx.updateXyzCount(xyzIj.xyzCount);
}
}
private void message(final int message_resource) {
Toast.makeText(this, message_resource, Toast.LENGTH_SHORT).show();
}
protected void setupCameraProperties(Tango tango) {
extrinsics = setupExtrinsics(tango);
intrinsics = tango.getCameraIntrinsics(ACTIVE_CAMERA_INTRINSICS);
}
protected void connectTango() {
TangoUx.StartParams params = new TangoUx.StartParams();
tangoUx.start(params);
TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true);
tango.connect(config);
ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<>();
framePairs.add(SOS_T_DEVICE_FRAME_PAIR);
framePairs.add(DEVICE_T_PREVIOUS_FRAME_PAIR);
tango.connectListener(framePairs, this);
setupCameraProperties(tango);
}
public TangoPoseData getCurrentPose() {
return tango.getPoseAtTime(rgbFrameTimestamp, SOS_T_DEVICE_FRAME_PAIR);
}
int position = 0;
protected void connectRenderer() {
renderer.getCurrentScene().registerFrameCallback(new ScenePreFrameCallbackAdapter() {
@Override
public void onPreFrame(long sceneTime, double deltaTime) {
synchronized (SoSPathActivity.this) {
if (!tangoIsConnected.get()) {
return;
}
if (!renderer.isSceneCameraConfigured()) {
renderer.setProjectionMatrix(intrinsics);
}
if (connectedTextureId != renderer.getTextureId()) {
tango.connectTextureId(ACTIVE_CAMERA_INTRINSICS, renderer.getTextureId());
connectedTextureId = renderer.getTextureId();
}
if (tangoFrameIsAvailable.compareAndSet(true, false)) {
rgbFrameTimestamp = tango.updateTexture(ACTIVE_CAMERA_INTRINSICS);
}
if (rgbFrameTimestamp > cameraPoseTimestamp) {
TangoPoseData currentPose = getCurrentPose();
if (currentPose != null && currentPose.statusCode == TangoPoseData.POSE_VALID) {
renderer.updateRenderCameraPose(currentPose, extrinsics, position);
cameraPoseTimestamp = currentPose.timestamp;
position++;
}
}
}
}
});
}
}
and this is Renderer class
public class SoSPathRenderer extends TangoRajawaliRenderer {
public static final int QUAD_TREE_START = -60;
public static final int QUAD_TREE_RANGE = 120;
private static final String TAG = SoSPathRenderer.class.getSimpleName();
private final QuadTree data;
// Rajawali texture used to render the Tango color camera
private ATexture mTangoCameraTexture;
// Keeps track of whether the scene camera has been configured
private boolean mSceneCameraConfigured;
private FloorPlan floorPlan;
private Pose startPoint;
private Pose endPoint;
private List<Cube> pathCubes = new ArrayList<>();
private boolean fillPath = false;
private Material blue;
private boolean renderVirtualObjects;
private Vector3 startingPoint;
private Vector3 endingPoint;
public SoSPathRenderer(Context context) {
super(context);
data = new QuadTree(new Vector2(QUAD_TREE_START, QUAD_TREE_START), QUAD_TREE_RANGE, 8);
}
@Override
protected void initScene() {
// Create a quad covering the whole background and assign a texture to it where the
// Tango color camera contents will be rendered.
ScreenQuad backgroundQuad = new ScreenQuad();
Material tangoCameraMaterial = new Material();
tangoCameraMaterial.setColorInfluence(0);
// We need to use Rajawali's {@code StreamingTexture} since it sets up the texture
// for GL_TEXTURE_EXTERNAL_OES rendering
mTangoCameraTexture =
new StreamingTexture("camera", (StreamingTexture.ISurfaceListener) null);
try {
tangoCameraMaterial.addTexture(mTangoCameraTexture);
backgroundQuad.setMaterial(tangoCameraMaterial);
} catch (ATexture.TextureException e) {
Log.e(TAG, "Exception creating texture for RGB camera contents", e);
}
getCurrentScene().addChildAt(backgroundQuad, 0);
// Add a directional light in an arbitrary direction.
DirectionalLight light = new DirectionalLight(1, 0.2, -1);
light.setColor(1, 1, 1);
light.setPower(0.8f);
light.setPosition(3, 2, 4);
getCurrentScene().addLight(light);
blue = new Material();
blue.setColor(Color.BLUE);
floorPlan = new FloorPlan(data);
getCurrentScene().addChild(floorPlan);
floorPlan.setVisible(renderVirtualObjects);
}
/**
* Update the scene camera based on the provided pose in Tango start of service frame.
* The device pose should match the pose of the device at the time the last rendered RGB
* frame, which can be retrieved with this.getTimestamp();
* NOTE: This must be called from the OpenGL render thread - it is not thread safe.
*/
public void updateRenderCameraPose(TangoPoseData devicePose, DeviceExtrinsics extrinsics, int position) {
Pose cameraPose = ScenePoseCalculator.toOpenGlCameraPose(devicePose, extrinsics);
getCurrentCamera().setRotation(cameraPose.getOrientation());
getCurrentCamera().setPosition(cameraPose.getPosition());
Vector3 vector3 = cameraPose.getPosition();
floorPlan.setTrajectoryPosition(cameraPose.getPosition());
Log.d(TAG, "P: " + cameraPose.toString());
/*if(position<getLatestPathPoints().size()) {
Log.d(TAG, "XXX Adding ADF Pose position into FloorPlan (x,y,z): " + getLatestPathPoints().get(position).x + ", "
+ getLatestPathPoints().get(position).y + ", " + getLatestPathPoints().get(position).z);
floorPlan.setTrajectoryPosition(getLatestPathPoints().get(position));
}*/
}
/**
* It returns the ID currently assigned to the texture where the Tango color camera contents
* should be rendered.
* NOTE: This must be called from the OpenGL render thread - it is not thread safe.
*/
public int getTextureId() {
return mTangoCameraTexture == null ? -1 : mTangoCameraTexture.getTextureId();
}
/**
* We need to override this method to mark the camera for re-configuration (set proper
* projection matrix) since it will be reset by Rajawali on surface changes.
*/
@Override
public void onRenderSurfaceSizeChanged(GL10 gl, int width, int height) {
super.onRenderSurfaceSizeChanged(gl, width, height);
mSceneCameraConfigured = false;
}
public boolean isSceneCameraConfigured() {
return mSceneCameraConfigured;
}
/**
* Sets the projection matrix for the scene camera to match the parameters of the color camera,
* provided by the {@code TangoCameraIntrinsics}.
*/
public void setProjectionMatrix(TangoCameraIntrinsics intrinsics) {
Matrix4 projectionMatrix = ScenePoseCalculator.calculateProjectionMatrix(
intrinsics.width, intrinsics.height,
intrinsics.fx, intrinsics.fy, intrinsics.cx, intrinsics.cy);
getCurrentCamera().setProjectionMatrix(projectionMatrix);
}
@Override
public void onOffsetsChanged(float xOffset, float yOffset,
float xOffsetStep, float yOffsetStep,
int xPixelOffset, int yPixelOffset) {
}
@Override
public void onTouchEvent(MotionEvent event) {
}
@Override
protected void onRender(long ellapsedRealtime, double deltaTime) {
super.onRender(ellapsedRealtime, deltaTime);
// add routing cubes to scene graph if available
if (fillPath) {
for (Cube pathCube : pathCubes) {
getCurrentScene().removeChild(pathCube);
}
pathCubes.clear();
PathFinder finder = new PathFinder(floorPlan.getData());
try {
List<Vector2> path = finder.findPathBetween(startPoint.getPosition(), endPoint.getPosition());
//List<Vector2> path = finder.findPathBetween(startingPoint, endingPoint);
Log.d(TAG, "XXX Pathpoints: " + path.size());
for (Vector2 vector2 : path) {
Cube cube = new Cube(0.2f);
cube.setMaterial(blue);
cube.setPosition(new Vector3(vector2.getX(), -1.2, vector2.getY()));
getCurrentScene().addChild(cube);
pathCubes.add(cube);
}
} catch (Exception e) {
Log.e(TAG, "onRender: " + e.getMessage(), e);
} finally {
fillPath = false;
}
}
}
public void setStartPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) {
startPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics);
floorPlan.addPoint(startPoint.getPosition());
if (startPoint != null && endPoint != null) {
fillPath = true;
}
/*startingPoint = getLatestPathPoints().get(0);
floorPlan.addPoint(startingPoint);
if (startingPoint != null && endingPoint != null) {
fillPath = true;
}*/
}
public void setEndPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) {
endPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics);
floorPlan.addPoint(endPoint.getPosition());
if (startPoint != null && endPoint != null) {
fillPath = true;
}
/*endingPoint = getLatestPathPoints().get(getLatestPathPoints().size()-10);
floorPlan.addPoint(endingPoint);
if (startingPoint != null && endingPoint != null) {
fillPath = true;
}*/
}
public QuadTree getFloorPlanData() {
return data;
}
public void renderVirtualObjects(boolean renderObjects) {
renderVirtualObjects = renderObjects;
if (this.floorPlan != null)
this.floorPlan.setVisible(renderObjects);
}
}
I am not sure, what I am missing here to get the path after loading an ADF. Please update me if anyone has experience on this.
Upvotes: 1
Views: 377
Reputation: 83
Sorry for answering, but I haven't enough reputation to write a comment. Can you please show me your code / project on github or something else? It have to work also with learning mode. I can imagine that the tablet isn't relocalized after loading an adf an so the pose data isn't valid.
EDIT: Check if your frame pair is like the following:
/** Record Device to Area Description as the main frame pair to be used for device pose queries. */
private static final TangoCoordinateFramePair FRAME_PAIR = new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
TangoPoseData.COORDINATE_FRAME_DEVICE);
And check if you have area learning mode on and loaded the adf correctly:
config.putBoolean(TangoConfig.KEY_BOOLEAN_LEARNINGMODE, true); //learning mode on
config.putString(TangoConfig.KEY_STRING_AREADESCRIPTION, mLoadedADFPair.getUuid()); //load adf
Then check if you ask for your pose data in a way similiar to that:
TangoPoseData lastFramePose = mTango.getPoseAtTime(mRgbTimestampGlThread,
FRAME_PAIR);
if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) {
// Device is re-located!
// Update the camera pose from the renderer
mRenderer.updateRenderCameraPose(lastFramePose);
mCameraPoseTimestamp = lastFramePose.timestamp;
} else {
Log.w(TAG, "Can't get device pose at time: " + mRgbTimestampGlThread);
}
Before there is a valid pose data available it may take up to 3-5 minutes. Walk around and don't give up.
Upvotes: 0