Skip to content
This repository has been archived by the owner on Mar 2, 2018. It is now read-only.

Commit

Permalink
Merge pull request #47 from googlesamples/release-zeno
Browse files Browse the repository at this point in the history
release zeno
  • Loading branch information
jguomoto committed Oct 29, 2015
2 parents f89352e + a412348 commit 7e7c91b
Show file tree
Hide file tree
Showing 16 changed files with 482 additions and 132 deletions.
1 change: 1 addition & 0 deletions AreaLearningJava/app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ if (project.hasProperty("Tango.catkin_devel_prefix")) {
}

dependencies {
compile fileTree(dir: external_lib_prefix + '/jar', include: ['**/*.jar'])
compile(name: 'TangoUtils', ext: 'aar')
compile 'org.rajawali3d:rajawali:1.0.294-SNAPSHOT@aar'
}
Expand Down
3 changes: 2 additions & 1 deletion AugmentedRealitySample/app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ if (project.hasProperty("Tango.catkin_devel_prefix")) {
}

dependencies {
compile(name: 'TangoUtils', ext: 'aar')
compile (name: 'TangoUtils', ext: 'aar')
compile (name: 'tango_support_java_lib', ext: 'aar')
compile 'org.rajawali3d:rajawali:1.0.294-SNAPSHOT@aar'
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,13 @@
package com.projecttango.experiments.augmentedrealitysample;

import java.util.ArrayList;
import java.util.concurrent.atomic.AtomicBoolean;

import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.Toast;

import com.google.atap.tangoservice.Tango;
Expand All @@ -32,11 +34,15 @@
import com.google.atap.tangoservice.TangoPoseData;
import com.google.atap.tangoservice.TangoXyzIjData;
import com.projecttango.rajawali.ar.TangoRajawaliView;
import com.projecttango.tangosupport.TangoSupport;

/**
* An example showing how to build a very simple augmented reality application in Java.
* It uses Rajawali to do the rendering through the utility classes
* <code>TangoRajawaliRenderer</code> and <code>TangoRajawaliView</code> from TangoUtils.
* It also uses the TangoSupportLibrary to do plane fitting using the PointCloud data. Whenever the
* user clicks on the camera display, plane detection will be done on the surface closest to the
* click location and a 3D object will be placed in the scene anchored in that location.
* <p/>
* TangoRajawaliView is used in the same way as the TangoCamaraPreview: we first need initialize the
* TangoRajawaliView class with the activity's context and connect to the camera we want by using
Expand All @@ -49,12 +55,15 @@
* The implementation of the 3D world is done by subclassing the Renderer, just like any other
* Rajawali application.
* <p/>
* Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration parameter in
* order to achieve best results synchronizing the Rajawali virtual world with the RGB camera.
* Note that it is important to include the KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION configuration
* parameter in order to achieve best results synchronizing the Rajawali virtual world with
* the RGB camera.
*/
public class AugmentedRealityActivity extends Activity {
public class AugmentedRealityActivity extends Activity implements View.OnTouchListener {
private static final String TAG = "AugmentedRealityActiv";
private TangoRajawaliView mGLView;
private AugmentedRealityRenderer mRenderer;
private PointCloudManager mPointCloudManager;
private Tango mTango;
private boolean mIsConnected;
private boolean mIsPermissionGranted;
Expand All @@ -65,6 +74,7 @@ protected void onCreate(Bundle savedInstanceState) {
mGLView = new TangoRajawaliView(this);
mRenderer = new AugmentedRealityRenderer(this);
mGLView.setSurfaceRenderer(mRenderer);
mGLView.setOnTouchListener(this);
mTango = new Tango(this);
startActivityForResult(
Tango.getRequestPermissionIntent(Tango.PERMISSIONTYPE_MOTION_TRACKING),
Expand All @@ -91,17 +101,17 @@ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Augmented reality view and renderer
private void startAugmentedreality() {
if (!mIsConnected) {
mIsConnected = true;
// Connect to color camera
mGLView.connectToTangoCamera(mTango,
TangoCameraIntrinsics.TANGO_CAMERA_COLOR);
mGLView.connectToTangoCamera(mTango, TangoCameraIntrinsics.TANGO_CAMERA_COLOR);

// Use default configuration for Tango Service, plus low latency IMU integration.
TangoConfig config = mTango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
// NOTE: low latency integration is necessary to achieve a precise alignment of
// virtual objects with the RBG image and produce a good AR effect.
config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
config.putBoolean(TangoConfig.KEY_BOOLEAN_DEPTH, true);
mTango.connect(config);
mIsConnected = true;

// No need to add any coordinate frame pairs since we are not using
// pose data. So just initialize.
Expand All @@ -123,17 +133,55 @@ public void onFrameAvailable(int cameraId) {

@Override
public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
// We are not using OnPoseAvailable for this app
// Get the device pose at the time the point cloud was acquired
TangoCoordinateFramePair framePair = new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
TangoPoseData.COORDINATE_FRAME_DEVICE);
TangoPoseData cloudPose = mTango.getPoseAtTime(xyzIj.timestamp, framePair);

// Save the cloud and point data for later use
mPointCloudManager.updateXyzIjData(xyzIj, cloudPose);
}

@Override
public void onTangoEvent(TangoEvent event) {
// We are not using OnPoseAvailable for this app
}
});

// Get extrinsics from device for use in transforms
// This needs to be done after connecting Tango and listeners
setupExtrinsics();

// Set-up point cloud plane fitting library helper class
mPointCloudManager = new PointCloudManager(mTango.getCameraIntrinsics(
TangoCameraIntrinsics.TANGO_CAMERA_COLOR));

}
}

/**
* Calculates and stores the fixed transformations between the device and the various sensors
* to be used later for transformations between frames.
*/
private void setupExtrinsics() {
// Create Camera to IMU Transform
TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
TangoPoseData imuTrgbPose = mTango.getPoseAtTime(0.0, framePair);

// Create Device to IMU Transform
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
TangoPoseData imuTdevicePose = mTango.getPoseAtTime(0.0, framePair);

// Create Depth camera to IMU Transform
framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH;
TangoPoseData imuTdepthPose = mTango.getPoseAtTime(0.0, framePair);

mRenderer.setupExtrinsics(imuTdevicePose, imuTrgbPose, imuTdepthPose);
}


@Override
protected void onPause() {
Expand All @@ -152,4 +200,38 @@ protected void onResume() {
startAugmentedreality();
}
}

@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
if (motionEvent.getAction() == MotionEvent.ACTION_UP) {
// Calculate click location in u,v (0;1) coordinates
float u = motionEvent.getX() / view.getWidth();
float v = motionEvent.getY() / view.getHeight();

try {
doFitPlane(u, v);
} catch (Throwable t) {
Log.e(TAG, "Exception measuring nomral", t);
}
}
return true;
}

/**
* Use the TangoSupport library with point cloud data to calculate the plane of
* the world feature pointed at the location the camera is looking at and update the
* renderer to show a 3D object in that location.
*/
private void doFitPlane(float u, float v) {
// Get the current device pose
TangoCoordinateFramePair framePair = new TangoCoordinateFramePair(
TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
TangoPoseData.COORDINATE_FRAME_DEVICE);
TangoPoseData devicePose = mTango.getPoseAtTime(0.0, framePair);

// Perform plane fitting with the latest available point cloud data
TangoSupport.IntersectionPointPlaneModelPair planeModel =
mPointCloudManager.fitPlane(u, v, devicePose, mRenderer.getPoseCalculator());
mRenderer.updateObjectPose(planeModel.intersectionPoint, planeModel.planeModel, devicePose);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,19 @@
import android.content.Context;
import android.view.MotionEvent;

import com.google.atap.tangoservice.TangoPoseData;
import com.projecttango.rajawali.Pose;
import com.projecttango.rajawali.ScenePoseCalcuator;
import com.projecttango.rajawali.ar.TangoRajawaliRenderer;

import org.rajawali3d.Object3D;
import org.rajawali3d.lights.DirectionalLight;
import org.rajawali3d.materials.Material;
import org.rajawali3d.materials.methods.DiffuseMethod;
import org.rajawali3d.primitives.NPrism;
import org.rajawali3d.primitives.Sphere;
import org.rajawali3d.materials.textures.ATexture;
import org.rajawali3d.materials.textures.Texture;
import org.rajawali3d.math.vector.Vector3;
import org.rajawali3d.primitives.Cube;

/**
* Very simple example augmented reality renderer which displays two objects in a fixed position
Expand All @@ -39,6 +44,13 @@
*/
public class AugmentedRealityRenderer extends TangoRajawaliRenderer {

private static final float CUBE_SIDE_LENGTH = 0.5f;

private Pose mPlanePose;
private boolean mPlanePoseUpdated = false;

private Object3D mObject;

public AugmentedRealityRenderer(Context context) {
super(context);
}
Expand All @@ -55,23 +67,61 @@ protected void initScene() {
light.setPosition(3, 2, 4);
getCurrentScene().addLight(light);

// Set-up a material: green with application of the light
// Set-up a material: green with application of the light and instructions
Material material = new Material();
material.setColor(0xff009900);
try {
Texture t = new Texture("instructions", R.drawable.instructions);
material.addTexture(t);
} catch (ATexture.TextureException e) {
e.printStackTrace();
}
material.setColorInfluence(0.1f);
material.enableLighting(true);
material.setDiffuseMethod(new DiffuseMethod.Lambert());

// Build a pyramid and place it roughly in front and a bit to the right
Object3D object1 = new NPrism(4, 0f, 0.2f, 0.2f);
object1.setMaterial(material);
object1.setPosition(-0.25, 0, -1);
getCurrentScene().addChild(object1);

// Build a sphere and place it roughly in front and a bit to the left
object1 = new Sphere(0.1f, 24, 24);
object1.setMaterial(material);
object1.setPosition(0.25, 0, -1);
getCurrentScene().addChild(object1);
// Build a Cube and place it initially in the origin
mObject = new Cube(CUBE_SIDE_LENGTH);
mObject.setMaterial(material);
mObject.setPosition(0, 0, -3);
mObject.setRotation(Vector3.Axis.Z, 180);
getCurrentScene().addChild(mObject);
}

@Override
protected void onRender(long ellapsedRealtime, double deltaTime) {
super.onRender(ellapsedRealtime, deltaTime);

synchronized (this) {
if (mPlanePoseUpdated == true) {
mPlanePoseUpdated = false;
// Place the 3D object in the location of the detected plane
mObject.setPosition(mPlanePose.getPosition());
mObject.setOrientation(mPlanePose.getOrientation());
// Move it forward by half of the size of the cube to make it flush with the plane
// surface
mObject.moveForward(CUBE_SIDE_LENGTH / 2.0f);
}
}
}

/**
* Update the 3D object based on the provided measurement point, normal (in depth frame) and
* device pose at the time of measurement.
*/
public synchronized void updateObjectPose(double[] point, double[] normal,
TangoPoseData devicePose) {
mPlanePose = mScenePoseCalcuator.planeFitToOpenGLPose(point, normal, devicePose);
mPlanePoseUpdated = true;
}

/**
* Provide access to scene calculator helper class to perform necessary transformations.
* NOTE: This won't be necessary once transformation functions are available through the
* support library
*/
public ScenePoseCalcuator getPoseCalculator() {
return mScenePoseCalcuator;
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
/*
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.projecttango.experiments.augmentedrealitysample;

import com.google.atap.tangoservice.TangoCameraIntrinsics;
import com.google.atap.tangoservice.TangoPoseData;
import com.google.atap.tangoservice.TangoXyzIjData;
import com.projecttango.rajawali.ScenePoseCalcuator;
import com.projecttango.tangosupport.TangoSupport;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;

/**
* This helper class keeps a copy of the point cloud data received in callbacks for use with the
* plane fitting function.
* It is implemented to be thread safe so that the caller (the Activity) doesn't need to worry
* about locking between the Tango callback and UI threads.
*/
public class PointCloudManager {
private static final String TAG = "PointCloudManager";

private final TangoCameraIntrinsics mTangoCameraIntrinsics;
private final TangoXyzIjData mXyzIjData;
private TangoPoseData mDevicePoseAtCloudTime;

public PointCloudManager(TangoCameraIntrinsics intrinsics) {
mXyzIjData = new TangoXyzIjData();
mTangoCameraIntrinsics = intrinsics;
}

/**
* Update the current cloud data with the provided xyzIjData from a Tango callback.
*
* @param from The point cloud data
* @param xyzIjPose The device pose with respect to start of service at the time
* the point cloud was acquired
*/
public synchronized void updateXyzIjData(TangoXyzIjData from, TangoPoseData xyzIjPose) {
mDevicePoseAtCloudTime = xyzIjPose;

if (mXyzIjData.xyz == null || mXyzIjData.xyz.capacity() < from.xyzCount * 3) {
mXyzIjData.xyz = ByteBuffer.allocateDirect(from.xyzCount * 3 * 4)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
} else {
mXyzIjData.xyz.rewind();
}

mXyzIjData.xyzCount = from.xyzCount;
mXyzIjData.timestamp = from.timestamp;

from.xyz.rewind();
mXyzIjData.xyz.put(from.xyz);
mXyzIjData.xyz.rewind();
from.xyz.rewind();
}

/**
* Calculate the plane that best fits the current point cloud at the provided u,v coordinates
* in the 2D projection of the point cloud data (i.e.: point cloud image).
*
* @param u u (horizontal) component of the click location
* @param v v (vertical) component of the click location
* @param devicePoseAtClickTime Device pose at the time this operation is requested
* @param poseCalcuator ScenePoseCalculator helper instance to calculate transforms
* @return The point and plane model, in depth sensor frame
*/
public synchronized TangoSupport.IntersectionPointPlaneModelPair fitPlane(float u, float v,
TangoPoseData devicePoseAtClickTime, ScenePoseCalcuator poseCalcuator) {

// We need to calculate the transform between the color camera at the time the user clicked
// and the depth camera at the time the depth cloud was acquired.
// This operation is currently implemented in the provided ScenePoseCalculator helper
// class. In the future, the support library will provide a method for this calculation.
TangoPoseData colorCameraTDepthCameraWithTime
= poseCalcuator.calculateColorCameraTDepthWithTime(devicePoseAtClickTime, mDevicePoseAtCloudTime);

return TangoSupport.fitPlaneModelNearClick(mXyzIjData, mTangoCameraIntrinsics,
colorCameraTDepthCameraWithTime, u, v);
}
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 7e7c91b

Please sign in to comment.