1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package com.android.cts.verifier.sensors.sixdof.Renderer; 17 18 import com.android.cts.verifier.R; 19 import com.android.cts.verifier.sensors.sixdof.Renderer.RenderUtils.ModelMatrixCalculator; 20 import com.android.cts.verifier.sensors.sixdof.Renderer.RenderUtils.ObjImporter; 21 import com.android.cts.verifier.sensors.sixdof.Renderer.Renderable.ConeRenderable; 22 import com.android.cts.verifier.sensors.sixdof.Renderer.Renderable.Light; 23 import com.android.cts.verifier.sensors.sixdof.Renderer.Renderable.RingRenderable; 24 import com.android.cts.verifier.sensors.sixdof.Utils.MathsUtils; 25 import com.android.cts.verifier.sensors.sixdof.Utils.Path.PathUtilityClasses.Ring; 26 import com.android.cts.verifier.sensors.sixdof.Utils.PoseProvider.Intrinsics; 27 import com.android.cts.verifier.sensors.sixdof.Utils.PoseProvider.PoseData; 28 29 import static com.android.cts.verifier.sensors.sixdof.Utils.MathsUtils.MATRIX_4X4; 30 31 import android.content.Context; 32 import android.media.MediaPlayer; 33 import android.opengl.GLES20; 34 import android.opengl.Matrix; 35 36 import java.util.ArrayList; 37 38 import javax.microedition.khronos.egl.EGLConfig; 39 import javax.microedition.khronos.opengles.GL10; 40 41 /** 42 * Renderer for the robustness test 43 */ 44 public class ComplexMovementRenderer extends BaseRenderer { 45 private static final String TAG = "ComplexMovementRenderer"; 46 private static final float[] DEFAULT_LIGHT_POSITION = new float[]{ 47 0.0f, 3.0f, 0.0f}; 48 private static final Object RING_LOCK = new Object(); 49 private ModelMatrixCalculator mCameraModelMatrixCalculator; 50 private ConeRenderable mCone; 51 private Light mLight; 52 private float[] mPoseViewMatrix = new float[MATRIX_4X4]; 53 private float[] mAugmentedRealityProjectMatrix = new float[MATRIX_4X4]; 54 55 protected boolean mIsCameraConfigured = false; 56 57 protected double mCameraPoseTimestamp = 0; 58 private PoseData mLastFramePose; 59 60 private Context mContext; 61 62 private int mWaypointCount = 0; 63 private MediaPlayer mMediaPlayer; 64 private ArrayList<Ring> mRings; 65 66 public ComplexMovementRenderer(Context context, ArrayList<Ring> rings) { 67 super(context); 68 mCameraModelMatrixCalculator = new ModelMatrixCalculator(mOpenGlRotation); 69 mContext = context; 70 mMediaPlayer = MediaPlayer.create(context, R.raw.ring_sound); 71 mRings = rings; 72 } 73 74 @Override 75 public void onSurfaceCreated(GL10 glUnused, EGLConfig config) { 76 super.onSurfaceCreated(glUnused, config); 77 mCone = new ConeRenderable(mOpenGlRotation, mOpenGlUpVector); 78 mLight = new Light(DEFAULT_LIGHT_POSITION, 2.0f); 79 setUpExtrinsics(); 80 81 ObjImporter.ObjectData ringData = ObjImporter.parse(mContext.getResources(), R.raw.ring_obj); 82 83 for (Ring ring : mRings) { 84 final float[] position = 85 MathsUtils.convertToOpenGlCoordinates(ring.getLocation(), mOpenGlRotation); 86 final float[] rotation = 87 MathsUtils.convertToOpenGlCoordinates(ring.getRingRotation(), mOpenGlRotation); 88 RingRenderable ringRenderable = new RingRenderable(position, rotation, mOpenGlUpVector); 89 ringRenderable.initialise(ringData); 90 ring.setRingRenderable(ringRenderable); 91 } 92 93 ObjImporter.ObjectData coneData = ObjImporter.parse(mContext.getResources(), R.raw.cone_obj); 94 mCone.initialise(coneData); 95 } 96 97 @Override 98 protected void doPreRenderingSetup() { 99 // Set up drawing of background camera preview (orthogonal). 100 mViewMatrix = mOrthogonalViewMatrix; 101 mProjectionMatrix = mOrthogonalProjectionMatrix; 102 } 103 104 @Override 105 protected void doTestSpecificRendering() { 106 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT); 107 if (mPoseProvider != null) { 108 // Update the texture with the latest camera frame. 109 updateCameraTexture(); 110 111 // We delay the camera set-up until now because if we do it earlier (i.e., when the 112 // camera is connected to the renderer) the PoseProvider service may still not have the 113 // necessary intrinsic and extrinsic transformation information available. 114 if (!mIsCameraConfigured) { 115 configureCamera(); 116 } 117 118 // Calculate the device pose at the camera frame update time. 119 mLastFramePose = mPoseProvider.getLatestPoseData(); 120 // Update the camera pose from the renderer 121 updateRenderCameraPose(mLastFramePose); 122 // Update the MV matrix with new pose data. 123 updatePoseViewMatrix(); 124 // Update light with new translation. 125 mLight.updateLightPosition(MathsUtils.convertToOpenGlCoordinates( 126 mLastFramePose.getTranslationAsFloats(), mOpenGlRotation)); 127 mCameraPoseTimestamp = mLastFramePose.timestamp; 128 } 129 130 // Render objects with latest pose information available. 131 renderAugmentedRealityObjects(); 132 } 133 134 private void renderAugmentedRealityObjects() { 135 // Set up projection matrix to match camera intrinsics. 136 mProjectionMatrix = mAugmentedRealityProjectMatrix; 137 // Set up view matrix to match current device positioning. 138 mViewMatrix = mPoseViewMatrix; 139 140 mDrawParameters.update(mViewMatrix, mProjectionMatrix, mLight); 141 for (Ring ring : mRings) { 142 // If we have placed the initial waypoint, we want rings for the first path, path 0. 143 if (ring.getPathNumber() == mWaypointCount && !ring.isEntered()) { 144 // Only draw the rings that are on our current path and have not been entered. 145 ring.getRingRenderable().draw(mDrawParameters); 146 } 147 } 148 // Clear depth buffer so cone does not clip with rings. 149 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT); 150 151 // Set cone to look at nearest ring. 152 boolean lookingAt = false; 153 for (Ring ring : mRings) { 154 if (!ring.isEntered() && !lookingAt && ring.getPathNumber() == mWaypointCount) { 155 // If the ring has not been entered, the cone has not been set to look at anything 156 // yet, and we are on the correct lap for this ring. 157 158 mCone.updateModelMatrix(mLastFramePose.getTranslationAsFloats(), 159 mLastFramePose.getRotationAsFloats(), ring.getLocation()); 160 lookingAt = true; 161 } 162 } 163 164 if (lookingAt) { 165 // Only draw the cone if it has something to look at. 166 mCone.draw(mDrawParameters); 167 } 168 } 169 170 protected void configureCamera() { 171 // This should never happen, but it never hurts to double-check. 172 if (mPoseProvider == null) { 173 return; 174 } 175 176 Intrinsics intrinsics = mPoseProvider.getIntrinsics(); 177 178 mAugmentedRealityProjectMatrix = calculateProjectionMatrix( 179 intrinsics.getWidth(), intrinsics.getHeight(), 180 intrinsics.getFocalLengthInPixelsX(), intrinsics.getFocalLengthInPixelsY()); 181 mIsCameraConfigured = true; 182 } 183 184 /** 185 * Called when a waypoint is placed in the last test. Used to show and hide rings. 186 * 187 * @param waypointCount Number of waypoints placed. 188 */ 189 public void onWaypointPlaced(int waypointCount) { 190 mWaypointCount = waypointCount; 191 } 192 193 /** 194 * Called when a ring has been entered. Plays a sound and then hides the ring. 195 * 196 * @param ring Ring that has just been entered. 197 */ 198 public void onRingEntered(Ring ring) { 199 synchronized (RING_LOCK) { 200 ring.setSoundPlayed(true); 201 } 202 mMediaPlayer.start(); 203 } 204 205 /** 206 * Setup the extrinsics of the device. 207 */ 208 private void setUpExtrinsics() { 209 } 210 211 /** 212 * Update the scene camera based on the provided pose. The 213 * device pose should match the pose of the device at the time the last rendered RGB frame. 214 */ 215 public void updateRenderCameraPose(PoseData devicePose) { 216 mCameraModelMatrixCalculator.updateModelMatrix(devicePose.getTranslationAsFloats(), 217 devicePose.getRotationAsFloats()); 218 } 219 220 /** 221 * Update the view matrix of the Renderer to follow the position of the device in the current 222 * perspective. 223 */ 224 public void updatePoseViewMatrix() { 225 float[] invertModelMat = new float[MATRIX_4X4]; 226 Matrix.setIdentityM(invertModelMat, 0); 227 228 float[] temporaryMatrix = new float[MATRIX_4X4]; 229 Matrix.setIdentityM(temporaryMatrix, 0); 230 231 Matrix.setIdentityM(mPoseViewMatrix, 0); 232 Matrix.invertM(invertModelMat, 0, 233 mCameraModelMatrixCalculator.getModelMatrix(), 0); 234 Matrix.multiplyMM(temporaryMatrix, 0, mPoseViewMatrix, 0, 235 invertModelMat, 0); 236 System.arraycopy(temporaryMatrix, 0, mPoseViewMatrix, 0, MATRIX_4X4); 237 } 238 239 /** 240 * Use camera intrinsics to calculate the projection Matrix. 241 */ 242 private float[] calculateProjectionMatrix(int width, int height, 243 double focalLengthX, double focalLengthY) { 244 // Uses frustumM to create a projection matrix taking into account calibrated camera 245 // intrinsic parameter. 246 // Reference: http://ksimek.github.io/2013/06/03/calibrated_cameras_in_opengl/ 247 float near = 0.1f; 248 float far = 100f; 249 250 float xScale = (float) (near / focalLengthX); 251 float yScale = (float) (near / focalLengthY); 252 253 float[] projectionMatrix = new float[16]; 254 Matrix.frustumM(projectionMatrix, 0, 255 xScale * -width / 2.0f, 256 xScale * width / 2.0f, 257 yScale * -height / 2.0f, 258 yScale * height / 2.0f, 259 near, far); 260 return projectionMatrix; 261 } 262 } 263