1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.graphics.ImageFormat.YUV_420_888; 20 import static android.hardware.camera2.cts.helpers.Preconditions.*; 21 import static android.hardware.camera2.cts.helpers.AssertHelpers.*; 22 import static android.hardware.camera2.cts.CameraTestUtils.*; 23 import static com.android.ex.camera2.blocking.BlockingStateCallback.*; 24 25 import android.content.Context; 26 import android.graphics.ImageFormat; 27 import android.graphics.RectF; 28 import android.hardware.camera2.CameraAccessException; 29 import android.hardware.camera2.CameraCaptureSession; 30 import android.hardware.camera2.CameraCharacteristics; 31 import android.hardware.camera2.CameraDevice; 32 import android.hardware.camera2.CameraManager; 33 import android.hardware.camera2.CameraMetadata; 34 import android.hardware.camera2.CaptureRequest; 35 import android.hardware.camera2.CaptureResult; 36 import android.hardware.camera2.TotalCaptureResult; 37 import android.hardware.camera2.params.ColorSpaceTransform; 38 import android.hardware.camera2.params.RggbChannelVector; 39 import android.hardware.camera2.params.StreamConfigurationMap; 40 import android.util.Size; 41 import android.hardware.camera2.cts.helpers.MaybeNull; 42 import android.hardware.camera2.cts.helpers.StaticMetadata; 43 import android.hardware.camera2.cts.rs.RenderScriptSingleton; 44 import android.hardware.camera2.cts.rs.ScriptGraph; 45 import android.hardware.camera2.cts.rs.ScriptYuvCrop; 46 import android.hardware.camera2.cts.rs.ScriptYuvMeans1d; 47 import android.hardware.camera2.cts.rs.ScriptYuvMeans2dTo1d; 48 import android.hardware.camera2.cts.rs.ScriptYuvToRgb; 49 import android.os.Handler; 50 import android.os.HandlerThread; 51 import android.renderscript.Allocation; 52 import android.renderscript.Script.LaunchOptions; 53 import android.test.AndroidTestCase; 54 import android.util.Log; 55 import android.util.Rational; 56 import android.view.Surface; 57 58 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 59 import com.android.ex.camera2.blocking.BlockingStateCallback; 60 import com.android.ex.camera2.blocking.BlockingSessionCallback; 61 62 import java.util.ArrayList; 63 import java.util.Arrays; 64 import java.util.List; 65 66 /** 67 * Suite of tests for camera2 -> RenderScript APIs. 68 * 69 * <p>It uses CameraDevice as producer, camera sends the data to the surface provided by 70 * Allocation. Only the below format is tested:</p> 71 * 72 * <p>YUV_420_888: flexible YUV420, it is a mandatory format for camera.</p> 73 */ 74 public class AllocationTest extends AndroidTestCase { 75 private static final String TAG = "AllocationTest"; 76 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 77 78 private CameraManager mCameraManager; 79 private CameraDevice mCamera; 80 private CameraCaptureSession mSession; 81 private BlockingStateCallback mCameraListener; 82 private BlockingSessionCallback mSessionListener; 83 84 private String[] mCameraIds; 85 86 private Handler mHandler; 87 private HandlerThread mHandlerThread; 88 89 private CameraIterable mCameraIterable; 90 private SizeIterable mSizeIterable; 91 private ResultIterable mResultIterable; 92 93 @Override 94 public synchronized void setContext(Context context) { 95 super.setContext(context); 96 mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); 97 assertNotNull("Can't connect to camera manager!", mCameraManager); 98 } 99 100 @Override 101 protected void setUp() throws Exception { 102 super.setUp(); 103 mCameraIds = mCameraManager.getCameraIdList(); 104 mHandlerThread = new HandlerThread("AllocationTest"); 105 mHandlerThread.start(); 106 mHandler = new Handler(mHandlerThread.getLooper()); 107 mCameraListener = new BlockingStateCallback(); 108 109 mCameraIterable = new CameraIterable(); 110 mSizeIterable = new SizeIterable(); 111 mResultIterable = new ResultIterable(); 112 113 RenderScriptSingleton.setContext(getContext()); 114 } 115 116 @Override 117 protected void tearDown() throws Exception { 118 MaybeNull.close(mCamera); 119 RenderScriptSingleton.clearContext(); 120 mHandlerThread.quitSafely(); 121 mHandler = null; 122 super.tearDown(); 123 } 124 125 /** 126 * Update the request with a default manual request template. 127 * 128 * @param request A builder for a CaptureRequest 129 * @param sensitivity ISO gain units (e.g. 100) 130 * @param expTimeNs Exposure time in nanoseconds 131 */ 132 private static void setManualCaptureRequest(CaptureRequest.Builder request, int sensitivity, 133 long expTimeNs) { 134 final Rational ONE = new Rational(1, 1); 135 final Rational ZERO = new Rational(0, 1); 136 137 if (VERBOSE) { 138 Log.v(TAG, String.format("Create manual capture request, sensitivity = %d, expTime = %f", 139 sensitivity, expTimeNs / (1000.0 * 1000))); 140 } 141 142 request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF); 143 request.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_OFF); 144 request.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_OFF); 145 request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 146 request.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureRequest.CONTROL_EFFECT_MODE_OFF); 147 request.set(CaptureRequest.SENSOR_FRAME_DURATION, 0L); 148 request.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 149 request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTimeNs); 150 request.set(CaptureRequest.COLOR_CORRECTION_MODE, 151 CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX); 152 153 // Identity transform 154 request.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, 155 new ColorSpaceTransform(new Rational[] { 156 ONE, ZERO, ZERO, 157 ZERO, ONE, ZERO, 158 ZERO, ZERO, ONE 159 })); 160 161 // Identity gains 162 request.set(CaptureRequest.COLOR_CORRECTION_GAINS, 163 new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f )); 164 request.set(CaptureRequest.TONEMAP_MODE, CaptureRequest.TONEMAP_MODE_FAST); 165 } 166 167 /** 168 * Calculate the absolute crop window from a {@link Size}, 169 * and configure {@link LaunchOptions} for it. 170 */ 171 // TODO: split patch crop window and the application against a particular size into 2 classes 172 public static class Patch { 173 /** 174 * Create a new {@link Patch} from relative crop coordinates. 175 * 176 * <p>All float values must be normalized coordinates between [0, 1].</p> 177 * 178 * @param size Size of the original rectangle that is being cropped. 179 * @param xNorm The X coordinate defining the left side of the rectangle (in [0, 1]). 180 * @param yNorm The Y coordinate defining the top side of the rectangle (in [0, 1]). 181 * @param wNorm The width of the crop rectangle (normalized between [0, 1]). 182 * @param hNorm The height of the crop rectangle (normalized between [0, 1]). 183 * 184 * @throws NullPointerException if size was {@code null}. 185 * @throws AssertionError if any of the normalized coordinates were out of range 186 */ 187 public Patch(Size size, float xNorm, float yNorm, float wNorm, float hNorm) { 188 checkNotNull("size", size); 189 190 assertInRange(xNorm, 0.0f, 1.0f); 191 assertInRange(yNorm, 0.0f, 1.0f); 192 assertInRange(wNorm, 0.0f, 1.0f); 193 assertInRange(hNorm, 0.0f, 1.0f); 194 195 wFull = size.getWidth(); 196 hFull = size.getWidth(); 197 198 xTile = (int)Math.ceil(xNorm * wFull); 199 yTile = (int)Math.ceil(yNorm * hFull); 200 201 wTile = (int)Math.ceil(wNorm * wFull); 202 hTile = (int)Math.ceil(hNorm * hFull); 203 204 mSourceSize = size; 205 } 206 207 /** 208 * Get the original size used to create this {@link Patch}. 209 * 210 * @return source size 211 */ 212 public Size getSourceSize() { 213 return mSourceSize; 214 } 215 216 /** 217 * Get the cropped size after applying the normalized crop window. 218 * 219 * @return cropped size 220 */ 221 public Size getSize() { 222 return new Size(wFull, hFull); 223 } 224 225 /** 226 * Get the {@link LaunchOptions} that can be used with a {@link android.renderscript.Script} 227 * to apply a kernel over a subset of an {@link Allocation}. 228 * 229 * @return launch options 230 */ 231 public LaunchOptions getLaunchOptions() { 232 return (new LaunchOptions()) 233 .setX(xTile, xTile + wTile) 234 .setY(yTile, yTile + hTile); 235 } 236 237 /** 238 * Get the cropped width after applying the normalized crop window. 239 * 240 * @return cropped width 241 */ 242 public int getWidth() { 243 return wTile; 244 } 245 246 /** 247 * Get the cropped height after applying the normalized crop window. 248 * 249 * @return cropped height 250 */ 251 public int getHeight() { 252 return hTile; 253 } 254 255 /** 256 * Convert to a {@link RectF} where each corner is represented by a 257 * normalized coordinate in between [0.0, 1.0] inclusive. 258 * 259 * @return a new rectangle 260 */ 261 public RectF toRectF() { 262 return new RectF( 263 xTile * 1.0f / wFull, 264 yTile * 1.0f / hFull, 265 (xTile + wTile) * 1.0f / wFull, 266 (yTile + hTile) * 1.0f / hFull); 267 } 268 269 private final Size mSourceSize; 270 private final int wFull; 271 private final int hFull; 272 private final int xTile; 273 private final int yTile; 274 private final int wTile; 275 private final int hTile; 276 } 277 278 /** 279 * Convert a single YUV pixel (3 byte elements) to an RGB pixel. 280 * 281 * <p>The color channels must be in the following order: 282 * <ul><li>Y - 0th channel 283 * <li>U - 1st channel 284 * <li>V - 2nd channel 285 * </ul></p> 286 * 287 * <p>Each channel has data in the range 0-255.</p> 288 * 289 * <p>Output data is a 3-element pixel with each channel in the range of [0,1]. 290 * Each channel is saturated to avoid over/underflow.</p> 291 * 292 * <p>The conversion is done using JFIF File Interchange Format's "Conversion to and from RGB": 293 * <ul> 294 * <li>R = Y + 1.042 (Cr - 128) 295 * <li>G = Y - 0.34414 (Cb - 128) - 0.71414 (Cr - 128) 296 * <li>B = Y + 1.772 (Cb - 128) 297 * </ul> 298 * 299 * Where Cr and Cb are aliases of V and U respectively. 300 * </p> 301 * 302 * @param yuvData An array of a YUV pixel (at least 3 bytes large) 303 * 304 * @return an RGB888 pixel with each channel in the range of [0,1] 305 */ 306 private static float[] convertPixelYuvToRgb(byte[] yuvData) { 307 final int CHANNELS = 3; // yuv 308 final float COLOR_RANGE = 255f; 309 310 assertTrue("YUV pixel must be at least 3 bytes large", CHANNELS <= yuvData.length); 311 312 float[] rgb = new float[CHANNELS]; 313 314 float y = yuvData[0] & 0xFF; // Y channel 315 float cb = yuvData[1] & 0xFF; // U channel 316 float cr = yuvData[2] & 0xFF; // V channel 317 318 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section) 319 float r = y + 1.402f * (cr - 128); 320 float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128); 321 float b = y + 1.772f * (cb - 128); 322 323 // normalize [0,255] -> [0,1] 324 rgb[0] = r / COLOR_RANGE; 325 rgb[1] = g / COLOR_RANGE; 326 rgb[2] = b / COLOR_RANGE; 327 328 // Clamp to range [0,1] 329 for (int i = 0; i < CHANNELS; ++i) { 330 rgb[i] = Math.max(0.0f, Math.min(1.0f, rgb[i])); 331 } 332 333 if (VERBOSE) { 334 Log.v(TAG, String.format("RGB calculated (r,g,b) = (%f, %f, %f)", rgb[0], rgb[1], 335 rgb[2])); 336 } 337 338 return rgb; 339 } 340 341 /** 342 * Configure the camera with the target surface; 343 * create a capture request builder with {@code cameraTarget} as the sole surface target. 344 * 345 * <p>Outputs are configured with the new surface targets, and this function blocks until 346 * the camera has finished configuring.</p> 347 * 348 * <p>The capture request is created from the {@link CameraDevice#TEMPLATE_PREVIEW} template. 349 * No other keys are set. 350 * </p> 351 */ 352 private CaptureRequest.Builder configureAndCreateRequestForSurface(Surface cameraTarget) 353 throws CameraAccessException { 354 List<Surface> outputSurfaces = new ArrayList<Surface>(/*capacity*/1); 355 assertNotNull("Failed to get Surface", cameraTarget); 356 outputSurfaces.add(cameraTarget); 357 358 mSessionListener = new BlockingSessionCallback(); 359 mCamera.createCaptureSession(outputSurfaces, mSessionListener, mHandler); 360 mSession = mSessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS); 361 CaptureRequest.Builder captureBuilder = 362 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 363 assertNotNull("Fail to create captureRequest", captureBuilder); 364 captureBuilder.addTarget(cameraTarget); 365 366 if (VERBOSE) Log.v(TAG, "configureAndCreateRequestForSurface - done"); 367 368 return captureBuilder; 369 } 370 371 /** 372 * Submit a single request to the camera, block until the buffer is available. 373 * 374 * <p>Upon return from this function, script has been executed against the latest buffer. 375 * </p> 376 */ 377 private void captureSingleShotAndExecute(CaptureRequest request, ScriptGraph graph) 378 throws CameraAccessException { 379 checkNotNull("request", request); 380 checkNotNull("graph", graph); 381 382 long exposureTimeNs = -1; 383 int controlMode = -1; 384 int aeMode = -1; 385 if (request.get(CaptureRequest.CONTROL_MODE) != null) { 386 controlMode = request.get(CaptureRequest.CONTROL_MODE); 387 } 388 if (request.get(CaptureRequest.CONTROL_AE_MODE) != null) { 389 aeMode = request.get(CaptureRequest.CONTROL_AE_MODE); 390 } 391 if ((request.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null) && 392 ((controlMode == CaptureRequest.CONTROL_MODE_OFF) || 393 (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF))) { 394 exposureTimeNs = request.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 395 } 396 mSession.capture(request, new CameraCaptureSession.CaptureCallback() { 397 @Override 398 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 399 TotalCaptureResult result) { 400 if (VERBOSE) Log.v(TAG, "Capture completed"); 401 } 402 }, mHandler); 403 404 if (VERBOSE) Log.v(TAG, "Waiting for single shot buffer"); 405 if (exposureTimeNs > 0) { 406 graph.advanceInputWaiting( 407 java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(exposureTimeNs)); 408 } else { 409 graph.advanceInputWaiting(); 410 } 411 if (VERBOSE) Log.v(TAG, "Got the buffer"); 412 graph.execute(); 413 } 414 415 private void stopCapture() throws CameraAccessException { 416 if (VERBOSE) Log.v(TAG, "Stopping capture and waiting for idle"); 417 // Stop repeat, wait for captures to complete, and disconnect from surfaces 418 mSession.close(); 419 mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_CLOSED, 420 SESSION_CLOSE_TIMEOUT_MS); 421 mSession = null; 422 mSessionListener = null; 423 } 424 425 /** 426 * Extremely dumb validator. Makes sure there is at least one non-zero RGB pixel value. 427 */ 428 private void validateInputOutputNotZeroes(ScriptGraph scriptGraph, Size size) { 429 final int BPP = 8; // bits per pixel 430 431 int width = size.getWidth(); 432 int height = size.getHeight(); 433 /** 434 * Check the input allocation is sane. 435 * - Byte size matches what we expect. 436 * - The input is not all zeroes. 437 */ 438 439 // Check that input data was updated first. If it wasn't, the rest of the test will fail. 440 byte[] data = scriptGraph.getInputData(); 441 assertArrayNotAllZeroes("Input allocation data was not updated", data); 442 443 // Minimal required size to represent YUV 4:2:0 image 444 int packedSize = 445 width * height * ImageFormat.getBitsPerPixel(YUV_420_888) / BPP; 446 if (VERBOSE) Log.v(TAG, "Expected image size = " + packedSize); 447 int actualSize = data.length; 448 // Actual size may be larger due to strides or planes being non-contiguous 449 assertTrue( 450 String.format( 451 "YUV 420 packed size (%d) should be at least as large as the actual size " + 452 "(%d)", packedSize, actualSize), packedSize <= actualSize); 453 /** 454 * Check the output allocation by converting to RGBA. 455 * - Byte size matches what we expect 456 * - The output is not all zeroes 457 */ 458 final int RGBA_CHANNELS = 4; 459 460 int actualSizeOut = scriptGraph.getOutputAllocation().getBytesSize(); 461 int packedSizeOut = width * height * RGBA_CHANNELS; 462 463 byte[] dataOut = scriptGraph.getOutputData(); 464 assertEquals("RGB mismatched byte[] and expected size", 465 packedSizeOut, dataOut.length); 466 467 if (VERBOSE) { 468 Log.v(TAG, "checkAllocationByConvertingToRgba - RGB data size " + dataOut.length); 469 } 470 471 assertArrayNotAllZeroes("RGBA data was not updated", dataOut); 472 // RGBA8888 stride should be equal to the width 473 assertEquals("RGBA 8888 mismatched byte[] and expected size", packedSizeOut, actualSizeOut); 474 475 if (VERBOSE) Log.v(TAG, "validating Buffer , size = " + actualSize); 476 } 477 478 public void testAllocationFromCameraFlexibleYuv() throws Exception { 479 480 /** number of frame (for streaming requests) to be verified. */ 481 final int NUM_FRAME_VERIFIED = 1; 482 483 mCameraIterable.forEachCamera(new CameraBlock() { 484 @Override 485 public void run(CameraDevice camera) throws CameraAccessException { 486 487 // Iterate over each size in the camera 488 mSizeIterable.forEachSize(YUV_420_888, new SizeBlock() { 489 @Override 490 public void run(final Size size) throws CameraAccessException { 491 // Create a script graph that converts YUV to RGB 492 try (ScriptGraph scriptGraph = ScriptGraph.create() 493 .configureInputWithSurface(size, YUV_420_888) 494 .chainScript(ScriptYuvToRgb.class) 495 .buildGraph()) { 496 497 if (VERBOSE) Log.v(TAG, "Prepared ScriptYuvToRgb for size " + size); 498 499 // Run the graph against camera input and validate we get some input 500 CaptureRequest request = 501 configureAndCreateRequestForSurface(scriptGraph.getInputSurface()).build(); 502 503 // Block until we get 1 result, then iterate over the result 504 mResultIterable.forEachResultRepeating( 505 request, NUM_FRAME_VERIFIED, new ResultBlock() { 506 @Override 507 public void run(CaptureResult result) throws CameraAccessException { 508 scriptGraph.advanceInputWaiting(); 509 scriptGraph.execute(); 510 validateInputOutputNotZeroes(scriptGraph, size); 511 scriptGraph.advanceInputAndDrop(); 512 } 513 }); 514 515 stopCapture(); 516 if (VERBOSE) Log.v(TAG, "Cleanup Renderscript cache"); 517 scriptGraph.close(); 518 RenderScriptSingleton.clearContext(); 519 RenderScriptSingleton.setContext(getContext()); 520 } 521 } 522 }); 523 } 524 }); 525 } 526 527 /** 528 * Take two shots and ensure per-frame-control with exposure/gain is working correctly. 529 * 530 * <p>Takes a shot with very low ISO and exposure time. Expect it to be black.</p> 531 * 532 * <p>Take a shot with very high ISO and exposure time. Expect it to be white.</p> 533 * 534 * @throws Exception 535 */ 536 public void testBlackWhite() throws CameraAccessException { 537 538 /** low iso + low exposure (first shot) */ 539 final float THRESHOLD_LOW = 0.025f; 540 /** high iso + high exposure (second shot) */ 541 final float THRESHOLD_HIGH = 0.975f; 542 543 mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() { 544 @Override 545 public void run(CameraDevice camera) throws CameraAccessException { 546 final StaticMetadata staticInfo = 547 new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId())); 548 549 // This test requires PFC and manual sensor control 550 if (!staticInfo.isCapabilitySupported( 551 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) || 552 !staticInfo.isPerFrameControlSupported()) { 553 return; 554 } 555 556 final Size maxSize = getMaxSize( 557 getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager)); 558 559 try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) { 560 561 CaptureRequest.Builder req = 562 configureAndCreateRequestForSurface(scriptGraph.getInputSurface()); 563 564 // Take a shot with very low ISO and exposure time. Expect it to be black. 565 int minimumSensitivity = staticInfo.getSensitivityMinimumOrDefault(); 566 long minimumExposure = staticInfo.getExposureMinimumOrDefault(); 567 setManualCaptureRequest(req, minimumSensitivity, minimumExposure); 568 569 CaptureRequest lowIsoExposureShot = req.build(); 570 captureSingleShotAndExecute(lowIsoExposureShot, scriptGraph); 571 572 float[] blackMeans = convertPixelYuvToRgb(scriptGraph.getOutputData()); 573 574 // Take a shot with very high ISO and exposure time. Expect it to be white. 575 int maximumSensitivity = staticInfo.getSensitivityMaximumOrDefault(); 576 long maximumExposure = staticInfo.getExposureMaximumOrDefault(); 577 setManualCaptureRequest(req, maximumSensitivity, maximumExposure); 578 579 CaptureRequest highIsoExposureShot = req.build(); 580 captureSingleShotAndExecute(highIsoExposureShot, scriptGraph); 581 582 float[] whiteMeans = convertPixelYuvToRgb(scriptGraph.getOutputData()); 583 584 // Low iso + low exposure (first shot), just check and log the error. 585 for (int i = 0; i < blackMeans.length; ++i) { 586 if (blackMeans[i] >= THRESHOLD_LOW) { 587 Log.e(TAG, 588 String.format("Black means too high: (%s should be greater" 589 + " than %s; item index %d in %s)", blackMeans[i], 590 THRESHOLD_LOW, i, 591 Arrays.toString(blackMeans))); 592 } 593 } 594 595 // High iso + high exposure (second shot), just check and log the error 596 for (int i = 0; i < whiteMeans.length; ++i) { 597 if (whiteMeans[i] <= THRESHOLD_HIGH) { 598 Log.e(TAG, 599 String.format("White means too low: (%s should be less than" 600 + " %s; item index %d in %s)", whiteMeans[i], 601 THRESHOLD_HIGH, i, 602 Arrays.toString(whiteMeans))); 603 } 604 } 605 } 606 } 607 }); 608 } 609 610 /** 611 * Test that the android.sensitivity.parameter is applied. 612 */ 613 public void testParamSensitivity() throws CameraAccessException { 614 final float THRESHOLD_MAX_MIN_DIFF = 0.3f; 615 final float THRESHOLD_MAX_MIN_RATIO = 2.0f; 616 final int NUM_STEPS = 5; 617 final long EXPOSURE_TIME_NS = 2000000; // 2 ms 618 final int RGB_CHANNELS = 3; 619 620 mCameraIterable.forEachCamera(/*fullHwLevel*/false, new CameraBlock() { 621 622 623 @Override 624 public void run(CameraDevice camera) throws CameraAccessException { 625 final StaticMetadata staticInfo = 626 new StaticMetadata(mCameraManager.getCameraCharacteristics(camera.getId())); 627 // This test requires PFC and manual sensor control 628 if (!staticInfo.isCapabilitySupported( 629 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR) || 630 !staticInfo.isPerFrameControlSupported()) { 631 return; 632 } 633 634 final List<float[]> rgbMeans = new ArrayList<float[]>(); 635 final Size maxSize = getMaxSize( 636 getSupportedSizeForFormat(YUV_420_888, camera.getId(), mCameraManager)); 637 638 final int sensitivityMin = staticInfo.getSensitivityMinimumOrDefault(); 639 final int sensitivityMax = staticInfo.getSensitivityMaximumOrDefault(); 640 641 // List each sensitivity from min-max in NUM_STEPS increments 642 int[] sensitivities = new int[NUM_STEPS]; 643 for (int i = 0; i < NUM_STEPS; ++i) { 644 int delta = (sensitivityMax - sensitivityMin) / (NUM_STEPS - 1); 645 sensitivities[i] = sensitivityMin + delta * i; 646 } 647 648 try (ScriptGraph scriptGraph = createGraphForYuvCroppedMeans(maxSize)) { 649 650 CaptureRequest.Builder req = 651 configureAndCreateRequestForSurface(scriptGraph.getInputSurface()); 652 653 // Take burst shots with increasing sensitivity one after other. 654 for (int i = 0; i < NUM_STEPS; ++i) { 655 setManualCaptureRequest(req, sensitivities[i], EXPOSURE_TIME_NS); 656 captureSingleShotAndExecute(req.build(), scriptGraph); 657 float[] means = convertPixelYuvToRgb(scriptGraph.getOutputData()); 658 rgbMeans.add(means); 659 660 if (VERBOSE) { 661 Log.v(TAG, "testParamSensitivity - captured image " + i + 662 " with RGB means: " + Arrays.toString(means)); 663 } 664 } 665 666 // Test that every consecutive image gets brighter. 667 for (int i = 0; i < rgbMeans.size() - 1; ++i) { 668 float[] curMeans = rgbMeans.get(i); 669 float[] nextMeans = rgbMeans.get(i+1); 670 671 float[] left = curMeans; 672 float[] right = nextMeans; 673 String leftString = Arrays.toString(left); 674 String rightString = Arrays.toString(right); 675 676 String msgHeader = 677 String.format("Shot with sensitivity %d should not have higher " + 678 "average means than shot with sensitivity %d", 679 sensitivities[i], sensitivities[i+1]); 680 for (int m = 0; m < left.length; ++m) { 681 String msg = String.format( 682 "%s: (%s should be less than or equal to %s; item index %d;" 683 + " left = %s; right = %s)", 684 msgHeader, left[m], right[m], m, leftString, rightString); 685 if (left[m] > right[m]) { 686 Log.e(TAG, msg); 687 } 688 } 689 } 690 691 // Test the min-max diff and ratios are within expected thresholds 692 float[] lastMeans = rgbMeans.get(NUM_STEPS - 1); 693 float[] firstMeans = rgbMeans.get(/*location*/0); 694 for (int i = 0; i < RGB_CHANNELS; ++i) { 695 if (lastMeans[i] - firstMeans[i] <= THRESHOLD_MAX_MIN_DIFF) { 696 Log.w(TAG, String.format("Sensitivity max-min diff too small" 697 + "(max=%f, min=%f)", lastMeans[i], firstMeans[i])); 698 } 699 if (lastMeans[i] / firstMeans[i] <= THRESHOLD_MAX_MIN_RATIO) { 700 Log.w(TAG, String.format("Sensitivity max-min ratio too small" 701 + "(max=%f, min=%f)", lastMeans[i], firstMeans[i])); 702 } 703 } 704 } 705 } 706 }); 707 708 } 709 710 /** 711 * Common script graph for manual-capture based tests that determine the average pixel 712 * values of a cropped sub-region. 713 * 714 * <p>Processing chain: 715 * 716 * <pre> 717 * input: YUV_420_888 surface 718 * output: mean YUV value of a central section of the image, 719 * YUV 4:4:4 encoded as U8_3 720 * steps: 721 * 1) crop [0.45,0.45] - [0.55, 0.55] 722 * 2) average columns 723 * 3) average rows 724 * </pre> 725 * </p> 726 */ 727 private static ScriptGraph createGraphForYuvCroppedMeans(final Size size) { 728 ScriptGraph scriptGraph = ScriptGraph.create() 729 .configureInputWithSurface(size, YUV_420_888) 730 .configureScript(ScriptYuvCrop.class) 731 .set(ScriptYuvCrop.CROP_WINDOW, 732 new Patch(size, /*x*/0.45f, /*y*/0.45f, /*w*/0.1f, /*h*/0.1f).toRectF()) 733 .buildScript() 734 .chainScript(ScriptYuvMeans2dTo1d.class) 735 .chainScript(ScriptYuvMeans1d.class) 736 // TODO: Make a script for YUV 444 -> RGB 888 conversion 737 .buildGraph(); 738 return scriptGraph; 739 } 740 741 /* 742 * TODO: Refactor below code into separate classes and to not depend on AllocationTest 743 * inner variables. 744 * 745 * TODO: add javadocs to below methods 746 * 747 * TODO: Figure out if there's some elegant way to compose these forEaches together, so that 748 * the callers don't have to do a ton of nesting 749 */ 750 751 interface CameraBlock { 752 void run(CameraDevice camera) throws CameraAccessException; 753 } 754 755 class CameraIterable { 756 public void forEachCamera(CameraBlock runnable) 757 throws CameraAccessException { 758 forEachCamera(/*fullHwLevel*/false, runnable); 759 } 760 761 public void forEachCamera(boolean fullHwLevel, CameraBlock runnable) 762 throws CameraAccessException { 763 assertNotNull("No camera manager", mCameraManager); 764 assertNotNull("No camera IDs", mCameraIds); 765 766 for (int i = 0; i < mCameraIds.length; i++) { 767 // Don't execute the runnable against non-FULL cameras if FULL is required 768 CameraCharacteristics properties = 769 mCameraManager.getCameraCharacteristics(mCameraIds[i]); 770 StaticMetadata staticInfo = new StaticMetadata(properties); 771 if (fullHwLevel && !staticInfo.isHardwareLevelAtLeastFull()) { 772 Log.i(TAG, String.format( 773 "Skipping this test for camera %s, needs FULL hw level", 774 mCameraIds[i])); 775 continue; 776 } 777 if (!staticInfo.isColorOutputSupported()) { 778 Log.i(TAG, String.format( 779 "Skipping this test for camera %s, does not support regular outputs", 780 mCameraIds[i])); 781 continue; 782 } 783 // Open camera and execute test 784 Log.i(TAG, "Testing Camera " + mCameraIds[i]); 785 try { 786 openDevice(mCameraIds[i]); 787 788 runnable.run(mCamera); 789 } finally { 790 closeDevice(mCameraIds[i]); 791 } 792 } 793 } 794 795 private void openDevice(String cameraId) { 796 if (mCamera != null) { 797 throw new IllegalStateException("Already have open camera device"); 798 } 799 try { 800 mCamera = openCamera( 801 mCameraManager, cameraId, mCameraListener, mHandler); 802 } catch (CameraAccessException e) { 803 fail("Fail to open camera synchronously, " + Log.getStackTraceString(e)); 804 } catch (BlockingOpenException e) { 805 fail("Fail to open camera asynchronously, " + Log.getStackTraceString(e)); 806 } 807 mCameraListener.waitForState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS); 808 } 809 810 private void closeDevice(String cameraId) { 811 if (mCamera != null) { 812 mCamera.close(); 813 mCameraListener.waitForState(STATE_CLOSED, CAMERA_CLOSE_TIMEOUT_MS); 814 mCamera = null; 815 } 816 } 817 } 818 819 interface SizeBlock { 820 void run(Size size) throws CameraAccessException; 821 } 822 823 class SizeIterable { 824 public void forEachSize(int format, SizeBlock runnable) throws CameraAccessException { 825 assertNotNull("No camera opened", mCamera); 826 assertNotNull("No camera manager", mCameraManager); 827 828 CameraCharacteristics properties = 829 mCameraManager.getCameraCharacteristics(mCamera.getId()); 830 831 assertNotNull("Can't get camera properties!", properties); 832 833 StreamConfigurationMap config = 834 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); 835 int[] availableOutputFormats = config.getOutputFormats(); 836 assertArrayNotEmpty(availableOutputFormats, 837 "availableOutputFormats should not be empty"); 838 Arrays.sort(availableOutputFormats); 839 assertTrue("Can't find the format " + format + " in supported formats " + 840 Arrays.toString(availableOutputFormats), 841 Arrays.binarySearch(availableOutputFormats, format) >= 0); 842 843 Size[] availableSizes = getSupportedSizeForFormat(format, mCamera.getId(), 844 mCameraManager); 845 assertArrayNotEmpty(availableSizes, "availableSizes should not be empty"); 846 847 for (Size size : availableSizes) { 848 849 if (VERBOSE) { 850 Log.v(TAG, "Testing size " + size.toString() + 851 " for camera " + mCamera.getId()); 852 } 853 runnable.run(size); 854 } 855 } 856 } 857 858 interface ResultBlock { 859 void run(CaptureResult result) throws CameraAccessException; 860 } 861 862 class ResultIterable { 863 public void forEachResultOnce(CaptureRequest request, ResultBlock block) 864 throws CameraAccessException { 865 forEachResult(request, /*count*/1, /*repeating*/false, block); 866 } 867 868 public void forEachResultRepeating(CaptureRequest request, int count, ResultBlock block) 869 throws CameraAccessException { 870 forEachResult(request, count, /*repeating*/true, block); 871 } 872 873 public void forEachResult(CaptureRequest request, int count, boolean repeating, 874 ResultBlock block) throws CameraAccessException { 875 876 // TODO: start capture, i.e. configureOutputs 877 878 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 879 880 if (!repeating) { 881 for (int i = 0; i < count; ++i) { 882 mSession.capture(request, listener, mHandler); 883 } 884 } else { 885 mSession.setRepeatingRequest(request, listener, mHandler); 886 } 887 888 // Assume that the device is already IDLE. 889 mSessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_ACTIVE, 890 CAMERA_ACTIVE_TIMEOUT_MS); 891 892 for (int i = 0; i < count; ++i) { 893 if (VERBOSE) { 894 Log.v(TAG, String.format("Testing with result %d of %d for camera %s", 895 i, count, mCamera.getId())); 896 } 897 898 CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 899 block.run(result); 900 } 901 902 if (repeating) { 903 mSession.stopRepeating(); 904 mSessionListener.getStateWaiter().waitForState( 905 BlockingSessionCallback.SESSION_READY, CAMERA_IDLE_TIMEOUT_MS); 906 } 907 908 // TODO: Make a Configure decorator or some such for configureOutputs 909 } 910 } 911 } 912