1 /* 2 * Copyright 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import static android.hardware.camera2.cts.CameraTestUtils.*; 20 import static android.hardware.camera2.CameraCharacteristics.*; 21 22 import android.graphics.Point; 23 import android.graphics.PointF; 24 import android.graphics.Rect; 25 import android.graphics.SurfaceTexture; 26 import android.hardware.camera2.CameraCharacteristics; 27 import android.hardware.camera2.CameraDevice; 28 import android.hardware.camera2.CameraMetadata; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 32 import android.hardware.camera2.cts.helpers.StaticMetadata; 33 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase; 34 import android.hardware.camera2.params.BlackLevelPattern; 35 import android.hardware.camera2.params.ColorSpaceTransform; 36 import android.hardware.camera2.params.Face; 37 import android.hardware.camera2.params.LensShadingMap; 38 import android.hardware.camera2.params.MeteringRectangle; 39 import android.hardware.camera2.params.RggbChannelVector; 40 import android.hardware.camera2.params.TonemapCurve; 41 import android.media.Image; 42 import android.os.Parcel; 43 import android.util.ArraySet; 44 import android.util.Log; 45 import android.util.Range; 46 import android.util.Rational; 47 import android.util.Size; 48 import android.view.Surface; 49 50 import java.nio.ByteBuffer; 51 import java.util.ArrayList; 52 import java.util.Arrays; 53 import java.util.List; 54 55 import org.junit.Test; 56 57 /** 58 * <p> 59 * Basic test for camera CaptureRequest key controls. 60 * </p> 61 * <p> 62 * Several test categories are covered: manual sensor control, 3A control, 63 * manual ISP control and other per-frame control and synchronization. 64 * </p> 65 */ 66 public class CaptureRequestTest extends Camera2SurfaceViewTestCase { 67 private static final String TAG = "CaptureRequestTest"; 68 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 69 private static final int NUM_FRAMES_VERIFIED = 15; 70 private static final int NUM_FACE_DETECTION_FRAMES_VERIFIED = 60; 71 /** 30ms exposure time must be supported by full capability devices. */ 72 private static final long DEFAULT_EXP_TIME_NS = 30000000L; // 30ms 73 private static final int DEFAULT_SENSITIVITY = 100; 74 private static final int RGGB_COLOR_CHANNEL_COUNT = 4; 75 private static final int MAX_SHADING_MAP_SIZE = 64 * 64 * RGGB_COLOR_CHANNEL_COUNT; 76 private static final int MIN_SHADING_MAP_SIZE = 1 * 1 * RGGB_COLOR_CHANNEL_COUNT; 77 private static final long IGNORE_REQUESTED_EXPOSURE_TIME_CHECK = -1L; 78 private static final long EXPOSURE_TIME_BOUNDARY_50HZ_NS = 10000000L; // 10ms 79 private static final long EXPOSURE_TIME_BOUNDARY_60HZ_NS = 8333333L; // 8.3ms, Approximation. 80 private static final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000L; // 100us, Approximation. 81 private static final float EXPOSURE_TIME_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 82 private static final float SENSITIVITY_ERROR_MARGIN_RATE = 0.03f; // 3%, Approximation. 83 private static final int DEFAULT_NUM_EXPOSURE_TIME_STEPS = 3; 84 private static final int DEFAULT_NUM_SENSITIVITY_STEPS = 8; 85 private static final int DEFAULT_SENSITIVITY_STEP_SIZE = 100; 86 private static final int NUM_RESULTS_WAIT_TIMEOUT = 100; 87 private static final int NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY = 8; 88 private static final int NUM_FRAMES_WAITED_FOR_TORCH = 100; 89 private static final int NUM_TEST_FOCUS_DISTANCES = 10; 90 private static final int NUM_FOCUS_DISTANCES_REPEAT = 3; 91 // 5 percent error margin for calibrated device 92 private static final float FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED = 0.05f; 93 // 25 percent error margin for uncalibrated device 94 private static final float FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED = 0.25f; 95 // 10 percent error margin for approximate device 96 private static final float FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE = 0.10f; 97 private static final int ANTI_FLICKERING_50HZ = 1; 98 private static final int ANTI_FLICKERING_60HZ = 2; 99 // 5 percent error margin for resulting crop regions 100 private static final float CROP_REGION_ERROR_PERCENT_DELTA = 0.05f; 101 // 1 percent error margin for centering the crop region 102 private static final float CROP_REGION_ERROR_PERCENT_CENTERED = 0.01f; 103 private static final float DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN = 0.25f; 104 private static final float DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN = 0.2f; 105 106 // Linear tone mapping curve example. 107 private static final float[] TONEMAP_CURVE_LINEAR = {0, 0, 1.0f, 1.0f}; 108 // Standard sRGB tone mapping, per IEC 61966-2-1:1999, with 16 control points. 109 private static final float[] TONEMAP_CURVE_SRGB = { 110 0.0000f, 0.0000f, 0.0667f, 0.2864f, 0.1333f, 0.4007f, 0.2000f, 0.4845f, 111 0.2667f, 0.5532f, 0.3333f, 0.6125f, 0.4000f, 0.6652f, 0.4667f, 0.7130f, 112 0.5333f, 0.7569f, 0.6000f, 0.7977f, 0.6667f, 0.8360f, 0.7333f, 0.8721f, 113 0.8000f, 0.9063f, 0.8667f, 0.9389f, 0.9333f, 0.9701f, 1.0000f, 1.0000f 114 }; 115 private final Rational ZERO_R = new Rational(0, 1); 116 private final Rational ONE_R = new Rational(1, 1); 117 118 private final int NUM_ALGORITHMS = 3; // AE, AWB and AF 119 private final int INDEX_ALGORITHM_AE = 0; 120 private final int INDEX_ALGORITHM_AWB = 1; 121 private final int INDEX_ALGORITHM_AF = 2; 122 123 private enum TorchSeqState { 124 RAMPING_UP, 125 FIRED, 126 RAMPING_DOWN 127 } 128 129 @Override 130 public void setUp() throws Exception { 131 super.setUp(); 132 } 133 134 @Override 135 public void tearDown() throws Exception { 136 super.tearDown(); 137 } 138 139 /** 140 * Test CaptureRequest settings parcelling. 141 */ 142 @Test 143 public void testSettingsBinderParcel() throws Exception { 144 SurfaceTexture outputTexture = new SurfaceTexture(/* random texture ID */ 5); 145 Surface surface = new Surface(outputTexture); 146 147 for (int i = 0; i < mCameraIds.length; i++) { 148 try { 149 openDevice(mCameraIds[i]); 150 CaptureRequest.Builder requestBuilder = 151 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 152 requestBuilder.addTarget(surface); 153 154 // Check regular/default case 155 CaptureRequest captureRequestOriginal = requestBuilder.build(); 156 Parcel p; 157 p = Parcel.obtain(); 158 captureRequestOriginal.writeToParcel(p, 0); 159 p.setDataPosition(0); 160 CaptureRequest captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 161 assertEquals("Parcelled camera settings should match", 162 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 163 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 164 p.recycle(); 165 166 // Check capture request with additional physical camera settings 167 String physicalId = new String(Integer.toString(i + 1)); 168 ArraySet<String> physicalIds = new ArraySet<String> (); 169 physicalIds.add(physicalId); 170 171 requestBuilder = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW, 172 physicalIds); 173 requestBuilder.addTarget(surface); 174 captureRequestOriginal = requestBuilder.build(); 175 p = Parcel.obtain(); 176 captureRequestOriginal.writeToParcel(p, 0); 177 p.setDataPosition(0); 178 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 179 assertEquals("Parcelled camera settings should match", 180 captureRequestParcelled.get(CaptureRequest.CONTROL_CAPTURE_INTENT), 181 new Integer(CameraMetadata.CONTROL_CAPTURE_INTENT_PREVIEW)); 182 p.recycle(); 183 184 // Check various invalid cases 185 p = Parcel.obtain(); 186 p.writeInt(-1); 187 p.setDataPosition(0); 188 try { 189 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 190 fail("should get RuntimeException due to invalid number of settings"); 191 } catch (RuntimeException e) { 192 // Expected 193 } 194 p.recycle(); 195 196 p = Parcel.obtain(); 197 p.writeInt(0); 198 p.setDataPosition(0); 199 try { 200 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 201 fail("should get RuntimeException due to invalid number of settings"); 202 } catch (RuntimeException e) { 203 // Expected 204 } 205 p.recycle(); 206 207 p = Parcel.obtain(); 208 p.writeInt(1); 209 p.setDataPosition(0); 210 try { 211 captureRequestParcelled = CaptureRequest.CREATOR.createFromParcel(p); 212 fail("should get RuntimeException due to absent settings"); 213 } catch (RuntimeException e) { 214 // Expected 215 } 216 p.recycle(); 217 } finally { 218 closeDevice(); 219 } 220 } 221 } 222 223 /** 224 * Test black level lock when exposure value change. 225 * <p> 226 * When {@link CaptureRequest#BLACK_LEVEL_LOCK} is true in a request, the 227 * camera device should lock the black level. When the exposure values are changed, 228 * the camera may require reset black level Since changes to certain capture 229 * parameters (such as exposure time) may require resetting of black level 230 * compensation. However, the black level must remain locked after exposure 231 * value changes (when requests have lock ON). 232 * </p> 233 */ 234 @Test 235 public void testBlackLevelLock() throws Exception { 236 for (int i = 0; i < mCameraIds.length; i++) { 237 try { 238 if (!mAllStaticInfo.get(mCameraIds[i]).isCapabilitySupported( 239 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 240 continue; 241 } 242 243 openDevice(mCameraIds[i]); 244 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 245 CaptureRequest.Builder requestBuilder = 246 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 247 248 // Start with default manual exposure time, with black level being locked. 249 requestBuilder.set(CaptureRequest.BLACK_LEVEL_LOCK, true); 250 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 251 252 Size previewSz = 253 getMaxPreviewSize(mCamera.getId(), mCameraManager, 254 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 255 256 startPreview(requestBuilder, previewSz, listener); 257 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 258 // No lock OFF state is allowed as the exposure is not changed. 259 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/0); 260 261 // Double the exposure time and gain, with black level still being locked. 262 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS * 2, DEFAULT_SENSITIVITY * 2); 263 listener = new SimpleCaptureCallback(); 264 startPreview(requestBuilder, previewSz, listener); 265 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 266 // Allow at most one lock OFF state as the exposure is changed once. 267 verifyBlackLevelLockResults(listener, NUM_FRAMES_VERIFIED, /*maxLockOffCnt*/1); 268 269 stopPreview(); 270 } finally { 271 closeDevice(); 272 } 273 } 274 } 275 276 /** 277 * Test dynamic black/white levels if they are supported. 278 * 279 * <p> 280 * If the dynamic black and white levels are reported, test below: 281 * 1. the dynamic black and white levels shouldn't deviate from the global value too much 282 * for different sensitivities. 283 * 2. If the RAW_SENSOR and optical black regions are supported, capture RAW images and 284 * calculate the optical black level values. The reported dynamic black level should be 285 * close enough to the optical black level values. 286 * </p> 287 */ 288 @Test 289 public void testDynamicBlackWhiteLevel() throws Exception { 290 for (String id : mCameraIds) { 291 try { 292 if (!mAllStaticInfo.get(id).isDynamicBlackLevelSupported()) { 293 continue; 294 } 295 openDevice(id); 296 dynamicBlackWhiteLevelTestByCamera(); 297 } finally { 298 closeDevice(); 299 } 300 } 301 } 302 303 /** 304 * Basic lens shading map request test. 305 * <p> 306 * When {@link CaptureRequest#SHADING_MODE} is set to OFF, no lens shading correction will 307 * be applied by the camera device, and an identity lens shading map data 308 * will be provided if {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} is ON. 309 * </p> 310 * <p> 311 * When {@link CaptureRequest#SHADING_MODE} is set to other modes, lens shading correction 312 * will be applied by the camera device. The lens shading map data can be 313 * requested by setting {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE} to ON. 314 * </p> 315 */ 316 @Test 317 public void testLensShadingMap() throws Exception { 318 for (int i = 0; i < mCameraIds.length; i++) { 319 try { 320 StaticMetadata staticInfo = mAllStaticInfo.get(mCameraIds[i]); 321 if (!staticInfo.isManualLensShadingMapSupported()) { 322 Log.i(TAG, "Camera " + mCameraIds[i] + 323 " doesn't support lens shading controls, skipping test"); 324 continue; 325 } 326 327 List<Integer> lensShadingMapModes = Arrays.asList(CameraTestUtils.toObject( 328 staticInfo.getAvailableLensShadingMapModesChecked())); 329 330 if (!lensShadingMapModes.contains(STATISTICS_LENS_SHADING_MAP_MODE_ON)) { 331 continue; 332 } 333 334 openDevice(mCameraIds[i]); 335 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 336 CaptureRequest.Builder requestBuilder = 337 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 338 requestBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 339 STATISTICS_LENS_SHADING_MAP_MODE_ON); 340 341 Size previewSz = 342 getMaxPreviewSize(mCamera.getId(), mCameraManager, 343 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 344 List<Integer> lensShadingModes = Arrays.asList(CameraTestUtils.toObject( 345 mStaticInfo.getAvailableLensShadingModesChecked())); 346 347 // Shading map mode OFF, lensShadingMapMode ON, camera device 348 // should output unity maps. 349 if (lensShadingModes.contains(SHADING_MODE_OFF)) { 350 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_OFF); 351 listener = new SimpleCaptureCallback(); 352 startPreview(requestBuilder, previewSz, listener); 353 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 354 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_OFF); 355 } 356 357 // Shading map mode FAST, lensShadingMapMode ON, camera device 358 // should output valid maps. 359 if (lensShadingModes.contains(SHADING_MODE_FAST)) { 360 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_FAST); 361 362 listener = new SimpleCaptureCallback(); 363 startPreview(requestBuilder, previewSz, listener); 364 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 365 // Allow at most one lock OFF state as the exposure is changed once. 366 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_FAST); 367 } 368 369 // Shading map mode HIGH_QUALITY, lensShadingMapMode ON, camera device 370 // should output valid maps. 371 if (lensShadingModes.contains(SHADING_MODE_HIGH_QUALITY)) { 372 requestBuilder.set(CaptureRequest.SHADING_MODE, SHADING_MODE_HIGH_QUALITY); 373 374 listener = new SimpleCaptureCallback(); 375 startPreview(requestBuilder, previewSz, listener); 376 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 377 verifyShadingMap(listener, NUM_FRAMES_VERIFIED, SHADING_MODE_HIGH_QUALITY); 378 } 379 380 stopPreview(); 381 } finally { 382 closeDevice(); 383 } 384 } 385 } 386 387 /** 388 * Test {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE} control. 389 * <p> 390 * Test all available anti-banding modes, check if the exposure time adjustment is 391 * correct. 392 * </p> 393 */ 394 @Test 395 public void testAntiBandingModes() throws Exception { 396 for (int i = 0; i < mCameraIds.length; i++) { 397 try { 398 // Without manual sensor control, exposure time cannot be verified 399 if (!mAllStaticInfo.get(mCameraIds[i]).isCapabilitySupported( 400 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 401 continue; 402 } 403 404 openDevice(mCameraIds[i]); 405 int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked(); 406 407 Size previewSz = 408 getMaxPreviewSize(mCamera.getId(), mCameraManager, 409 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 410 411 for (int mode : modes) { 412 antiBandingTestByMode(previewSz, mode); 413 } 414 } finally { 415 closeDevice(); 416 } 417 } 418 419 } 420 421 /** 422 * Test AE mode and lock. 423 * 424 * <p> 425 * For AE lock, when it is locked, exposure parameters shouldn't be changed. 426 * For AE modes, each mode should satisfy the per frame controls defined in 427 * API specifications. 428 * </p> 429 */ 430 @Test(timeout=60*60*1000) // timeout = 60 mins for long running tests 431 public void testAeModeAndLock() throws Exception { 432 for (int i = 0; i < mCameraIds.length; i++) { 433 try { 434 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) { 435 Log.i(TAG, "Camera " + mCameraIds[i] + 436 " does not support color outputs, skipping"); 437 continue; 438 } 439 440 openDevice(mCameraIds[i]); 441 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 442 443 // Update preview surface with given size for all sub-tests. 444 updatePreviewSurface(maxPreviewSz); 445 446 // Test aeMode and lock 447 int[] aeModes = mStaticInfo.getAeAvailableModesChecked(); 448 for (int mode : aeModes) { 449 aeModeAndLockTestByMode(mode); 450 } 451 } finally { 452 closeDevice(); 453 } 454 } 455 } 456 457 /** Test {@link CaptureRequest#FLASH_MODE} control. 458 * <p> 459 * For each {@link CaptureRequest#FLASH_MODE} mode, test the flash control 460 * and {@link CaptureResult#FLASH_STATE} result. 461 * </p> 462 */ 463 @Test 464 public void testFlashControl() throws Exception { 465 for (int i = 0; i < mCameraIds.length; i++) { 466 try { 467 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) { 468 Log.i(TAG, "Camera " + mCameraIds[i] + 469 " does not support color outputs, skipping"); 470 continue; 471 } 472 473 openDevice(mCameraIds[i]); 474 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 475 CaptureRequest.Builder requestBuilder = 476 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 477 478 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 479 480 startPreview(requestBuilder, maxPreviewSz, listener); 481 482 // Flash control can only be used when the AE mode is ON or OFF. 483 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_ON); 484 485 // LEGACY won't support AE mode OFF 486 boolean aeOffModeSupported = false; 487 for (int aeMode : mStaticInfo.getAeAvailableModesChecked()) { 488 if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 489 aeOffModeSupported = true; 490 } 491 } 492 if (aeOffModeSupported) { 493 flashTestByAeMode(listener, CaptureRequest.CONTROL_AE_MODE_OFF); 494 } 495 496 stopPreview(); 497 } finally { 498 closeDevice(); 499 } 500 } 501 } 502 503 /** 504 * Test that the flash can be successfully turned off given various initial and final 505 * AE_CONTROL modes for repeating CaptureRequests. 506 */ 507 @Test 508 public void testFlashTurnOff() throws Exception { 509 for (int i = 0; i < mCameraIds.length; i++) { 510 try { 511 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) { 512 Log.i(TAG, "Camera " + mCameraIds[i] + 513 " does not support color outputs, skipping"); 514 continue; 515 } 516 517 openDevice(mCameraIds[i]); 518 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 519 CaptureRequest.Builder requestBuilder = 520 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 521 522 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 523 524 startPreview(requestBuilder, maxPreviewSz, listener); 525 flashTurnOffTest(listener, 526 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 527 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH); 528 529 flashTurnOffTest(listener, 530 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 531 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); 532 533 flashTurnOffTest(listener, 534 /* initiaAeControl */CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH, 535 /* offAeControl */CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE); 536 537 538 stopPreview(); 539 } finally { 540 closeDevice(); 541 } 542 } 543 544 } 545 546 /** 547 * Test face detection modes and results. 548 */ 549 @Test 550 public void testFaceDetection() throws Exception { 551 for (int i = 0; i < mCameraIds.length; i++) { 552 try { 553 if (!mAllStaticInfo.get(mCameraIds[i]).isColorOutputSupported()) { 554 Log.i(TAG, "Camera " + mCameraIds[i] + 555 " does not support color outputs, skipping"); 556 continue; 557 } 558 openDevice(mCameraIds[i]); 559 faceDetectionTestByCamera(); 560 } finally { 561 closeDevice(); 562 } 563 } 564 } 565 566 /** 567 * Test tone map modes and controls. 568 */ 569 @Test 570 public void testToneMapControl() throws Exception { 571 for (String id : mCameraIds) { 572 try { 573 if (!mAllStaticInfo.get(id).isManualToneMapSupported()) { 574 Log.i(TAG, "Camera " + id + 575 " doesn't support tone mapping controls, skipping test"); 576 continue; 577 } 578 openDevice(id); 579 toneMapTestByCamera(); 580 } finally { 581 closeDevice(); 582 } 583 } 584 } 585 586 /** 587 * Test color correction modes and controls. 588 */ 589 @Test 590 public void testColorCorrectionControl() throws Exception { 591 for (String id : mCameraIds) { 592 try { 593 if (!mAllStaticInfo.get(id).isColorCorrectionSupported()) { 594 Log.i(TAG, "Camera " + id + 595 " doesn't support color correction controls, skipping test"); 596 continue; 597 } 598 openDevice(id); 599 colorCorrectionTestByCamera(); 600 } finally { 601 closeDevice(); 602 } 603 } 604 } 605 606 /** 607 * Test edge mode control for Fps not exceeding 30. 608 */ 609 @Test 610 public void testEdgeModeControl() throws Exception { 611 for (String id : mCameraIds) { 612 try { 613 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 614 Log.i(TAG, "Camera " + id + 615 " doesn't support EDGE_MODE controls, skipping test"); 616 continue; 617 } 618 619 openDevice(id); 620 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 621 edgeModesTestByCamera(fpsRanges); 622 } finally { 623 closeDevice(); 624 } 625 } 626 } 627 628 /** 629 * Test edge mode control for Fps greater than 30. 630 */ 631 @Test 632 public void testEdgeModeControlFastFps() throws Exception { 633 for (String id : mCameraIds) { 634 try { 635 if (!mAllStaticInfo.get(id).isEdgeModeControlSupported()) { 636 Log.i(TAG, "Camera " + id + 637 " doesn't support EDGE_MODE controls, skipping test"); 638 continue; 639 } 640 641 openDevice(id); 642 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 643 edgeModesTestByCamera(fpsRanges); 644 } finally { 645 closeDevice(); 646 } 647 } 648 649 } 650 651 /** 652 * Test focus distance control. 653 */ 654 @Test 655 public void testFocusDistanceControl() throws Exception { 656 for (String id : mCameraIds) { 657 try { 658 StaticMetadata staticInfo = mAllStaticInfo.get(id); 659 if (!staticInfo.hasFocuser()) { 660 Log.i(TAG, "Camera " + id + " has no focuser, skipping test"); 661 continue; 662 } 663 664 if (!staticInfo.isCapabilitySupported( 665 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 666 Log.i(TAG, "Camera " + id + 667 " does not support MANUAL_SENSOR, skipping test"); 668 continue; 669 } 670 671 openDevice(id); 672 focusDistanceTestByCamera(); 673 } finally { 674 closeDevice(); 675 } 676 } 677 } 678 679 /** 680 * Test noise reduction mode for fps ranges not exceeding 30 681 */ 682 @Test 683 public void testNoiseReductionModeControl() throws Exception { 684 for (String id : mCameraIds) { 685 try { 686 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 687 Log.i(TAG, "Camera " + id + 688 " doesn't support noise reduction mode, skipping test"); 689 continue; 690 } 691 692 openDevice(id); 693 List<Range<Integer>> fpsRanges = getTargetFpsRangesUpTo30(mStaticInfo); 694 noiseReductionModeTestByCamera(fpsRanges); 695 } finally { 696 closeDevice(); 697 } 698 } 699 } 700 701 /** 702 * Test noise reduction mode for fps ranges greater than 30 703 */ 704 @Test 705 public void testNoiseReductionModeControlFastFps() throws Exception { 706 for (String id : mCameraIds) { 707 try { 708 if (!mAllStaticInfo.get(id).isNoiseReductionModeControlSupported()) { 709 Log.i(TAG, "Camera " + id + 710 " doesn't support noise reduction mode, skipping test"); 711 continue; 712 } 713 714 openDevice(id); 715 List<Range<Integer>> fpsRanges = getTargetFpsRangesGreaterThan30(mStaticInfo); 716 noiseReductionModeTestByCamera(fpsRanges); 717 } finally { 718 closeDevice(); 719 } 720 } 721 } 722 723 /** 724 * Test AWB lock control. 725 * 726 * <p>The color correction gain and transform shouldn't be changed when AWB is locked.</p> 727 */ 728 @Test 729 public void testAwbModeAndLock() throws Exception { 730 for (String id : mCameraIds) { 731 try { 732 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 733 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 734 continue; 735 } 736 openDevice(id); 737 awbModeAndLockTestByCamera(); 738 } finally { 739 closeDevice(); 740 } 741 } 742 } 743 744 /** 745 * Test different AF modes. 746 */ 747 @Test 748 public void testAfModes() throws Exception { 749 for (String id : mCameraIds) { 750 try { 751 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 752 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 753 continue; 754 } 755 openDevice(id); 756 afModeTestByCamera(); 757 } finally { 758 closeDevice(); 759 } 760 } 761 } 762 763 /** 764 * Test video and optical stabilizations. 765 */ 766 @Test 767 public void testCameraStabilizations() throws Exception { 768 for (String id : mCameraIds) { 769 try { 770 StaticMetadata staticInfo = mAllStaticInfo.get(id); 771 List<Key<?>> keys = staticInfo.getCharacteristics().getKeys(); 772 if (!(keys.contains( 773 CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES) || 774 keys.contains( 775 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION))) { 776 Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes"); 777 continue; 778 } 779 if (!staticInfo.isColorOutputSupported()) { 780 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 781 continue; 782 } 783 openDevice(id); 784 stabilizationTestByCamera(); 785 } finally { 786 closeDevice(); 787 } 788 } 789 } 790 791 /** 792 * Test digitalZoom (center wise and non-center wise), validate the returned crop regions. 793 * The max preview size is used for each camera. 794 */ 795 @Test 796 public void testDigitalZoom() throws Exception { 797 for (String id : mCameraIds) { 798 try { 799 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 800 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 801 continue; 802 } 803 openDevice(id); 804 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 805 digitalZoomTestByCamera(maxPreviewSize); 806 } finally { 807 closeDevice(); 808 } 809 } 810 } 811 812 /** 813 * Test digital zoom and all preview size combinations. 814 * TODO: this and above test should all be moved to preview test class. 815 */ 816 @Test 817 public void testDigitalZoomPreviewCombinations() throws Exception { 818 for (String id : mCameraIds) { 819 try { 820 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 821 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 822 continue; 823 } 824 openDevice(id); 825 digitalZoomPreviewCombinationTestByCamera(); 826 } finally { 827 closeDevice(); 828 } 829 } 830 } 831 832 /** 833 * Test scene mode controls. 834 */ 835 @Test 836 public void testSceneModes() throws Exception { 837 for (String id : mCameraIds) { 838 try { 839 if (mAllStaticInfo.get(id).isSceneModeSupported()) { 840 openDevice(id); 841 sceneModeTestByCamera(); 842 } 843 } finally { 844 closeDevice(); 845 } 846 } 847 } 848 849 /** 850 * Test effect mode controls. 851 */ 852 @Test 853 public void testEffectModes() throws Exception { 854 for (String id : mCameraIds) { 855 try { 856 if (!mAllStaticInfo.get(id).isColorOutputSupported()) { 857 Log.i(TAG, "Camera " + id + " does not support color outputs, skipping"); 858 continue; 859 } 860 openDevice(id); 861 effectModeTestByCamera(); 862 } finally { 863 closeDevice(); 864 } 865 } 866 } 867 868 // TODO: add 3A state machine test. 869 870 /** 871 * Per camera dynamic black and white level test. 872 */ 873 private void dynamicBlackWhiteLevelTestByCamera() throws Exception { 874 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 875 SimpleImageReaderListener imageListener = null; 876 CaptureRequest.Builder previewBuilder = 877 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 878 CaptureRequest.Builder rawBuilder = null; 879 Size previewSize = 880 getMaxPreviewSize(mCamera.getId(), mCameraManager, 881 getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND)); 882 Size rawSize = null; 883 boolean canCaptureBlackRaw = 884 mStaticInfo.isCapabilitySupported( 885 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW) && 886 mStaticInfo.isOpticalBlackRegionSupported(); 887 if (canCaptureBlackRaw) { 888 // Capture Raw16, then calculate the optical black, and use it to check with the dynamic 889 // black level. 890 rawBuilder = 891 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); 892 rawSize = mStaticInfo.getRawDimensChecked(); 893 imageListener = new SimpleImageReaderListener(); 894 prepareRawCaptureAndStartPreview(previewBuilder, rawBuilder, previewSize, rawSize, 895 resultListener, imageListener); 896 } else { 897 startPreview(previewBuilder, previewSize, resultListener); 898 } 899 900 // Capture a sequence of frames with different sensitivities and validate the black/white 901 // level values 902 int[] sensitivities = getSensitivityTestValues(); 903 float[][] dynamicBlackLevels = new float[sensitivities.length][]; 904 int[] dynamicWhiteLevels = new int[sensitivities.length]; 905 float[][] opticalBlackLevels = new float[sensitivities.length][]; 906 for (int i = 0; i < sensitivities.length; i++) { 907 CaptureResult result = null; 908 if (canCaptureBlackRaw) { 909 changeExposure(rawBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 910 CaptureRequest rawRequest = rawBuilder.build(); 911 mSession.capture(rawRequest, resultListener, mHandler); 912 result = resultListener.getCaptureResultForRequest(rawRequest, 913 NUM_RESULTS_WAIT_TIMEOUT); 914 Image rawImage = imageListener.getImage(CAPTURE_IMAGE_TIMEOUT_MS); 915 916 // Get max (area-wise) optical black region 917 Rect[] opticalBlackRegions = mStaticInfo.getCharacteristics().get( 918 CameraCharacteristics.SENSOR_OPTICAL_BLACK_REGIONS); 919 Rect maxRegion = opticalBlackRegions[0]; 920 for (Rect region : opticalBlackRegions) { 921 if (region.width() * region.height() > maxRegion.width() * maxRegion.height()) { 922 maxRegion = region; 923 } 924 } 925 926 // Get average black pixel values in the region (region is multiple of 2x2) 927 Image.Plane rawPlane = rawImage.getPlanes()[0]; 928 ByteBuffer rawBuffer = rawPlane.getBuffer(); 929 float[] avgBlackLevels = {0, 0, 0, 0}; 930 final int rowSize = rawPlane.getRowStride(); 931 final int bytePerPixel = rawPlane.getPixelStride(); 932 if (VERBOSE) { 933 Log.v(TAG, "maxRegion: " + maxRegion + ", Row stride: " + 934 rawPlane.getRowStride()); 935 } 936 for (int row = maxRegion.top; row < maxRegion.bottom; row += 2) { 937 for (int col = maxRegion.left; col < maxRegion.right; col += 2) { 938 int startOffset = row * rowSize + col * bytePerPixel; 939 avgBlackLevels[0] += rawBuffer.getShort(startOffset); 940 avgBlackLevels[1] += rawBuffer.getShort(startOffset + bytePerPixel); 941 startOffset += rowSize; 942 avgBlackLevels[2] += rawBuffer.getShort(startOffset); 943 avgBlackLevels[3] += rawBuffer.getShort(startOffset + bytePerPixel); 944 } 945 } 946 int numBlackBlocks = maxRegion.width() * maxRegion.height() / (2 * 2); 947 for (int m = 0; m < avgBlackLevels.length; m++) { 948 avgBlackLevels[m] /= numBlackBlocks; 949 } 950 opticalBlackLevels[i] = avgBlackLevels; 951 952 if (VERBOSE) { 953 Log.v(TAG, String.format("Optical black level results for sensitivity (%d): %s", 954 sensitivities[i], Arrays.toString(avgBlackLevels))); 955 } 956 957 rawImage.close(); 958 } else { 959 changeExposure(previewBuilder, DEFAULT_EXP_TIME_NS, sensitivities[i]); 960 CaptureRequest previewRequest = previewBuilder.build(); 961 mSession.capture(previewRequest, resultListener, mHandler); 962 result = resultListener.getCaptureResultForRequest(previewRequest, 963 NUM_RESULTS_WAIT_TIMEOUT); 964 } 965 966 dynamicBlackLevels[i] = getValueNotNull(result, 967 CaptureResult.SENSOR_DYNAMIC_BLACK_LEVEL); 968 dynamicWhiteLevels[i] = getValueNotNull(result, 969 CaptureResult.SENSOR_DYNAMIC_WHITE_LEVEL); 970 } 971 972 if (VERBOSE) { 973 Log.v(TAG, "Different sensitivities tested: " + Arrays.toString(sensitivities)); 974 Log.v(TAG, "Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels)); 975 Log.v(TAG, "Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels)); 976 if (canCaptureBlackRaw) { 977 Log.v(TAG, "Optical black level results " + 978 Arrays.deepToString(opticalBlackLevels)); 979 } 980 } 981 982 // check the dynamic black level against global black level. 983 // Implicit guarantee: if the dynamic black level is supported, fixed black level must be 984 // supported as well (tested in ExtendedCameraCharacteristicsTest#testOpticalBlackRegions). 985 BlackLevelPattern blackPattern = mStaticInfo.getCharacteristics().get( 986 CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN); 987 int[] fixedBlackLevels = new int[4]; 988 int fixedWhiteLevel = mStaticInfo.getCharacteristics().get( 989 CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL); 990 blackPattern.copyTo(fixedBlackLevels, 0); 991 float maxBlackDeviation = 0; 992 int maxWhiteDeviation = 0; 993 for (int i = 0; i < dynamicBlackLevels.length; i++) { 994 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 995 if (maxBlackDeviation < Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j])) { 996 maxBlackDeviation = Math.abs(fixedBlackLevels[j] - dynamicBlackLevels[i][j]); 997 } 998 } 999 if (maxWhiteDeviation < Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel)) { 1000 maxWhiteDeviation = Math.abs(dynamicWhiteLevels[i] - fixedWhiteLevel); 1001 } 1002 } 1003 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs fixed black level" 1004 + " exceed threshold." 1005 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels), 1006 fixedBlackLevels[0] * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, maxBlackDeviation); 1007 mCollector.expectLessOrEqual("Max deviation of the dynamic white level exceed threshold." 1008 + " Dynamic white level results: " + Arrays.toString(dynamicWhiteLevels), 1009 fixedWhiteLevel * DYNAMIC_VS_FIXED_BLK_WH_LVL_ERROR_MARGIN, 1010 (float)maxWhiteDeviation); 1011 1012 // Validate against optical black levels if it is available 1013 if (canCaptureBlackRaw) { 1014 maxBlackDeviation = 0; 1015 for (int i = 0; i < dynamicBlackLevels.length; i++) { 1016 for (int j = 0; j < dynamicBlackLevels[i].length; j++) { 1017 if (maxBlackDeviation < 1018 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j])) { 1019 maxBlackDeviation = 1020 Math.abs(opticalBlackLevels[i][j] - dynamicBlackLevels[i][j]); 1021 } 1022 } 1023 } 1024 1025 mCollector.expectLessOrEqual("Max deviation of the dynamic black level vs optical black" 1026 + " exceed threshold." 1027 + " Dynamic black level results: " + Arrays.deepToString(dynamicBlackLevels) 1028 + " Optical black level results: " + Arrays.deepToString(opticalBlackLevels), 1029 fixedBlackLevels[0] * DYNAMIC_VS_OPTICAL_BLK_LVL_ERROR_MARGIN, 1030 maxBlackDeviation); 1031 } 1032 } 1033 1034 private void noiseReductionModeTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1035 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1036 CaptureRequest.Builder requestBuilder = 1037 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1038 int[] availableModes = mStaticInfo.getAvailableNoiseReductionModesChecked(); 1039 1040 for (int mode : availableModes) { 1041 requestBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, mode); 1042 1043 // Test that OFF and FAST mode should not slow down the frame rate. 1044 if (mode == CaptureRequest.NOISE_REDUCTION_MODE_OFF || 1045 mode == CaptureRequest.NOISE_REDUCTION_MODE_FAST) { 1046 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1047 } 1048 1049 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1050 startPreview(requestBuilder, maxPrevSize, resultListener); 1051 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1052 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1053 1054 verifyCaptureResultForKey(CaptureResult.NOISE_REDUCTION_MODE, mode, 1055 resultListener, NUM_FRAMES_VERIFIED); 1056 } 1057 1058 stopPreview(); 1059 } 1060 1061 private void focusDistanceTestByCamera() throws Exception { 1062 CaptureRequest.Builder requestBuilder = 1063 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1064 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); 1065 int calibrationStatus = mStaticInfo.getFocusDistanceCalibrationChecked(); 1066 float errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_UNCALIBRATED; 1067 if (calibrationStatus == 1068 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1069 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_CALIBRATED; 1070 } else if (calibrationStatus == 1071 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE) { 1072 errorMargin = FOCUS_DISTANCE_ERROR_PERCENT_APPROXIMATE; 1073 } 1074 1075 // Test changing focus distance with repeating request 1076 focusDistanceTestRepeating(requestBuilder, errorMargin); 1077 1078 if (calibrationStatus == 1079 CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED) { 1080 // Test changing focus distance with burst request 1081 focusDistanceTestBurst(requestBuilder, errorMargin); 1082 } 1083 } 1084 1085 private void focusDistanceTestRepeating(CaptureRequest.Builder requestBuilder, 1086 float errorMargin) throws Exception { 1087 CaptureRequest request; 1088 float[] testDistances = getFocusDistanceTestValuesInOrder(0, 0); 1089 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1090 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1091 startPreview(requestBuilder, maxPrevSize, resultListener); 1092 1093 float[] resultDistances = new float[testDistances.length]; 1094 int[] resultLensStates = new int[testDistances.length]; 1095 1096 // Collect results 1097 for (int i = 0; i < testDistances.length; i++) { 1098 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1099 request = requestBuilder.build(); 1100 resultListener = new SimpleCaptureCallback(); 1101 mSession.setRepeatingRequest(request, resultListener, mHandler); 1102 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1103 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1104 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1105 CaptureResult result = resultListener.getCaptureResultForRequest(request, 1106 NUM_RESULTS_WAIT_TIMEOUT); 1107 1108 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1109 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1110 1111 if (VERBOSE) { 1112 Log.v(TAG, "Capture repeating request focus distance: " + testDistances[i] 1113 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1114 } 1115 } 1116 1117 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1118 /*ascendingOrder*/true, /*noOvershoot*/false, /*repeatStart*/0, /*repeatEnd*/0, 1119 errorMargin); 1120 1121 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1122 1123 // Test hyperfocal distance optionally 1124 float hyperFocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1125 if (hyperFocalDistance > 0) { 1126 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, hyperFocalDistance); 1127 request = requestBuilder.build(); 1128 resultListener = new SimpleCaptureCallback(); 1129 mSession.setRepeatingRequest(request, resultListener, mHandler); 1130 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1131 1132 // Then wait for the lens.state to be stationary. 1133 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1134 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1135 CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1136 Float focusDistance = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1137 mCollector.expectInRange("Focus distance for hyper focal should be close enough to" + 1138 " requested value", focusDistance, 1139 hyperFocalDistance * (1.0f - errorMargin), 1140 hyperFocalDistance * (1.0f + errorMargin)); 1141 } 1142 } 1143 } 1144 1145 private void focusDistanceTestBurst(CaptureRequest.Builder requestBuilder, 1146 float errorMargin) throws Exception { 1147 1148 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1149 float[] testDistances = getFocusDistanceTestValuesInOrder(NUM_FOCUS_DISTANCES_REPEAT, 1150 NUM_FOCUS_DISTANCES_REPEAT); 1151 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1152 startPreview(requestBuilder, maxPrevSize, resultListener); 1153 1154 float[] resultDistances = new float[testDistances.length]; 1155 int[] resultLensStates = new int[testDistances.length]; 1156 1157 final int maxPipelineDepth = mStaticInfo.getCharacteristics().get( 1158 CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH); 1159 1160 // Move lens to starting position, and wait for the lens.state to be stationary. 1161 CaptureRequest request; 1162 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[0]); 1163 request = requestBuilder.build(); 1164 mSession.setRepeatingRequest(request, resultListener, mHandler); 1165 waitForResultValue(resultListener, CaptureResult.LENS_STATE, 1166 CaptureResult.LENS_STATE_STATIONARY, NUM_RESULTS_WAIT_TIMEOUT); 1167 1168 // Submit burst of requests with different focus distances 1169 List<CaptureRequest> burst = new ArrayList<>(); 1170 for (int i = 0; i < testDistances.length; i ++) { 1171 requestBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, testDistances[i]); 1172 burst.add(requestBuilder.build()); 1173 } 1174 mSession.captureBurst(burst, resultListener, mHandler); 1175 1176 for (int i = 0; i < testDistances.length; i++) { 1177 CaptureResult result = resultListener.getCaptureResultForRequest( 1178 burst.get(i), maxPipelineDepth+1); 1179 1180 resultDistances[i] = getValueNotNull(result, CaptureResult.LENS_FOCUS_DISTANCE); 1181 resultLensStates[i] = getValueNotNull(result, CaptureResult.LENS_STATE); 1182 1183 if (VERBOSE) { 1184 Log.v(TAG, "Capture burst request focus distance: " + testDistances[i] 1185 + " result: " + resultDistances[i] + " lens state " + resultLensStates[i]); 1186 } 1187 } 1188 1189 verifyFocusDistance(testDistances, resultDistances, resultLensStates, 1190 /*ascendingOrder*/true, /*noOvershoot*/true, 1191 /*repeatStart*/NUM_FOCUS_DISTANCES_REPEAT, /*repeatEnd*/NUM_FOCUS_DISTANCES_REPEAT, 1192 errorMargin); 1193 1194 } 1195 1196 /** 1197 * Verify focus distance control. 1198 * 1199 * Assumption: 1200 * - First repeatStart+1 elements of requestedDistances share the same value 1201 * - Last repeatEnd+1 elements of requestedDistances share the same value 1202 * - All elements in between are monotonically increasing/decreasing depending on ascendingOrder. 1203 * - Focuser is at requestedDistances[0] at the beginning of the test. 1204 * 1205 * @param requestedDistances The requested focus distances 1206 * @param resultDistances The result focus distances 1207 * @param lensStates The result lens states 1208 * @param ascendingOrder The order of the expected focus distance request/output 1209 * @param noOvershoot Assert that focus control doesn't overshoot the requested value 1210 * @param repeatStart The number of times the starting focus distance is repeated 1211 * @param repeatEnd The number of times the ending focus distance is repeated 1212 * @param errorMargin The error margin between request and result 1213 */ 1214 private void verifyFocusDistance(float[] requestedDistances, float[] resultDistances, 1215 int[] lensStates, boolean ascendingOrder, boolean noOvershoot, int repeatStart, 1216 int repeatEnd, float errorMargin) { 1217 1218 float minValue = 0; 1219 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 1220 float hyperfocalDistance = 0; 1221 if (mStaticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE)) { 1222 hyperfocalDistance = mStaticInfo.getHyperfocalDistanceChecked(); 1223 } 1224 1225 // Verify lens and focus distance do not change for first repeatStart 1226 // results. 1227 for (int i = 0; i < repeatStart; i ++) { 1228 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1229 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1230 float marginMax = 1231 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1232 1233 mCollector.expectEquals("Lens moves even though focus_distance didn't change", 1234 lensStates[i], CaptureResult.LENS_STATE_STATIONARY); 1235 if (noOvershoot) { 1236 mCollector.expectInRange("Focus distance in result should be close enough to " + 1237 "requested value", resultDistances[i], marginMin, marginMax); 1238 } 1239 mCollector.expectInRange("Result focus distance is out of range", 1240 resultDistances[i], minValue, maxValue); 1241 } 1242 1243 for (int i = repeatStart; i < resultDistances.length-1; i ++) { 1244 float marginMin = requestedDistances[i] * (1.0f - errorMargin); 1245 // HAL may choose to use hyperfocal distance for all distances between [0, hyperfocal]. 1246 float marginMax = 1247 Math.max(requestedDistances[i], hyperfocalDistance) * (1.0f + errorMargin); 1248 if (noOvershoot) { 1249 // Result focus distance shouldn't overshoot the request 1250 boolean condition; 1251 if (ascendingOrder) { 1252 condition = resultDistances[i] <= marginMax; 1253 } else { 1254 condition = resultDistances[i] >= marginMin; 1255 } 1256 mCollector.expectTrue(String.format( 1257 "Lens shouldn't move past request focus distance. result " + 1258 resultDistances[i] + " vs target of " + 1259 (ascendingOrder ? marginMax : marginMin)), condition); 1260 } 1261 1262 // Verify monotonically increased focus distance setting 1263 boolean condition; 1264 float compareDistance = resultDistances[i+1] - resultDistances[i]; 1265 if (i < resultDistances.length-1-repeatEnd) { 1266 condition = (ascendingOrder ? compareDistance > 0 : compareDistance < 0); 1267 } else { 1268 condition = (ascendingOrder ? compareDistance >= 0 : compareDistance <= 0); 1269 } 1270 mCollector.expectTrue(String.format("Adjacent [resultDistances, lens_state] results [" 1271 + resultDistances[i] + "," + lensStates[i] + "], [" + resultDistances[i+1] + "," 1272 + lensStates[i+1] + "] monotonicity is broken"), condition); 1273 } 1274 1275 mCollector.expectTrue(String.format("All values of this array are equal: " + 1276 resultDistances[0] + " " + resultDistances[resultDistances.length-1]), 1277 resultDistances[0] != resultDistances[resultDistances.length-1]); 1278 1279 // Verify lens moved to destination location. 1280 mCollector.expectInRange("Focus distance " + resultDistances[resultDistances.length-1] + 1281 " for minFocusDistance should be closed enough to requested value " + 1282 requestedDistances[requestedDistances.length-1], 1283 resultDistances[resultDistances.length-1], 1284 requestedDistances[requestedDistances.length-1] * (1.0f - errorMargin), 1285 requestedDistances[requestedDistances.length-1] * (1.0f + errorMargin)); 1286 } 1287 1288 /** 1289 * Verify edge mode control results for fpsRanges 1290 */ 1291 private void edgeModesTestByCamera(List<Range<Integer>> fpsRanges) throws Exception { 1292 Size maxPrevSize = mOrderedPreviewSizes.get(0); 1293 int[] edgeModes = mStaticInfo.getAvailableEdgeModesChecked(); 1294 CaptureRequest.Builder requestBuilder = 1295 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1296 1297 for (int mode : edgeModes) { 1298 requestBuilder.set(CaptureRequest.EDGE_MODE, mode); 1299 1300 // Test that OFF and FAST mode should not slow down the frame rate. 1301 if (mode == CaptureRequest.EDGE_MODE_OFF || 1302 mode == CaptureRequest.EDGE_MODE_FAST) { 1303 verifyFpsNotSlowDown(requestBuilder, NUM_FRAMES_VERIFIED, fpsRanges); 1304 } 1305 1306 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1307 startPreview(requestBuilder, maxPrevSize, resultListener); 1308 mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler); 1309 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1310 1311 verifyCaptureResultForKey(CaptureResult.EDGE_MODE, mode, resultListener, 1312 NUM_FRAMES_VERIFIED); 1313 } 1314 1315 stopPreview(); 1316 } 1317 1318 /** 1319 * Test color correction controls. 1320 * 1321 * <p>Test different color correction modes. For TRANSFORM_MATRIX, only test 1322 * the unit gain and identity transform.</p> 1323 */ 1324 private void colorCorrectionTestByCamera() throws Exception { 1325 CaptureRequest request; 1326 CaptureResult result; 1327 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1328 updatePreviewSurface(maxPreviewSz); 1329 CaptureRequest.Builder manualRequestBuilder = createRequestForPreview(); 1330 CaptureRequest.Builder previewRequestBuilder = createRequestForPreview(); 1331 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1332 1333 startPreview(previewRequestBuilder, maxPreviewSz, listener); 1334 1335 // Default preview result should give valid color correction metadata. 1336 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1337 validateColorCorrectionResult(result, 1338 previewRequestBuilder.get(CaptureRequest.COLOR_CORRECTION_MODE)); 1339 int colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; 1340 // TRANSFORM_MATRIX mode 1341 // Only test unit gain and identity transform 1342 List<Integer> availableControlModes = Arrays.asList( 1343 CameraTestUtils.toObject(mStaticInfo.getAvailableControlModesChecked())); 1344 List<Integer> availableAwbModes = Arrays.asList( 1345 CameraTestUtils.toObject(mStaticInfo.getAwbAvailableModesChecked())); 1346 boolean isManualCCSupported = 1347 availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF) || 1348 availableAwbModes.contains(CaptureRequest.CONTROL_AWB_MODE_OFF); 1349 if (isManualCCSupported) { 1350 if (!availableControlModes.contains(CaptureRequest.CONTROL_MODE_OFF)) { 1351 // Only manual AWB mode is supported 1352 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1353 CaptureRequest.CONTROL_MODE_AUTO); 1354 manualRequestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, 1355 CaptureRequest.CONTROL_AWB_MODE_OFF); 1356 } else { 1357 // All 3A manual controls are supported, it doesn't matter what we set for AWB mode. 1358 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, 1359 CaptureRequest.CONTROL_MODE_OFF); 1360 } 1361 1362 RggbChannelVector UNIT_GAIN = new RggbChannelVector(1.0f, 1.0f, 1.0f, 1.0f); 1363 1364 ColorSpaceTransform IDENTITY_TRANSFORM = new ColorSpaceTransform( 1365 new Rational[] { 1366 ONE_R, ZERO_R, ZERO_R, 1367 ZERO_R, ONE_R, ZERO_R, 1368 ZERO_R, ZERO_R, ONE_R 1369 }); 1370 1371 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1372 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, UNIT_GAIN); 1373 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, IDENTITY_TRANSFORM); 1374 request = manualRequestBuilder.build(); 1375 mSession.capture(request, listener, mHandler); 1376 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1377 RggbChannelVector gains = result.get(CaptureResult.COLOR_CORRECTION_GAINS); 1378 ColorSpaceTransform transform = result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM); 1379 validateColorCorrectionResult(result, colorCorrectionMode); 1380 mCollector.expectEquals("control mode result/request mismatch", 1381 CaptureResult.CONTROL_MODE_OFF, result.get(CaptureResult.CONTROL_MODE)); 1382 mCollector.expectEquals("Color correction gain result/request mismatch", 1383 UNIT_GAIN, gains); 1384 mCollector.expectEquals("Color correction gain result/request mismatch", 1385 IDENTITY_TRANSFORM, transform); 1386 1387 } 1388 1389 // FAST mode 1390 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_FAST; 1391 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1392 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1393 request = manualRequestBuilder.build(); 1394 mSession.capture(request, listener, mHandler); 1395 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1396 validateColorCorrectionResult(result, colorCorrectionMode); 1397 mCollector.expectEquals("control mode result/request mismatch", 1398 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1399 1400 // HIGH_QUALITY mode 1401 colorCorrectionMode = CaptureRequest.COLOR_CORRECTION_MODE_HIGH_QUALITY; 1402 manualRequestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1403 manualRequestBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, colorCorrectionMode); 1404 request = manualRequestBuilder.build(); 1405 mSession.capture(request, listener, mHandler); 1406 result = listener.getCaptureResultForRequest(request, NUM_RESULTS_WAIT_TIMEOUT); 1407 validateColorCorrectionResult(result, colorCorrectionMode); 1408 mCollector.expectEquals("control mode result/request mismatch", 1409 CaptureResult.CONTROL_MODE_AUTO, result.get(CaptureResult.CONTROL_MODE)); 1410 } 1411 1412 private void validateColorCorrectionResult(CaptureResult result, int colorCorrectionMode) { 1413 final RggbChannelVector ZERO_GAINS = new RggbChannelVector(0, 0, 0, 0); 1414 final int TRANSFORM_SIZE = 9; 1415 Rational[] zeroTransform = new Rational[TRANSFORM_SIZE]; 1416 Arrays.fill(zeroTransform, ZERO_R); 1417 final ColorSpaceTransform ZERO_TRANSFORM = new ColorSpaceTransform(zeroTransform); 1418 1419 RggbChannelVector resultGain; 1420 if ((resultGain = mCollector.expectKeyValueNotNull(result, 1421 CaptureResult.COLOR_CORRECTION_GAINS)) != null) { 1422 mCollector.expectKeyValueNotEquals(result, 1423 CaptureResult.COLOR_CORRECTION_GAINS, ZERO_GAINS); 1424 } 1425 1426 ColorSpaceTransform resultTransform; 1427 if ((resultTransform = mCollector.expectKeyValueNotNull(result, 1428 CaptureResult.COLOR_CORRECTION_TRANSFORM)) != null) { 1429 mCollector.expectKeyValueNotEquals(result, 1430 CaptureResult.COLOR_CORRECTION_TRANSFORM, ZERO_TRANSFORM); 1431 } 1432 1433 mCollector.expectEquals("color correction mode result/request mismatch", 1434 colorCorrectionMode, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 1435 } 1436 1437 /** 1438 * Test that flash can be turned off successfully with a given initial and final AE_CONTROL 1439 * states. 1440 * 1441 * This function expects that initialAeControl and flashOffAeControl will not be either 1442 * CaptureRequest.CONTROL_AE_MODE_ON or CaptureRequest.CONTROL_AE_MODE_OFF 1443 * 1444 * @param listener The Capture listener that is used to wait for capture result 1445 * @param initialAeControl The initial AE_CONTROL mode to start repeating requests with. 1446 * @param flashOffAeControl The final AE_CONTROL mode which is expected to turn flash off for 1447 * TEMPLATE_PREVIEW repeating requests. 1448 */ 1449 private void flashTurnOffTest(SimpleCaptureCallback listener, int initialAeControl, 1450 int flashOffAeControl) throws Exception { 1451 CaptureResult result; 1452 final int NUM_FLASH_REQUESTS_TESTED = 10; 1453 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1454 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1455 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, initialAeControl); 1456 1457 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1458 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1459 1460 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 1461 if (mStaticInfo.getFlashInfoChecked() == false) { 1462 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1463 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 1464 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 1465 + "for AE mode " + initialAeControl, 1466 CaptureResult.FLASH_STATE_UNAVAILABLE, 1467 result.get(CaptureResult.FLASH_STATE)); 1468 } 1469 return; 1470 } 1471 1472 // Turn on torch using FLASH_MODE_TORCH 1473 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON); 1474 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1475 CaptureRequest torchOnRequest = requestBuilder.build(); 1476 mSession.setRepeatingRequest(torchOnRequest, listener, mHandler); 1477 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1478 result = listener.getCaptureResultForRequest(torchOnRequest, NUM_RESULTS_WAIT_TIMEOUT); 1479 // Test that the flash actually turned on continuously. 1480 mCollector.expectEquals("Flash state result must be FIRED", CaptureResult.FLASH_STATE_FIRED, 1481 result.get(CaptureResult.FLASH_STATE)); 1482 1483 // Turn off the torch 1484 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, flashOffAeControl); 1485 // TODO: jchowdhary@, b/130323585, this line can be removed. 1486 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1487 CaptureRequest flashOffRequest = requestBuilder.build(); 1488 mSession.setRepeatingRequest(flashOffRequest, listener, mHandler); 1489 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_TORCH); 1490 result = listener.getCaptureResultForRequest(flashOffRequest, NUM_RESULTS_WAIT_TIMEOUT); 1491 mCollector.expectEquals("Flash state result must be READY", CaptureResult.FLASH_STATE_READY, 1492 result.get(CaptureResult.FLASH_STATE)); 1493 } 1494 1495 /** 1496 * Test flash mode control by AE mode. 1497 * <p> 1498 * Only allow AE mode ON or OFF, because other AE mode could run into conflict with 1499 * flash manual control. This function expects the camera to already have an active 1500 * repeating request and be sending results to the listener. 1501 * </p> 1502 * 1503 * @param listener The Capture listener that is used to wait for capture result 1504 * @param aeMode The AE mode for flash to test with 1505 */ 1506 private void flashTestByAeMode(SimpleCaptureCallback listener, int aeMode) throws Exception { 1507 CaptureResult result; 1508 final int NUM_FLASH_REQUESTS_TESTED = 10; 1509 CaptureRequest.Builder requestBuilder = createRequestForPreview(); 1510 1511 if (aeMode == CaptureRequest.CONTROL_AE_MODE_ON) { 1512 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, aeMode); 1513 } else if (aeMode == CaptureRequest.CONTROL_AE_MODE_OFF) { 1514 changeExposure(requestBuilder, DEFAULT_EXP_TIME_NS, DEFAULT_SENSITIVITY); 1515 } else { 1516 throw new IllegalArgumentException("This test only works when AE mode is ON or OFF"); 1517 } 1518 1519 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 1520 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1521 1522 // For camera that doesn't have flash unit, flash state should always be UNAVAILABLE. 1523 if (mStaticInfo.getFlashInfoChecked() == false) { 1524 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1525 result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 1526 mCollector.expectEquals("No flash unit available, flash state must be UNAVAILABLE" 1527 + "for AE mode " + aeMode, CaptureResult.FLASH_STATE_UNAVAILABLE, 1528 result.get(CaptureResult.FLASH_STATE)); 1529 } 1530 1531 return; 1532 } 1533 1534 // Test flash SINGLE mode control. Wait for flash state to be READY first. 1535 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 1536 waitForResultValue(listener, CaptureResult.FLASH_STATE, CaptureResult.FLASH_STATE_READY, 1537 NUM_RESULTS_WAIT_TIMEOUT); 1538 } // else the settings were already waited on earlier 1539 1540 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE); 1541 CaptureRequest flashSinglerequest = requestBuilder.build(); 1542 1543 int flashModeSingleRequests = captureRequestsSynchronized( 1544 flashSinglerequest, listener, mHandler); 1545 waitForNumResults(listener, flashModeSingleRequests - 1); 1546 result = listener.getCaptureResultForRequest(flashSinglerequest, NUM_RESULTS_WAIT_TIMEOUT); 1547 // Result mode must be SINGLE, state must be FIRED. 1548 mCollector.expectEquals("Flash mode result must be SINGLE", 1549 CaptureResult.FLASH_MODE_SINGLE, result.get(CaptureResult.FLASH_MODE)); 1550 mCollector.expectEquals("Flash state result must be FIRED", 1551 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1552 1553 // Test flash TORCH mode control. 1554 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_TORCH); 1555 CaptureRequest torchRequest = requestBuilder.build(); 1556 1557 int flashModeTorchRequests = captureRequestsSynchronized(torchRequest, 1558 NUM_FLASH_REQUESTS_TESTED, listener, mHandler); 1559 waitForNumResults(listener, flashModeTorchRequests - NUM_FLASH_REQUESTS_TESTED); 1560 1561 // Verify the results 1562 TorchSeqState state = TorchSeqState.RAMPING_UP; 1563 for (int i = 0; i < NUM_FLASH_REQUESTS_TESTED; i++) { 1564 result = listener.getCaptureResultForRequest(torchRequest, 1565 NUM_RESULTS_WAIT_TIMEOUT); 1566 int flashMode = result.get(CaptureResult.FLASH_MODE); 1567 int flashState = result.get(CaptureResult.FLASH_STATE); 1568 // Result mode must be TORCH 1569 mCollector.expectEquals("Flash mode result " + i + " must be TORCH", 1570 CaptureResult.FLASH_MODE_TORCH, result.get(CaptureResult.FLASH_MODE)); 1571 if (state == TorchSeqState.RAMPING_UP && 1572 flashState == CaptureResult.FLASH_STATE_FIRED) { 1573 state = TorchSeqState.FIRED; 1574 } else if (state == TorchSeqState.FIRED && 1575 flashState == CaptureResult.FLASH_STATE_PARTIAL) { 1576 state = TorchSeqState.RAMPING_DOWN; 1577 } 1578 1579 if (i == 0 && mStaticInfo.isPerFrameControlSupported()) { 1580 mCollector.expectTrue( 1581 "Per frame control device must enter FIRED state on first torch request", 1582 state == TorchSeqState.FIRED); 1583 } 1584 1585 if (state == TorchSeqState.FIRED) { 1586 mCollector.expectEquals("Flash state result " + i + " must be FIRED", 1587 CaptureResult.FLASH_STATE_FIRED, result.get(CaptureResult.FLASH_STATE)); 1588 } else { 1589 mCollector.expectEquals("Flash state result " + i + " must be PARTIAL", 1590 CaptureResult.FLASH_STATE_PARTIAL, result.get(CaptureResult.FLASH_STATE)); 1591 } 1592 } 1593 mCollector.expectTrue("Torch state FIRED never seen", 1594 state == TorchSeqState.FIRED || state == TorchSeqState.RAMPING_DOWN); 1595 1596 // Test flash OFF mode control 1597 requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1598 CaptureRequest flashOffrequest = requestBuilder.build(); 1599 1600 int flashModeOffRequests = captureRequestsSynchronized(flashOffrequest, listener, mHandler); 1601 waitForNumResults(listener, flashModeOffRequests - 1); 1602 result = listener.getCaptureResultForRequest(flashOffrequest, NUM_RESULTS_WAIT_TIMEOUT); 1603 mCollector.expectEquals("Flash mode result must be OFF", CaptureResult.FLASH_MODE_OFF, 1604 result.get(CaptureResult.FLASH_MODE)); 1605 } 1606 1607 private void verifyAntiBandingMode(SimpleCaptureCallback listener, int numFramesVerified, 1608 int mode, boolean isAeManual, long requestExpTime) throws Exception { 1609 // Skip the first a couple of frames as antibanding may not be fully up yet. 1610 final int NUM_FRAMES_SKIPPED = 5; 1611 for (int i = 0; i < NUM_FRAMES_SKIPPED; i++) { 1612 listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1613 } 1614 1615 for (int i = 0; i < numFramesVerified; i++) { 1616 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1617 Long resultExpTime = result.get(CaptureResult.SENSOR_EXPOSURE_TIME); 1618 assertNotNull("Exposure time shouldn't be null", resultExpTime); 1619 Integer flicker = result.get(CaptureResult.STATISTICS_SCENE_FLICKER); 1620 // Scene flicker result should be always available. 1621 assertNotNull("Scene flicker must not be null", flicker); 1622 assertTrue("Scene flicker is invalid", flicker >= STATISTICS_SCENE_FLICKER_NONE && 1623 flicker <= STATISTICS_SCENE_FLICKER_60HZ); 1624 1625 Integer antiBandMode = result.get(CaptureResult.CONTROL_AE_ANTIBANDING_MODE); 1626 assertNotNull("antiBanding mode shouldn't be null", antiBandMode); 1627 assertTrue("antiBanding Mode invalid, should be == " + mode + ", is: " + antiBandMode, 1628 antiBandMode == mode); 1629 if (isAeManual) { 1630 // First, round down not up, second, need close enough. 1631 validateExposureTime(requestExpTime, resultExpTime); 1632 return; 1633 } 1634 1635 long expectedExpTime = resultExpTime; // Default, no exposure adjustment. 1636 if (mode == CONTROL_AE_ANTIBANDING_MODE_50HZ) { 1637 // result exposure time must be adjusted by 50Hz illuminant source. 1638 expectedExpTime = 1639 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 1640 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_60HZ) { 1641 // result exposure time must be adjusted by 60Hz illuminant source. 1642 expectedExpTime = 1643 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 1644 } else if (mode == CONTROL_AE_ANTIBANDING_MODE_AUTO){ 1645 /** 1646 * Use STATISTICS_SCENE_FLICKER to tell the illuminant source 1647 * and do the exposure adjustment. 1648 */ 1649 expectedExpTime = resultExpTime; 1650 if (flicker == STATISTICS_SCENE_FLICKER_60HZ) { 1651 expectedExpTime = 1652 getAntiFlickeringExposureTime(ANTI_FLICKERING_60HZ, resultExpTime); 1653 } else if (flicker == STATISTICS_SCENE_FLICKER_50HZ) { 1654 expectedExpTime = 1655 getAntiFlickeringExposureTime(ANTI_FLICKERING_50HZ, resultExpTime); 1656 } 1657 } 1658 1659 if (Math.abs(resultExpTime - expectedExpTime) > EXPOSURE_TIME_ERROR_MARGIN_NS) { 1660 mCollector.addMessage(String.format("Result exposure time %dns diverges too much" 1661 + " from expected exposure time %dns for mode %d when AE is auto", 1662 resultExpTime, expectedExpTime, mode)); 1663 } 1664 } 1665 } 1666 1667 private void antiBandingTestByMode(Size size, int mode) 1668 throws Exception { 1669 if(VERBOSE) { 1670 Log.v(TAG, "Anti-banding test for mode " + mode + " for camera " + mCamera.getId()); 1671 } 1672 CaptureRequest.Builder requestBuilder = 1673 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1674 1675 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, mode); 1676 1677 // Test auto AE mode anti-banding behavior 1678 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 1679 startPreview(requestBuilder, size, resultListener); 1680 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1681 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/false, 1682 IGNORE_REQUESTED_EXPOSURE_TIME_CHECK); 1683 1684 // Test manual AE mode anti-banding behavior 1685 // 65ms, must be supported by full capability devices. 1686 final long TEST_MANUAL_EXP_TIME_NS = 65000000L; 1687 long manualExpTime = mStaticInfo.getExposureClampToRange(TEST_MANUAL_EXP_TIME_NS); 1688 changeExposure(requestBuilder, manualExpTime); 1689 resultListener = new SimpleCaptureCallback(); 1690 startPreview(requestBuilder, size, resultListener); 1691 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1692 verifyAntiBandingMode(resultListener, NUM_FRAMES_VERIFIED, mode, /*isAeManual*/true, 1693 manualExpTime); 1694 1695 stopPreview(); 1696 } 1697 1698 /** 1699 * Test the all available AE modes and AE lock. 1700 * <p> 1701 * For manual AE mode, test iterates through different sensitivities and 1702 * exposure times, validate the result exposure time correctness. For 1703 * CONTROL_AE_MODE_ON_ALWAYS_FLASH mode, the AE lock and flash are tested. 1704 * For the rest of the AUTO mode, AE lock is tested. 1705 * </p> 1706 * 1707 * @param mode 1708 */ 1709 private void aeModeAndLockTestByMode(int mode) 1710 throws Exception { 1711 switch (mode) { 1712 case CONTROL_AE_MODE_OFF: 1713 if (mStaticInfo.isCapabilitySupported( 1714 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 1715 // Test manual exposure control. 1716 aeManualControlTest(); 1717 } else { 1718 Log.w(TAG, 1719 "aeModeAndLockTestByMode - can't test AE mode OFF without " + 1720 "manual sensor control"); 1721 } 1722 break; 1723 case CONTROL_AE_MODE_ON: 1724 case CONTROL_AE_MODE_ON_AUTO_FLASH: 1725 case CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE: 1726 case CONTROL_AE_MODE_ON_ALWAYS_FLASH: 1727 case CONTROL_AE_MODE_ON_EXTERNAL_FLASH: 1728 // Test AE lock for above AUTO modes. 1729 aeAutoModeTestLock(mode); 1730 break; 1731 default: 1732 throw new UnsupportedOperationException("Unhandled AE mode " + mode); 1733 } 1734 } 1735 1736 /** 1737 * Test AE auto modes. 1738 * <p> 1739 * Use single request rather than repeating request to test AE lock per frame control. 1740 * </p> 1741 */ 1742 private void aeAutoModeTestLock(int mode) throws Exception { 1743 CaptureRequest.Builder requestBuilder = 1744 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1745 if (mStaticInfo.isAeLockSupported()) { 1746 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 1747 } 1748 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mode); 1749 configurePreviewOutput(requestBuilder); 1750 1751 final int MAX_NUM_CAPTURES_DURING_LOCK = 5; 1752 for (int i = 1; i <= MAX_NUM_CAPTURES_DURING_LOCK; i++) { 1753 autoAeMultipleCapturesThenTestLock(requestBuilder, mode, i); 1754 } 1755 } 1756 1757 /** 1758 * Issue multiple auto AE captures, then lock AE, validate the AE lock vs. 1759 * the first capture result after the AE lock. The right AE lock behavior is: 1760 * When it is locked, it locks to the current exposure value, and all subsequent 1761 * request with lock ON will have the same exposure value locked. 1762 */ 1763 private void autoAeMultipleCapturesThenTestLock( 1764 CaptureRequest.Builder requestBuilder, int aeMode, int numCapturesDuringLock) 1765 throws Exception { 1766 if (numCapturesDuringLock < 1) { 1767 throw new IllegalArgumentException("numCapturesBeforeLock must be no less than 1"); 1768 } 1769 if (VERBOSE) { 1770 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing auto AE mode and lock for mode " 1771 + aeMode + " with " + numCapturesDuringLock + " captures before lock"); 1772 } 1773 1774 final int NUM_CAPTURES_BEFORE_LOCK = 2; 1775 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1776 1777 CaptureResult[] resultsDuringLock = new CaptureResult[numCapturesDuringLock]; 1778 boolean canSetAeLock = mStaticInfo.isAeLockSupported(); 1779 1780 // Reset the AE lock to OFF, since we are reusing this builder many times 1781 if (canSetAeLock) { 1782 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false); 1783 } 1784 1785 // Just send several captures with auto AE, lock off. 1786 CaptureRequest request = requestBuilder.build(); 1787 for (int i = 0; i < NUM_CAPTURES_BEFORE_LOCK; i++) { 1788 mSession.capture(request, listener, mHandler); 1789 } 1790 waitForNumResults(listener, NUM_CAPTURES_BEFORE_LOCK); 1791 1792 if (!canSetAeLock) { 1793 // Without AE lock, the remaining tests items won't work 1794 return; 1795 } 1796 1797 // Then fire several capture to lock the AE. 1798 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true); 1799 1800 int requestCount = captureRequestsSynchronized( 1801 requestBuilder.build(), numCapturesDuringLock, listener, mHandler); 1802 1803 int[] sensitivities = new int[numCapturesDuringLock]; 1804 long[] expTimes = new long[numCapturesDuringLock]; 1805 Arrays.fill(sensitivities, -1); 1806 Arrays.fill(expTimes, -1L); 1807 1808 // Get the AE lock on result and validate the exposure values. 1809 waitForNumResults(listener, requestCount - numCapturesDuringLock); 1810 for (int i = 0; i < resultsDuringLock.length; i++) { 1811 resultsDuringLock[i] = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1812 } 1813 1814 for (int i = 0; i < numCapturesDuringLock; i++) { 1815 mCollector.expectKeyValueEquals( 1816 resultsDuringLock[i], CaptureResult.CONTROL_AE_LOCK, true); 1817 } 1818 1819 // Can't read manual sensor/exposure settings without manual sensor 1820 if (mStaticInfo.isCapabilitySupported( 1821 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) { 1822 int sensitivityLocked = 1823 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_SENSITIVITY); 1824 long expTimeLocked = 1825 getValueNotNull(resultsDuringLock[0], CaptureResult.SENSOR_EXPOSURE_TIME); 1826 for (int i = 1; i < resultsDuringLock.length; i++) { 1827 mCollector.expectKeyValueEquals( 1828 resultsDuringLock[i], CaptureResult.SENSOR_EXPOSURE_TIME, expTimeLocked); 1829 mCollector.expectKeyValueEquals( 1830 resultsDuringLock[i], CaptureResult.SENSOR_SENSITIVITY, sensitivityLocked); 1831 } 1832 } 1833 } 1834 1835 /** 1836 * Iterate through exposure times and sensitivities for manual AE control. 1837 * <p> 1838 * Use single request rather than repeating request to test manual exposure 1839 * value change per frame control. 1840 * </p> 1841 */ 1842 private void aeManualControlTest() 1843 throws Exception { 1844 CaptureRequest.Builder requestBuilder = 1845 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1846 configurePreviewOutput(requestBuilder); 1847 1848 // Warm up pipeline for more accurate timing 1849 SimpleCaptureCallback warmupListener = new SimpleCaptureCallback(); 1850 mSession.setRepeatingRequest(requestBuilder.build(), warmupListener, mHandler); 1851 warmupListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1852 1853 // Do manual captures 1854 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 1855 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 1856 1857 long[] expTimesNs = getExposureTimeTestValues(); 1858 int[] sensitivities = getSensitivityTestValues(); 1859 // Submit single request at a time, then verify the result. 1860 for (int i = 0; i < expTimesNs.length; i++) { 1861 for (int j = 0; j < sensitivities.length; j++) { 1862 if (VERBOSE) { 1863 Log.v(TAG, "Camera " + mCamera.getId() + ": Testing sensitivity " 1864 + sensitivities[j] + ", exposure time " + expTimesNs[i] + "ns"); 1865 } 1866 1867 changeExposure(requestBuilder, expTimesNs[i], sensitivities[j]); 1868 mSession.capture(requestBuilder.build(), listener, mHandler); 1869 1870 // make sure timeout is long enough for long exposure time - add a 2x safety margin 1871 // to exposure time 1872 long timeoutMs = WAIT_FOR_RESULT_TIMEOUT_MS + 2 * expTimesNs[i] / 1000000; 1873 CaptureResult result = listener.getCaptureResult(timeoutMs); 1874 long resultExpTimeNs = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 1875 int resultSensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 1876 validateExposureTime(expTimesNs[i], resultExpTimeNs); 1877 validateSensitivity(sensitivities[j], resultSensitivity); 1878 validateFrameDurationForCapture(result); 1879 } 1880 } 1881 mSession.stopRepeating(); 1882 1883 // TODO: Add another case to test where we can submit all requests, then wait for 1884 // results, which will hide the pipeline latency. this is not only faster, but also 1885 // test high speed per frame control and synchronization. 1886 } 1887 1888 1889 /** 1890 * Verify black level lock control. 1891 */ 1892 private void verifyBlackLevelLockResults(SimpleCaptureCallback listener, int numFramesVerified, 1893 int maxLockOffCnt) throws Exception { 1894 int noLockCnt = 0; 1895 for (int i = 0; i < numFramesVerified; i++) { 1896 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1897 Boolean blackLevelLock = result.get(CaptureResult.BLACK_LEVEL_LOCK); 1898 assertNotNull("Black level lock result shouldn't be null", blackLevelLock); 1899 1900 // Count the lock == false result, which could possibly occur at most once. 1901 if (blackLevelLock == false) { 1902 noLockCnt++; 1903 } 1904 1905 if(VERBOSE) { 1906 Log.v(TAG, "Black level lock result: " + blackLevelLock); 1907 } 1908 } 1909 assertTrue("Black level lock OFF occurs " + noLockCnt + " times, expect at most " 1910 + maxLockOffCnt + " for camera " + mCamera.getId(), noLockCnt <= maxLockOffCnt); 1911 } 1912 1913 /** 1914 * Verify shading map for different shading modes. 1915 */ 1916 private void verifyShadingMap(SimpleCaptureCallback listener, int numFramesVerified, 1917 int shadingMode) throws Exception { 1918 1919 for (int i = 0; i < numFramesVerified; i++) { 1920 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1921 mCollector.expectEquals("Shading mode result doesn't match request", 1922 shadingMode, result.get(CaptureResult.SHADING_MODE)); 1923 LensShadingMap mapObj = result.get( 1924 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP); 1925 assertNotNull("Map object must not be null", mapObj); 1926 int numElementsInMap = mapObj.getGainFactorCount(); 1927 float[] map = new float[numElementsInMap]; 1928 mapObj.copyGainFactors(map, /*offset*/0); 1929 assertNotNull("Map must not be null", map); 1930 assertFalse(String.format( 1931 "Map size %d should be less than %d", numElementsInMap, MAX_SHADING_MAP_SIZE), 1932 numElementsInMap >= MAX_SHADING_MAP_SIZE); 1933 assertFalse(String.format("Map size %d should be no less than %d", numElementsInMap, 1934 MIN_SHADING_MAP_SIZE), numElementsInMap < MIN_SHADING_MAP_SIZE); 1935 1936 if (shadingMode == CaptureRequest.SHADING_MODE_FAST || 1937 shadingMode == CaptureRequest.SHADING_MODE_HIGH_QUALITY) { 1938 // shading mode is FAST or HIGH_QUALITY, expect to receive a map with all 1939 // elements >= 1.0f 1940 1941 int badValueCnt = 0; 1942 // Detect the bad values of the map data. 1943 for (int j = 0; j < numElementsInMap; j++) { 1944 if (Float.isNaN(map[j]) || map[j] < 1.0f) { 1945 badValueCnt++; 1946 } 1947 } 1948 assertEquals("Number of value in the map is " + badValueCnt + " out of " 1949 + numElementsInMap, /*expected*/0, /*actual*/badValueCnt); 1950 } else if (shadingMode == CaptureRequest.SHADING_MODE_OFF) { 1951 float[] unityMap = new float[numElementsInMap]; 1952 Arrays.fill(unityMap, 1.0f); 1953 // shading mode is OFF, expect to receive a unity map. 1954 assertTrue("Result map " + Arrays.toString(map) + " must be an unity map", 1955 Arrays.equals(unityMap, map)); 1956 } 1957 } 1958 } 1959 1960 /** 1961 * Test face detection for a camera. 1962 */ 1963 private void faceDetectionTestByCamera() throws Exception { 1964 int[] faceDetectModes = mStaticInfo.getAvailableFaceDetectModesChecked(); 1965 1966 SimpleCaptureCallback listener; 1967 CaptureRequest.Builder requestBuilder = 1968 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 1969 1970 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 1971 for (int mode : faceDetectModes) { 1972 requestBuilder.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mode); 1973 if (VERBOSE) { 1974 Log.v(TAG, "Start testing face detection mode " + mode); 1975 } 1976 1977 // Create a new listener for each run to avoid the results from one run spill 1978 // into another run. 1979 listener = new SimpleCaptureCallback(); 1980 startPreview(requestBuilder, maxPreviewSz, listener); 1981 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 1982 verifyFaceDetectionResults(listener, NUM_FACE_DETECTION_FRAMES_VERIFIED, mode); 1983 } 1984 1985 stopPreview(); 1986 } 1987 1988 /** 1989 * Verify face detection results for different face detection modes. 1990 * 1991 * @param listener The listener to get capture result 1992 * @param numFramesVerified Number of results to be verified 1993 * @param faceDetectionMode Face detection mode to be verified against 1994 */ 1995 private void verifyFaceDetectionResults(SimpleCaptureCallback listener, int numFramesVerified, 1996 int faceDetectionMode) { 1997 for (int i = 0; i < numFramesVerified; i++) { 1998 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 1999 mCollector.expectEquals("Result face detection mode should match the request", 2000 faceDetectionMode, result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE)); 2001 2002 Face[] faces = result.get(CaptureResult.STATISTICS_FACES); 2003 List<Integer> faceIds = new ArrayList<Integer>(faces.length); 2004 List<Integer> faceScores = new ArrayList<Integer>(faces.length); 2005 if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 2006 mCollector.expectEquals("Number of detection faces should always 0 for OFF mode", 2007 0, faces.length); 2008 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 2009 for (Face face : faces) { 2010 mCollector.expectNotNull("Face rectangle shouldn't be null", face.getBounds()); 2011 faceScores.add(face.getScore()); 2012 mCollector.expectTrue("Face id is expected to be -1 for SIMPLE mode", 2013 face.getId() == Face.ID_UNSUPPORTED); 2014 } 2015 } else if (faceDetectionMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 2016 if (VERBOSE) { 2017 Log.v(TAG, "Number of faces detected: " + faces.length); 2018 } 2019 2020 for (Face face : faces) { 2021 Rect faceBound; 2022 boolean faceRectAvailable = mCollector.expectTrue("Face rectangle " 2023 + "shouldn't be null", face.getBounds() != null); 2024 if (!faceRectAvailable) { 2025 continue; 2026 } 2027 faceBound = face.getBounds(); 2028 2029 faceScores.add(face.getScore()); 2030 faceIds.add(face.getId()); 2031 2032 mCollector.expectTrue("Face id is shouldn't be -1 for FULL mode", 2033 face.getId() != Face.ID_UNSUPPORTED); 2034 boolean leftEyeAvailable = 2035 mCollector.expectTrue("Left eye position shouldn't be null", 2036 face.getLeftEyePosition() != null); 2037 boolean rightEyeAvailable = 2038 mCollector.expectTrue("Right eye position shouldn't be null", 2039 face.getRightEyePosition() != null); 2040 boolean mouthAvailable = 2041 mCollector.expectTrue("Mouth position shouldn't be null", 2042 face.getMouthPosition() != null); 2043 // Eyes/mouth position should be inside of the face rect. 2044 if (leftEyeAvailable) { 2045 Point leftEye = face.getLeftEyePosition(); 2046 mCollector.expectTrue("Left eye " + leftEye + "should be" 2047 + "inside of face rect " + faceBound, 2048 faceBound.contains(leftEye.x, leftEye.y)); 2049 } 2050 if (rightEyeAvailable) { 2051 Point rightEye = face.getRightEyePosition(); 2052 mCollector.expectTrue("Right eye " + rightEye + "should be" 2053 + "inside of face rect " + faceBound, 2054 faceBound.contains(rightEye.x, rightEye.y)); 2055 } 2056 if (mouthAvailable) { 2057 Point mouth = face.getMouthPosition(); 2058 mCollector.expectTrue("Mouth " + mouth + " should be inside of" 2059 + " face rect " + faceBound, 2060 faceBound.contains(mouth.x, mouth.y)); 2061 } 2062 } 2063 } 2064 mCollector.expectValuesInRange("Face scores are invalid", faceScores, 2065 Face.SCORE_MIN, Face.SCORE_MAX); 2066 mCollector.expectValuesUnique("Face ids are invalid", faceIds); 2067 } 2068 } 2069 2070 /** 2071 * Test tone map mode and result by camera 2072 */ 2073 private void toneMapTestByCamera() throws Exception { 2074 if (!mStaticInfo.isManualToneMapSupported()) { 2075 return; 2076 } 2077 2078 CaptureRequest.Builder requestBuilder = 2079 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2080 int[] toneMapModes = mStaticInfo.getAvailableToneMapModesChecked(); 2081 // Test AUTO modes first. Note that FAST/HQ must both present or not present 2082 for (int i = 0; i < toneMapModes.length; i++) { 2083 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_FAST && i > 0) { 2084 int tmpMode = toneMapModes[0]; 2085 toneMapModes[0] = CaptureRequest.TONEMAP_MODE_FAST; 2086 toneMapModes[i] = tmpMode; 2087 } 2088 if (toneMapModes[i] == CaptureRequest.TONEMAP_MODE_HIGH_QUALITY && i > 1) { 2089 int tmpMode = toneMapModes[1]; 2090 toneMapModes[1] = CaptureRequest.TONEMAP_MODE_HIGH_QUALITY; 2091 toneMapModes[i] = tmpMode; 2092 } 2093 } 2094 for (int mode : toneMapModes) { 2095 if (VERBOSE) { 2096 Log.v(TAG, "Testing tonemap mode " + mode); 2097 } 2098 2099 requestBuilder.set(CaptureRequest.TONEMAP_MODE, mode); 2100 switch (mode) { 2101 case CaptureRequest.TONEMAP_MODE_CONTRAST_CURVE: 2102 TonemapCurve toneCurve = new TonemapCurve(TONEMAP_CURVE_LINEAR, 2103 TONEMAP_CURVE_LINEAR, TONEMAP_CURVE_LINEAR); 2104 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2105 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2106 2107 toneCurve = new TonemapCurve(TONEMAP_CURVE_SRGB, 2108 TONEMAP_CURVE_SRGB, TONEMAP_CURVE_SRGB); 2109 requestBuilder.set(CaptureRequest.TONEMAP_CURVE, toneCurve); 2110 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2111 break; 2112 case CaptureRequest.TONEMAP_MODE_GAMMA_VALUE: 2113 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 1.0f); 2114 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2115 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 2.2f); 2116 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2117 requestBuilder.set(CaptureRequest.TONEMAP_GAMMA, 5.0f); 2118 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2119 break; 2120 case CaptureRequest.TONEMAP_MODE_PRESET_CURVE: 2121 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2122 CaptureRequest.TONEMAP_PRESET_CURVE_REC709); 2123 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2124 requestBuilder.set(CaptureRequest.TONEMAP_PRESET_CURVE, 2125 CaptureRequest.TONEMAP_PRESET_CURVE_SRGB); 2126 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2127 break; 2128 default: 2129 testToneMapMode(NUM_FRAMES_VERIFIED, requestBuilder); 2130 break; 2131 } 2132 } 2133 2134 2135 } 2136 2137 /** 2138 * Test tonemap mode with speficied request settings 2139 * 2140 * @param numFramesVerified Number of results to be verified 2141 * @param requestBuilder the request builder of settings to be tested 2142 */ 2143 private void testToneMapMode (int numFramesVerified, 2144 CaptureRequest.Builder requestBuilder) throws Exception { 2145 final int MIN_TONEMAP_CURVE_POINTS = 2; 2146 final Float ZERO = new Float(0); 2147 final Float ONE = new Float(1.0f); 2148 2149 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2150 int tonemapMode = requestBuilder.get(CaptureRequest.TONEMAP_MODE); 2151 Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size. 2152 startPreview(requestBuilder, maxPreviewSz, listener); 2153 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2154 2155 int maxCurvePoints = mStaticInfo.getMaxTonemapCurvePointChecked(); 2156 for (int i = 0; i < numFramesVerified; i++) { 2157 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2158 mCollector.expectEquals("Capture result tonemap mode should match request", tonemapMode, 2159 result.get(CaptureResult.TONEMAP_MODE)); 2160 TonemapCurve tc = getValueNotNull(result, CaptureResult.TONEMAP_CURVE); 2161 int pointCount = tc.getPointCount(TonemapCurve.CHANNEL_RED); 2162 float[] mapRed = new float[pointCount * TonemapCurve.POINT_SIZE]; 2163 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_GREEN); 2164 float[] mapGreen = new float[pointCount * TonemapCurve.POINT_SIZE]; 2165 pointCount = tc.getPointCount(TonemapCurve.CHANNEL_BLUE); 2166 float[] mapBlue = new float[pointCount * TonemapCurve.POINT_SIZE]; 2167 tc.copyColorCurve(TonemapCurve.CHANNEL_RED, mapRed, 0); 2168 tc.copyColorCurve(TonemapCurve.CHANNEL_GREEN, mapGreen, 0); 2169 tc.copyColorCurve(TonemapCurve.CHANNEL_BLUE, mapBlue, 0); 2170 if (tonemapMode == CaptureResult.TONEMAP_MODE_CONTRAST_CURVE) { 2171 /** 2172 * TODO: need figure out a good way to measure the difference 2173 * between request and result, as they may have different array 2174 * size. 2175 */ 2176 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_GAMMA_VALUE) { 2177 mCollector.expectEquals("Capture result gamma value should match request", 2178 requestBuilder.get(CaptureRequest.TONEMAP_GAMMA), 2179 result.get(CaptureResult.TONEMAP_GAMMA)); 2180 } else if (tonemapMode == CaptureResult.TONEMAP_MODE_PRESET_CURVE) { 2181 mCollector.expectEquals("Capture result preset curve should match request", 2182 requestBuilder.get(CaptureRequest.TONEMAP_PRESET_CURVE), 2183 result.get(CaptureResult.TONEMAP_PRESET_CURVE)); 2184 } 2185 2186 // Tonemap curve result availability and basic sanity check for all modes. 2187 mCollector.expectValuesInRange("Tonemap curve red values are out of range", 2188 CameraTestUtils.toObject(mapRed), /*min*/ZERO, /*max*/ONE); 2189 mCollector.expectInRange("Tonemap curve red length is out of range", 2190 mapRed.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2191 mCollector.expectValuesInRange("Tonemap curve green values are out of range", 2192 CameraTestUtils.toObject(mapGreen), /*min*/ZERO, /*max*/ONE); 2193 mCollector.expectInRange("Tonemap curve green length is out of range", 2194 mapGreen.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2195 mCollector.expectValuesInRange("Tonemap curve blue values are out of range", 2196 CameraTestUtils.toObject(mapBlue), /*min*/ZERO, /*max*/ONE); 2197 mCollector.expectInRange("Tonemap curve blue length is out of range", 2198 mapBlue.length, MIN_TONEMAP_CURVE_POINTS, maxCurvePoints * 2); 2199 2200 // Make sure capture result tonemap has identical channels. 2201 if (mStaticInfo.isMonochromeCamera()) { 2202 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2203 "have same dimension for all channels", mapRed.length, mapGreen.length); 2204 mCollector.expectEquals("Capture result tonemap of monochrome camera should " + 2205 "have same dimension for all channels", mapRed.length, mapBlue.length); 2206 2207 if (mapRed.length == mapGreen.length && mapRed.length == mapBlue.length) { 2208 boolean isIdentical = true; 2209 for (int j = 0; j < mapRed.length; j++) { 2210 isIdentical = (mapRed[j] == mapGreen[j] && mapRed[j] == mapBlue[j]); 2211 if (!isIdentical) 2212 break; 2213 } 2214 mCollector.expectTrue("Capture result tonemap of monochrome camera should " + 2215 "be identical between all channels", isIdentical); 2216 } 2217 } 2218 } 2219 stopPreview(); 2220 } 2221 2222 /** 2223 * Test awb mode control. 2224 * <p> 2225 * Test each supported AWB mode, verify the AWB mode in capture result 2226 * matches request. When AWB is locked, the color correction gains and 2227 * transform should remain unchanged. 2228 * </p> 2229 */ 2230 private void awbModeAndLockTestByCamera() throws Exception { 2231 int[] awbModes = mStaticInfo.getAwbAvailableModesChecked(); 2232 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2233 boolean canSetAwbLock = mStaticInfo.isAwbLockSupported(); 2234 CaptureRequest.Builder requestBuilder = 2235 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2236 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2237 2238 for (int mode : awbModes) { 2239 SimpleCaptureCallback listener; 2240 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE, mode); 2241 listener = new SimpleCaptureCallback(); 2242 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2243 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2244 2245 // Verify AWB mode in capture result. 2246 verifyCaptureResultForKey(CaptureResult.CONTROL_AWB_MODE, mode, listener, 2247 NUM_FRAMES_VERIFIED); 2248 2249 if (mode == CameraMetadata.CONTROL_AWB_MODE_AUTO && canSetAwbLock) { 2250 // Verify color correction transform and gains stay unchanged after a lock. 2251 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true); 2252 listener = new SimpleCaptureCallback(); 2253 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2254 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2255 2256 if (mStaticInfo.areKeysAvailable(CaptureResult.CONTROL_AWB_STATE)) { 2257 waitForResultValue(listener, CaptureResult.CONTROL_AWB_STATE, 2258 CaptureResult.CONTROL_AWB_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT); 2259 } 2260 2261 } 2262 // Don't verify auto mode result if AWB lock is not supported 2263 if (mode != CameraMetadata.CONTROL_AWB_MODE_AUTO || canSetAwbLock) { 2264 verifyAwbCaptureResultUnchanged(listener, NUM_FRAMES_VERIFIED); 2265 } 2266 } 2267 } 2268 2269 private void verifyAwbCaptureResultUnchanged(SimpleCaptureCallback listener, 2270 int numFramesVerified) { 2271 // Skip check if cc gains/transform/mode are not available 2272 if (!mStaticInfo.areKeysAvailable( 2273 CaptureResult.COLOR_CORRECTION_GAINS, 2274 CaptureResult.COLOR_CORRECTION_TRANSFORM, 2275 CaptureResult.COLOR_CORRECTION_MODE)) { 2276 return; 2277 } 2278 2279 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2280 RggbChannelVector lockedGains = 2281 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2282 ColorSpaceTransform lockedTransform = 2283 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2284 2285 for (int i = 0; i < numFramesVerified; i++) { 2286 result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2287 // Color correction mode check is skipped here, as it is checked in colorCorrectionTest. 2288 validateColorCorrectionResult(result, result.get(CaptureResult.COLOR_CORRECTION_MODE)); 2289 2290 RggbChannelVector gains = getValueNotNull(result, CaptureResult.COLOR_CORRECTION_GAINS); 2291 ColorSpaceTransform transform = 2292 getValueNotNull(result, CaptureResult.COLOR_CORRECTION_TRANSFORM); 2293 mCollector.expectEquals("Color correction gains should remain unchanged after awb lock", 2294 lockedGains, gains); 2295 mCollector.expectEquals("Color correction transform should remain unchanged after" 2296 + " awb lock", lockedTransform, transform); 2297 } 2298 } 2299 2300 /** 2301 * Test AF mode control. 2302 * <p> 2303 * Test all supported AF modes, verify the AF mode in capture result matches 2304 * request. When AF mode is one of the CONTROL_AF_MODE_CONTINUOUS_* mode, 2305 * verify if the AF can converge to PASSIVE_FOCUSED or PASSIVE_UNFOCUSED 2306 * state within certain amount of frames. 2307 * </p> 2308 */ 2309 private void afModeTestByCamera() throws Exception { 2310 int[] afModes = mStaticInfo.getAfAvailableModesChecked(); 2311 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2312 CaptureRequest.Builder requestBuilder = 2313 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2314 startPreview(requestBuilder, maxPreviewSize, /*listener*/null); 2315 2316 for (int mode : afModes) { 2317 SimpleCaptureCallback listener; 2318 requestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mode); 2319 listener = new SimpleCaptureCallback(); 2320 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2321 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2322 2323 // Verify AF mode in capture result. 2324 verifyCaptureResultForKey(CaptureResult.CONTROL_AF_MODE, mode, listener, 2325 NUM_FRAMES_VERIFIED); 2326 2327 // Verify AF can finish a scan for CONTROL_AF_MODE_CONTINUOUS_* modes. 2328 // In LEGACY mode, a transition to one of the continuous AF modes does not necessarily 2329 // result in a passive AF call if the camera has already been focused, and the scene has 2330 // not changed enough to trigger an AF pass. Skip this constraint for LEGACY. 2331 if (mStaticInfo.isHardwareLevelAtLeastLimited() && 2332 (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || 2333 mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO)) { 2334 List<Integer> afStateList = new ArrayList<Integer>(); 2335 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED); 2336 afStateList.add(CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED); 2337 waitForAnyResultValue(listener, CaptureResult.CONTROL_AF_STATE, afStateList, 2338 NUM_RESULTS_WAIT_TIMEOUT); 2339 } 2340 } 2341 } 2342 2343 /** 2344 * Test video and optical stabilizations if they are supported by a given camera. 2345 */ 2346 private void stabilizationTestByCamera() throws Exception { 2347 // video stabilization test. 2348 List<Key<?>> keys = mStaticInfo.getCharacteristics().getKeys(); 2349 2350 Integer[] videoStabModes = (keys.contains(CameraCharacteristics. 2351 CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES)) ? 2352 CameraTestUtils.toObject(mStaticInfo.getAvailableVideoStabilizationModesChecked()) : 2353 new Integer[0]; 2354 int[] opticalStabModes = (keys.contains( 2355 CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION)) ? 2356 mStaticInfo.getAvailableOpticalStabilizationChecked() : new int[0]; 2357 2358 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2359 CaptureRequest.Builder requestBuilder = 2360 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2361 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2362 startPreview(requestBuilder, maxPreviewSize, listener); 2363 2364 for (Integer mode : videoStabModes) { 2365 listener = new SimpleCaptureCallback(); 2366 requestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE, mode); 2367 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2368 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2369 // Video stabilization could return any modes. 2370 verifyAnyCaptureResultForKey(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE, 2371 videoStabModes, listener, NUM_FRAMES_VERIFIED); 2372 } 2373 2374 for (int mode : opticalStabModes) { 2375 listener = new SimpleCaptureCallback(); 2376 requestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, mode); 2377 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2378 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2379 verifyCaptureResultForKey(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE, mode, 2380 listener, NUM_FRAMES_VERIFIED); 2381 } 2382 2383 stopPreview(); 2384 } 2385 2386 private void digitalZoomTestByCamera(Size previewSize) throws Exception { 2387 final int ZOOM_STEPS = 15; 2388 final PointF[] TEST_ZOOM_CENTERS; 2389 final float maxZoom = mStaticInfo.getAvailableMaxDigitalZoomChecked(); 2390 final float ZOOM_ERROR_MARGIN = 0.01f; 2391 if (Math.abs(maxZoom - 1.0f) < ZOOM_ERROR_MARGIN) { 2392 // It doesn't make much sense to test the zoom if the device effectively supports 2393 // no zoom. 2394 return; 2395 } 2396 2397 final int croppingType = mStaticInfo.getScalerCroppingTypeChecked(); 2398 if (croppingType == CameraCharacteristics.SCALER_CROPPING_TYPE_FREEFORM) { 2399 // Set the four corners in a way that the minimally allowed zoom factor is 2x. 2400 float normalizedLeft = 0.25f; 2401 float normalizedTop = 0.25f; 2402 float normalizedRight = 0.75f; 2403 float normalizedBottom = 0.75f; 2404 // If the max supported zoom is too small, make sure we at least test the max 2405 // Zoom is tested for the four corners. 2406 if (maxZoom < 2.0f) { 2407 normalizedLeft = 0.5f / maxZoom; 2408 normalizedTop = 0.5f / maxZoom; 2409 normalizedRight = 1.0f - normalizedLeft; 2410 normalizedBottom = 1.0f - normalizedTop; 2411 } 2412 TEST_ZOOM_CENTERS = new PointF[] { 2413 new PointF(0.5f, 0.5f), // Center point 2414 new PointF(normalizedLeft, normalizedTop), // top left corner zoom 2415 new PointF(normalizedRight, normalizedTop), // top right corner zoom 2416 new PointF(normalizedLeft, normalizedBottom), // bottom left corner zoom 2417 new PointF(normalizedRight, normalizedBottom), // bottom right corner zoom 2418 }; 2419 2420 if (VERBOSE) { 2421 Log.v(TAG, "Testing zoom with CROPPING_TYPE = FREEFORM"); 2422 } 2423 } else { 2424 // CENTER_ONLY 2425 TEST_ZOOM_CENTERS = new PointF[] { 2426 new PointF(0.5f, 0.5f), // Center point 2427 }; 2428 2429 if (VERBOSE) { 2430 Log.v(TAG, "Testing zoom with CROPPING_TYPE = CENTER_ONLY"); 2431 } 2432 } 2433 2434 final Rect activeArraySize = mStaticInfo.getActiveArraySizeChecked(); 2435 Rect[] cropRegions = new Rect[ZOOM_STEPS]; 2436 MeteringRectangle[][] expectRegions = new MeteringRectangle[ZOOM_STEPS][]; 2437 CaptureRequest.Builder requestBuilder = 2438 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2439 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2440 2441 updatePreviewSurface(previewSize); 2442 configurePreviewOutput(requestBuilder); 2443 2444 CaptureRequest[] requests = new CaptureRequest[ZOOM_STEPS]; 2445 2446 // Set algorithm regions to full active region 2447 // TODO: test more different 3A regions 2448 final MeteringRectangle[] defaultMeteringRect = new MeteringRectangle[] { 2449 new MeteringRectangle ( 2450 /*x*/0, /*y*/0, activeArraySize.width(), activeArraySize.height(), 2451 /*meteringWeight*/1) 2452 }; 2453 2454 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2455 update3aRegion(requestBuilder, algo, defaultMeteringRect); 2456 } 2457 2458 final int CAPTURE_SUBMIT_REPEAT; 2459 { 2460 int maxLatency = mStaticInfo.getSyncMaxLatency(); 2461 if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) { 2462 CAPTURE_SUBMIT_REPEAT = NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY + 1; 2463 } else { 2464 CAPTURE_SUBMIT_REPEAT = maxLatency + 1; 2465 } 2466 } 2467 2468 if (VERBOSE) { 2469 Log.v(TAG, "Testing zoom with CAPTURE_SUBMIT_REPEAT = " + CAPTURE_SUBMIT_REPEAT); 2470 } 2471 2472 for (PointF center : TEST_ZOOM_CENTERS) { 2473 Rect previousCrop = null; 2474 2475 for (int i = 0; i < ZOOM_STEPS; i++) { 2476 /* 2477 * Submit capture request 2478 */ 2479 float zoomFactor = (float) (1.0f + (maxZoom - 1.0) * i / ZOOM_STEPS); 2480 cropRegions[i] = getCropRegionForZoom(zoomFactor, center, maxZoom, activeArraySize); 2481 if (VERBOSE) { 2482 Log.v(TAG, "Testing Zoom for factor " + zoomFactor + " and center " + 2483 center + " The cropRegion is " + cropRegions[i] + 2484 " Preview size is " + previewSize); 2485 } 2486 requestBuilder.set(CaptureRequest.SCALER_CROP_REGION, cropRegions[i]); 2487 requests[i] = requestBuilder.build(); 2488 for (int j = 0; j < CAPTURE_SUBMIT_REPEAT; ++j) { 2489 if (VERBOSE) { 2490 Log.v(TAG, "submit crop region " + cropRegions[i]); 2491 } 2492 mSession.capture(requests[i], listener, mHandler); 2493 } 2494 2495 /* 2496 * Validate capture result 2497 */ 2498 waitForNumResults(listener, CAPTURE_SUBMIT_REPEAT - 1); // Drop first few frames 2499 CaptureResult result = listener.getCaptureResultForRequest( 2500 requests[i], NUM_RESULTS_WAIT_TIMEOUT); 2501 Rect cropRegion = getValueNotNull(result, CaptureResult.SCALER_CROP_REGION); 2502 2503 /* 2504 * Validate resulting crop regions 2505 */ 2506 if (previousCrop != null) { 2507 Rect currentCrop = cropRegion; 2508 mCollector.expectTrue(String.format( 2509 "Crop region should shrink or stay the same " + 2510 "(previous = %s, current = %s)", 2511 previousCrop, currentCrop), 2512 previousCrop.equals(currentCrop) || 2513 (previousCrop.width() > currentCrop.width() && 2514 previousCrop.height() > currentCrop.height())); 2515 } 2516 2517 if (mStaticInfo.isHardwareLevelAtLeastLimited()) { 2518 mCollector.expectRectsAreSimilar( 2519 "Request and result crop region should be similar", 2520 cropRegions[i], cropRegion, CROP_REGION_ERROR_PERCENT_DELTA); 2521 } 2522 2523 if (croppingType == SCALER_CROPPING_TYPE_CENTER_ONLY) { 2524 mCollector.expectRectCentered( 2525 "Result crop region should be centered inside the active array", 2526 new Size(activeArraySize.width(), activeArraySize.height()), 2527 cropRegion, CROP_REGION_ERROR_PERCENT_CENTERED); 2528 } 2529 2530 /* 2531 * Validate resulting metering regions 2532 */ 2533 2534 // Use the actual reported crop region to calculate the resulting metering region 2535 expectRegions[i] = getExpectedOutputRegion( 2536 /*requestRegion*/defaultMeteringRect, 2537 /*cropRect*/ cropRegion); 2538 2539 // Verify Output 3A region is intersection of input 3A region and crop region 2540 for (int algo = 0; algo < NUM_ALGORITHMS; algo++) { 2541 validate3aRegion(result, algo, expectRegions[i]); 2542 } 2543 2544 previousCrop = cropRegion; 2545 } 2546 2547 if (maxZoom > 1.0f) { 2548 mCollector.expectTrue( 2549 String.format("Most zoomed-in crop region should be smaller" + 2550 "than active array w/h" + 2551 "(last crop = %s, active array = %s)", 2552 previousCrop, activeArraySize), 2553 (previousCrop.width() < activeArraySize.width() && 2554 previousCrop.height() < activeArraySize.height())); 2555 } 2556 } 2557 } 2558 2559 private void digitalZoomPreviewCombinationTestByCamera() throws Exception { 2560 final double ASPECT_RATIO_THRESHOLD = 0.001; 2561 List<Double> aspectRatiosTested = new ArrayList<Double>(); 2562 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2563 aspectRatiosTested.add((double)(maxPreviewSize.getWidth()) / maxPreviewSize.getHeight()); 2564 2565 for (Size size : mOrderedPreviewSizes) { 2566 // Max preview size was already tested in testDigitalZoom test. skip it. 2567 if (size.equals(maxPreviewSize)) { 2568 continue; 2569 } 2570 2571 // Only test the largest size for each aspect ratio. 2572 double aspectRatio = (double)(size.getWidth()) / size.getHeight(); 2573 if (isAspectRatioContained(aspectRatiosTested, aspectRatio, ASPECT_RATIO_THRESHOLD)) { 2574 continue; 2575 } 2576 2577 if (VERBOSE) { 2578 Log.v(TAG, "Test preview size " + size.toString() + " digital zoom"); 2579 } 2580 2581 aspectRatiosTested.add(aspectRatio); 2582 digitalZoomTestByCamera(size); 2583 } 2584 } 2585 2586 private static boolean isAspectRatioContained(List<Double> aspectRatioList, 2587 double aspectRatio, double delta) { 2588 for (Double ratio : aspectRatioList) { 2589 if (Math.abs(ratio - aspectRatio) < delta) { 2590 return true; 2591 } 2592 } 2593 2594 return false; 2595 } 2596 2597 private void sceneModeTestByCamera() throws Exception { 2598 int[] sceneModes = mStaticInfo.getAvailableSceneModesChecked(); 2599 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2600 CaptureRequest.Builder requestBuilder = 2601 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2602 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2603 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE); 2604 startPreview(requestBuilder, maxPreviewSize, listener); 2605 2606 for(int mode : sceneModes) { 2607 requestBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, mode); 2608 listener = new SimpleCaptureCallback(); 2609 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2610 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2611 2612 verifyCaptureResultForKey(CaptureResult.CONTROL_SCENE_MODE, 2613 mode, listener, NUM_FRAMES_VERIFIED); 2614 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 2615 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 2616 CaptureRequest.CONTROL_MODE_USE_SCENE_MODE, listener, NUM_FRAMES_VERIFIED); 2617 } 2618 } 2619 2620 private void effectModeTestByCamera() throws Exception { 2621 int[] effectModes = mStaticInfo.getAvailableEffectModesChecked(); 2622 Size maxPreviewSize = mOrderedPreviewSizes.get(0); 2623 CaptureRequest.Builder requestBuilder = 2624 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 2625 requestBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 2626 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 2627 startPreview(requestBuilder, maxPreviewSize, listener); 2628 2629 for(int mode : effectModes) { 2630 requestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, mode); 2631 listener = new SimpleCaptureCallback(); 2632 mSession.setRepeatingRequest(requestBuilder.build(), listener, mHandler); 2633 waitForSettingsApplied(listener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2634 2635 verifyCaptureResultForKey(CaptureResult.CONTROL_EFFECT_MODE, 2636 mode, listener, NUM_FRAMES_VERIFIED); 2637 // This also serves as purpose of showing preview for NUM_FRAMES_VERIFIED 2638 verifyCaptureResultForKey(CaptureResult.CONTROL_MODE, 2639 CaptureRequest.CONTROL_MODE_AUTO, listener, NUM_FRAMES_VERIFIED); 2640 } 2641 } 2642 2643 //---------------------------------------------------------------- 2644 //---------Below are common functions for all tests.-------------- 2645 //---------------------------------------------------------------- 2646 2647 /** 2648 * Enable exposure manual control and change exposure and sensitivity and 2649 * clamp the value into the supported range. 2650 */ 2651 private void changeExposure(CaptureRequest.Builder requestBuilder, 2652 long expTime, int sensitivity) { 2653 // Check if the max analog sensitivity is available and no larger than max sensitivity. 2654 // The max analog sensitivity is not actually used here. This is only an extra sanity check. 2655 mStaticInfo.getMaxAnalogSensitivityChecked(); 2656 2657 expTime = mStaticInfo.getExposureClampToRange(expTime); 2658 sensitivity = mStaticInfo.getSensitivityClampToRange(sensitivity); 2659 2660 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CONTROL_AE_MODE_OFF); 2661 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, expTime); 2662 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, sensitivity); 2663 } 2664 /** 2665 * Enable exposure manual control and change exposure time and 2666 * clamp the value into the supported range. 2667 * 2668 * <p>The sensitivity is set to default value.</p> 2669 */ 2670 private void changeExposure(CaptureRequest.Builder requestBuilder, long expTime) { 2671 changeExposure(requestBuilder, expTime, DEFAULT_SENSITIVITY); 2672 } 2673 2674 /** 2675 * Get the exposure time array that contains multiple exposure time steps in 2676 * the exposure time range, in nanoseconds. 2677 */ 2678 private long[] getExposureTimeTestValues() { 2679 long[] testValues = new long[DEFAULT_NUM_EXPOSURE_TIME_STEPS + 1]; 2680 long maxExpTime = mStaticInfo.getExposureMaximumOrDefault(DEFAULT_EXP_TIME_NS); 2681 long minExpTime = mStaticInfo.getExposureMinimumOrDefault(DEFAULT_EXP_TIME_NS); 2682 2683 long range = maxExpTime - minExpTime; 2684 double stepSize = range / (double)DEFAULT_NUM_EXPOSURE_TIME_STEPS; 2685 for (int i = 0; i < testValues.length; i++) { 2686 testValues[i] = maxExpTime - (long)(stepSize * i); 2687 testValues[i] = mStaticInfo.getExposureClampToRange(testValues[i]); 2688 } 2689 2690 return testValues; 2691 } 2692 2693 /** 2694 * Generate test focus distances in range of [0, minFocusDistance] in increasing order. 2695 * 2696 * @param repeatMin number of times minValue will be repeated. 2697 * @param repeatMax number of times maxValue will be repeated. 2698 */ 2699 private float[] getFocusDistanceTestValuesInOrder(int repeatMin, int repeatMax) { 2700 int totalCount = NUM_TEST_FOCUS_DISTANCES + 1 + repeatMin + repeatMax; 2701 float[] testValues = new float[totalCount]; 2702 float minValue = 0; 2703 float maxValue = mStaticInfo.getMinimumFocusDistanceChecked(); 2704 2705 float range = maxValue - minValue; 2706 float stepSize = range / NUM_TEST_FOCUS_DISTANCES; 2707 2708 for (int i = 0; i < repeatMin; i++) { 2709 testValues[i] = minValue; 2710 } 2711 for (int i = 0; i <= NUM_TEST_FOCUS_DISTANCES; i++) { 2712 testValues[repeatMin+i] = minValue + stepSize * i; 2713 } 2714 for (int i = 0; i < repeatMax; i++) { 2715 testValues[repeatMin+NUM_TEST_FOCUS_DISTANCES+1+i] = 2716 maxValue; 2717 } 2718 2719 return testValues; 2720 } 2721 2722 /** 2723 * Get the sensitivity array that contains multiple sensitivity steps in the 2724 * sensitivity range. 2725 * <p> 2726 * Sensitivity number of test values is determined by 2727 * {@value #DEFAULT_SENSITIVITY_STEP_SIZE} and sensitivity range, and 2728 * bounded by {@value #DEFAULT_NUM_SENSITIVITY_STEPS}. 2729 * </p> 2730 */ 2731 private int[] getSensitivityTestValues() { 2732 int maxSensitivity = mStaticInfo.getSensitivityMaximumOrDefault( 2733 DEFAULT_SENSITIVITY); 2734 int minSensitivity = mStaticInfo.getSensitivityMinimumOrDefault( 2735 DEFAULT_SENSITIVITY); 2736 2737 int range = maxSensitivity - minSensitivity; 2738 int stepSize = DEFAULT_SENSITIVITY_STEP_SIZE; 2739 int numSteps = range / stepSize; 2740 // Bound the test steps to avoid supper long test. 2741 if (numSteps > DEFAULT_NUM_SENSITIVITY_STEPS) { 2742 numSteps = DEFAULT_NUM_SENSITIVITY_STEPS; 2743 stepSize = range / numSteps; 2744 } 2745 int[] testValues = new int[numSteps + 1]; 2746 for (int i = 0; i < testValues.length; i++) { 2747 testValues[i] = maxSensitivity - stepSize * i; 2748 testValues[i] = mStaticInfo.getSensitivityClampToRange(testValues[i]); 2749 } 2750 2751 return testValues; 2752 } 2753 2754 /** 2755 * Validate the AE manual control exposure time. 2756 * 2757 * <p>Exposure should be close enough, and only round down if they are not equal.</p> 2758 * 2759 * @param request Request exposure time 2760 * @param result Result exposure time 2761 */ 2762 private void validateExposureTime(long request, long result) { 2763 long expTimeDelta = request - result; 2764 long expTimeErrorMargin = (long)(Math.max(EXPOSURE_TIME_ERROR_MARGIN_NS, request 2765 * EXPOSURE_TIME_ERROR_MARGIN_RATE)); 2766 // First, round down not up, second, need close enough. 2767 mCollector.expectTrue("Exposture time is invalid for AE manaul control test, request: " 2768 + request + " result: " + result, 2769 expTimeDelta < expTimeErrorMargin && expTimeDelta >= 0); 2770 } 2771 2772 /** 2773 * Validate AE manual control sensitivity. 2774 * 2775 * @param request Request sensitivity 2776 * @param result Result sensitivity 2777 */ 2778 private void validateSensitivity(int request, int result) { 2779 float sensitivityDelta = request - result; 2780 float sensitivityErrorMargin = request * SENSITIVITY_ERROR_MARGIN_RATE; 2781 // First, round down not up, second, need close enough. 2782 mCollector.expectTrue("Sensitivity is invalid for AE manaul control test, request: " 2783 + request + " result: " + result, 2784 sensitivityDelta < sensitivityErrorMargin && sensitivityDelta >= 0); 2785 } 2786 2787 /** 2788 * Validate frame duration for a given capture. 2789 * 2790 * <p>Frame duration should be longer than exposure time.</p> 2791 * 2792 * @param result The capture result for a given capture 2793 */ 2794 private void validateFrameDurationForCapture(CaptureResult result) { 2795 long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 2796 long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 2797 if (VERBOSE) { 2798 Log.v(TAG, "frame duration: " + frameDuration + " Exposure time: " + expTime); 2799 } 2800 2801 mCollector.expectTrue(String.format("Frame duration (%d) should be longer than exposure" 2802 + " time (%d) for a given capture", frameDuration, expTime), 2803 frameDuration >= expTime); 2804 2805 validatePipelineDepth(result); 2806 } 2807 2808 /** 2809 * Basic verification for the control mode capture result. 2810 * 2811 * @param key The capture result key to be verified against 2812 * @param requestMode The request mode for this result 2813 * @param listener The capture listener to get capture results 2814 * @param numFramesVerified The number of capture results to be verified 2815 */ 2816 private <T> void verifyCaptureResultForKey(CaptureResult.Key<T> key, T requestMode, 2817 SimpleCaptureCallback listener, int numFramesVerified) { 2818 for (int i = 0; i < numFramesVerified; i++) { 2819 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2820 validatePipelineDepth(result); 2821 T resultMode = getValueNotNull(result, key); 2822 if (VERBOSE) { 2823 Log.v(TAG, "Expect value: " + requestMode.toString() + " result value: " 2824 + resultMode.toString()); 2825 } 2826 mCollector.expectEquals("Key " + key.getName() + " result should match request", 2827 requestMode, resultMode); 2828 } 2829 } 2830 2831 /** 2832 * Basic verification that the value of a capture result key should be one of the expected 2833 * values. 2834 * 2835 * @param key The capture result key to be verified against 2836 * @param expectedModes The list of any possible expected modes for this result 2837 * @param listener The capture listener to get capture results 2838 * @param numFramesVerified The number of capture results to be verified 2839 */ 2840 private <T> void verifyAnyCaptureResultForKey(CaptureResult.Key<T> key, T[] expectedModes, 2841 SimpleCaptureCallback listener, int numFramesVerified) { 2842 for (int i = 0; i < numFramesVerified; i++) { 2843 CaptureResult result = listener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2844 validatePipelineDepth(result); 2845 T resultMode = getValueNotNull(result, key); 2846 if (VERBOSE) { 2847 Log.v(TAG, "Expect values: " + Arrays.toString(expectedModes) + " result value: " 2848 + resultMode.toString()); 2849 } 2850 // Capture result should be one of the expected values. 2851 mCollector.expectContains(expectedModes, resultMode); 2852 } 2853 } 2854 2855 /** 2856 * Verify if the fps is slow down for given input request with certain 2857 * controls inside. 2858 * <p> 2859 * This method selects a max preview size for each fps range, and then 2860 * configure the preview stream. Preview is started with the max preview 2861 * size, and then verify if the result frame duration is in the frame 2862 * duration range. 2863 * </p> 2864 * 2865 * @param requestBuilder The request builder that contains post-processing 2866 * controls that could impact the output frame rate, such as 2867 * {@link CaptureRequest.NOISE_REDUCTION_MODE}. The value of 2868 * these controls must be set to some values such that the frame 2869 * rate is not slow down. 2870 * @param numFramesVerified The number of frames to be verified 2871 * @param fpsRanges The fps ranges to be verified 2872 */ 2873 private void verifyFpsNotSlowDown(CaptureRequest.Builder requestBuilder, 2874 int numFramesVerified, List<Range<Integer>> fpsRanges ) throws Exception { 2875 boolean frameDurationAvailable = true; 2876 // Allow a few frames for AE to settle on target FPS range 2877 final int NUM_FRAME_TO_SKIP = 6; 2878 float frameDurationErrorMargin = FRAME_DURATION_ERROR_MARGIN; 2879 if (!mStaticInfo.areKeysAvailable(CaptureResult.SENSOR_FRAME_DURATION)) { 2880 frameDurationAvailable = false; 2881 // Allow a larger error margin (1.5%) for timestamps 2882 frameDurationErrorMargin = 0.015f; 2883 } 2884 if (mStaticInfo.isExternalCamera()) { 2885 // Allow a even larger error margin (15%) for external camera timestamps 2886 frameDurationErrorMargin = 0.15f; 2887 } 2888 2889 boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported(); 2890 Range<Integer> fpsRange; 2891 SimpleCaptureCallback resultListener; 2892 2893 for (int i = 0; i < fpsRanges.size(); i += 1) { 2894 fpsRange = fpsRanges.get(i); 2895 Size previewSz = getMaxPreviewSizeForFpsRange(fpsRange); 2896 // If unable to find a preview size, then log the failure, and skip this run. 2897 if (previewSz == null) { 2898 if (mStaticInfo.isCapabilitySupported( 2899 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 2900 mCollector.addMessage(String.format( 2901 "Unable to find a preview size supporting given fps range %s", 2902 fpsRange)); 2903 } 2904 continue; 2905 } 2906 2907 if (VERBOSE) { 2908 Log.v(TAG, String.format("Test fps range %s for preview size %s", 2909 fpsRange, previewSz.toString())); 2910 } 2911 requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange); 2912 // Turn off auto antibanding to avoid exposure time and frame duration interference 2913 // from antibanding algorithm. 2914 if (antiBandingOffIsSupported) { 2915 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, 2916 CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF); 2917 } else { 2918 // The device doesn't implement the OFF mode, test continues. It need make sure 2919 // that the antibanding algorithm doesn't slow down the fps. 2920 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" + 2921 " not slow down the frame rate regardless of its current antibanding" + 2922 " mode"); 2923 } 2924 2925 resultListener = new SimpleCaptureCallback(); 2926 startPreview(requestBuilder, previewSz, resultListener); 2927 waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY); 2928 // Wait several more frames for AE to settle on target FPS range 2929 waitForNumResults(resultListener, NUM_FRAME_TO_SKIP); 2930 2931 long[] frameDurationRange = new long[]{ 2932 (long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())}; 2933 long captureTime = 0, prevCaptureTime = 0; 2934 for (int j = 0; j < numFramesVerified; j++) { 2935 long frameDuration = frameDurationRange[0]; 2936 CaptureResult result = 2937 resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS); 2938 validatePipelineDepth(result); 2939 if (frameDurationAvailable) { 2940 frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION); 2941 } else { 2942 // if frame duration is not available, check timestamp instead 2943 captureTime = getValueNotNull(result, CaptureResult.SENSOR_TIMESTAMP); 2944 if (j > 0) { 2945 frameDuration = captureTime - prevCaptureTime; 2946 } 2947 prevCaptureTime = captureTime; 2948 } 2949 mCollector.expectInRange( 2950 "Frame duration must be in the range of " + 2951 Arrays.toString(frameDurationRange), 2952 frameDuration, 2953 (long) (frameDurationRange[0] * (1 - frameDurationErrorMargin)), 2954 (long) (frameDurationRange[1] * (1 + frameDurationErrorMargin))); 2955 } 2956 } 2957 2958 stopPreview(); 2959 } 2960 2961 /** 2962 * Validate the pipeline depth result. 2963 * 2964 * @param result The capture result to get pipeline depth data 2965 */ 2966 private void validatePipelineDepth(CaptureResult result) { 2967 final byte MIN_PIPELINE_DEPTH = 1; 2968 byte maxPipelineDepth = mStaticInfo.getPipelineMaxDepthChecked(); 2969 Byte pipelineDepth = getValueNotNull(result, CaptureResult.REQUEST_PIPELINE_DEPTH); 2970 mCollector.expectInRange(String.format("Pipeline depth must be in the range of [%d, %d]", 2971 MIN_PIPELINE_DEPTH, maxPipelineDepth), pipelineDepth, MIN_PIPELINE_DEPTH, 2972 maxPipelineDepth); 2973 } 2974 2975 /** 2976 * Calculate the anti-flickering corrected exposure time. 2977 * <p> 2978 * If the input exposure time is very short (shorter than flickering 2979 * boundary), which indicate the scene is bright and very likely at outdoor 2980 * environment, skip the correction, as it doesn't make much sense by doing so. 2981 * </p> 2982 * <p> 2983 * For long exposure time (larger than the flickering boundary), find the 2984 * exposure time that is closest to the flickering boundary. 2985 * </p> 2986 * 2987 * @param flickeringMode The flickering mode 2988 * @param exposureTime The input exposureTime to be corrected 2989 * @return anti-flickering corrected exposure time 2990 */ 2991 private long getAntiFlickeringExposureTime(int flickeringMode, long exposureTime) { 2992 if (flickeringMode != ANTI_FLICKERING_50HZ && flickeringMode != ANTI_FLICKERING_60HZ) { 2993 throw new IllegalArgumentException("Input anti-flickering mode must be 50 or 60Hz"); 2994 } 2995 long flickeringBoundary = EXPOSURE_TIME_BOUNDARY_50HZ_NS; 2996 if (flickeringMode == ANTI_FLICKERING_60HZ) { 2997 flickeringBoundary = EXPOSURE_TIME_BOUNDARY_60HZ_NS; 2998 } 2999 3000 if (exposureTime <= flickeringBoundary) { 3001 return exposureTime; 3002 } 3003 3004 // Find the closest anti-flickering corrected exposure time 3005 long correctedExpTime = exposureTime + (flickeringBoundary / 2); 3006 correctedExpTime = correctedExpTime - (correctedExpTime % flickeringBoundary); 3007 return correctedExpTime; 3008 } 3009 3010 /** 3011 * Update one 3A region in capture request builder if that region is supported. Do nothing 3012 * if the specified 3A region is not supported by camera device. 3013 * @param requestBuilder The request to be updated 3014 * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2) 3015 * @param regions The 3A regions to be set 3016 */ 3017 private void update3aRegion( 3018 CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions) 3019 { 3020 int maxRegions; 3021 CaptureRequest.Key<MeteringRectangle[]> key; 3022 3023 if (regions == null || regions.length == 0) { 3024 throw new IllegalArgumentException("Invalid input 3A region!"); 3025 } 3026 3027 switch (algoIdx) { 3028 case INDEX_ALGORITHM_AE: 3029 maxRegions = mStaticInfo.getAeMaxRegionsChecked(); 3030 key = CaptureRequest.CONTROL_AE_REGIONS; 3031 break; 3032 case INDEX_ALGORITHM_AWB: 3033 maxRegions = mStaticInfo.getAwbMaxRegionsChecked(); 3034 key = CaptureRequest.CONTROL_AWB_REGIONS; 3035 break; 3036 case INDEX_ALGORITHM_AF: 3037 maxRegions = mStaticInfo.getAfMaxRegionsChecked(); 3038 key = CaptureRequest.CONTROL_AF_REGIONS; 3039 break; 3040 default: 3041 throw new IllegalArgumentException("Unknown 3A Algorithm!"); 3042 } 3043 3044 if (maxRegions >= regions.length) { 3045 requestBuilder.set(key, regions); 3046 } 3047 } 3048 3049 /** 3050 * Validate one 3A region in capture result equals to expected region if that region is 3051 * supported. Do nothing if the specified 3A region is not supported by camera device. 3052 * @param result The capture result to be validated 3053 * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2) 3054 * @param expectRegions The 3A regions expected in capture result 3055 */ 3056 private void validate3aRegion( 3057 CaptureResult result, int algoIdx, MeteringRectangle[] expectRegions) 3058 { 3059 final int maxCorrectionDist = 2; 3060 int maxRegions; 3061 CaptureResult.Key<MeteringRectangle[]> key; 3062 MeteringRectangle[] actualRegion; 3063 3064 switch (algoIdx) { 3065 case INDEX_ALGORITHM_AE: 3066 maxRegions = mStaticInfo.getAeMaxRegionsChecked(); 3067 key = CaptureResult.CONTROL_AE_REGIONS; 3068 break; 3069 case INDEX_ALGORITHM_AWB: 3070 maxRegions = mStaticInfo.getAwbMaxRegionsChecked(); 3071 key = CaptureResult.CONTROL_AWB_REGIONS; 3072 break; 3073 case INDEX_ALGORITHM_AF: 3074 maxRegions = mStaticInfo.getAfMaxRegionsChecked(); 3075 key = CaptureResult.CONTROL_AF_REGIONS; 3076 break; 3077 default: 3078 throw new IllegalArgumentException("Unknown 3A Algorithm!"); 3079 } 3080 3081 Integer distortionCorrectionMode = result.get(CaptureResult.DISTORTION_CORRECTION_MODE); 3082 boolean correctionEnabled = 3083 distortionCorrectionMode != null && 3084 distortionCorrectionMode != CaptureResult.DISTORTION_CORRECTION_MODE_OFF; 3085 3086 if (maxRegions > 0) 3087 { 3088 actualRegion = getValueNotNull(result, key); 3089 if (correctionEnabled) { 3090 for(int i = 0; i < actualRegion.length; i++) { 3091 Rect a = actualRegion[i].getRect(); 3092 Rect e = expectRegions[i].getRect(); 3093 if (!mCollector.expectLessOrEqual( 3094 "Expected 3A regions: " + Arrays.toString(expectRegions) + 3095 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 3096 maxCorrectionDist, Math.abs(a.left - e.left))) continue; 3097 if (!mCollector.expectLessOrEqual( 3098 "Expected 3A regions: " + Arrays.toString(expectRegions) + 3099 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 3100 maxCorrectionDist, Math.abs(a.right - e.right))) continue; 3101 if (!mCollector.expectLessOrEqual( 3102 "Expected 3A regions: " + Arrays.toString(expectRegions) + 3103 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 3104 maxCorrectionDist, Math.abs(a.top - e.top))) continue; 3105 if (!mCollector.expectLessOrEqual( 3106 "Expected 3A regions: " + Arrays.toString(expectRegions) + 3107 " are not close enough to the actual one: " + Arrays.toString(actualRegion), 3108 maxCorrectionDist, Math.abs(a.bottom - e.bottom))) continue; 3109 } 3110 } else { 3111 mCollector.expectEquals( 3112 "Expected 3A regions: " + Arrays.toString(expectRegions) + 3113 " does not match actual one: " + Arrays.toString(actualRegion), 3114 expectRegions, actualRegion); 3115 } 3116 } 3117 } 3118 } 3119