1 /* 2 * Copyright 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.cts; 18 19 import android.content.Context; 20 import android.graphics.Bitmap; 21 import android.graphics.BitmapFactory; 22 import android.graphics.BitmapRegionDecoder; 23 import android.graphics.Color; 24 import android.graphics.ImageFormat; 25 import android.graphics.Matrix; 26 import android.graphics.Rect; 27 import android.graphics.RectF; 28 import android.hardware.camera2.CameraCharacteristics; 29 import android.hardware.camera2.CameraDevice; 30 import android.hardware.camera2.CaptureRequest; 31 import android.hardware.camera2.CaptureResult; 32 import android.hardware.camera2.cts.helpers.StaticMetadata; 33 import android.hardware.camera2.cts.rs.BitmapUtils; 34 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase; 35 import android.hardware.camera2.params.StreamConfigurationMap; 36 import android.media.Image; 37 import android.media.ImageReader; 38 import android.os.ConditionVariable; 39 import android.util.Log; 40 import android.util.Size; 41 import android.view.Surface; 42 43 import java.nio.ByteBuffer; 44 import java.util.ArrayList; 45 import java.util.List; 46 47 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS; 48 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback; 49 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener; 50 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile; 51 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull; 52 53 /** 54 * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera 55 * sends the data to the surface provided by imageReader. Below image formats 56 * are tested:</p> 57 * 58 * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p> 59 * <p>JPEG: used for JPEG still capture, also mandatory format. </p> 60 * <p>Some invalid access test. </p> 61 * <p>TODO: Add more format tests? </p> 62 */ 63 public class ImageReaderTest extends Camera2AndroidTestCase { 64 private static final String TAG = "ImageReaderTest"; 65 private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE); 66 private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG); 67 68 // number of frame (for streaming requests) to be verified. 69 private static final int NUM_FRAME_VERIFIED = 2; 70 // Max number of images can be accessed simultaneously from ImageReader. 71 private static final int MAX_NUM_IMAGES = 5; 72 // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very 73 // generous to avoid false positives due to punch/saturation operations vendors apply to the 74 // JPEG outputs. 75 private static final double IMAGE_DIFFERENCE_TOLERANCE = 30; 76 77 private SimpleImageListener mListener; 78 79 @Override 80 public void setContext(Context context) { 81 super.setContext(context); 82 } 83 84 @Override 85 protected void setUp() throws Exception { 86 super.setUp(); 87 } 88 89 @Override 90 protected void tearDown() throws Exception { 91 super.tearDown(); 92 } 93 94 public void testFlexibleYuv() throws Exception { 95 for (String id : mCameraIds) { 96 try { 97 Log.i(TAG, "Testing Camera " + id); 98 openDevice(id); 99 bufferFormatTestByCamera(ImageFormat.YUV_420_888, /*repeating*/true); 100 } finally { 101 closeDevice(id); 102 } 103 } 104 } 105 106 public void testJpeg() throws Exception { 107 for (String id : mCameraIds) { 108 try { 109 Log.v(TAG, "Testing jpeg capture for Camera " + id); 110 openDevice(id); 111 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/false); 112 } finally { 113 closeDevice(id); 114 } 115 } 116 } 117 118 public void testRaw() throws Exception { 119 for (String id : mCameraIds) { 120 try { 121 Log.v(TAG, "Testing raw capture for camera " + id); 122 openDevice(id); 123 124 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/false); 125 } finally { 126 closeDevice(id); 127 } 128 } 129 } 130 131 public void testRepeatingJpeg() throws Exception { 132 for (String id : mCameraIds) { 133 try { 134 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id); 135 openDevice(id); 136 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/true); 137 } finally { 138 closeDevice(id); 139 } 140 } 141 } 142 143 public void testRepeatingRaw() throws Exception { 144 for (String id : mCameraIds) { 145 try { 146 Log.v(TAG, "Testing repeating raw capture for camera " + id); 147 openDevice(id); 148 149 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/true); 150 } finally { 151 closeDevice(id); 152 } 153 } 154 } 155 156 /** 157 * Test invalid access of image byte buffers: when an image is closed, further access 158 * of the image byte buffers will get an IllegalStateException. The basic assumption of 159 * this test is that the ImageReader always gives direct byte buffer, which is always true 160 * for camera case. For if the produced image byte buffer is not direct byte buffer, there 161 * is no guarantee to get an ISE for this invalid access case. 162 */ 163 public void testInvalidAccessTest() throws Exception { 164 // Test byte buffer access after an image is released, it should throw ISE. 165 for (String id : mCameraIds) { 166 try { 167 Log.v(TAG, "Testing invalid image access for Camera " + id); 168 openDevice(id); 169 bufferAccessAfterRelease(); 170 fail("ImageReader should throw IllegalStateException when accessing a byte buffer" 171 + " after the image is closed"); 172 } catch (IllegalStateException e) { 173 // Expected. 174 } finally { 175 closeDevice(id); 176 } 177 } 178 179 } 180 181 /** 182 * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader. 183 * 184 * <p>Both stream formats are mandatory for Camera2 API</p> 185 */ 186 public void testYuvAndJpeg() throws Exception { 187 for (String id : mCameraIds) { 188 try { 189 Log.v(TAG, "YUV and JPEG testing for camera " + id); 190 openDevice(id); 191 192 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG); 193 } finally { 194 closeDevice(id); 195 } 196 } 197 } 198 199 /** 200 * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader. 201 * 202 */ 203 public void testImageReaderYuvAndRaw() throws Exception { 204 for (String id : mCameraIds) { 205 try { 206 Log.v(TAG, "YUV and RAW testing for camera " + id); 207 openDevice(id); 208 209 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR); 210 } finally { 211 closeDevice(id); 212 } 213 } 214 } 215 216 /** 217 * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV 218 * resolution and format supported. 219 */ 220 public void testAllOutputYUVResolutions() throws Exception { 221 for (String id : mCameraIds) { 222 try { 223 Log.v(TAG, "Testing all YUV image resolutions for camera " + id); 224 openDevice(id); 225 226 // Skip warmup on FULL mode devices. 227 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ? 228 MAX_NUM_IMAGES - 1 : 0; 229 230 // NV21 isn't supported by ImageReader. 231 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12}; 232 233 CameraCharacteristics.Key<StreamConfigurationMap> key = 234 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP; 235 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key); 236 int[] supportedFormats = config.getOutputFormats(); 237 List<Integer> supportedYUVFormats = new ArrayList<>(); 238 for (int format : YUVFormats) { 239 if (CameraTestUtils.contains(supportedFormats, format)) { 240 supportedYUVFormats.add(format); 241 } 242 } 243 244 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG, 245 StaticMetadata.StreamDirection.Output); 246 assertFalse("JPEG output not supported for camera " + id + 247 ", at least one JPEG output is required.", jpegSizes.length == 0); 248 249 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes); 250 251 for (int format : supportedYUVFormats) { 252 Size[] targetCaptureSizes = 253 mStaticInfo.getAvailableSizesForFormatChecked(format, 254 StaticMetadata.StreamDirection.Output); 255 256 for (Size captureSz : targetCaptureSizes) { 257 if (VERBOSE) { 258 Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size " 259 + maxJpegSize + " for camera " + mCamera.getId()); 260 } 261 262 ImageReader jpegReader = null; 263 ImageReader yuvReader = null; 264 try { 265 // Create YUV image reader 266 SimpleImageReaderListener yuvListener = new SimpleImageReaderListener(); 267 yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES, 268 yuvListener); 269 Surface yuvSurface = yuvReader.getSurface(); 270 271 // Create JPEG image reader 272 SimpleImageReaderListener jpegListener = 273 new SimpleImageReaderListener(); 274 jpegReader = createImageReader(maxJpegSize, 275 ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener); 276 Surface jpegSurface = jpegReader.getSurface(); 277 278 // Setup session 279 List<Surface> outputSurfaces = new ArrayList<Surface>(); 280 outputSurfaces.add(yuvSurface); 281 outputSurfaces.add(jpegSurface); 282 createSession(outputSurfaces); 283 284 // Warm up camera preview (mainly to give legacy devices time to do 3A). 285 CaptureRequest.Builder warmupRequest = 286 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 287 warmupRequest.addTarget(yuvSurface); 288 assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest); 289 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 290 291 for (int i = 0; i < warmupCaptureNumber; i++) { 292 startCapture(warmupRequest.build(), /*repeating*/false, 293 resultListener, mHandler); 294 } 295 for (int i = 0; i < warmupCaptureNumber; i++) { 296 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS); 297 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS); 298 image.close(); 299 } 300 301 // Capture image. 302 CaptureRequest.Builder mainRequest = 303 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 304 for (Surface s : outputSurfaces) { 305 mainRequest.addTarget(s); 306 } 307 308 startCapture(mainRequest.build(), /*repeating*/false, resultListener, 309 mHandler); 310 311 // Verify capture result and images 312 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS); 313 314 Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS); 315 Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS); 316 317 //Validate captured images. 318 CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(), 319 captureSz.getHeight(), format, /*filePath*/null); 320 CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(), 321 maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null); 322 323 // Compare the image centers. 324 RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(), 325 jpegImage.getHeight()); 326 RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(), 327 yuvImage.getHeight()); 328 329 // Find scale difference between YUV and JPEG output 330 Matrix m = new Matrix(); 331 m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START); 332 RectF scaledYuv = new RectF(); 333 m.mapRect(scaledYuv, yuvDimens); 334 float scale = scaledYuv.width() / yuvDimens.width(); 335 336 final int PATCH_DIMEN = 40; // pixels in YUV 337 338 // Find matching square patch of pixels in YUV and JPEG output 339 RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN); 340 tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(), 341 yuvDimens.centerY() - tempPatch.centerY()); 342 Rect yuvPatch = new Rect(); 343 tempPatch.roundOut(yuvPatch); 344 345 tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale); 346 tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(), 347 jpegDimens.centerY() - tempPatch.centerY()); 348 Rect jpegPatch = new Rect(); 349 tempPatch.roundOut(jpegPatch); 350 351 // Decode center patches 352 int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(), 353 yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage); 354 Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(), 355 yuvPatch.height(), Bitmap.Config.ARGB_8888); 356 357 byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage); 358 BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance( 359 compressedJpegData, /*offset*/0, compressedJpegData.length, 360 /*isShareable*/true); 361 BitmapFactory.Options opt = new BitmapFactory.Options(); 362 opt.inPreferredConfig = Bitmap.Config.ARGB_8888; 363 Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt); 364 Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap, 365 yuvPatch.width(), yuvPatch.height(), /*filter*/true); 366 367 // Compare two patches using average of per-pixel differences 368 double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap); 369 370 Log.i(TAG, "Difference for resolution " + captureSz + " is: " + 371 difference); 372 if (difference > IMAGE_DIFFERENCE_TOLERANCE) { 373 // Dump files if running in verbose mode 374 if (DEBUG) { 375 String jpegFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz + 376 "_jpeg.jpg"; 377 dumpFile(jpegFileName, jpegBmap); 378 String fullSizeJpegFileName = DEBUG_FILE_NAME_BASE + "/" + 379 captureSz + "_full_jpeg.jpg"; 380 dumpFile(fullSizeJpegFileName, compressedJpegData); 381 String yuvFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz + 382 "_yuv.jpg"; 383 dumpFile(yuvFileName, yuvBmap); 384 String fullSizeYuvFileName = DEBUG_FILE_NAME_BASE + "/" + 385 captureSz + "_full_yuv.jpg"; 386 int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(), 387 yuvImage.getHeight(), 0, 0, yuvImage); 388 Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors, 389 yuvImage.getWidth(), yuvImage.getHeight(), 390 Bitmap.Config.ARGB_8888); 391 dumpFile(fullSizeYuvFileName, fullYUVBmap); 392 } 393 fail("Camera " + mCamera.getId() + ": YUV and JPEG image at " + 394 "capture size " + captureSz + " for the same frame are " + 395 "not similar, center patches have difference metric of " + 396 difference); 397 } 398 399 // Stop capture, delete the streams. 400 stopCapture(/*fast*/false); 401 } finally { 402 closeImageReader(jpegReader); 403 jpegReader = null; 404 closeImageReader(yuvReader); 405 yuvReader = null; 406 } 407 } 408 } 409 410 } finally { 411 closeDevice(id); 412 } 413 } 414 } 415 416 /** 417 * Convert a rectangular patch in a YUV image to an ARGB color array. 418 * 419 * @param w width of the patch. 420 * @param h height of the patch. 421 * @param wOffset offset of the left side of the patch. 422 * @param hOffset offset of the top of the patch. 423 * @param yuvImage a YUV image to select a patch from. 424 * @return the image patch converted to RGB as an ARGB color array. 425 */ 426 private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset, 427 Image yuvImage) { 428 final int CHANNELS = 3; // yuv 429 final float COLOR_RANGE = 255f; 430 431 assertTrue("Invalid argument to convertPixelYuvToRgba", 432 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0); 433 assertNotNull(yuvImage); 434 435 int imageFormat = yuvImage.getFormat(); 436 assertTrue("YUV image must have YUV-type format", 437 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 || 438 imageFormat == ImageFormat.NV21); 439 440 int height = yuvImage.getHeight(); 441 int width = yuvImage.getWidth(); 442 443 Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height); 444 Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w, 445 /*bottom*/hOffset + h); 446 assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds, 447 imageBounds.contains(crop)); 448 Image.Plane[] planes = yuvImage.getPlanes(); 449 450 Image.Plane yPlane = planes[0]; 451 Image.Plane cbPlane = planes[1]; 452 Image.Plane crPlane = planes[2]; 453 454 ByteBuffer yBuf = yPlane.getBuffer(); 455 int yPixStride = yPlane.getPixelStride(); 456 int yRowStride = yPlane.getRowStride(); 457 ByteBuffer cbBuf = cbPlane.getBuffer(); 458 int cbPixStride = cbPlane.getPixelStride(); 459 int cbRowStride = cbPlane.getRowStride(); 460 ByteBuffer crBuf = crPlane.getBuffer(); 461 int crPixStride = crPlane.getPixelStride(); 462 int crRowStride = crPlane.getRowStride(); 463 464 int[] output = new int[w * h]; 465 466 // TODO: Optimize this with renderscript intrinsics 467 byte[] yRow = new byte[yPixStride * w]; 468 byte[] cbRow = new byte[cbPixStride * w / 2]; 469 byte[] crRow = new byte[crPixStride * w / 2]; 470 yBuf.mark(); 471 cbBuf.mark(); 472 crBuf.mark(); 473 int initialYPos = yBuf.position(); 474 int initialCbPos = cbBuf.position(); 475 int initialCrPos = crBuf.position(); 476 int outputPos = 0; 477 for (int i = hOffset; i < hOffset + h; i++) { 478 yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride); 479 yBuf.get(yRow); 480 if ((i & 1) == (hOffset & 1)) { 481 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2); 482 cbBuf.get(cbRow); 483 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2); 484 crBuf.get(crRow); 485 } 486 for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) { 487 float y = yRow[yPix] & 0xFF; 488 float cb = cbRow[cbPix] & 0xFF; 489 float cr = crRow[crPix] & 0xFF; 490 491 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section) 492 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128))); 493 int g = (int) Math.max(0.0f, 494 Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128))); 495 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128))); 496 497 // Convert to ARGB pixel color (use opaque alpha) 498 output[outputPos++] = Color.rgb(r, g, b); 499 500 if ((j & 1) == 1) { 501 crPix += crPixStride; 502 cbPix += cbPixStride; 503 } 504 } 505 } 506 yBuf.rewind(); 507 cbBuf.rewind(); 508 crBuf.rewind(); 509 510 return output; 511 } 512 513 /** 514 * Test capture a given format stream with yuv stream simultaneously. 515 * 516 * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p> 517 * 518 * @param format The capture format to be tested along with yuv format. 519 */ 520 private void bufferFormatWithYuvTestByCamera(int format) throws Exception { 521 if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR 522 && format != ImageFormat.YUV_420_888) { 523 throw new IllegalArgumentException("Unsupported format: " + format); 524 } 525 526 final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1; 527 Size maxYuvSz = mOrderedPreviewSizes.get(0); 528 Size[] targetCaptureSizes = mStaticInfo.getAvailableSizesForFormatChecked(format, 529 StaticMetadata.StreamDirection.Output); 530 531 for (Size captureSz : targetCaptureSizes) { 532 if (VERBOSE) { 533 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size " 534 + captureSz.toString() + " for camera " + mCamera.getId()); 535 } 536 537 ImageReader captureReader = null; 538 ImageReader yuvReader = null; 539 try { 540 // Create YUV image reader 541 SimpleImageReaderListener yuvListener = new SimpleImageReaderListener(); 542 yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES, 543 yuvListener); 544 Surface yuvSurface = yuvReader.getSurface(); 545 546 // Create capture image reader 547 SimpleImageReaderListener captureListener = new SimpleImageReaderListener(); 548 captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES, 549 captureListener); 550 Surface captureSurface = captureReader.getSurface(); 551 552 // Capture images. 553 List<Surface> outputSurfaces = new ArrayList<Surface>(); 554 outputSurfaces.add(yuvSurface); 555 outputSurfaces.add(captureSurface); 556 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces); 557 SimpleCaptureCallback resultListener = new SimpleCaptureCallback(); 558 559 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) { 560 startCapture(request.build(), /*repeating*/false, resultListener, mHandler); 561 } 562 563 // Verify capture result and images 564 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) { 565 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS); 566 if (VERBOSE) { 567 Log.v(TAG, " Got the capture result back for " + i + "th capture"); 568 } 569 570 Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS); 571 if (VERBOSE) { 572 Log.v(TAG, " Got the yuv image back for " + i + "th capture"); 573 } 574 575 Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS); 576 if (VERBOSE) { 577 Log.v(TAG, " Got the capture image back for " + i + "th capture"); 578 } 579 580 //Validate captured images. 581 CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(), 582 maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null); 583 CameraTestUtils.validateImage(captureImage, captureSz.getWidth(), 584 captureSz.getHeight(), format, /*filePath*/null); 585 } 586 587 // Stop capture, delete the streams. 588 stopCapture(/*fast*/false); 589 } finally { 590 closeImageReader(captureReader); 591 captureReader = null; 592 closeImageReader(yuvReader); 593 yuvReader = null; 594 } 595 } 596 } 597 598 /** 599 * Test buffer access after release, YUV420_888 single capture is tested. This method 600 * should throw ISE. 601 */ 602 private void bufferAccessAfterRelease() throws Exception { 603 final int FORMAT = ImageFormat.YUV_420_888; 604 Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT, 605 StaticMetadata.StreamDirection.Output); 606 607 try { 608 // Create ImageReader. 609 mListener = new SimpleImageListener(); 610 createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener); 611 612 // Start capture. 613 CaptureRequest request = prepareCaptureRequest(); 614 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 615 startCapture(request, /* repeating */false, listener, mHandler); 616 617 mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS); 618 Image img = mReader.acquireNextImage(); 619 ByteBuffer buffer = img.getPlanes()[0].getBuffer(); 620 img.close(); 621 622 byte data = buffer.get(); // An ISE should be thrown here. 623 } finally { 624 closeDefaultImageReader(); 625 } 626 } 627 628 private void bufferFormatTestByCamera(int format, boolean repeating) throws Exception { 629 630 Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format, 631 StaticMetadata.StreamDirection.Output); 632 633 // for each resolution, test imageReader: 634 for (Size sz : availableSizes) { 635 try { 636 if (VERBOSE) { 637 Log.v(TAG, "Testing size " + sz.toString() + " format " + format 638 + " for camera " + mCamera.getId()); 639 } 640 641 // Create ImageReader. 642 mListener = new SimpleImageListener(); 643 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener); 644 645 // Start capture. 646 CaptureRequest request = prepareCaptureRequest(); 647 SimpleCaptureCallback listener = new SimpleCaptureCallback(); 648 startCapture(request, repeating, listener, mHandler); 649 650 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1; 651 652 // Validate images. 653 validateImage(sz, format, numFrameVerified, repeating); 654 655 // Validate capture result. 656 validateCaptureResult(format, sz, listener, numFrameVerified); 657 658 // stop capture. 659 stopCapture(/*fast*/false); 660 } finally { 661 closeDefaultImageReader(); 662 } 663 664 } 665 } 666 667 /** 668 * Validate capture results. 669 * 670 * @param format The format of this capture. 671 * @param size The capture size. 672 * @param listener The capture listener to get capture result callbacks. 673 */ 674 private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener, 675 int numFrameVerified) { 676 for (int i = 0; i < numFrameVerified; i++) { 677 CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS); 678 679 // TODO: Update this to use availableResultKeys once shim supports this. 680 if (mStaticInfo.isCapabilitySupported( 681 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) { 682 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME); 683 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY); 684 mCollector.expectInRange( 685 String.format( 686 "Capture for format %d, size %s exposure time is invalid.", 687 format, size.toString()), 688 exposureTime, 689 mStaticInfo.getExposureMinimumOrDefault(), 690 mStaticInfo.getExposureMaximumOrDefault() 691 ); 692 mCollector.expectInRange( 693 String.format("Capture for format %d, size %s sensitivity is invalid.", 694 format, size.toString()), 695 sensitivity, 696 mStaticInfo.getSensitivityMinimumOrDefault(), 697 mStaticInfo.getSensitivityMaximumOrDefault() 698 ); 699 } 700 // TODO: add more key validations. 701 } 702 } 703 704 private final class SimpleImageListener implements ImageReader.OnImageAvailableListener { 705 private final ConditionVariable imageAvailable = new ConditionVariable(); 706 @Override 707 public void onImageAvailable(ImageReader reader) { 708 if (mReader != reader) { 709 return; 710 } 711 712 if (VERBOSE) Log.v(TAG, "new image available"); 713 imageAvailable.open(); 714 } 715 716 public void waitForAnyImageAvailable(long timeout) { 717 if (imageAvailable.block(timeout)) { 718 imageAvailable.close(); 719 } else { 720 fail("wait for image available timed out after " + timeout + "ms"); 721 } 722 } 723 724 public void closePendingImages() { 725 Image image = mReader.acquireLatestImage(); 726 if (image != null) { 727 image.close(); 728 } 729 } 730 } 731 732 private CaptureRequest prepareCaptureRequest() throws Exception { 733 List<Surface> outputSurfaces = new ArrayList<Surface>(); 734 Surface surface = mReader.getSurface(); 735 assertNotNull("Fail to get surface from ImageReader", surface); 736 outputSurfaces.add(surface); 737 return prepareCaptureRequestForSurfaces(outputSurfaces).build(); 738 } 739 740 private CaptureRequest.Builder prepareCaptureRequestForSurfaces(List<Surface> surfaces) 741 throws Exception { 742 createSession(surfaces); 743 744 CaptureRequest.Builder captureBuilder = 745 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); 746 assertNotNull("Fail to get captureRequest", captureBuilder); 747 for (Surface surface : surfaces) { 748 captureBuilder.addTarget(surface); 749 } 750 751 return captureBuilder; 752 } 753 754 private void validateImage(Size sz, int format, int captureCount, boolean repeating) 755 throws Exception { 756 // TODO: Add more format here, and wrap each one as a function. 757 Image img; 758 final int MAX_RETRY_COUNT = 20; 759 int numImageVerified = 0; 760 int reTryCount = 0; 761 while (numImageVerified < captureCount) { 762 assertNotNull("Image listener is null", mListener); 763 if (VERBOSE) Log.v(TAG, "Waiting for an Image"); 764 mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS); 765 if (repeating) { 766 /** 767 * Acquire the latest image in case the validation is slower than 768 * the image producing rate. 769 */ 770 img = mReader.acquireLatestImage(); 771 /** 772 * Sometimes if multiple onImageAvailable callbacks being queued, 773 * acquireLatestImage will clear all buffer before corresponding callback is 774 * executed. Wait for a new frame in that case. 775 */ 776 if (img == null && reTryCount < MAX_RETRY_COUNT) { 777 reTryCount++; 778 continue; 779 } 780 } else { 781 img = mReader.acquireNextImage(); 782 } 783 assertNotNull("Unable to acquire the latest image", img); 784 if (VERBOSE) Log.v(TAG, "Got the latest image"); 785 CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format, 786 DEBUG_FILE_NAME_BASE); 787 if (VERBOSE) Log.v(TAG, "finish vaildation of image " + numImageVerified); 788 img.close(); 789 numImageVerified++; 790 reTryCount = 0; 791 } 792 793 // Return all pending images to the ImageReader as the validateImage may 794 // take a while to return and there could be many images pending. 795 mListener.closePendingImages(); 796 } 797 } 798