Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2.cts;
     18 
     19 import android.content.Context;
     20 import android.graphics.Bitmap;
     21 import android.graphics.BitmapFactory;
     22 import android.graphics.BitmapRegionDecoder;
     23 import android.graphics.Color;
     24 import android.graphics.ImageFormat;
     25 import android.graphics.Matrix;
     26 import android.graphics.Rect;
     27 import android.graphics.RectF;
     28 import android.hardware.HardwareBuffer;
     29 import android.hardware.camera2.CameraCharacteristics;
     30 import android.hardware.camera2.CameraDevice;
     31 import android.hardware.camera2.CaptureRequest;
     32 import android.hardware.camera2.CaptureResult;
     33 import android.hardware.camera2.cts.CameraTestUtils.ImageDropperListener;
     34 import android.hardware.camera2.cts.helpers.StaticMetadata;
     35 import android.hardware.camera2.cts.rs.BitmapUtils;
     36 import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
     37 import android.hardware.camera2.params.StreamConfigurationMap;
     38 import android.media.Image;
     39 import android.media.Image.Plane;
     40 import android.media.ImageReader;
     41 import android.os.ConditionVariable;
     42 import android.platform.test.annotations.AppModeFull;
     43 import android.util.Log;
     44 import android.util.Size;
     45 import android.view.Surface;
     46 
     47 import com.android.ex.camera2.blocking.BlockingSessionCallback;
     48 
     49 import java.nio.ByteBuffer;
     50 import java.util.ArrayList;
     51 import java.util.Arrays;
     52 import java.util.List;
     53 
     54 import static android.hardware.camera2.cts.CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS;
     55 import static android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
     56 import static android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
     57 import static android.hardware.camera2.cts.CameraTestUtils.dumpFile;
     58 import static android.hardware.camera2.cts.CameraTestUtils.getValueNotNull;
     59 
     60 /**
     61  * <p>Basic test for ImageReader APIs. It uses CameraDevice as producer, camera
     62  * sends the data to the surface provided by imageReader. Below image formats
     63  * are tested:</p>
     64  *
     65  * <p>YUV_420_888: flexible YUV420, it is mandatory format for camera. </p>
     66  * <p>JPEG: used for JPEG still capture, also mandatory format. </p>
     67  * <p>Some invalid access test. </p>
     68  * <p>TODO: Add more format tests? </p>
     69  */
     70 @AppModeFull
     71 public class ImageReaderTest extends Camera2AndroidTestCase {
     72     private static final String TAG = "ImageReaderTest";
     73     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
     74     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
     75 
     76     // Number of frame (for streaming requests) to be verified.
     77     private static final int NUM_FRAME_VERIFIED = 2;
     78     // Number of frame (for streaming requests) to be verified with log processing time.
     79     private static final int NUM_LONG_PROCESS_TIME_FRAME_VERIFIED = 10;
     80     // The time to hold each image for to simulate long processing time.
     81     private static final int LONG_PROCESS_TIME_MS = 300;
     82     // Max number of images can be accessed simultaneously from ImageReader.
     83     private static final int MAX_NUM_IMAGES = 5;
     84     // Max difference allowed between YUV and JPEG patches. This tolerance is intentionally very
     85     // generous to avoid false positives due to punch/saturation operations vendors apply to the
     86     // JPEG outputs.
     87     private static final double IMAGE_DIFFERENCE_TOLERANCE = 40;
     88     // Legacy level devices needs even larger tolerance because jpeg and yuv are not captured
     89     // from the same frame in legacy mode.
     90     private static final double IMAGE_DIFFERENCE_TOLERANCE_LEGACY = 60;
     91 
     92     private SimpleImageListener mListener;
     93 
     94     @Override
     95     public void setContext(Context context) {
     96         super.setContext(context);
     97     }
     98 
     99     @Override
    100     protected void setUp() throws Exception {
    101         super.setUp();
    102     }
    103 
    104     @Override
    105     protected void tearDown() throws Exception {
    106         super.tearDown();
    107     }
    108 
    109     public void testFlexibleYuv() throws Exception {
    110         for (String id : mCameraIds) {
    111             try {
    112                 Log.i(TAG, "Testing Camera " + id);
    113                 openDevice(id);
    114                 bufferFormatTestByCamera(ImageFormat.YUV_420_888, /*repeating*/true);
    115             } finally {
    116                 closeDevice(id);
    117             }
    118         }
    119     }
    120 
    121     public void testDepth16() throws Exception {
    122         for (String id : mCameraIds) {
    123             try {
    124                 Log.i(TAG, "Testing Camera " + id);
    125                 openDevice(id);
    126                 bufferFormatTestByCamera(ImageFormat.DEPTH16, /*repeating*/true);
    127             } finally {
    128                 closeDevice(id);
    129             }
    130         }
    131     }
    132 
    133     public void testDepthPointCloud() throws Exception {
    134         for (String id : mCameraIds) {
    135             try {
    136                 Log.i(TAG, "Testing Camera " + id);
    137                 openDevice(id);
    138                 bufferFormatTestByCamera(ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
    139             } finally {
    140                 closeDevice(id);
    141             }
    142         }
    143     }
    144 
    145     public void testJpeg() throws Exception {
    146         for (String id : mCameraIds) {
    147             try {
    148                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
    149                 openDevice(id);
    150                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/false);
    151             } finally {
    152                 closeDevice(id);
    153             }
    154         }
    155     }
    156 
    157     public void testRaw() throws Exception {
    158         for (String id : mCameraIds) {
    159             try {
    160                 Log.v(TAG, "Testing raw capture for camera " + id);
    161                 openDevice(id);
    162 
    163                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/false);
    164             } finally {
    165                 closeDevice(id);
    166             }
    167         }
    168     }
    169 
    170     public void testRawPrivate() throws Exception {
    171         for (String id : mCameraIds) {
    172             try {
    173                 Log.v(TAG, "Testing raw capture for camera " + id);
    174                 openDevice(id);
    175 
    176                 bufferFormatTestByCamera(ImageFormat.RAW_PRIVATE, /*repeating*/false);
    177             } finally {
    178                 closeDevice(id);
    179             }
    180         }
    181     }
    182 
    183 
    184     public void testRepeatingJpeg() throws Exception {
    185         for (String id : mCameraIds) {
    186             try {
    187                 Log.v(TAG, "Testing repeating jpeg capture for Camera " + id);
    188                 openDevice(id);
    189                 bufferFormatTestByCamera(ImageFormat.JPEG, /*repeating*/true);
    190             } finally {
    191                 closeDevice(id);
    192             }
    193         }
    194     }
    195 
    196     public void testRepeatingRaw() throws Exception {
    197         for (String id : mCameraIds) {
    198             try {
    199                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
    200                 openDevice(id);
    201 
    202                 bufferFormatTestByCamera(ImageFormat.RAW_SENSOR, /*repeating*/true);
    203             } finally {
    204                 closeDevice(id);
    205             }
    206         }
    207     }
    208 
    209     public void testRepeatingRawPrivate() throws Exception {
    210         for (String id : mCameraIds) {
    211             try {
    212                 Log.v(TAG, "Testing repeating raw capture for camera " + id);
    213                 openDevice(id);
    214 
    215                 bufferFormatTestByCamera(ImageFormat.RAW_PRIVATE, /*repeating*/true);
    216             } finally {
    217                 closeDevice(id);
    218             }
    219         }
    220     }
    221 
    222     public void testLongProcessingRepeatingRaw() throws Exception {
    223         for (String id : mCameraIds) {
    224             try {
    225                 Log.v(TAG, "Testing long processing on repeating raw for camera " + id);
    226                 openDevice(id);
    227 
    228                 if (!mStaticInfo.isCapabilitySupported(
    229                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
    230                     continue;
    231                 }
    232 
    233                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.RAW_SENSOR);
    234             } finally {
    235                 closeDevice(id);
    236             }
    237         }
    238     }
    239 
    240     public void testLongProcessingRepeatingFlexibleYuv() throws Exception {
    241         for (String id : mCameraIds) {
    242             try {
    243                 Log.v(TAG, "Testing long processing on repeating YUV for camera " + id);
    244                 openDevice(id);
    245 
    246                 if (!mStaticInfo.isCapabilitySupported(
    247                         CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
    248                     continue;
    249                 }
    250 
    251                 bufferFormatLongProcessingTimeTestByCamera(ImageFormat.YUV_420_888);
    252             } finally {
    253                 closeDevice(id);
    254             }
    255         }
    256     }
    257 
    258     /**
    259      * Test invalid access of image after an image is closed, further access
    260      * of the image will get an IllegalStateException. The basic assumption of
    261      * this test is that the ImageReader always gives direct byte buffer, which is always true
    262      * for camera case. For if the produced image byte buffer is not direct byte buffer, there
    263      * is no guarantee to get an ISE for this invalid access case.
    264      */
    265     public void testInvalidAccessTest() throws Exception {
    266         // Test byte buffer access after an image is released, it should throw ISE.
    267         for (String id : mCameraIds) {
    268             try {
    269                 Log.v(TAG, "Testing invalid image access for Camera " + id);
    270                 openDevice(id);
    271                 invalidAccessTestAfterClose();
    272             } finally {
    273                 closeDevice(id);
    274                 closeDefaultImageReader();
    275             }
    276         }
    277     }
    278 
    279     /**
    280      * Test two image stream (YUV420_888 and JPEG) capture by using ImageReader.
    281      *
    282      * <p>Both stream formats are mandatory for Camera2 API</p>
    283      */
    284     public void testYuvAndJpeg() throws Exception {
    285         for (String id : mCameraIds) {
    286             try {
    287                 Log.v(TAG, "YUV and JPEG testing for camera " + id);
    288                 openDevice(id);
    289                 if (!mStaticInfo.isColorOutputSupported()) {
    290                     Log.i(TAG, "Camera " + id +
    291                             " does not support color outputs, skipping");
    292                     continue;
    293                 }
    294                 bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
    295             } finally {
    296                 closeDevice(id);
    297             }
    298         }
    299     }
    300 
    301     /**
    302      * Test two image stream (YUV420_888 and RAW_SENSOR) capture by using ImageReader.
    303      *
    304      */
    305     public void testImageReaderYuvAndRaw() throws Exception {
    306         for (String id : mCameraIds) {
    307             try {
    308                 Log.v(TAG, "YUV and RAW testing for camera " + id);
    309                 openDevice(id);
    310                 if (!mStaticInfo.isColorOutputSupported()) {
    311                     Log.i(TAG, "Camera " + id +
    312                             " does not support color outputs, skipping");
    313                     continue;
    314                 }
    315                 bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
    316             } finally {
    317                 closeDevice(id);
    318             }
    319         }
    320     }
    321 
    322     /**
    323      * Check that the center patches for YUV and JPEG outputs for the same frame match for each YUV
    324      * resolution and format supported.
    325      */
    326     public void testAllOutputYUVResolutions() throws Exception {
    327         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
    328                 BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
    329         for (String id : mCameraIds) {
    330             try {
    331                 Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
    332                 openDevice(id);
    333 
    334                 if (!mStaticInfo.isColorOutputSupported()) {
    335                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
    336                     continue;
    337                 }
    338 
    339                 // Skip warmup on FULL mode devices.
    340                 int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
    341                         MAX_NUM_IMAGES - 1 : 0;
    342 
    343                 // NV21 isn't supported by ImageReader.
    344                 final int[] YUVFormats = new int[] {ImageFormat.YUV_420_888, ImageFormat.YV12};
    345 
    346                 CameraCharacteristics.Key<StreamConfigurationMap> key =
    347                         CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
    348                 StreamConfigurationMap config = mStaticInfo.getValueFromKeyNonNull(key);
    349                 int[] supportedFormats = config.getOutputFormats();
    350                 List<Integer> supportedYUVFormats = new ArrayList<>();
    351                 for (int format : YUVFormats) {
    352                     if (CameraTestUtils.contains(supportedFormats, format)) {
    353                         supportedYUVFormats.add(format);
    354                     }
    355                 }
    356 
    357                 Size[] jpegSizes = mStaticInfo.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
    358                         StaticMetadata.StreamDirection.Output);
    359                 assertFalse("JPEG output not supported for camera " + id +
    360                         ", at least one JPEG output is required.", jpegSizes.length == 0);
    361 
    362                 Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
    363                 Size maxPreviewSize = mOrderedPreviewSizes.get(0);
    364 
    365                 for (int format : supportedYUVFormats) {
    366                     Size[] targetCaptureSizes =
    367                             mStaticInfo.getAvailableSizesForFormatChecked(format,
    368                             StaticMetadata.StreamDirection.Output);
    369 
    370                     for (Size captureSz : targetCaptureSizes) {
    371                         if (VERBOSE) {
    372                             Log.v(TAG, "Testing yuv size " + captureSz + " and jpeg size "
    373                                     + maxJpegSize + " for camera " + mCamera.getId());
    374                         }
    375 
    376                         ImageReader jpegReader = null;
    377                         ImageReader yuvReader = null;
    378                         try {
    379                             // Create YUV image reader
    380                             SimpleImageReaderListener yuvListener = new SimpleImageReaderListener();
    381                             yuvReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
    382                                     yuvListener);
    383                             Surface yuvSurface = yuvReader.getSurface();
    384 
    385                             // Create JPEG image reader
    386                             SimpleImageReaderListener jpegListener =
    387                                     new SimpleImageReaderListener();
    388                             jpegReader = createImageReader(maxJpegSize,
    389                                     ImageFormat.JPEG, MAX_NUM_IMAGES, jpegListener);
    390                             Surface jpegSurface = jpegReader.getSurface();
    391 
    392                             // Setup session
    393                             List<Surface> outputSurfaces = new ArrayList<Surface>();
    394                             outputSurfaces.add(yuvSurface);
    395                             outputSurfaces.add(jpegSurface);
    396                             createSession(outputSurfaces);
    397 
    398                             int state = mCameraSessionListener.getStateWaiter().waitForAnyOfStates(
    399                                         Arrays.asList(sessionStates),
    400                                         CameraTestUtils.SESSION_CONFIGURE_TIMEOUT_MS);
    401 
    402                             if (state == BlockingSessionCallback.SESSION_CONFIGURE_FAILED) {
    403                                 if (captureSz.getWidth() > maxPreviewSize.getWidth() ||
    404                                         captureSz.getHeight() > maxPreviewSize.getHeight()) {
    405                                     Log.v(TAG, "Skip testing {yuv:" + captureSz
    406                                             + " ,jpeg:" + maxJpegSize + "} for camera "
    407                                             + mCamera.getId() +
    408                                             " because full size jpeg + yuv larger than "
    409                                             + "max preview size (" + maxPreviewSize
    410                                             + ") is not supported");
    411                                     continue;
    412                                 } else {
    413                                     fail("Camera " + mCamera.getId() +
    414                                             ":session configuration failed for {jpeg: " +
    415                                             maxJpegSize + ", yuv: " + captureSz + "}");
    416                                 }
    417                             }
    418 
    419                             // Warm up camera preview (mainly to give legacy devices time to do 3A).
    420                             CaptureRequest.Builder warmupRequest =
    421                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    422                             warmupRequest.addTarget(yuvSurface);
    423                             assertNotNull("Fail to get CaptureRequest.Builder", warmupRequest);
    424                             SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    425 
    426                             for (int i = 0; i < warmupCaptureNumber; i++) {
    427                                 startCapture(warmupRequest.build(), /*repeating*/false,
    428                                         resultListener, mHandler);
    429                             }
    430                             for (int i = 0; i < warmupCaptureNumber; i++) {
    431                                 resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
    432                                 Image image = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
    433                                 image.close();
    434                             }
    435 
    436                             // Capture image.
    437                             CaptureRequest.Builder mainRequest =
    438                                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    439                             for (Surface s : outputSurfaces) {
    440                                 mainRequest.addTarget(s);
    441                             }
    442 
    443                             startCapture(mainRequest.build(), /*repeating*/false, resultListener,
    444                                     mHandler);
    445 
    446                             // Verify capture result and images
    447                             resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
    448 
    449                             Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
    450                             Image jpegImage = jpegListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
    451 
    452                             //Validate captured images.
    453                             CameraTestUtils.validateImage(yuvImage, captureSz.getWidth(),
    454                                     captureSz.getHeight(), format, /*filePath*/null);
    455                             CameraTestUtils.validateImage(jpegImage, maxJpegSize.getWidth(),
    456                                     maxJpegSize.getHeight(), ImageFormat.JPEG, /*filePath*/null);
    457 
    458                             // Compare the image centers.
    459                             RectF jpegDimens = new RectF(0, 0, jpegImage.getWidth(),
    460                                     jpegImage.getHeight());
    461                             RectF yuvDimens = new RectF(0, 0, yuvImage.getWidth(),
    462                                     yuvImage.getHeight());
    463 
    464                             // Find scale difference between YUV and JPEG output
    465                             Matrix m = new Matrix();
    466                             m.setRectToRect(yuvDimens, jpegDimens, Matrix.ScaleToFit.START);
    467                             RectF scaledYuv = new RectF();
    468                             m.mapRect(scaledYuv, yuvDimens);
    469                             float scale = scaledYuv.width() / yuvDimens.width();
    470 
    471                             final int PATCH_DIMEN = 40; // pixels in YUV
    472 
    473                             // Find matching square patch of pixels in YUV and JPEG output
    474                             RectF tempPatch = new RectF(0, 0, PATCH_DIMEN, PATCH_DIMEN);
    475                             tempPatch.offset(yuvDimens.centerX() - tempPatch.centerX(),
    476                                     yuvDimens.centerY() - tempPatch.centerY());
    477                             Rect yuvPatch = new Rect();
    478                             tempPatch.roundOut(yuvPatch);
    479 
    480                             tempPatch.set(0, 0, PATCH_DIMEN * scale, PATCH_DIMEN * scale);
    481                             tempPatch.offset(jpegDimens.centerX() - tempPatch.centerX(),
    482                                     jpegDimens.centerY() - tempPatch.centerY());
    483                             Rect jpegPatch = new Rect();
    484                             tempPatch.roundOut(jpegPatch);
    485 
    486                             // Decode center patches
    487                             int[] yuvColors = convertPixelYuvToRgba(yuvPatch.width(),
    488                                     yuvPatch.height(), yuvPatch.left, yuvPatch.top, yuvImage);
    489                             Bitmap yuvBmap = Bitmap.createBitmap(yuvColors, yuvPatch.width(),
    490                                     yuvPatch.height(), Bitmap.Config.ARGB_8888);
    491 
    492                             byte[] compressedJpegData = CameraTestUtils.getDataFromImage(jpegImage);
    493                             BitmapRegionDecoder decoder = BitmapRegionDecoder.newInstance(
    494                                     compressedJpegData, /*offset*/0, compressedJpegData.length,
    495                                     /*isShareable*/true);
    496                             BitmapFactory.Options opt = new BitmapFactory.Options();
    497                             opt.inPreferredConfig = Bitmap.Config.ARGB_8888;
    498                             Bitmap fullSizeJpegBmap = decoder.decodeRegion(jpegPatch, opt);
    499                             Bitmap jpegBmap = Bitmap.createScaledBitmap(fullSizeJpegBmap,
    500                                     yuvPatch.width(), yuvPatch.height(), /*filter*/true);
    501 
    502                             // Compare two patches using average of per-pixel differences
    503                             double difference = BitmapUtils.calcDifferenceMetric(yuvBmap, jpegBmap);
    504                             double tolerance = IMAGE_DIFFERENCE_TOLERANCE;
    505                             if (mStaticInfo.isHardwareLevelLegacy()) {
    506                                 tolerance = IMAGE_DIFFERENCE_TOLERANCE_LEGACY;
    507                             }
    508                             Log.i(TAG, "Difference for resolution " + captureSz + " is: " +
    509                                     difference);
    510                             if (difference > tolerance) {
    511                                 // Dump files if running in verbose mode
    512                                 if (DEBUG) {
    513                                     String jpegFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz +
    514                                             "_jpeg.jpg";
    515                                     dumpFile(jpegFileName, jpegBmap);
    516                                     String fullSizeJpegFileName = DEBUG_FILE_NAME_BASE + "/" +
    517                                             captureSz + "_full_jpeg.jpg";
    518                                     dumpFile(fullSizeJpegFileName, compressedJpegData);
    519                                     String yuvFileName = DEBUG_FILE_NAME_BASE + "/" + captureSz +
    520                                             "_yuv.jpg";
    521                                     dumpFile(yuvFileName, yuvBmap);
    522                                     String fullSizeYuvFileName = DEBUG_FILE_NAME_BASE + "/" +
    523                                             captureSz + "_full_yuv.jpg";
    524                                     int[] fullYUVColors = convertPixelYuvToRgba(yuvImage.getWidth(),
    525                                             yuvImage.getHeight(), 0, 0, yuvImage);
    526                                     Bitmap fullYUVBmap = Bitmap.createBitmap(fullYUVColors,
    527                                             yuvImage.getWidth(), yuvImage.getHeight(),
    528                                             Bitmap.Config.ARGB_8888);
    529                                     dumpFile(fullSizeYuvFileName, fullYUVBmap);
    530                                 }
    531                                 fail("Camera " + mCamera.getId() + ": YUV and JPEG image at " +
    532                                         "capture size " + captureSz + " for the same frame are " +
    533                                         "not similar, center patches have difference metric of " +
    534                                         difference + ", tolerance is " + tolerance);
    535                             }
    536 
    537                             // Stop capture, delete the streams.
    538                             stopCapture(/*fast*/false);
    539                             yuvImage.close();
    540                             jpegImage.close();
    541                             yuvListener.drain();
    542                             jpegListener.drain();
    543                         } finally {
    544                             closeImageReader(jpegReader);
    545                             jpegReader = null;
    546                             closeImageReader(yuvReader);
    547                             yuvReader = null;
    548                         }
    549                     }
    550                 }
    551 
    552             } finally {
    553                 closeDevice(id);
    554             }
    555         }
    556     }
    557 
    558     /**
    559      * Test that images captured after discarding free buffers are valid.
    560      */
    561     public void testDiscardFreeBuffers() throws Exception {
    562         for (String id : mCameraIds) {
    563             try {
    564                 Log.v(TAG, "Testing jpeg capture for Camera " + id);
    565                 openDevice(id);
    566                 discardFreeBuffersTestByCamera();
    567             } finally {
    568                 closeDevice(id);
    569             }
    570         }
    571     }
    572 
    573     /**
    574      * Convert a rectangular patch in a YUV image to an ARGB color array.
    575      *
    576      * @param w width of the patch.
    577      * @param h height of the patch.
    578      * @param wOffset offset of the left side of the patch.
    579      * @param hOffset offset of the top of the patch.
    580      * @param yuvImage a YUV image to select a patch from.
    581      * @return the image patch converted to RGB as an ARGB color array.
    582      */
    583     private static int[] convertPixelYuvToRgba(int w, int h, int wOffset, int hOffset,
    584                                                Image yuvImage) {
    585         final int CHANNELS = 3; // yuv
    586         final float COLOR_RANGE = 255f;
    587 
    588         assertTrue("Invalid argument to convertPixelYuvToRgba",
    589                 w > 0 && h > 0 && wOffset >= 0 && hOffset >= 0);
    590         assertNotNull(yuvImage);
    591 
    592         int imageFormat = yuvImage.getFormat();
    593         assertTrue("YUV image must have YUV-type format",
    594                 imageFormat == ImageFormat.YUV_420_888 || imageFormat == ImageFormat.YV12 ||
    595                         imageFormat == ImageFormat.NV21);
    596 
    597         int height = yuvImage.getHeight();
    598         int width = yuvImage.getWidth();
    599 
    600         Rect imageBounds = new Rect(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
    601         Rect crop = new Rect(/*left*/wOffset, /*top*/hOffset, /*right*/wOffset + w,
    602                 /*bottom*/hOffset + h);
    603         assertTrue("Output rectangle" + crop + " must lie within image bounds " + imageBounds,
    604                 imageBounds.contains(crop));
    605         Image.Plane[] planes = yuvImage.getPlanes();
    606 
    607         Image.Plane yPlane = planes[0];
    608         Image.Plane cbPlane = planes[1];
    609         Image.Plane crPlane = planes[2];
    610 
    611         ByteBuffer yBuf = yPlane.getBuffer();
    612         int yPixStride = yPlane.getPixelStride();
    613         int yRowStride = yPlane.getRowStride();
    614         ByteBuffer cbBuf = cbPlane.getBuffer();
    615         int cbPixStride = cbPlane.getPixelStride();
    616         int cbRowStride = cbPlane.getRowStride();
    617         ByteBuffer crBuf = crPlane.getBuffer();
    618         int crPixStride = crPlane.getPixelStride();
    619         int crRowStride = crPlane.getRowStride();
    620 
    621         int[] output = new int[w * h];
    622 
    623         // TODO: Optimize this with renderscript intrinsics
    624         byte[] yRow = new byte[yPixStride * w];
    625         byte[] cbRow = new byte[cbPixStride * w / 2];
    626         byte[] crRow = new byte[crPixStride * w / 2];
    627         yBuf.mark();
    628         cbBuf.mark();
    629         crBuf.mark();
    630         int initialYPos = yBuf.position();
    631         int initialCbPos = cbBuf.position();
    632         int initialCrPos = crBuf.position();
    633         int outputPos = 0;
    634         for (int i = hOffset; i < hOffset + h; i++) {
    635             yBuf.position(initialYPos + i * yRowStride + wOffset * yPixStride);
    636             yBuf.get(yRow);
    637             if ((i & 1) == (hOffset & 1)) {
    638                 cbBuf.position(initialCbPos + (i / 2) * cbRowStride + wOffset * cbPixStride / 2);
    639                 cbBuf.get(cbRow);
    640                 crBuf.position(initialCrPos + (i / 2) * crRowStride + wOffset * crPixStride / 2);
    641                 crBuf.get(crRow);
    642             }
    643             for (int j = 0, yPix = 0, crPix = 0, cbPix = 0; j < w; j++, yPix += yPixStride) {
    644                 float y = yRow[yPix] & 0xFF;
    645                 float cb = cbRow[cbPix] & 0xFF;
    646                 float cr = crRow[crPix] & 0xFF;
    647 
    648                 // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
    649                 int r = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.402f * (cr - 128)));
    650                 int g = (int) Math.max(0.0f,
    651                         Math.min(COLOR_RANGE, y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128)));
    652                 int b = (int) Math.max(0.0f, Math.min(COLOR_RANGE, y + 1.772f * (cb - 128)));
    653 
    654                 // Convert to ARGB pixel color (use opaque alpha)
    655                 output[outputPos++] = Color.rgb(r, g, b);
    656 
    657                 if ((j & 1) == 1) {
    658                     crPix += crPixStride;
    659                     cbPix += cbPixStride;
    660                 }
    661             }
    662         }
    663         yBuf.rewind();
    664         cbBuf.rewind();
    665         crBuf.rewind();
    666 
    667         return output;
    668     }
    669 
    670     /**
    671      * Test capture a given format stream with yuv stream simultaneously.
    672      *
    673      * <p>Use fixed yuv size, varies targeted format capture size. Single capture is tested.</p>
    674      *
    675      * @param format The capture format to be tested along with yuv format.
    676      */
    677     private void bufferFormatWithYuvTestByCamera(int format) throws Exception {
    678         if (format != ImageFormat.JPEG && format != ImageFormat.RAW_SENSOR
    679                 && format != ImageFormat.YUV_420_888) {
    680             throw new IllegalArgumentException("Unsupported format: " + format);
    681         }
    682 
    683         final int NUM_SINGLE_CAPTURE_TESTED = MAX_NUM_IMAGES - 1;
    684         Size maxYuvSz = mOrderedPreviewSizes.get(0);
    685         Size[] targetCaptureSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
    686                 StaticMetadata.StreamDirection.Output);
    687 
    688         for (Size captureSz : targetCaptureSizes) {
    689             if (VERBOSE) {
    690                 Log.v(TAG, "Testing yuv size " + maxYuvSz.toString() + " and capture size "
    691                         + captureSz.toString() + " for camera " + mCamera.getId());
    692             }
    693 
    694             ImageReader captureReader = null;
    695             ImageReader yuvReader = null;
    696             try {
    697                 // Create YUV image reader
    698                 SimpleImageReaderListener yuvListener  = new SimpleImageReaderListener();
    699                 yuvReader = createImageReader(maxYuvSz, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
    700                         yuvListener);
    701                 Surface yuvSurface = yuvReader.getSurface();
    702 
    703                 // Create capture image reader
    704                 SimpleImageReaderListener captureListener = new SimpleImageReaderListener();
    705                 captureReader = createImageReader(captureSz, format, MAX_NUM_IMAGES,
    706                         captureListener);
    707                 Surface captureSurface = captureReader.getSurface();
    708 
    709                 // Capture images.
    710                 List<Surface> outputSurfaces = new ArrayList<Surface>();
    711                 outputSurfaces.add(yuvSurface);
    712                 outputSurfaces.add(captureSurface);
    713                 CaptureRequest.Builder request = prepareCaptureRequestForSurfaces(outputSurfaces,
    714                         CameraDevice.TEMPLATE_PREVIEW);
    715                 SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    716 
    717                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
    718                     startCapture(request.build(), /*repeating*/false, resultListener, mHandler);
    719                 }
    720 
    721                 // Verify capture result and images
    722                 for (int i = 0; i < NUM_SINGLE_CAPTURE_TESTED; i++) {
    723                     resultListener.getCaptureResult(CAPTURE_WAIT_TIMEOUT_MS);
    724                     if (VERBOSE) {
    725                         Log.v(TAG, " Got the capture result back for " + i + "th capture");
    726                     }
    727 
    728                     Image yuvImage = yuvListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
    729                     if (VERBOSE) {
    730                         Log.v(TAG, " Got the yuv image back for " + i + "th capture");
    731                     }
    732 
    733                     Image captureImage = captureListener.getImage(CAPTURE_WAIT_TIMEOUT_MS);
    734                     if (VERBOSE) {
    735                         Log.v(TAG, " Got the capture image back for " + i + "th capture");
    736                     }
    737 
    738                     //Validate captured images.
    739                     CameraTestUtils.validateImage(yuvImage, maxYuvSz.getWidth(),
    740                             maxYuvSz.getHeight(), ImageFormat.YUV_420_888, /*filePath*/null);
    741                     CameraTestUtils.validateImage(captureImage, captureSz.getWidth(),
    742                             captureSz.getHeight(), format, /*filePath*/null);
    743                     yuvImage.close();
    744                     captureImage.close();
    745                 }
    746 
    747                 // Stop capture, delete the streams.
    748                 stopCapture(/*fast*/false);
    749             } finally {
    750                 closeImageReader(captureReader);
    751                 captureReader = null;
    752                 closeImageReader(yuvReader);
    753                 yuvReader = null;
    754             }
    755         }
    756     }
    757 
    758     private void invalidAccessTestAfterClose() throws Exception {
    759         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
    760             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
    761 
    762         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
    763                 StaticMetadata.StreamDirection.Output);
    764         Image img = null;
    765         // Create ImageReader.
    766         mListener = new SimpleImageListener();
    767         createDefaultImageReader(availableSizes[0], FORMAT, MAX_NUM_IMAGES, mListener);
    768 
    769         // Start capture.
    770         CaptureRequest request = prepareCaptureRequest();
    771         SimpleCaptureCallback listener = new SimpleCaptureCallback();
    772         startCapture(request, /* repeating */false, listener, mHandler);
    773 
    774         mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
    775         img = mReader.acquireNextImage();
    776         Plane firstPlane = img.getPlanes()[0];
    777         ByteBuffer buffer = firstPlane.getBuffer();
    778         img.close();
    779 
    780         imageInvalidAccessTestAfterClose(img, firstPlane, buffer);
    781     }
    782 
    783     /**
    784      * Test that images captured after discarding free buffers are valid.
    785      */
    786     private void discardFreeBuffersTestByCamera() throws Exception {
    787         final int FORMAT = mStaticInfo.isColorOutputSupported() ?
    788             ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
    789 
    790         final Size SIZE = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
    791                 StaticMetadata.StreamDirection.Output)[0];
    792         Image img = null;
    793         // Create ImageReader.
    794         mListener = new SimpleImageListener();
    795         createDefaultImageReader(SIZE, FORMAT, MAX_NUM_IMAGES, mListener);
    796 
    797         // Start capture.
    798         final boolean REPEATING = true;
    799         CaptureRequest request = prepareCaptureRequest();
    800         SimpleCaptureCallback listener = new SimpleCaptureCallback();
    801         startCapture(request, REPEATING, listener, mHandler);
    802 
    803         // Validate images and capture results.
    804         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING);
    805         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
    806 
    807         // Discard free buffers.
    808         mReader.discardFreeBuffers();
    809 
    810         // Validate images and capture resulst again.
    811         validateImage(SIZE, FORMAT, NUM_FRAME_VERIFIED, REPEATING);
    812         validateCaptureResult(FORMAT, SIZE, listener, NUM_FRAME_VERIFIED);
    813     }
    814 
    815     private void bufferFormatTestByCamera(int format, boolean repeating) throws Exception {
    816 
    817         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
    818                 StaticMetadata.StreamDirection.Output);
    819 
    820         // for each resolution, test imageReader:
    821         for (Size sz : availableSizes) {
    822             try {
    823                 if (VERBOSE) {
    824                     Log.v(TAG, "Testing size " + sz.toString() + " format " + format
    825                             + " for camera " + mCamera.getId());
    826                 }
    827 
    828                 // Create ImageReader.
    829                 mListener  = new SimpleImageListener();
    830                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
    831 
    832                 // Start capture.
    833                 CaptureRequest request = prepareCaptureRequest();
    834                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
    835                 startCapture(request, repeating, listener, mHandler);
    836 
    837                 int numFrameVerified = repeating ? NUM_FRAME_VERIFIED : 1;
    838 
    839                 // Validate images.
    840                 validateImage(sz, format, numFrameVerified, repeating);
    841 
    842                 // Validate capture result.
    843                 validateCaptureResult(format, sz, listener, numFrameVerified);
    844 
    845                 // stop capture.
    846                 stopCapture(/*fast*/false);
    847             } finally {
    848                 closeDefaultImageReader();
    849             }
    850 
    851         }
    852     }
    853 
    854     private void bufferFormatLongProcessingTimeTestByCamera(int format)
    855             throws Exception {
    856 
    857         final int TEST_SENSITIVITY_VALUE = mStaticInfo.getSensitivityClampToRange(204);
    858         final long TEST_EXPOSURE_TIME_NS = mStaticInfo.getExposureClampToRange(28000000);
    859         final long EXPOSURE_TIME_ERROR_MARGIN_NS = 100000;
    860 
    861         Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(format,
    862                 StaticMetadata.StreamDirection.Output);
    863 
    864         // for each resolution, test imageReader:
    865         for (Size sz : availableSizes) {
    866             Log.v(TAG, "testing size " + sz.toString());
    867             try {
    868                 if (VERBOSE) {
    869                     Log.v(TAG, "Testing long processing time: size " + sz.toString() + " format " +
    870                             format + " for camera " + mCamera.getId());
    871                 }
    872 
    873                 // Create ImageReader.
    874                 mListener  = new SimpleImageListener();
    875                 createDefaultImageReader(sz, format, MAX_NUM_IMAGES, mListener);
    876 
    877                 // Setting manual controls
    878                 List<Surface> outputSurfaces = new ArrayList<Surface>();
    879                 outputSurfaces.add(mReader.getSurface());
    880                 CaptureRequest.Builder requestBuilder = prepareCaptureRequestForSurfaces(
    881                         outputSurfaces, CameraDevice.TEMPLATE_STILL_CAPTURE);
    882 
    883                 requestBuilder.set(
    884                         CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_OFF);
    885                 requestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    886                 requestBuilder.set(CaptureRequest.CONTROL_AWB_LOCK, true);
    887                 requestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
    888                         CaptureRequest.CONTROL_AE_MODE_OFF);
    889                 requestBuilder.set(CaptureRequest.CONTROL_AWB_MODE,
    890                         CaptureRequest.CONTROL_AWB_MODE_OFF);
    891                 requestBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, TEST_SENSITIVITY_VALUE);
    892                 requestBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, TEST_EXPOSURE_TIME_NS);
    893 
    894                 SimpleCaptureCallback listener = new SimpleCaptureCallback();
    895                 startCapture(requestBuilder.build(), /*repeating*/true, listener, mHandler);
    896 
    897                 for (int i = 0; i < NUM_LONG_PROCESS_TIME_FRAME_VERIFIED; i++) {
    898                     mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
    899 
    900                     // Verify image.
    901                     Image img = mReader.acquireNextImage();
    902                     assertNotNull("Unable to acquire next image", img);
    903                     CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
    904                             DEBUG_FILE_NAME_BASE);
    905 
    906                     // Verify the exposure time and iso match the requested values.
    907                     CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
    908 
    909                     long exposureTimeDiff = TEST_EXPOSURE_TIME_NS -
    910                             getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
    911                     int sensitivityDiff = TEST_SENSITIVITY_VALUE -
    912                             getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
    913 
    914                     mCollector.expectTrue(
    915                             String.format("Long processing frame %d format %d size %s " +
    916                                     "exposure time was %d expecting %d.", i, format, sz.toString(),
    917                                     getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME),
    918                                     TEST_EXPOSURE_TIME_NS),
    919                             exposureTimeDiff < EXPOSURE_TIME_ERROR_MARGIN_NS &&
    920                             exposureTimeDiff >= 0);
    921 
    922                     mCollector.expectTrue(
    923                             String.format("Long processing frame %d format %d size %s " +
    924                                     "sensitivity was %d expecting %d.", i, format, sz.toString(),
    925                                     getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY),
    926                                     TEST_SENSITIVITY_VALUE),
    927                             sensitivityDiff >= 0);
    928 
    929 
    930                     // Sleep to Simulate long porcessing before closing the image.
    931                     Thread.sleep(LONG_PROCESS_TIME_MS);
    932                     img.close();
    933                 }
    934                 // Stop capture.
    935                 // Drain the reader queue in case the full queue blocks
    936                 // HAL from delivering new results
    937                 ImageDropperListener imageDropperListener = new ImageDropperListener();
    938                 mReader.setOnImageAvailableListener(imageDropperListener, mHandler);
    939                 Image img = mReader.acquireLatestImage();
    940                 if (img != null) {
    941                     img.close();
    942                 }
    943                 stopCapture(/*fast*/false);
    944             } finally {
    945                 closeDefaultImageReader();
    946             }
    947         }
    948     }
    949 
    950     /**
    951      * Validate capture results.
    952      *
    953      * @param format The format of this capture.
    954      * @param size The capture size.
    955      * @param listener The capture listener to get capture result callbacks.
    956      */
    957     private void validateCaptureResult(int format, Size size, SimpleCaptureCallback listener,
    958             int numFrameVerified) {
    959         for (int i = 0; i < numFrameVerified; i++) {
    960             CaptureResult result = listener.getCaptureResult(CAPTURE_RESULT_TIMEOUT_MS);
    961 
    962             // TODO: Update this to use availableResultKeys once shim supports this.
    963             if (mStaticInfo.isCapabilitySupported(
    964                     CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS)) {
    965                 Long exposureTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
    966                 Integer sensitivity = getValueNotNull(result, CaptureResult.SENSOR_SENSITIVITY);
    967                 mCollector.expectInRange(
    968                         String.format(
    969                                 "Capture for format %d, size %s exposure time is invalid.",
    970                                 format, size.toString()),
    971                         exposureTime,
    972                         mStaticInfo.getExposureMinimumOrDefault(),
    973                         mStaticInfo.getExposureMaximumOrDefault()
    974                 );
    975                 mCollector.expectInRange(
    976                         String.format("Capture for format %d, size %s sensitivity is invalid.",
    977                                 format, size.toString()),
    978                         sensitivity,
    979                         mStaticInfo.getSensitivityMinimumOrDefault(),
    980                         mStaticInfo.getSensitivityMaximumOrDefault()
    981                 );
    982             }
    983             // TODO: add more key validations.
    984         }
    985     }
    986 
    987     private final class SimpleImageListener implements ImageReader.OnImageAvailableListener {
    988         private final ConditionVariable imageAvailable = new ConditionVariable();
    989         @Override
    990         public void onImageAvailable(ImageReader reader) {
    991             if (mReader != reader) {
    992                 return;
    993             }
    994 
    995             if (VERBOSE) Log.v(TAG, "new image available");
    996             imageAvailable.open();
    997         }
    998 
    999         public void waitForAnyImageAvailable(long timeout) {
   1000             if (imageAvailable.block(timeout)) {
   1001                 imageAvailable.close();
   1002             } else {
   1003                 fail("wait for image available timed out after " + timeout + "ms");
   1004             }
   1005         }
   1006 
   1007         public void closePendingImages() {
   1008             Image image = mReader.acquireLatestImage();
   1009             if (image != null) {
   1010                 image.close();
   1011             }
   1012         }
   1013     }
   1014 
   1015     private void validateImage(Size sz, int format, int captureCount,  boolean repeating)
   1016             throws Exception {
   1017         // TODO: Add more format here, and wrap each one as a function.
   1018         Image img;
   1019         final int MAX_RETRY_COUNT = 20;
   1020         int numImageVerified = 0;
   1021         int reTryCount = 0;
   1022         while (numImageVerified < captureCount) {
   1023             assertNotNull("Image listener is null", mListener);
   1024             if (VERBOSE) Log.v(TAG, "Waiting for an Image");
   1025             mListener.waitForAnyImageAvailable(CAPTURE_WAIT_TIMEOUT_MS);
   1026             if (repeating) {
   1027                 /**
   1028                  * Acquire the latest image in case the validation is slower than
   1029                  * the image producing rate.
   1030                  */
   1031                 img = mReader.acquireLatestImage();
   1032                 /**
   1033                  * Sometimes if multiple onImageAvailable callbacks being queued,
   1034                  * acquireLatestImage will clear all buffer before corresponding callback is
   1035                  * executed. Wait for a new frame in that case.
   1036                  */
   1037                 if (img == null && reTryCount < MAX_RETRY_COUNT) {
   1038                     reTryCount++;
   1039                     continue;
   1040                 }
   1041             } else {
   1042                 img = mReader.acquireNextImage();
   1043             }
   1044             assertNotNull("Unable to acquire the latest image", img);
   1045             if (VERBOSE) Log.v(TAG, "Got the latest image");
   1046             CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
   1047                     DEBUG_FILE_NAME_BASE);
   1048             HardwareBuffer hwb = img.getHardwareBuffer();
   1049             assertNotNull("Unable to retrieve the Image's HardwareBuffer", hwb);
   1050             if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
   1051             img.close();
   1052             numImageVerified++;
   1053             reTryCount = 0;
   1054         }
   1055 
   1056         // Return all pending images to the ImageReader as the validateImage may
   1057         // take a while to return and there could be many images pending.
   1058         mListener.closePendingImages();
   1059     }
   1060 }
   1061