Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2.cts;
     18 
     19 import static com.android.ex.camera2.blocking.BlockingSessionCallback.SESSION_CLOSED;
     20 
     21 import android.graphics.ImageFormat;
     22 import android.hardware.camera2.CameraAccessException;
     23 import android.hardware.camera2.CameraCaptureSession;
     24 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
     25 import android.hardware.camera2.CameraCharacteristics;
     26 import android.hardware.camera2.CameraDevice;
     27 import android.hardware.camera2.CaptureRequest;
     28 import android.hardware.camera2.CaptureResult;
     29 import android.hardware.camera2.TotalCaptureResult;
     30 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
     31 import android.hardware.camera2.cts.CameraTestUtils.SimpleImageReaderListener;
     32 import android.hardware.camera2.cts.helpers.StaticMetadata;
     33 import android.hardware.camera2.cts.helpers.StaticMetadata.CheckLevel;
     34 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
     35 import android.hardware.camera2.params.InputConfiguration;
     36 import android.hardware.camera2.params.StreamConfigurationMap;
     37 import android.media.Image;
     38 import android.media.ImageReader;
     39 import android.media.ImageWriter;
     40 import android.os.ConditionVariable;
     41 import android.os.SystemClock;
     42 import android.platform.test.annotations.AppModeFull;
     43 import android.util.Log;
     44 import android.util.Pair;
     45 import android.util.Range;
     46 import android.util.Size;
     47 import android.view.Surface;
     48 
     49 import com.android.compatibility.common.util.DeviceReportLog;
     50 import com.android.compatibility.common.util.ResultType;
     51 import com.android.compatibility.common.util.ResultUnit;
     52 import com.android.compatibility.common.util.Stat;
     53 import com.android.ex.camera2.blocking.BlockingSessionCallback;
     54 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
     55 
     56 import java.util.ArrayList;
     57 import java.util.Arrays;
     58 import java.util.List;
     59 import java.util.concurrent.LinkedBlockingQueue;
     60 import java.util.concurrent.TimeUnit;
     61 
     62 /**
     63  * Test camera2 API use case performance KPIs, such as camera open time, session creation time,
     64  * shutter lag etc. The KPI data will be reported in cts results.
     65  */
     66 @AppModeFull
     67 public class PerformanceTest extends Camera2SurfaceViewTestCase {
     68     private static final String TAG = "PerformanceTest";
     69     private static final String REPORT_LOG_NAME = "CtsCameraTestCases";
     70     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
     71     private static final int NUM_TEST_LOOPS = 10;
     72     private static final int NUM_MAX_IMAGES = 4;
     73     private static final int NUM_RESULTS_WAIT = 30;
     74     private static final int[] REPROCESS_FORMATS = {ImageFormat.YUV_420_888, ImageFormat.PRIVATE};
     75     private final int MAX_REPROCESS_IMAGES = 6;
     76     private final int MAX_JPEG_IMAGES = MAX_REPROCESS_IMAGES;
     77     private final int MAX_INPUT_IMAGES = MAX_REPROCESS_IMAGES;
     78     // ZSL queue depth should be bigger than the max simultaneous reprocessing capture request
     79     // count to maintain reasonable number of candidate image for the worse-case.
     80     private final int MAX_ZSL_IMAGES = MAX_REPROCESS_IMAGES * 3 / 2;
     81     private final double REPROCESS_STALL_MARGIN = 0.1;
     82 
     83     private DeviceReportLog mReportLog;
     84 
     85     // Used for reading camera output buffers.
     86     private ImageReader mCameraZslReader;
     87     private SimpleImageReaderListener mCameraZslImageListener;
     88     // Used for reprocessing (jpeg) output.
     89     private ImageReader mJpegReader;
     90     private SimpleImageReaderListener mJpegListener;
     91     // Used for reprocessing input.
     92     private ImageWriter mWriter;
     93     private SimpleCaptureCallback mZslResultListener;
     94 
     95     @Override
     96     protected void setUp() throws Exception {
     97         super.setUp();
     98     }
     99 
    100     @Override
    101     protected void tearDown() throws Exception {
    102         super.tearDown();
    103     }
    104 
    105     /**
    106      * Test camera launch KPI: the time duration between a camera device is
    107      * being opened and first preview frame is available.
    108      * <p>
    109      * It includes camera open time, session creation time, and sending first
    110      * preview request processing latency etc. For the SurfaceView based preview use
    111      * case, there is no way for client to know the exact preview frame
    112      * arrival time. To approximate this time, a companion YUV420_888 stream is
    113      * created. The first YUV420_888 Image coming out of the ImageReader is treated
    114      * as the first preview arrival time.</p>
    115      * <p>
    116      * For depth-only devices, timing is done with the DEPTH16 format instead.
    117      * </p>
    118      */
    119     public void testCameraLaunch() throws Exception {
    120         double[] avgCameraLaunchTimes = new double[mCameraIds.length];
    121 
    122         int counter = 0;
    123         for (String id : mCameraIds) {
    124             // Do NOT move these variables to outer scope
    125             // They will be passed to DeviceReportLog and their references will be stored
    126             String streamName = "test_camera_launch";
    127             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    128             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    129             double[] cameraOpenTimes = new double[NUM_TEST_LOOPS];
    130             double[] configureStreamTimes = new double[NUM_TEST_LOOPS];
    131             double[] startPreviewTimes = new double[NUM_TEST_LOOPS];
    132             double[] stopPreviewTimes = new double[NUM_TEST_LOOPS];
    133             double[] cameraCloseTimes = new double[NUM_TEST_LOOPS];
    134             double[] cameraLaunchTimes = new double[NUM_TEST_LOOPS];
    135             try {
    136                 mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(id));
    137                 if (mStaticInfo.isColorOutputSupported()) {
    138                     initializeImageReader(id, ImageFormat.YUV_420_888);
    139                 } else {
    140                     assertTrue("Depth output must be supported if regular output isn't!",
    141                             mStaticInfo.isDepthOutputSupported());
    142                     initializeImageReader(id, ImageFormat.DEPTH16);
    143                 }
    144 
    145                 SimpleImageListener imageListener = null;
    146                 long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
    147                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
    148                     try {
    149                         // Need create a new listener every iteration to be able to wait
    150                         // for the first image comes out.
    151                         imageListener = new SimpleImageListener();
    152                         mReader.setOnImageAvailableListener(imageListener, mHandler);
    153                         startTimeMs = SystemClock.elapsedRealtime();
    154 
    155                         // Blocking open camera
    156                         simpleOpenCamera(id);
    157                         openTimeMs = SystemClock.elapsedRealtime();
    158                         cameraOpenTimes[i] = openTimeMs - startTimeMs;
    159 
    160                         // Blocking configure outputs.
    161                         configureReaderAndPreviewOutputs();
    162                         configureTimeMs = SystemClock.elapsedRealtime();
    163                         configureStreamTimes[i] = configureTimeMs - openTimeMs;
    164 
    165                         // Blocking start preview (start preview to first image arrives)
    166                         SimpleCaptureCallback resultListener =
    167                                 new SimpleCaptureCallback();
    168                         blockingStartPreview(resultListener, imageListener);
    169                         previewStartedTimeMs = SystemClock.elapsedRealtime();
    170                         startPreviewTimes[i] = previewStartedTimeMs - configureTimeMs;
    171                         cameraLaunchTimes[i] = previewStartedTimeMs - startTimeMs;
    172 
    173                         // Let preview on for a couple of frames
    174                         waitForNumResults(resultListener, NUM_RESULTS_WAIT);
    175 
    176                         // Blocking stop preview
    177                         startTimeMs = SystemClock.elapsedRealtime();
    178                         blockingStopPreview();
    179                         stopPreviewTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
    180                     }
    181                     finally {
    182                         // Blocking camera close
    183                         startTimeMs = SystemClock.elapsedRealtime();
    184                         closeDevice();
    185                         cameraCloseTimes[i] = SystemClock.elapsedRealtime() - startTimeMs;
    186                     }
    187                 }
    188 
    189                 avgCameraLaunchTimes[counter] = Stat.getAverage(cameraLaunchTimes);
    190                 // Finish the data collection, report the KPIs.
    191                 // ReportLog keys have to be lowercase underscored format.
    192                 mReportLog.addValues("camera_open_time", cameraOpenTimes, ResultType.LOWER_BETTER,
    193                         ResultUnit.MS);
    194                 mReportLog.addValues("camera_configure_stream_time", configureStreamTimes,
    195                         ResultType.LOWER_BETTER, ResultUnit.MS);
    196                 mReportLog.addValues("camera_start_preview_time", startPreviewTimes,
    197                         ResultType.LOWER_BETTER, ResultUnit.MS);
    198                 mReportLog.addValues("camera_camera_stop_preview", stopPreviewTimes,
    199                         ResultType.LOWER_BETTER, ResultUnit.MS);
    200                 mReportLog.addValues("camera_camera_close_time", cameraCloseTimes,
    201                         ResultType.LOWER_BETTER, ResultUnit.MS);
    202                 mReportLog.addValues("camera_launch_time", cameraLaunchTimes,
    203                         ResultType.LOWER_BETTER, ResultUnit.MS);
    204             }
    205             finally {
    206                 closeImageReader();
    207             }
    208             counter++;
    209             mReportLog.submit(getInstrumentation());
    210 
    211             if (VERBOSE) {
    212                 Log.v(TAG, "Camera " + id + " device open times(ms): "
    213                         + Arrays.toString(cameraOpenTimes)
    214                         + ". Average(ms): " + Stat.getAverage(cameraOpenTimes)
    215                         + ". Min(ms): " + Stat.getMin(cameraOpenTimes)
    216                         + ". Max(ms): " + Stat.getMax(cameraOpenTimes));
    217                 Log.v(TAG, "Camera " + id + " configure stream times(ms): "
    218                         + Arrays.toString(configureStreamTimes)
    219                         + ". Average(ms): " + Stat.getAverage(configureStreamTimes)
    220                         + ". Min(ms): " + Stat.getMin(configureStreamTimes)
    221                         + ". Max(ms): " + Stat.getMax(configureStreamTimes));
    222                 Log.v(TAG, "Camera " + id + " start preview times(ms): "
    223                         + Arrays.toString(startPreviewTimes)
    224                         + ". Average(ms): " + Stat.getAverage(startPreviewTimes)
    225                         + ". Min(ms): " + Stat.getMin(startPreviewTimes)
    226                         + ". Max(ms): " + Stat.getMax(startPreviewTimes));
    227                 Log.v(TAG, "Camera " + id + " stop preview times(ms): "
    228                         + Arrays.toString(stopPreviewTimes)
    229                         + ". Average(ms): " + Stat.getAverage(stopPreviewTimes)
    230                         + ". nMin(ms): " + Stat.getMin(stopPreviewTimes)
    231                         + ". nMax(ms): " + Stat.getMax(stopPreviewTimes));
    232                 Log.v(TAG, "Camera " + id + " device close times(ms): "
    233                         + Arrays.toString(cameraCloseTimes)
    234                         + ". Average(ms): " + Stat.getAverage(cameraCloseTimes)
    235                         + ". Min(ms): " + Stat.getMin(cameraCloseTimes)
    236                         + ". Max(ms): " + Stat.getMax(cameraCloseTimes));
    237                 Log.v(TAG, "Camera " + id + " camera launch times(ms): "
    238                         + Arrays.toString(cameraLaunchTimes)
    239                         + ". Average(ms): " + Stat.getAverage(cameraLaunchTimes)
    240                         + ". Min(ms): " + Stat.getMin(cameraLaunchTimes)
    241                         + ". Max(ms): " + Stat.getMax(cameraLaunchTimes));
    242             }
    243         }
    244         if (mCameraIds.length != 0) {
    245             String streamName = "test_camera_launch_average";
    246             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    247             mReportLog.setSummary("camera_launch_average_time_for_all_cameras",
    248                     Stat.getAverage(avgCameraLaunchTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
    249             mReportLog.submit(getInstrumentation());
    250         }
    251     }
    252 
    253     /**
    254      * Test camera capture KPI for YUV_420_888 format: the time duration between
    255      * sending out a single image capture request and receiving image data and
    256      * capture result.
    257      * <p>
    258      * It enumerates the following metrics: capture latency, computed by
    259      * measuring the time between sending out the capture request and getting
    260      * the image data; partial result latency, computed by measuring the time
    261      * between sending out the capture request and getting the partial result;
    262      * capture result latency, computed by measuring the time between sending
    263      * out the capture request and getting the full capture result.
    264      * </p>
    265      */
    266     public void testSingleCapture() throws Exception {
    267         double[] avgResultTimes = new double[mCameraIds.length];
    268 
    269         int counter = 0;
    270         for (String id : mCameraIds) {
    271             // Do NOT move these variables to outer scope
    272             // They will be passed to DeviceReportLog and their references will be stored
    273             String streamName = "test_single_capture";
    274             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    275             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    276             double[] captureTimes = new double[NUM_TEST_LOOPS];
    277             double[] getPartialTimes = new double[NUM_TEST_LOOPS];
    278             double[] getResultTimes = new double[NUM_TEST_LOOPS];
    279             try {
    280                 openDevice(id);
    281 
    282                 if (!mStaticInfo.isColorOutputSupported()) {
    283                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
    284                     continue;
    285                 }
    286 
    287 
    288                 boolean partialsExpected = mStaticInfo.getPartialResultCount() > 1;
    289                 long startTimeMs;
    290                 boolean isPartialTimingValid = partialsExpected;
    291                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
    292 
    293                     // setup builders and listeners
    294                     CaptureRequest.Builder previewBuilder =
    295                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    296                     CaptureRequest.Builder captureBuilder =
    297                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
    298                     SimpleCaptureCallback previewResultListener =
    299                             new SimpleCaptureCallback();
    300                     SimpleTimingResultListener captureResultListener =
    301                             new SimpleTimingResultListener();
    302                     SimpleImageListener imageListener = new SimpleImageListener();
    303 
    304                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
    305                         id, mCameraManager, ImageFormat.YUV_420_888, /*bound*/null).get(0);
    306 
    307                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
    308                             mOrderedPreviewSizes.get(0), maxYuvSize,
    309                             ImageFormat.YUV_420_888, previewResultListener,
    310                             NUM_MAX_IMAGES, imageListener);
    311 
    312                     // Capture an image and get image data
    313                     startTimeMs = SystemClock.elapsedRealtime();
    314                     CaptureRequest request = captureBuilder.build();
    315                     mSession.capture(request, captureResultListener, mHandler);
    316 
    317                     Pair<CaptureResult, Long> partialResultNTime = null;
    318                     if (partialsExpected) {
    319                         partialResultNTime = captureResultListener.getPartialResultNTimeForRequest(
    320                             request, NUM_RESULTS_WAIT);
    321                         // Even if maxPartials > 1, may not see partials for some devices
    322                         if (partialResultNTime == null) {
    323                             partialsExpected = false;
    324                             isPartialTimingValid = false;
    325                         }
    326                     }
    327                     Pair<CaptureResult, Long> captureResultNTime =
    328                             captureResultListener.getCaptureResultNTimeForRequest(
    329                                     request, NUM_RESULTS_WAIT);
    330                     imageListener.waitForImageAvailable(
    331                             CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    332 
    333                     captureTimes[i] = imageListener.getTimeReceivedImage() - startTimeMs;
    334                     if (partialsExpected) {
    335                         getPartialTimes[i] = partialResultNTime.second - startTimeMs;
    336                         if (getPartialTimes[i] < 0) {
    337                             isPartialTimingValid = false;
    338                         }
    339                     }
    340                     getResultTimes[i] = captureResultNTime.second - startTimeMs;
    341 
    342                     // simulate real scenario (preview runs a bit)
    343                     waitForNumResults(previewResultListener, NUM_RESULTS_WAIT);
    344 
    345                     stopPreview();
    346 
    347                 }
    348                 mReportLog.addValues("camera_capture_latency", captureTimes,
    349                         ResultType.LOWER_BETTER, ResultUnit.MS);
    350                 // If any of the partial results do not contain AE and AF state, then no report
    351                 if (isPartialTimingValid) {
    352                     mReportLog.addValues("camera_partial_result_latency", getPartialTimes,
    353                             ResultType.LOWER_BETTER, ResultUnit.MS);
    354                 }
    355                 mReportLog.addValues("camera_capture_result_latency", getResultTimes,
    356                         ResultType.LOWER_BETTER, ResultUnit.MS);
    357 
    358                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
    359             }
    360             finally {
    361                 closeImageReader();
    362                 closeDevice();
    363             }
    364             counter++;
    365             mReportLog.submit(getInstrumentation());
    366         }
    367 
    368         // Result will not be reported in CTS report if no summary is printed.
    369         if (mCameraIds.length != 0) {
    370             String streamName = "test_single_capture_average";
    371             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    372             mReportLog.setSummary("camera_capture_result_average_latency_for_all_cameras",
    373                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
    374             mReportLog.submit(getInstrumentation());
    375         }
    376     }
    377 
    378     /**
    379      * Test multiple capture KPI for YUV_420_888 format: the average time duration
    380      * between sending out image capture requests and receiving capture results.
    381      * <p>
    382      * It measures capture latency, which is the time between sending out the capture
    383      * request and getting the full capture result, and the frame duration, which is the timestamp
    384      * gap between results.
    385      * </p>
    386      */
    387     public void testMultipleCapture() throws Exception {
    388         double[] avgResultTimes = new double[mCameraIds.length];
    389         double[] avgDurationMs = new double[mCameraIds.length];
    390 
    391         // A simple CaptureSession StateCallback to handle onCaptureQueueEmpty
    392         class MultipleCaptureStateCallback extends CameraCaptureSession.StateCallback {
    393             private ConditionVariable captureQueueEmptyCond = new ConditionVariable();
    394             private int captureQueueEmptied = 0;
    395 
    396             @Override
    397             public void onConfigured(CameraCaptureSession session) {
    398                 // Empty implementation
    399             }
    400 
    401             @Override
    402             public void onConfigureFailed(CameraCaptureSession session) {
    403                 // Empty implementation
    404             }
    405 
    406             @Override
    407             public void onCaptureQueueEmpty(CameraCaptureSession session) {
    408                 captureQueueEmptied++;
    409                 if (VERBOSE) {
    410                     Log.v(TAG, "onCaptureQueueEmpty received. captureQueueEmptied = "
    411                         + captureQueueEmptied);
    412                 }
    413 
    414                 captureQueueEmptyCond.open();
    415             }
    416 
    417             /* Wait for onCaptureQueueEmpty, return immediately if an onCaptureQueueEmpty was
    418              * already received, otherwise, wait for one to arrive. */
    419             public void waitForCaptureQueueEmpty(long timeout) {
    420                 if (captureQueueEmptied > 0) {
    421                     captureQueueEmptied--;
    422                     return;
    423                 }
    424 
    425                 if (captureQueueEmptyCond.block(timeout)) {
    426                     captureQueueEmptyCond.close();
    427                     captureQueueEmptied = 0;
    428                 } else {
    429                     throw new TimeoutRuntimeException("Unable to receive onCaptureQueueEmpty after "
    430                         + timeout + "ms");
    431                 }
    432             }
    433         }
    434 
    435         final MultipleCaptureStateCallback sessionListener = new MultipleCaptureStateCallback();
    436 
    437         int counter = 0;
    438         for (String id : mCameraIds) {
    439             // Do NOT move these variables to outer scope
    440             // They will be passed to DeviceReportLog and their references will be stored
    441             String streamName = "test_multiple_capture";
    442             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    443             mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    444             long[] startTimes = new long[NUM_MAX_IMAGES];
    445             double[] getResultTimes = new double[NUM_MAX_IMAGES];
    446             double[] frameDurationMs = new double[NUM_MAX_IMAGES-1];
    447             try {
    448                 openDevice(id);
    449 
    450                 if (!mStaticInfo.isColorOutputSupported()) {
    451                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
    452                     continue;
    453                 }
    454 
    455                 for (int i = 0; i < NUM_TEST_LOOPS; i++) {
    456 
    457                     // setup builders and listeners
    458                     CaptureRequest.Builder previewBuilder =
    459                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    460                     CaptureRequest.Builder captureBuilder =
    461                             mCamera.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
    462                     SimpleCaptureCallback previewResultListener =
    463                             new SimpleCaptureCallback();
    464                     SimpleTimingResultListener captureResultListener =
    465                             new SimpleTimingResultListener();
    466                     SimpleImageReaderListener imageListener =
    467                             new SimpleImageReaderListener(/*asyncMode*/true, NUM_MAX_IMAGES);
    468 
    469                     Size maxYuvSize = CameraTestUtils.getSortedSizesForFormat(
    470                         id, mCameraManager, ImageFormat.YUV_420_888, /*bound*/null).get(0);
    471                     // Find minimum frame duration for YUV_420_888
    472                     StreamConfigurationMap config = mStaticInfo.getCharacteristics().get(
    473                             CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
    474 
    475                     final long minStillFrameDuration =
    476                             config.getOutputMinFrameDuration(ImageFormat.YUV_420_888, maxYuvSize);
    477                     if (minStillFrameDuration > 0) {
    478                         Range<Integer> targetRange = getSuitableFpsRangeForDuration(id,
    479                                 minStillFrameDuration);
    480                         previewBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    481                         captureBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, targetRange);
    482                     }
    483 
    484                     prepareCaptureAndStartPreview(previewBuilder, captureBuilder,
    485                             mOrderedPreviewSizes.get(0), maxYuvSize,
    486                             ImageFormat.YUV_420_888, previewResultListener,
    487                             sessionListener, NUM_MAX_IMAGES, imageListener);
    488 
    489                     // Converge AE
    490                     waitForAeStable(previewResultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
    491 
    492                     if (mStaticInfo.isAeLockSupported()) {
    493                         // Lock AE if possible to improve stability
    494                         previewBuilder.set(CaptureRequest.CONTROL_AE_LOCK, true);
    495                         mSession.setRepeatingRequest(previewBuilder.build(), previewResultListener,
    496                                 mHandler);
    497                         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
    498                             // Legacy mode doesn't output AE state
    499                             waitForResultValue(previewResultListener, CaptureResult.CONTROL_AE_STATE,
    500                                     CaptureResult.CONTROL_AE_STATE_LOCKED, NUM_RESULTS_WAIT_TIMEOUT);
    501                         }
    502                     }
    503 
    504                     // Capture NUM_MAX_IMAGES images based on onCaptureQueueEmpty callback
    505                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
    506 
    507                         // Capture an image and get image data
    508                         startTimes[j] = SystemClock.elapsedRealtime();
    509                         CaptureRequest request = captureBuilder.build();
    510                         mSession.capture(request, captureResultListener, mHandler);
    511 
    512                         // Wait for capture queue empty for the current request
    513                         sessionListener.waitForCaptureQueueEmpty(
    514                                 CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    515                     }
    516 
    517                     // Acquire the capture result time and frame duration
    518                     long prevTimestamp = -1;
    519                     for (int j = 0; j < NUM_MAX_IMAGES; j++) {
    520                         Pair<CaptureResult, Long> captureResultNTime =
    521                                 captureResultListener.getCaptureResultNTime(
    522                                         CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
    523 
    524                         getResultTimes[j] +=
    525                                 (double)(captureResultNTime.second - startTimes[j])/NUM_TEST_LOOPS;
    526 
    527                         // Collect inter-frame timestamp
    528                         long timestamp = captureResultNTime.first.get(CaptureResult.SENSOR_TIMESTAMP);
    529                         if (prevTimestamp != -1) {
    530                             frameDurationMs[j-1] +=
    531                                     (double)(timestamp - prevTimestamp)/(NUM_TEST_LOOPS * 1000000.0);
    532                         }
    533                         prevTimestamp = timestamp;
    534                     }
    535 
    536                     // simulate real scenario (preview runs a bit)
    537                     waitForNumResults(previewResultListener, NUM_RESULTS_WAIT);
    538 
    539                     stopPreview();
    540                 }
    541 
    542                 for (int i = 0; i < getResultTimes.length; i++) {
    543                     Log.v(TAG, "Camera " + id + " result time[" + i + "] is " +
    544                             getResultTimes[i] + " ms");
    545                 }
    546                 for (int i = 0; i < NUM_MAX_IMAGES-1; i++) {
    547                     Log.v(TAG, "Camera " + id + " frame duration time[" + i + "] is " +
    548                             frameDurationMs[i] + " ms");
    549                 }
    550 
    551                 mReportLog.addValues("camera_multiple_capture_result_latency", getResultTimes,
    552                         ResultType.LOWER_BETTER, ResultUnit.MS);
    553                 mReportLog.addValues("camera_multiple_capture_frame_duration", frameDurationMs,
    554                         ResultType.LOWER_BETTER, ResultUnit.MS);
    555 
    556 
    557                 avgResultTimes[counter] = Stat.getAverage(getResultTimes);
    558                 avgDurationMs[counter] = Stat.getAverage(frameDurationMs);
    559             }
    560             finally {
    561                 closeImageReader();
    562                 closeDevice();
    563             }
    564             counter++;
    565             mReportLog.submit(getInstrumentation());
    566         }
    567 
    568         // Result will not be reported in CTS report if no summary is printed.
    569         if (mCameraIds.length != 0) {
    570             String streamName = "test_multiple_capture_average";
    571             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    572             mReportLog.setSummary("camera_multiple_capture_result_average_latency_for_all_cameras",
    573                     Stat.getAverage(avgResultTimes), ResultType.LOWER_BETTER, ResultUnit.MS);
    574             mReportLog.submit(getInstrumentation());
    575             mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    576             mReportLog.setSummary("camera_multiple_capture_frame_duration_average_for_all_cameras",
    577                     Stat.getAverage(avgDurationMs), ResultType.LOWER_BETTER, ResultUnit.MS);
    578             mReportLog.submit(getInstrumentation());
    579         }
    580     }
    581 
    582     /**
    583      * Test reprocessing shot-to-shot latency with default NR and edge options, i.e., from the time
    584      * a reprocess request is issued to the time the reprocess image is returned.
    585      */
    586     public void testReprocessingLatency() throws Exception {
    587         for (String id : mCameraIds) {
    588             for (int format : REPROCESS_FORMATS) {
    589                 if (!isReprocessSupported(id, format)) {
    590                     continue;
    591                 }
    592 
    593                 try {
    594                     openDevice(id);
    595                     String streamName = "test_reprocessing_latency";
    596                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    597                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    598                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
    599                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
    600                             /*highQuality*/false);
    601                 } finally {
    602                     closeReaderWriters();
    603                     closeDevice();
    604                     mReportLog.submit(getInstrumentation());
    605                 }
    606             }
    607         }
    608     }
    609 
    610     /**
    611      * Test reprocessing throughput with default NR and edge options, i.e., how many frames can be reprocessed
    612      * during a given amount of time.
    613      *
    614      */
    615     public void testReprocessingThroughput() throws Exception {
    616         for (String id : mCameraIds) {
    617             for (int format : REPROCESS_FORMATS) {
    618                 if (!isReprocessSupported(id, format)) {
    619                     continue;
    620                 }
    621 
    622                 try {
    623                     openDevice(id);
    624                     String streamName = "test_reprocessing_throughput";
    625                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    626                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    627                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
    628                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
    629                             /*highQuality*/false);
    630                 } finally {
    631                     closeReaderWriters();
    632                     closeDevice();
    633                     mReportLog.submit(getInstrumentation());
    634                 }
    635             }
    636         }
    637     }
    638 
    639     /**
    640      * Test reprocessing shot-to-shot latency with High Quality NR and edge options, i.e., from the
    641      * time a reprocess request is issued to the time the reprocess image is returned.
    642      */
    643     public void testHighQualityReprocessingLatency() throws Exception {
    644         for (String id : mCameraIds) {
    645             for (int format : REPROCESS_FORMATS) {
    646                 if (!isReprocessSupported(id, format)) {
    647                     continue;
    648                 }
    649 
    650                 try {
    651                     openDevice(id);
    652                     String streamName = "test_high_quality_reprocessing_latency";
    653                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    654                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    655                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
    656                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/false,
    657                             /*requireHighQuality*/true);
    658                 } finally {
    659                     closeReaderWriters();
    660                     closeDevice();
    661                     mReportLog.submit(getInstrumentation());
    662                 }
    663             }
    664         }
    665     }
    666 
    667     /**
    668      * Test reprocessing throughput with high quality NR and edge options, i.e., how many frames can
    669      * be reprocessed during a given amount of time.
    670      *
    671      */
    672     public void testHighQualityReprocessingThroughput() throws Exception {
    673         for (String id : mCameraIds) {
    674             for (int format : REPROCESS_FORMATS) {
    675                 if (!isReprocessSupported(id, format)) {
    676                     continue;
    677                 }
    678 
    679                 try {
    680                     openDevice(id);
    681                     String streamName = "test_high_quality_reprocessing_throughput";
    682                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    683                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    684                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
    685                     reprocessingPerformanceTestByCamera(format, /*asyncMode*/true,
    686                             /*requireHighQuality*/true);
    687                 } finally {
    688                     closeReaderWriters();
    689                     closeDevice();
    690                     mReportLog.submit(getInstrumentation());
    691                 }
    692             }
    693         }
    694     }
    695 
    696     /**
    697      * Testing reprocessing caused preview stall (frame drops)
    698      */
    699     public void testReprocessingCaptureStall() throws Exception {
    700         for (String id : mCameraIds) {
    701             for (int format : REPROCESS_FORMATS) {
    702                 if (!isReprocessSupported(id, format)) {
    703                     continue;
    704                 }
    705 
    706                 try {
    707                     openDevice(id);
    708                     String streamName = "test_reprocessing_capture_stall";
    709                     mReportLog = new DeviceReportLog(REPORT_LOG_NAME, streamName);
    710                     mReportLog.addValue("camera_id", id, ResultType.NEUTRAL, ResultUnit.NONE);
    711                     mReportLog.addValue("format", format, ResultType.NEUTRAL, ResultUnit.NONE);
    712                     reprocessingCaptureStallTestByCamera(format);
    713                 } finally {
    714                     closeReaderWriters();
    715                     closeDevice();
    716                     mReportLog.submit(getInstrumentation());
    717                 }
    718             }
    719         }
    720     }
    721 
    722     private void reprocessingCaptureStallTestByCamera(int reprocessInputFormat) throws Exception {
    723         prepareReprocessCapture(reprocessInputFormat);
    724 
    725         // Let it stream for a while before reprocessing
    726         startZslStreaming();
    727         waitForFrames(NUM_RESULTS_WAIT);
    728 
    729         final int NUM_REPROCESS_TESTED = MAX_REPROCESS_IMAGES / 2;
    730         // Prepare several reprocessing request
    731         Image[] inputImages = new Image[NUM_REPROCESS_TESTED];
    732         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
    733         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
    734             inputImages[i] =
    735                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    736             TotalCaptureResult zslResult =
    737                     mZslResultListener.getCaptureResult(
    738                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
    739             reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
    740             reprocessReqs[i].addTarget(mJpegReader.getSurface());
    741             reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
    742                     CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
    743             reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
    744                     CaptureRequest.EDGE_MODE_HIGH_QUALITY);
    745             mWriter.queueInputImage(inputImages[i]);
    746         }
    747 
    748         double[] maxCaptureGapsMs = new double[NUM_REPROCESS_TESTED];
    749         double[] averageFrameDurationMs = new double[NUM_REPROCESS_TESTED];
    750         Arrays.fill(averageFrameDurationMs, 0.0);
    751         final int MAX_REPROCESS_RETURN_FRAME_COUNT = 20;
    752         SimpleCaptureCallback reprocessResultListener = new SimpleCaptureCallback();
    753         for (int i = 0; i < NUM_REPROCESS_TESTED; i++) {
    754             mZslResultListener.drain();
    755             CaptureRequest reprocessRequest = reprocessReqs[i].build();
    756             mSession.capture(reprocessRequest, reprocessResultListener, mHandler);
    757             // Wait for reprocess output jpeg and result come back.
    758             reprocessResultListener.getCaptureResultForRequest(reprocessRequest,
    759                     CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
    760             mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
    761             long numFramesMaybeStalled = mZslResultListener.getTotalNumFrames();
    762             assertTrue("Reprocess capture result should be returned in "
    763                     + MAX_REPROCESS_RETURN_FRAME_COUNT + " frames",
    764                     numFramesMaybeStalled <= MAX_REPROCESS_RETURN_FRAME_COUNT);
    765 
    766             // Need look longer time, as the stutter could happen after the reprocessing
    767             // output frame is received.
    768             long[] timestampGap = new long[MAX_REPROCESS_RETURN_FRAME_COUNT + 1];
    769             Arrays.fill(timestampGap, 0);
    770             CaptureResult[] results = new CaptureResult[timestampGap.length];
    771             long[] frameDurationsNs = new long[timestampGap.length];
    772             for (int j = 0; j < results.length; j++) {
    773                 results[j] = mZslResultListener.getCaptureResult(
    774                         CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    775                 if (j > 0) {
    776                     timestampGap[j] = results[j].get(CaptureResult.SENSOR_TIMESTAMP) -
    777                             results[j - 1].get(CaptureResult.SENSOR_TIMESTAMP);
    778                     assertTrue("Time stamp should be monotonically increasing",
    779                             timestampGap[j] > 0);
    780                 }
    781                 frameDurationsNs[j] = results[j].get(CaptureResult.SENSOR_FRAME_DURATION);
    782             }
    783 
    784             if (VERBOSE) {
    785                 Log.i(TAG, "timestampGap: " + Arrays.toString(timestampGap));
    786                 Log.i(TAG, "frameDurationsNs: " + Arrays.toString(frameDurationsNs));
    787             }
    788 
    789             // Get the number of candidate results, calculate the average frame duration
    790             // and max timestamp gap.
    791             Arrays.sort(timestampGap);
    792             double maxTimestampGapMs = timestampGap[timestampGap.length - 1] / 1000000.0;
    793             for (int m = 0; m < frameDurationsNs.length; m++) {
    794                 averageFrameDurationMs[i] += (frameDurationsNs[m] / 1000000.0);
    795             }
    796             averageFrameDurationMs[i] /= frameDurationsNs.length;
    797 
    798             maxCaptureGapsMs[i] = maxTimestampGapMs;
    799         }
    800 
    801         stopZslStreaming();
    802 
    803         String reprocessType = "YUV reprocessing";
    804         if (reprocessInputFormat == ImageFormat.PRIVATE) {
    805             reprocessType = "opaque reprocessing";
    806         }
    807         mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL, ResultUnit.NONE);
    808         mReportLog.addValues("max_capture_timestamp_gaps", maxCaptureGapsMs,
    809                 ResultType.LOWER_BETTER, ResultUnit.MS);
    810         mReportLog.addValues("capture_average_frame_duration", averageFrameDurationMs,
    811                 ResultType.LOWER_BETTER, ResultUnit.MS);
    812         mReportLog.setSummary("camera_reprocessing_average_max_capture_timestamp_gaps",
    813                 Stat.getAverage(maxCaptureGapsMs), ResultType.LOWER_BETTER, ResultUnit.MS);
    814 
    815         // The max timestamp gap should be less than (captureStall + 1) x average frame
    816         // duration * (1 + error margin).
    817         int maxCaptureStallFrames = mStaticInfo.getMaxCaptureStallOrDefault();
    818         for (int i = 0; i < maxCaptureGapsMs.length; i++) {
    819             double stallDurationBound = averageFrameDurationMs[i] *
    820                     (maxCaptureStallFrames + 1) * (1 + REPROCESS_STALL_MARGIN);
    821             assertTrue("max capture stall duration should be no larger than " + stallDurationBound,
    822                     maxCaptureGapsMs[i] <= stallDurationBound);
    823         }
    824     }
    825 
    826     private void reprocessingPerformanceTestByCamera(int reprocessInputFormat, boolean asyncMode,
    827             boolean requireHighQuality)
    828             throws Exception {
    829         // Prepare the reprocessing capture
    830         prepareReprocessCapture(reprocessInputFormat);
    831 
    832         // Start ZSL streaming
    833         startZslStreaming();
    834         waitForFrames(NUM_RESULTS_WAIT);
    835 
    836         CaptureRequest.Builder[] reprocessReqs = new CaptureRequest.Builder[MAX_REPROCESS_IMAGES];
    837         Image[] inputImages = new Image[MAX_REPROCESS_IMAGES];
    838         double[] getImageLatenciesMs = new double[MAX_REPROCESS_IMAGES];
    839         long startTimeMs;
    840         for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
    841             inputImages[i] =
    842                     mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    843             TotalCaptureResult zslResult =
    844                     mZslResultListener.getCaptureResult(
    845                             WAIT_FOR_RESULT_TIMEOUT_MS, inputImages[i].getTimestamp());
    846             reprocessReqs[i] = mCamera.createReprocessCaptureRequest(zslResult);
    847             if (requireHighQuality) {
    848                 // Reprocessing should support high quality for NR and edge modes.
    849                 reprocessReqs[i].set(CaptureRequest.NOISE_REDUCTION_MODE,
    850                         CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY);
    851                 reprocessReqs[i].set(CaptureRequest.EDGE_MODE,
    852                         CaptureRequest.EDGE_MODE_HIGH_QUALITY);
    853             }
    854             reprocessReqs[i].addTarget(mJpegReader.getSurface());
    855         }
    856 
    857         if (asyncMode) {
    858             // async capture: issue all the reprocess requests as quick as possible, then
    859             // check the throughput of the output jpegs.
    860             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
    861                 // Could be slow for YUV reprocessing, do it in advance.
    862                 mWriter.queueInputImage(inputImages[i]);
    863             }
    864 
    865             // Submit the requests
    866             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
    867                 mSession.capture(reprocessReqs[i].build(), null, null);
    868             }
    869 
    870             // Get images
    871             startTimeMs = SystemClock.elapsedRealtime();
    872             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
    873             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
    874                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    875                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
    876                 startTimeMs = SystemClock.elapsedRealtime();
    877             }
    878             for (Image i : jpegImages) {
    879                 i.close();
    880             }
    881         } else {
    882             // sync capture: issue reprocess request one by one, only submit next one when
    883             // the previous capture image is returned. This is to test the back to back capture
    884             // performance.
    885             Image jpegImages[] = new Image[MAX_REPROCESS_IMAGES];
    886             for (int i = 0; i < MAX_REPROCESS_IMAGES; i++) {
    887                 startTimeMs = SystemClock.elapsedRealtime();
    888                 mWriter.queueInputImage(inputImages[i]);
    889                 mSession.capture(reprocessReqs[i].build(), null, null);
    890                 jpegImages[i] = mJpegListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
    891                 getImageLatenciesMs[i] = SystemClock.elapsedRealtime() - startTimeMs;
    892             }
    893             for (Image i : jpegImages) {
    894                 i.close();
    895             }
    896         }
    897 
    898         stopZslStreaming();
    899 
    900         String reprocessType = "YUV reprocessing";
    901         if (reprocessInputFormat == ImageFormat.PRIVATE) {
    902             reprocessType = "opaque reprocessing";
    903         }
    904 
    905         // Report the performance data
    906         String captureMsg;
    907         if (asyncMode) {
    908             captureMsg = "capture latency";
    909             if (requireHighQuality) {
    910                 captureMsg += " for High Quality noise reduction and edge modes";
    911             }
    912             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
    913                     ResultUnit.NONE);
    914             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
    915                     ResultUnit.NONE);
    916             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
    917                     ResultUnit.MS);
    918             mReportLog.setSummary("camera_reprocessing_average_latency",
    919                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
    920         } else {
    921             captureMsg = "shot to shot latency";
    922             if (requireHighQuality) {
    923                 captureMsg += " for High Quality noise reduction and edge modes";
    924             }
    925             mReportLog.addValue("reprocess_type", reprocessType, ResultType.NEUTRAL,
    926                     ResultUnit.NONE);
    927             mReportLog.addValue("capture_message", captureMsg, ResultType.NEUTRAL,
    928                     ResultUnit.NONE);
    929             mReportLog.addValues("latency", getImageLatenciesMs, ResultType.LOWER_BETTER,
    930                     ResultUnit.MS);
    931             mReportLog.setSummary("camera_reprocessing_shot_to_shot_average_latency",
    932                     Stat.getAverage(getImageLatenciesMs), ResultType.LOWER_BETTER, ResultUnit.MS);
    933         }
    934     }
    935 
    936     /**
    937      * Start preview and ZSL streaming
    938      */
    939     private void startZslStreaming() throws Exception {
    940         CaptureRequest.Builder zslBuilder =
    941                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
    942         zslBuilder.addTarget(mPreviewSurface);
    943         zslBuilder.addTarget(mCameraZslReader.getSurface());
    944         mSession.setRepeatingRequest(zslBuilder.build(), mZslResultListener, mHandler);
    945     }
    946 
    947     private void stopZslStreaming() throws Exception {
    948         mSession.stopRepeating();
    949         mSessionListener.getStateWaiter().waitForState(
    950             BlockingSessionCallback.SESSION_READY, CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
    951     }
    952 
    953     /**
    954      * Wait for a certain number of frames, the images and results will be drained from the
    955      * listeners to make sure that next reprocessing can get matched results and images.
    956      *
    957      * @param numFrameWait The number of frames to wait before return, 0 means that
    958      *      this call returns immediately after streaming on.
    959      */
    960     private void waitForFrames(int numFrameWait) throws Exception {
    961         if (numFrameWait < 0) {
    962             throw new IllegalArgumentException("numFrameWait " + numFrameWait +
    963                     " should be non-negative");
    964         }
    965 
    966         for (int i = 0; i < numFrameWait; i++) {
    967             mCameraZslImageListener.getImage(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS).close();
    968         }
    969     }
    970 
    971     private void closeReaderWriters() {
    972         mCameraZslImageListener.drain();
    973         CameraTestUtils.closeImageReader(mCameraZslReader);
    974         mCameraZslReader = null;
    975         mJpegListener.drain();
    976         CameraTestUtils.closeImageReader(mJpegReader);
    977         mJpegReader = null;
    978         CameraTestUtils.closeImageWriter(mWriter);
    979         mWriter = null;
    980     }
    981 
    982     private void prepareReprocessCapture(int inputFormat)
    983                     throws CameraAccessException {
    984         // 1. Find the right preview and capture sizes.
    985         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
    986         Size[] supportedInputSizes =
    987                 mStaticInfo.getAvailableSizesForFormatChecked(inputFormat,
    988                 StaticMetadata.StreamDirection.Input);
    989         Size maxInputSize = CameraTestUtils.getMaxSize(supportedInputSizes);
    990         Size maxJpegSize = mOrderedStillSizes.get(0);
    991         updatePreviewSurface(maxPreviewSize);
    992         mZslResultListener = new SimpleCaptureCallback();
    993 
    994         // 2. Create camera output ImageReaders.
    995         // YUV/Opaque output, camera should support output with input size/format
    996         mCameraZslImageListener = new SimpleImageReaderListener(
    997                 /*asyncMode*/true, MAX_ZSL_IMAGES - MAX_REPROCESS_IMAGES);
    998         mCameraZslReader = CameraTestUtils.makeImageReader(
    999                 maxInputSize, inputFormat, MAX_ZSL_IMAGES, mCameraZslImageListener, mHandler);
   1000         // Jpeg reprocess output
   1001         mJpegListener = new SimpleImageReaderListener();
   1002         mJpegReader = CameraTestUtils.makeImageReader(
   1003                 maxJpegSize, ImageFormat.JPEG, MAX_JPEG_IMAGES, mJpegListener, mHandler);
   1004 
   1005         // create camera reprocess session
   1006         List<Surface> outSurfaces = new ArrayList<Surface>();
   1007         outSurfaces.add(mPreviewSurface);
   1008         outSurfaces.add(mCameraZslReader.getSurface());
   1009         outSurfaces.add(mJpegReader.getSurface());
   1010         InputConfiguration inputConfig = new InputConfiguration(maxInputSize.getWidth(),
   1011                 maxInputSize.getHeight(), inputFormat);
   1012         mSessionListener = new BlockingSessionCallback();
   1013         mSession = CameraTestUtils.configureReprocessableCameraSession(
   1014                 mCamera, inputConfig, outSurfaces, mSessionListener, mHandler);
   1015 
   1016         // 3. Create ImageWriter for input
   1017         mWriter = CameraTestUtils.makeImageWriter(
   1018                 mSession.getInputSurface(), MAX_INPUT_IMAGES, /*listener*/null, /*handler*/null);
   1019 
   1020     }
   1021 
   1022     private void blockingStopPreview() throws Exception {
   1023         stopPreview();
   1024         mSessionListener.getStateWaiter().waitForState(SESSION_CLOSED,
   1025                 CameraTestUtils.SESSION_CLOSE_TIMEOUT_MS);
   1026     }
   1027 
   1028     private void blockingStartPreview(CaptureCallback listener, SimpleImageListener imageListener)
   1029             throws Exception {
   1030         if (mPreviewSurface == null || mReaderSurface == null) {
   1031             throw new IllegalStateException("preview and reader surface must be initilized first");
   1032         }
   1033 
   1034         CaptureRequest.Builder previewBuilder =
   1035                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
   1036         if (mStaticInfo.isColorOutputSupported()) {
   1037             previewBuilder.addTarget(mPreviewSurface);
   1038         }
   1039         previewBuilder.addTarget(mReaderSurface);
   1040         mSession.setRepeatingRequest(previewBuilder.build(), listener, mHandler);
   1041         imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
   1042     }
   1043 
   1044     /**
   1045      * Configure reader and preview outputs and wait until done.
   1046      */
   1047     private void configureReaderAndPreviewOutputs() throws Exception {
   1048         if (mPreviewSurface == null || mReaderSurface == null) {
   1049             throw new IllegalStateException("preview and reader surface must be initilized first");
   1050         }
   1051         mSessionListener = new BlockingSessionCallback();
   1052         List<Surface> outputSurfaces = new ArrayList<>();
   1053         if (mStaticInfo.isColorOutputSupported()) {
   1054             outputSurfaces.add(mPreviewSurface);
   1055         }
   1056         outputSurfaces.add(mReaderSurface);
   1057         mSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces,
   1058                 mSessionListener, mHandler);
   1059     }
   1060 
   1061     /**
   1062      * Initialize the ImageReader instance and preview surface.
   1063      * @param cameraId The camera to be opened.
   1064      * @param format The format used to create ImageReader instance.
   1065      */
   1066     private void initializeImageReader(String cameraId, int format) throws Exception {
   1067         mOrderedPreviewSizes = CameraTestUtils.getSortedSizesForFormat(
   1068                 cameraId, mCameraManager, format,
   1069                 CameraTestUtils.getPreviewSizeBound(mWindowManager,
   1070                     CameraTestUtils.PREVIEW_SIZE_BOUND));
   1071         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
   1072         createImageReader(maxPreviewSize, format, NUM_MAX_IMAGES, /*listener*/null);
   1073         updatePreviewSurface(maxPreviewSize);
   1074     }
   1075 
   1076     private void simpleOpenCamera(String cameraId) throws Exception {
   1077         mCamera = CameraTestUtils.openCamera(
   1078                 mCameraManager, cameraId, mCameraListener, mHandler);
   1079         mCollector.setCameraId(cameraId);
   1080         mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
   1081                 CheckLevel.ASSERT, /*collector*/null);
   1082         mMinPreviewFrameDurationMap =
   1083                 mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
   1084     }
   1085 
   1086     /**
   1087      * Simple image listener that can be used to time the availability of first image.
   1088      *
   1089      */
   1090     private static class SimpleImageListener implements ImageReader.OnImageAvailableListener {
   1091         private ConditionVariable imageAvailable = new ConditionVariable();
   1092         private boolean imageReceived = false;
   1093         private long mTimeReceivedImage = 0;
   1094 
   1095         @Override
   1096         public void onImageAvailable(ImageReader reader) {
   1097             Image image = null;
   1098             if (!imageReceived) {
   1099                 if (VERBOSE) {
   1100                     Log.v(TAG, "First image arrives");
   1101                 }
   1102                 imageReceived = true;
   1103                 mTimeReceivedImage = SystemClock.elapsedRealtime();
   1104                 imageAvailable.open();
   1105             }
   1106             image = reader.acquireNextImage();
   1107             if (image != null) {
   1108                 image.close();
   1109             }
   1110         }
   1111 
   1112         /**
   1113          * Wait for image available, return immediately if the image was already
   1114          * received, otherwise wait until an image arrives.
   1115          */
   1116         public void waitForImageAvailable(long timeout) {
   1117             if (imageReceived) {
   1118                 imageReceived = false;
   1119                 return;
   1120             }
   1121 
   1122             if (imageAvailable.block(timeout)) {
   1123                 imageAvailable.close();
   1124                 imageReceived = true;
   1125             } else {
   1126                 throw new TimeoutRuntimeException("Unable to get the first image after "
   1127                         + CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS + "ms");
   1128             }
   1129         }
   1130 
   1131         public long getTimeReceivedImage() {
   1132             return mTimeReceivedImage;
   1133         }
   1134     }
   1135 
   1136     private static class SimpleTimingResultListener
   1137             extends CameraCaptureSession.CaptureCallback {
   1138         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mPartialResultQueue =
   1139                 new LinkedBlockingQueue<Pair<CaptureResult, Long> >();
   1140         private final LinkedBlockingQueue<Pair<CaptureResult, Long> > mResultQueue =
   1141                 new LinkedBlockingQueue<Pair<CaptureResult, Long> > ();
   1142 
   1143         @Override
   1144         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
   1145                 TotalCaptureResult result) {
   1146             try {
   1147                 Long time = SystemClock.elapsedRealtime();
   1148                 mResultQueue.put(new Pair<CaptureResult, Long>(result, time));
   1149             } catch (InterruptedException e) {
   1150                 throw new UnsupportedOperationException(
   1151                         "Can't handle InterruptedException in onCaptureCompleted");
   1152             }
   1153         }
   1154 
   1155         @Override
   1156         public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
   1157                 CaptureResult partialResult) {
   1158             try {
   1159                 // check if AE and AF state exists
   1160                 Long time = -1L;
   1161                 if (partialResult.get(CaptureResult.CONTROL_AE_STATE) != null &&
   1162                         partialResult.get(CaptureResult.CONTROL_AF_STATE) != null) {
   1163                     time = SystemClock.elapsedRealtime();
   1164                 }
   1165                 mPartialResultQueue.put(new Pair<CaptureResult, Long>(partialResult, time));
   1166             } catch (InterruptedException e) {
   1167                 throw new UnsupportedOperationException(
   1168                         "Can't handle InterruptedException in onCaptureProgressed");
   1169             }
   1170         }
   1171 
   1172         public Pair<CaptureResult, Long> getPartialResultNTime(long timeout) {
   1173             try {
   1174                 Pair<CaptureResult, Long> result =
   1175                         mPartialResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
   1176                 return result;
   1177             } catch (InterruptedException e) {
   1178                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
   1179             }
   1180         }
   1181 
   1182         public Pair<CaptureResult, Long> getCaptureResultNTime(long timeout) {
   1183             try {
   1184                 Pair<CaptureResult, Long> result =
   1185                         mResultQueue.poll(timeout, TimeUnit.MILLISECONDS);
   1186                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
   1187                 return result;
   1188             } catch (InterruptedException e) {
   1189                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
   1190             }
   1191         }
   1192 
   1193         public Pair<CaptureResult, Long> getPartialResultNTimeForRequest(CaptureRequest myRequest,
   1194                 int numResultsWait) {
   1195             if (numResultsWait < 0) {
   1196                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
   1197             }
   1198 
   1199             Pair<CaptureResult, Long> result;
   1200             int i = 0;
   1201             do {
   1202                 result = getPartialResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
   1203                 // The result may be null if no partials are produced on this particular path, so
   1204                 // stop trying
   1205                 if (result == null) break;
   1206                 if (result.first.getRequest().equals(myRequest)) {
   1207                     return result;
   1208                 }
   1209             } while (i++ < numResultsWait);
   1210 
   1211             // No partials produced - this may not be an error, since a given device may not
   1212             // produce any partials on this testing path
   1213             return null;
   1214         }
   1215 
   1216         public Pair<CaptureResult, Long> getCaptureResultNTimeForRequest(CaptureRequest myRequest,
   1217                 int numResultsWait) {
   1218             if (numResultsWait < 0) {
   1219                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
   1220             }
   1221 
   1222             Pair<CaptureResult, Long> result;
   1223             int i = 0;
   1224             do {
   1225                 result = getCaptureResultNTime(CameraTestUtils.CAPTURE_RESULT_TIMEOUT_MS);
   1226                 if (result.first.getRequest().equals(myRequest)) {
   1227                     return result;
   1228                 }
   1229             } while (i++ < numResultsWait);
   1230 
   1231             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
   1232                     + "waiting for " + numResultsWait + " results");
   1233         }
   1234 
   1235     }
   1236 }
   1237