Home | History | Annotate | Download | only in cts
      1 /*
      2  * Copyright (C) 2014 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.hardware.camera2.cts;
     18 
     19 import static android.hardware.camera2.cts.CameraTestUtils.*;
     20 
     21 import android.graphics.ImageFormat;
     22 import android.view.Surface;
     23 
     24 import com.android.ex.camera2.blocking.BlockingSessionCallback;
     25 
     26 import android.graphics.SurfaceTexture;
     27 import android.hardware.camera2.CameraCaptureSession;
     28 import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
     29 import android.hardware.camera2.CameraDevice;
     30 import android.hardware.camera2.CaptureFailure;
     31 import android.hardware.camera2.CaptureRequest;
     32 import android.hardware.camera2.CaptureResult;
     33 import android.hardware.camera2.TotalCaptureResult;
     34 import android.util.Size;
     35 import android.hardware.camera2.cts.CameraTestUtils.SimpleCaptureCallback;
     36 import android.hardware.camera2.cts.testcases.Camera2SurfaceViewTestCase;
     37 import android.hardware.camera2.params.OutputConfiguration;
     38 import android.platform.test.annotations.AppModeFull;
     39 import android.util.Log;
     40 import android.util.Pair;
     41 import android.util.Range;
     42 import android.view.SurfaceView;
     43 import android.view.SurfaceHolder;
     44 
     45 import org.mockito.ArgumentCaptor;
     46 import org.mockito.ArgumentMatcher;
     47 
     48 import static org.mockito.Mockito.*;
     49 
     50 import java.util.ArrayList;
     51 import java.util.Arrays;
     52 import java.util.List;
     53 
     54 /**
     55  * CameraDevice preview test by using SurfaceView.
     56  */
     57 @AppModeFull
     58 public class SurfaceViewPreviewTest extends Camera2SurfaceViewTestCase {
     59     private static final String TAG = "SurfaceViewPreviewTest";
     60     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
     61     private static final int FRAME_TIMEOUT_MS = 1000;
     62     private static final int NUM_FRAMES_VERIFIED = 30;
     63     private static final int NUM_TEST_PATTERN_FRAMES_VERIFIED = 60;
     64     private static final float FRAME_DURATION_ERROR_MARGIN = 0.01f; // 1 percent error margin.
     65     private static final int PREPARE_TIMEOUT_MS = 10000; // 10 s
     66 
     67     @Override
     68     protected void setUp() throws Exception {
     69         super.setUp();
     70     }
     71 
     72     @Override
     73     protected void tearDown() throws Exception {
     74         super.tearDown();
     75     }
     76 
     77     /**
     78      * Test all supported preview sizes for each camera device.
     79      * <p>
     80      * For the first  {@link #NUM_FRAMES_VERIFIED}  of capture results,
     81      * the {@link CaptureCallback} callback availability and the capture timestamp
     82      * (monotonically increasing) ordering are verified.
     83      * </p>
     84      */
     85     public void testCameraPreview() throws Exception {
     86         for (int i = 0; i < mCameraIds.length; i++) {
     87             try {
     88                 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
     89                 openDevice(mCameraIds[i]);
     90                 if (!mStaticInfo.isColorOutputSupported()) {
     91                     Log.i(TAG, "Camera " + mCameraIds[i] +
     92                             " does not support color outputs, skipping");
     93                     continue;
     94                 }
     95                 previewTestByCamera();
     96             } finally {
     97                 closeDevice();
     98             }
     99         }
    100     }
    101 
    102     /**
    103      * Basic test pattern mode preview.
    104      * <p>
    105      * Only test the test pattern preview and capture result, the image buffer
    106      * is not validated.
    107      * </p>
    108      */
    109     public void testBasicTestPatternPreview() throws Exception{
    110         for (int i = 0; i < mCameraIds.length; i++) {
    111             try {
    112                 Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
    113                 openDevice(mCameraIds[i]);
    114                 if (!mStaticInfo.isColorOutputSupported()) {
    115                     Log.i(TAG, "Camera " + mCameraIds[i] +
    116                             " does not support color outputs, skipping");
    117                     continue;
    118                 }
    119                 previewTestPatternTestByCamera();
    120             } finally {
    121                 closeDevice();
    122             }
    123         }
    124     }
    125 
    126     /**
    127      * Test {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} for preview, validate the preview
    128      * frame duration and exposure time.
    129      */
    130     public void testPreviewFpsRange() throws Exception {
    131         for (String id : mCameraIds) {
    132             try {
    133                 openDevice(id);
    134                 if (!mStaticInfo.isColorOutputSupported()) {
    135                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
    136                     continue;
    137                 }
    138                 previewFpsRangeTestByCamera();
    139             } finally {
    140                 closeDevice();
    141             }
    142         }
    143     }
    144 
    145     /**
    146      * Test surface set streaming use cases.
    147      *
    148      * <p>
    149      * The test sets output configuration with increasing surface set IDs for preview and YUV
    150      * streams. The max supported preview size is selected for preview stream, and the max
    151      * supported YUV size (depending on hw supported level) is selected for YUV stream. This test
    152      * also exercises the prepare API.
    153      * </p>
    154      */
    155     public void testSurfaceSet() throws Exception {
    156         for (String id : mCameraIds) {
    157             try {
    158                 openDevice(id);
    159                 if (!mStaticInfo.isColorOutputSupported()) {
    160                     Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
    161                     continue;
    162                 }
    163                 surfaceSetTestByCamera(id);
    164             } finally {
    165                 closeDevice();
    166             }
    167         }
    168     }
    169 
    170     /**
    171      * Test to verify the {@link CameraCaptureSession#prepare} method works correctly, and has the
    172      * expected effects on performance.
    173      *
    174      * - Ensure that prepare() results in onSurfacePrepared() being invoked
    175      * - Ensure that prepare() does not cause preview glitches while operating
    176      * - Ensure that starting to use a newly-prepared output does not cause additional
    177      *   preview glitches to occur
    178      */
    179     public void testPreparePerformance() throws Throwable {
    180         for (int i = 0; i < mCameraIds.length; i++) {
    181             try {
    182                 openDevice(mCameraIds[i]);
    183                 if (!mStaticInfo.isColorOutputSupported()) {
    184                     Log.i(TAG, "Camera " + mCameraIds[i] +
    185                             " does not support color outputs, skipping");
    186                     continue;
    187                 }
    188                 preparePerformanceTestByCamera(mCameraIds[i]);
    189             }
    190             finally {
    191                 closeDevice();
    192             }
    193         }
    194     }
    195 
    196     private void preparePerformanceTestByCamera(String cameraId) throws Exception {
    197         final int MAX_IMAGES_TO_PREPARE = 10;
    198         final int UNKNOWN_LATENCY_RESULT_WAIT = 5;
    199         final int MAX_RESULTS_TO_WAIT = 10;
    200         final int FRAMES_FOR_AVERAGING = 100;
    201         final float PREPARE_FRAME_RATE_BOUNDS = 0.05f; // fraction allowed difference
    202         final float PREPARE_PEAK_RATE_BOUNDS = 0.5f; // fraction allowed difference
    203 
    204         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, null).get(0);
    205         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
    206 
    207         // Don't need image data, just drop it right away to minimize overhead
    208         ImageDropperListener imageListener = new ImageDropperListener();
    209 
    210         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    211 
    212         CaptureRequest.Builder previewRequest =
    213                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    214 
    215         // Configure outputs and session
    216 
    217         updatePreviewSurface(maxPreviewSize);
    218 
    219         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_IMAGES_TO_PREPARE, imageListener);
    220 
    221         List<Surface> outputSurfaces = new ArrayList<Surface>();
    222         outputSurfaces.add(mPreviewSurface);
    223         outputSurfaces.add(mReaderSurface);
    224 
    225         CameraCaptureSession.StateCallback mockSessionListener =
    226                 mock(CameraCaptureSession.StateCallback.class);
    227 
    228         mSession = configureCameraSession(mCamera, outputSurfaces, mockSessionListener, mHandler);
    229 
    230         previewRequest.addTarget(mPreviewSurface);
    231         Range<Integer> maxFpsTarget = mStaticInfo.getAeMaxTargetFpsRange();
    232         previewRequest.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFpsTarget);
    233 
    234         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    235 
    236         // Converge AE
    237         waitForAeStable(resultListener, UNKNOWN_LATENCY_RESULT_WAIT);
    238 
    239         if (mStaticInfo.isAeLockSupported()) {
    240             // Lock AE if possible to improve stability
    241             previewRequest.set(CaptureRequest.CONTROL_AE_LOCK, true);
    242             mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    243             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
    244                 // Legacy mode doesn't output AE state
    245                 waitForResultValue(resultListener, CaptureResult.CONTROL_AE_STATE,
    246                         CaptureResult.CONTROL_AE_STATE_LOCKED, MAX_RESULTS_TO_WAIT);
    247             }
    248         }
    249 
    250         // Measure frame rate for a bit
    251         Pair<Long, Long> frameDurationStats =
    252                 measureMeanFrameInterval(resultListener, FRAMES_FOR_AVERAGING, /*prevTimestamp*/ 0);
    253 
    254         Log.i(TAG, String.format("Frame interval avg during normal preview: %f ms, peak %f ms",
    255                         frameDurationStats.first / 1e6, frameDurationStats.second / 1e6));
    256 
    257         // Drain results, do prepare
    258         resultListener.drain();
    259 
    260         mSession.prepare(mReaderSurface);
    261 
    262         verify(mockSessionListener,
    263                 timeout(PREPARE_TIMEOUT_MS).times(1)).
    264                 onSurfacePrepared(eq(mSession), eq(mReaderSurface));
    265 
    266         // Calculate frame rate during prepare
    267 
    268         int resultsReceived = (int) resultListener.getTotalNumFrames();
    269         if (resultsReceived > 2) {
    270             // Only verify frame rate if there are a couple of results
    271             Pair<Long, Long> whilePreparingFrameDurationStats =
    272                     measureMeanFrameInterval(resultListener, resultsReceived, /*prevTimestamp*/ 0);
    273 
    274             Log.i(TAG, String.format("Frame interval during prepare avg: %f ms, peak %f ms",
    275                             whilePreparingFrameDurationStats.first / 1e6,
    276                             whilePreparingFrameDurationStats.second / 1e6));
    277 
    278             if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
    279                 mCollector.expectTrue(
    280                     String.format("Camera %s: Preview peak frame interval affected by prepare " +
    281                             "call: preview avg frame duration: %f ms, peak during prepare: %f ms",
    282                             cameraId,
    283                             frameDurationStats.first / 1e6,
    284                             whilePreparingFrameDurationStats.second / 1e6),
    285                     (whilePreparingFrameDurationStats.second <=
    286                             frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
    287                 mCollector.expectTrue(
    288                     String.format("Camera %s: Preview average frame interval affected by prepare " +
    289                             "call: preview avg frame duration: %f ms, during prepare: %f ms",
    290                             cameraId,
    291                             frameDurationStats.first / 1e6,
    292                             whilePreparingFrameDurationStats.first / 1e6),
    293                     (whilePreparingFrameDurationStats.first <=
    294                             frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
    295             }
    296         }
    297 
    298         resultListener.drain();
    299 
    300         // Get at least one more preview result without prepared target
    301         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    302         long prevTimestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    303 
    304         // Now use the prepared stream and ensure there are no hiccups from using it
    305         previewRequest.addTarget(mReaderSurface);
    306 
    307         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    308 
    309         Pair<Long, Long> preparedFrameDurationStats =
    310                 measureMeanFrameInterval(resultListener, MAX_IMAGES_TO_PREPARE*2, prevTimestamp);
    311 
    312         Log.i(TAG, String.format("Frame interval with prepared stream added avg: %f ms, peak %f ms",
    313                         preparedFrameDurationStats.first / 1e6,
    314                         preparedFrameDurationStats.second / 1e6));
    315 
    316         if (mStaticInfo.isHardwareLevelAtLeastLimited()) {
    317             mCollector.expectTrue(
    318                 String.format("Camera %s: Preview peak frame interval affected by use of new " +
    319                         " stream: preview avg frame duration: %f ms, peak with new stream: %f ms",
    320                         cameraId,
    321                         frameDurationStats.first / 1e6, preparedFrameDurationStats.second / 1e6),
    322                 (preparedFrameDurationStats.second <=
    323                         frameDurationStats.first * (1 + PREPARE_PEAK_RATE_BOUNDS)));
    324             mCollector.expectTrue(
    325                 String.format("Camera %s: Preview average frame interval affected by use of new " +
    326                         "stream: preview avg frame duration: %f ms, with new stream: %f ms",
    327                         cameraId,
    328                         frameDurationStats.first / 1e6, preparedFrameDurationStats.first / 1e6),
    329                 (preparedFrameDurationStats.first <=
    330                         frameDurationStats.first * (1 + PREPARE_FRAME_RATE_BOUNDS)));
    331         }
    332     }
    333 
    334     /**
    335      * Test to verify correct behavior with the same Surface object being used repeatedly with
    336      * different native internals, and multiple Surfaces pointing to the same actual consumer object
    337      */
    338     public void testSurfaceEquality() throws Exception {
    339         for (int i = 0; i < mCameraIds.length; i++) {
    340             try {
    341                 openDevice(mCameraIds[i]);
    342                 if (!mStaticInfo.isColorOutputSupported()) {
    343                     Log.i(TAG, "Camera " + mCameraIds[i] +
    344                             " does not support color outputs, skipping");
    345                     continue;
    346                 }
    347                 surfaceEqualityTestByCamera(mCameraIds[i]);
    348             }
    349             finally {
    350                 closeDevice();
    351             }
    352         }
    353     }
    354 
    355     private void surfaceEqualityTestByCamera(String cameraId) throws Exception {
    356         final int SOME_FRAMES = 10;
    357 
    358         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
    359 
    360         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    361 
    362         // Create a SurfaceTexture for a second output
    363         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
    364         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
    365                 maxPreviewSize.getHeight());
    366         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
    367 
    368         updatePreviewSurface(maxPreviewSize);
    369 
    370         List<Surface> outputSurfaces = new ArrayList<Surface>();
    371         outputSurfaces.add(mPreviewSurface);
    372         outputSurfaces.add(sharedOutputSurface1);
    373 
    374         BlockingSessionCallback sessionListener =
    375                 new BlockingSessionCallback();
    376 
    377         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
    378         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
    379                 SESSION_CONFIGURE_TIMEOUT_MS);
    380 
    381         CaptureRequest.Builder previewRequest =
    382                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    383         previewRequest.addTarget(mPreviewSurface);
    384         previewRequest.addTarget(sharedOutputSurface1);
    385 
    386         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    387 
    388         // Wait to get some frames out
    389         waitForNumResults(resultListener, SOME_FRAMES);
    390 
    391         // Drain
    392         mSession.abortCaptures();
    393         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
    394                 SESSION_CONFIGURE_TIMEOUT_MS);
    395 
    396         // Hide / unhide the SurfaceView to get a new target Surface
    397         recreatePreviewSurface();
    398 
    399         // And resize it again
    400         updatePreviewSurface(maxPreviewSize);
    401 
    402         // Create a second surface that targets the shared SurfaceTexture
    403         Surface sharedOutputSurface2 = new Surface(sharedOutputTexture);
    404 
    405         // Use the new Surfaces for a new session
    406         outputSurfaces.clear();
    407         outputSurfaces.add(mPreviewSurface);
    408         outputSurfaces.add(sharedOutputSurface2);
    409 
    410         sessionListener = new BlockingSessionCallback();
    411 
    412         mSession = configureCameraSession(mCamera, outputSurfaces, sessionListener, mHandler);
    413 
    414         previewRequest =
    415                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    416         previewRequest.addTarget(mPreviewSurface);
    417         previewRequest.addTarget(sharedOutputSurface2);
    418 
    419         mSession.setRepeatingRequest(previewRequest.build(), resultListener, mHandler);
    420 
    421         // Wait to get some frames out
    422         waitForNumResults(resultListener, SOME_FRAMES);
    423     }
    424 
    425     /*
    426      * Verify creation of deferred surface capture sessions
    427      */
    428     public void testDeferredSurfaces() throws Exception {
    429         for (int i = 0; i < mCameraIds.length; i++) {
    430             try {
    431                 openDevice(mCameraIds[i]);
    432                 if (mStaticInfo.isHardwareLevelLegacy()) {
    433                     Log.i(TAG, "Camera " + mCameraIds[i] + " is legacy, skipping");
    434                     continue;
    435                 }
    436                 if (!mStaticInfo.isColorOutputSupported()) {
    437                     Log.i(TAG, "Camera " + mCameraIds[i] +
    438                             " does not support color outputs, skipping");
    439                     continue;
    440                 }
    441 
    442                 testDeferredSurfacesByCamera(mCameraIds[i]);
    443             }
    444             finally {
    445                 closeDevice();
    446             }
    447         }
    448     }
    449 
    450     private void testDeferredSurfacesByCamera(String cameraId) throws Exception {
    451         Size maxPreviewSize = m1080pBoundedOrderedPreviewSizes.get(0);
    452 
    453         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    454 
    455         // Create a SurfaceTexture for a second output
    456         SurfaceTexture sharedOutputTexture = new SurfaceTexture(/*random texture ID*/ 5);
    457         sharedOutputTexture.setDefaultBufferSize(maxPreviewSize.getWidth(),
    458                 maxPreviewSize.getHeight());
    459         Surface sharedOutputSurface1 = new Surface(sharedOutputTexture);
    460 
    461         class TextureAvailableListener implements SurfaceTexture.OnFrameAvailableListener {
    462             @Override
    463             public void onFrameAvailable(SurfaceTexture t) {
    464                 mGotFrame = true;
    465             }
    466             public boolean gotFrame() { return mGotFrame; }
    467 
    468             private volatile boolean mGotFrame = false;
    469         }
    470         TextureAvailableListener textureAvailableListener = new TextureAvailableListener();
    471 
    472         sharedOutputTexture.setOnFrameAvailableListener(textureAvailableListener, mHandler);
    473 
    474         updatePreviewSurface(maxPreviewSize);
    475 
    476         // Create deferred outputs for surface view and surface texture
    477         OutputConfiguration surfaceViewOutput = new OutputConfiguration(maxPreviewSize,
    478                 SurfaceHolder.class);
    479         OutputConfiguration surfaceTextureOutput = new OutputConfiguration(maxPreviewSize,
    480                 SurfaceTexture.class);
    481 
    482         List<OutputConfiguration> outputSurfaces = new ArrayList<>();
    483         outputSurfaces.add(surfaceViewOutput);
    484         outputSurfaces.add(surfaceTextureOutput);
    485 
    486         // Create non-deferred ImageReader output (JPEG for LIMITED-level compatibility)
    487         ImageDropperListener imageListener = new ImageDropperListener();
    488         createImageReader(mOrderedStillSizes.get(0), ImageFormat.JPEG, /*maxImages*/ 3,
    489                 imageListener);
    490         OutputConfiguration jpegOutput =
    491                 new OutputConfiguration(OutputConfiguration.SURFACE_GROUP_ID_NONE, mReaderSurface);
    492         outputSurfaces.add(jpegOutput);
    493 
    494         // Confirm that other surface types aren't supported for OutputConfiguration
    495         Class[] unsupportedClasses =
    496                 {android.media.ImageReader.class, android.media.MediaCodec.class,
    497                  android.renderscript.Allocation.class, android.media.MediaRecorder.class};
    498 
    499         for (Class klass : unsupportedClasses) {
    500             try {
    501                 OutputConfiguration bad = new OutputConfiguration(maxPreviewSize, klass);
    502                 fail("OutputConfiguration allowed use of unsupported class " + klass);
    503             } catch (IllegalArgumentException e) {
    504                 // expected
    505             }
    506         }
    507 
    508         // Confirm that zero surface size isn't supported for OutputConfiguration
    509         Size[] sizeZeros = { new Size(0, 0), new Size(1, 0), new Size(0, 1) };
    510         for (Size size : sizeZeros) {
    511             try {
    512                 OutputConfiguration bad = new OutputConfiguration(size, SurfaceHolder.class);
    513                 fail("OutputConfiguration allowed use of zero surfaceSize");
    514             } catch (IllegalArgumentException e) {
    515                 //expected
    516             }
    517         }
    518 
    519         // Create session
    520 
    521         BlockingSessionCallback sessionListener =
    522                 new BlockingSessionCallback();
    523 
    524         mSession = configureCameraSessionWithConfig(mCamera, outputSurfaces, sessionListener,
    525                 mHandler);
    526         sessionListener.getStateWaiter().waitForState(BlockingSessionCallback.SESSION_READY,
    527                 SESSION_CONFIGURE_TIMEOUT_MS);
    528 
    529         // Submit JPEG requests
    530 
    531         CaptureRequest.Builder request = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    532         request.addTarget(mReaderSurface);
    533 
    534         final int SOME_FRAMES = 10;
    535         for (int i = 0; i < SOME_FRAMES; i++) {
    536             mSession.capture(request.build(), resultListener, mHandler);
    537         }
    538 
    539         // Wait to get some frames out to ensure we can operate just the one expected surface
    540         waitForNumResults(resultListener, SOME_FRAMES);
    541         assertTrue("No images received", imageListener.getImageCount() > 0);
    542 
    543         // Ensure we can't use the deferred surfaces yet
    544         request.addTarget(sharedOutputSurface1);
    545         try {
    546             mSession.capture(request.build(), resultListener, mHandler);
    547             fail("Should have received IAE for trying to use a deferred target " +
    548                     "that's not yet configured");
    549         } catch (IllegalArgumentException e) {
    550             // expected
    551         }
    552 
    553         // Add deferred surfaces to their configurations
    554         surfaceViewOutput.addSurface(mPreviewSurface);
    555         surfaceTextureOutput.addSurface(sharedOutputSurface1);
    556 
    557         // Verify bad inputs to addSurface
    558         try {
    559             surfaceViewOutput.addSurface(null);
    560             fail("No error from setting a null deferred surface");
    561         } catch (NullPointerException e) {
    562             // expected
    563         }
    564         try {
    565             surfaceViewOutput.addSurface(mPreviewSurface);
    566             fail("Shouldn't be able to set deferred surface twice");
    567         } catch (IllegalStateException e) {
    568             // expected
    569         }
    570 
    571         // Add first deferred surface to session
    572         List<OutputConfiguration> deferredSurfaces = new ArrayList<>();
    573         deferredSurfaces.add(surfaceTextureOutput);
    574 
    575         mSession.finalizeOutputConfigurations(deferredSurfaces);
    576 
    577         // Try a second time, this should error
    578 
    579         try {
    580             mSession.finalizeOutputConfigurations(deferredSurfaces);
    581             fail("Should have received ISE for trying to finish a deferred output twice");
    582         } catch (IllegalArgumentException e) {
    583             // expected
    584         }
    585 
    586         // Use new deferred surface for a bit
    587         imageListener.resetImageCount();
    588         for (int i = 0; i < SOME_FRAMES; i++) {
    589             mSession.capture(request.build(), resultListener, mHandler);
    590         }
    591         waitForNumResults(resultListener, SOME_FRAMES);
    592         assertTrue("No images received", imageListener.getImageCount() > 0);
    593         assertTrue("No texture update received", textureAvailableListener.gotFrame());
    594 
    595         // Ensure we can't use the last deferred surface yet
    596         request.addTarget(mPreviewSurface);
    597         try {
    598             mSession.capture(request.build(), resultListener, mHandler);
    599             fail("Should have received IAE for trying to use a deferred target that's" +
    600                     " not yet configured");
    601         } catch (IllegalArgumentException e) {
    602             // expected
    603         }
    604 
    605         // Add final deferred surface
    606         deferredSurfaces.clear();
    607         deferredSurfaces.add(surfaceViewOutput);
    608 
    609         mSession.finalizeOutputConfigurations(deferredSurfaces);
    610 
    611         // Use final deferred surface for a bit
    612         imageListener.resetImageCount();
    613         for (int i = 0; i < SOME_FRAMES; i++) {
    614             mSession.capture(request.build(), resultListener, mHandler);
    615         }
    616         waitForNumResults(resultListener, SOME_FRAMES);
    617         assertTrue("No images received", imageListener.getImageCount() > 0);
    618         // Can't check GL output since we don't have a context to call updateTexImage on, and
    619         // the callback only fires once per updateTexImage call.
    620         // And there's no way to verify data is going to a SurfaceView
    621 
    622         // Check for invalid output configurations being handed to a session
    623         OutputConfiguration badConfig =
    624                 new OutputConfiguration(maxPreviewSize, SurfaceTexture.class);
    625         deferredSurfaces.clear();
    626         try {
    627             mSession.finalizeOutputConfigurations(deferredSurfaces);
    628             fail("No error for empty list passed to finalizeOutputConfigurations");
    629         } catch (IllegalArgumentException e) {
    630             // expected
    631         }
    632 
    633         deferredSurfaces.add(badConfig);
    634         try {
    635             mSession.finalizeOutputConfigurations(deferredSurfaces);
    636             fail("No error for invalid output config being passed to finalizeOutputConfigurations");
    637         } catch (IllegalArgumentException e) {
    638             // expected
    639         }
    640 
    641     }
    642 
    643     /**
    644      * Measure the inter-frame interval based on SENSOR_TIMESTAMP for frameCount frames from the
    645      * provided capture listener.  If prevTimestamp is positive, it is used for the first interval
    646      * calculation; otherwise, the first result is used to establish the starting time.
    647      *
    648      * Returns the mean interval in the first pair entry, and the largest interval in the second
    649      * pair entry
    650      */
    651     Pair<Long, Long> measureMeanFrameInterval(SimpleCaptureCallback resultListener, int frameCount,
    652             long prevTimestamp) throws Exception {
    653         long summedIntervals = 0;
    654         long maxInterval = 0;
    655         int measurementCount = frameCount - ((prevTimestamp > 0) ? 0 : 1);
    656 
    657         for (int i = 0; i < frameCount; i++) {
    658             CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    659             long timestamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    660             if (prevTimestamp > 0) {
    661                 long interval = timestamp - prevTimestamp;
    662                 if (interval > maxInterval) maxInterval = interval;
    663                 summedIntervals += interval;
    664             }
    665             prevTimestamp = timestamp;
    666         }
    667         return new Pair<Long, Long>(summedIntervals / measurementCount, maxInterval);
    668     }
    669 
    670 
    671     /**
    672      * Test preview fps range for all supported ranges. The exposure time are frame duration are
    673      * validated.
    674      */
    675     private void previewFpsRangeTestByCamera() throws Exception {
    676         Size maxPreviewSz;
    677         Range<Integer>[] fpsRanges = getDescendingTargetFpsRanges(mStaticInfo);
    678         boolean antiBandingOffIsSupported = mStaticInfo.isAntiBandingOffModeSupported();
    679         Range<Integer> fpsRange;
    680         CaptureRequest.Builder requestBuilder =
    681                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    682         SimpleCaptureCallback resultListener = new SimpleCaptureCallback();
    683 
    684         for (int i = 0; i < fpsRanges.length; i += 1) {
    685             fpsRange = fpsRanges[i];
    686             if (mStaticInfo.isHardwareLevelLegacy()) {
    687                 // Legacy devices don't report minimum frame duration for preview sizes. The FPS
    688                 // range should be valid for any supported preview size.
    689                 maxPreviewSz = mOrderedPreviewSizes.get(0);
    690             } else {
    691                 maxPreviewSz = getMaxPreviewSizeForFpsRange(fpsRange);
    692             }
    693 
    694             requestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, fpsRange);
    695             // Turn off auto antibanding to avoid exposure time and frame duration interference
    696             // from antibanding algorithm.
    697             if (antiBandingOffIsSupported) {
    698                 requestBuilder.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
    699                         CaptureRequest.CONTROL_AE_ANTIBANDING_MODE_OFF);
    700             } else {
    701                 // The device doesn't implement the OFF mode, test continues. It need make sure
    702                 // that the antibanding algorithm doesn't interfere with the fps range control.
    703                 Log.i(TAG, "OFF antibanding mode is not supported, the camera device output must" +
    704                         " satisfy the specified fps range regardless of its current antibanding" +
    705                         " mode");
    706             }
    707 
    708             startPreview(requestBuilder, maxPreviewSz, resultListener);
    709             resultListener = new SimpleCaptureCallback();
    710             mSession.setRepeatingRequest(requestBuilder.build(), resultListener, mHandler);
    711 
    712             waitForSettingsApplied(resultListener, NUM_FRAMES_WAITED_FOR_UNKNOWN_LATENCY);
    713 
    714             verifyPreviewTargetFpsRange(resultListener, NUM_FRAMES_VERIFIED, fpsRange,
    715                     maxPreviewSz);
    716             stopPreview();
    717             resultListener.drain();
    718         }
    719     }
    720 
    721     private void verifyPreviewTargetFpsRange(SimpleCaptureCallback resultListener,
    722             int numFramesVerified, Range<Integer> fpsRange, Size previewSz) {
    723         CaptureResult result = resultListener.getCaptureResult(WAIT_FOR_RESULT_TIMEOUT_MS);
    724         List<Integer> capabilities = mStaticInfo.getAvailableCapabilitiesChecked();
    725 
    726         if (capabilities.contains(CaptureRequest.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
    727             long frameDuration = getValueNotNull(result, CaptureResult.SENSOR_FRAME_DURATION);
    728             long[] frameDurationRange =
    729                     new long[]{(long) (1e9 / fpsRange.getUpper()), (long) (1e9 / fpsRange.getLower())};
    730             mCollector.expectInRange(
    731                     "Frame duration must be in the range of " + Arrays.toString(frameDurationRange),
    732                     frameDuration, (long) (frameDurationRange[0] * (1 - FRAME_DURATION_ERROR_MARGIN)),
    733                     (long) (frameDurationRange[1] * (1 + FRAME_DURATION_ERROR_MARGIN)));
    734             long expTime = getValueNotNull(result, CaptureResult.SENSOR_EXPOSURE_TIME);
    735             mCollector.expectTrue(String.format("Exposure time %d must be no larger than frame"
    736                     + "duration %d", expTime, frameDuration), expTime <= frameDuration);
    737 
    738             Long minFrameDuration = mMinPreviewFrameDurationMap.get(previewSz);
    739             boolean findDuration = mCollector.expectTrue("Unable to find minFrameDuration for size "
    740                     + previewSz.toString(), minFrameDuration != null);
    741             if (findDuration) {
    742                 mCollector.expectTrue("Frame duration " + frameDuration + " must be no smaller than"
    743                         + " minFrameDuration " + minFrameDuration, frameDuration >= minFrameDuration);
    744             }
    745         } else {
    746             Log.i(TAG, "verifyPreviewTargetFpsRange - MANUAL_SENSOR control is not supported," +
    747                     " skipping duration and exposure time check.");
    748         }
    749     }
    750 
    751     /**
    752      * Test all supported preview sizes for a camera device
    753      *
    754      * @throws Exception
    755      */
    756     private void previewTestByCamera() throws Exception {
    757         List<Size> previewSizes = getSupportedPreviewSizes(
    758                 mCamera.getId(), mCameraManager, PREVIEW_SIZE_BOUND);
    759 
    760         for (final Size sz : previewSizes) {
    761             if (VERBOSE) {
    762                 Log.v(TAG, "Testing camera preview size: " + sz.toString());
    763             }
    764 
    765             // TODO: vary the different settings like crop region to cover more cases.
    766             CaptureRequest.Builder requestBuilder =
    767                     mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    768             CaptureCallback mockCaptureCallback =
    769                     mock(CameraCaptureSession.CaptureCallback.class);
    770 
    771             startPreview(requestBuilder, sz, mockCaptureCallback);
    772             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
    773                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
    774             stopPreview();
    775         }
    776     }
    777 
    778     private void previewTestPatternTestByCamera() throws Exception {
    779         Size maxPreviewSize = mOrderedPreviewSizes.get(0);
    780         int[] testPatternModes = mStaticInfo.getAvailableTestPatternModesChecked();
    781         CaptureRequest.Builder requestBuilder =
    782                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    783         CaptureCallback mockCaptureCallback;
    784 
    785         final int[] TEST_PATTERN_DATA = {0, 0xFFFFFFFF, 0xFFFFFFFF, 0}; // G:100%, RB:0.
    786         for (int mode : testPatternModes) {
    787             if (VERBOSE) {
    788                 Log.v(TAG, "Test pattern mode: " + mode);
    789             }
    790             requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_MODE, mode);
    791             if (mode == CaptureRequest.SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) {
    792                 // Assign color pattern to SENSOR_TEST_PATTERN_MODE_DATA
    793                 requestBuilder.set(CaptureRequest.SENSOR_TEST_PATTERN_DATA, TEST_PATTERN_DATA);
    794             }
    795             mockCaptureCallback = mock(CaptureCallback.class);
    796             startPreview(requestBuilder, maxPreviewSize, mockCaptureCallback);
    797             verifyCaptureResults(mSession, mockCaptureCallback, NUM_TEST_PATTERN_FRAMES_VERIFIED,
    798                     NUM_TEST_PATTERN_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
    799         }
    800 
    801         stopPreview();
    802     }
    803 
    804     private void surfaceSetTestByCamera(String cameraId) throws Exception {
    805         final int MAX_SURFACE_GROUP_ID = 10;
    806         Size maxPreviewSz = mOrderedPreviewSizes.get(0);
    807         Size yuvSizeBound = maxPreviewSz; // Default case: legacy device
    808         if (mStaticInfo.isHardwareLevelLimited()) {
    809             yuvSizeBound = mOrderedVideoSizes.get(0);
    810         } else if (mStaticInfo.isHardwareLevelAtLeastFull()) {
    811             yuvSizeBound = null;
    812         }
    813         Size maxYuvSize = getSupportedPreviewSizes(cameraId, mCameraManager, yuvSizeBound).get(0);
    814 
    815         CaptureRequest.Builder requestBuilder =
    816                 mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
    817         ImageDropperListener imageListener = new ImageDropperListener();
    818 
    819         updatePreviewSurface(maxPreviewSz);
    820         createImageReader(maxYuvSize, ImageFormat.YUV_420_888, MAX_READER_IMAGES, imageListener);
    821         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
    822         OutputConfiguration previewConfig = new OutputConfiguration(mPreviewSurface);
    823         OutputConfiguration yuvConfig = new OutputConfiguration(mReaderSurface);
    824         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, previewConfig.getSurfaceGroupId());
    825         assertEquals(OutputConfiguration.SURFACE_GROUP_ID_NONE, yuvConfig.getSurfaceGroupId());
    826         assertEquals(mPreviewSurface, previewConfig.getSurface());
    827         assertEquals(mReaderSurface, yuvConfig.getSurface());
    828         outputConfigs.add(previewConfig);
    829         outputConfigs.add(yuvConfig);
    830         requestBuilder.addTarget(mPreviewSurface);
    831         requestBuilder.addTarget(mReaderSurface);
    832 
    833         // Test different stream set ID.
    834         for (int surfaceGroupId = OutputConfiguration.SURFACE_GROUP_ID_NONE;
    835                 surfaceGroupId < MAX_SURFACE_GROUP_ID; surfaceGroupId++) {
    836             if (VERBOSE) {
    837                 Log.v(TAG, "test preview with surface group id: ");
    838             }
    839 
    840             previewConfig = new OutputConfiguration(surfaceGroupId, mPreviewSurface);
    841             yuvConfig = new OutputConfiguration(surfaceGroupId, mReaderSurface);
    842             outputConfigs.clear();
    843             outputConfigs.add(previewConfig);
    844             outputConfigs.add(yuvConfig);
    845 
    846             for (OutputConfiguration config : outputConfigs) {
    847                 assertEquals(surfaceGroupId, config.getSurfaceGroupId());
    848             }
    849 
    850             CameraCaptureSession.StateCallback mockSessionListener =
    851                     mock(CameraCaptureSession.StateCallback.class);
    852 
    853             mSession = configureCameraSessionWithConfig(mCamera, outputConfigs,
    854                     mockSessionListener, mHandler);
    855 
    856 
    857             mSession.prepare(mPreviewSurface);
    858             verify(mockSessionListener,
    859                     timeout(PREPARE_TIMEOUT_MS).times(1)).
    860                     onSurfacePrepared(eq(mSession), eq(mPreviewSurface));
    861 
    862             mSession.prepare(mReaderSurface);
    863             verify(mockSessionListener,
    864                     timeout(PREPARE_TIMEOUT_MS).times(1)).
    865                     onSurfacePrepared(eq(mSession), eq(mReaderSurface));
    866 
    867             CaptureRequest request = requestBuilder.build();
    868             CaptureCallback mockCaptureCallback =
    869                     mock(CameraCaptureSession.CaptureCallback.class);
    870             mSession.setRepeatingRequest(request, mockCaptureCallback, mHandler);
    871             verifyCaptureResults(mSession, mockCaptureCallback, NUM_FRAMES_VERIFIED,
    872                     NUM_FRAMES_VERIFIED * FRAME_TIMEOUT_MS);
    873         }
    874     }
    875 
    876     private class IsCaptureResultValid implements ArgumentMatcher<TotalCaptureResult> {
    877         @Override
    878         public boolean matches(TotalCaptureResult obj) {
    879             TotalCaptureResult result = obj;
    880             Long timeStamp = result.get(CaptureResult.SENSOR_TIMESTAMP);
    881             if (timeStamp != null && timeStamp.longValue() > 0L) {
    882                 return true;
    883             }
    884             return false;
    885         }
    886     }
    887 
    888     private void verifyCaptureResults(
    889             CameraCaptureSession session,
    890             CaptureCallback mockListener,
    891             int expectResultCount,
    892             int timeOutMs) {
    893         // Should receive expected number of onCaptureStarted callbacks.
    894         ArgumentCaptor<Long> timestamps = ArgumentCaptor.forClass(Long.class);
    895         ArgumentCaptor<Long> frameNumbers = ArgumentCaptor.forClass(Long.class);
    896         verify(mockListener,
    897                 timeout(timeOutMs).atLeast(expectResultCount))
    898                         .onCaptureStarted(
    899                                 eq(session),
    900                                 isA(CaptureRequest.class),
    901                                 timestamps.capture(),
    902                                 frameNumbers.capture());
    903 
    904         // Validate timestamps: all timestamps should be larger than 0 and monotonically increase.
    905         long timestamp = 0;
    906         for (Long nextTimestamp : timestamps.getAllValues()) {
    907             assertNotNull("Next timestamp is null!", nextTimestamp);
    908             assertTrue("Captures are out of order", timestamp < nextTimestamp);
    909             timestamp = nextTimestamp;
    910         }
    911 
    912         // Validate framenumbers: all framenumbers should be consecutive and positive
    913         long frameNumber = -1;
    914         for (Long nextFrameNumber : frameNumbers.getAllValues()) {
    915             assertNotNull("Next frame number is null!", nextFrameNumber);
    916             assertTrue("Captures are out of order",
    917                     (frameNumber == -1) || (frameNumber + 1 == nextFrameNumber));
    918             frameNumber = nextFrameNumber;
    919         }
    920 
    921         // Should receive expected number of capture results.
    922         verify(mockListener,
    923                 timeout(timeOutMs).atLeast(expectResultCount))
    924                         .onCaptureCompleted(
    925                                 eq(session),
    926                                 isA(CaptureRequest.class),
    927                                 argThat(new IsCaptureResultValid()));
    928 
    929         // Should not receive any capture failed callbacks.
    930         verify(mockListener, never())
    931                         .onCaptureFailed(
    932                                 eq(session),
    933                                 isA(CaptureRequest.class),
    934                                 isA(CaptureFailure.class));
    935     }
    936 
    937 }
    938