Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <gtest/gtest.h>
     18 #include <inttypes.h>
     19 
     20 #define LOG_TAG "CameraBurstTest"
     21 //#define LOG_NDEBUG 0
     22 #include <utils/Log.h>
     23 #include <utils/Timers.h>
     24 
     25 #include <cmath>
     26 
     27 #include "CameraStreamFixture.h"
     28 #include "TestExtensions.h"
     29 
     30 #define CAMERA_FRAME_TIMEOUT    1000000000LL //nsecs (1 secs)
     31 #define CAMERA_HEAP_COUNT       2 //HALBUG: 1 means registerBuffers fails
     32 #define CAMERA_BURST_DEBUGGING  0
     33 #define CAMERA_FRAME_BURST_COUNT 10
     34 
     35 /* constants for the exposure test */
     36 #define CAMERA_EXPOSURE_DOUBLE  2
     37 #define CAMERA_EXPOSURE_DOUBLING_THRESHOLD 1.0f
     38 #define CAMERA_EXPOSURE_DOUBLING_COUNT 4
     39 #define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
     40 #define CAMERA_EXPOSURE_STARTING 100000 // 1/10ms, up to 51.2ms with 10 steps
     41 
     42 #define USEC 1000LL        // in ns
     43 #define MSEC 1000000LL     // in ns
     44 #define SEC  1000000000LL  // in ns
     45 
     46 #if CAMERA_BURST_DEBUGGING
     47 #define dout std::cout
     48 #else
     49 #define dout if (0) std::cout
     50 #endif
     51 
     52 #define WARN_UNLESS(condition) if(!(condition)) std::cerr << "Warning: "
     53 #define WARN_LE(exp, act) WARN_UNLESS((exp) <= (act))
     54 #define WARN_LT(exp, act) WARN_UNLESS((exp) < (act))
     55 #define WARN_GT(exp, act) WARN_UNLESS((exp) > (act))
     56 
     57 using namespace android;
     58 using namespace android::camera2;
     59 
     60 namespace android {
     61 namespace camera2 {
     62 namespace tests {
     63 
     64 static CameraStreamParams STREAM_PARAMETERS = {
     65     /*mFormat*/     CAMERA_EXPOSURE_FORMAT,
     66     /*mHeapCount*/  CAMERA_HEAP_COUNT
     67 };
     68 
     69 class CameraBurstTest
     70     : public ::testing::Test,
     71       public CameraStreamFixture {
     72 
     73 public:
     74     CameraBurstTest() : CameraStreamFixture(STREAM_PARAMETERS) {
     75         TEST_EXTENSION_FORKING_CONSTRUCTOR;
     76 
     77         if (HasFatalFailure()) {
     78             return;
     79         }
     80 
     81         CreateStream();
     82     }
     83 
     84     ~CameraBurstTest() {
     85         TEST_EXTENSION_FORKING_DESTRUCTOR;
     86 
     87         if (mDevice.get()) {
     88             mDevice->waitUntilDrained();
     89         }
     90         DeleteStream();
     91     }
     92 
     93     virtual void SetUp() {
     94         TEST_EXTENSION_FORKING_SET_UP;
     95     }
     96     virtual void TearDown() {
     97         TEST_EXTENSION_FORKING_TEAR_DOWN;
     98     }
     99 
    100     /* this assumes the format is YUV420sp or flexible YUV */
    101     long long TotalBrightness(const CpuConsumer::LockedBuffer& imgBuffer,
    102                               int *underexposed,
    103                               int *overexposed) const {
    104 
    105         const uint8_t* buf = imgBuffer.data;
    106         size_t stride = imgBuffer.stride;
    107 
    108         /* iterate over the Y plane only */
    109         long long acc = 0;
    110 
    111         *underexposed = 0;
    112         *overexposed = 0;
    113 
    114         for (size_t y = 0; y < imgBuffer.height; ++y) {
    115             for (size_t x = 0; x < imgBuffer.width; ++x) {
    116                 const uint8_t p = buf[y * stride + x];
    117 
    118                 if (p == 0) {
    119                     if (underexposed) {
    120                         ++*underexposed;
    121                     }
    122                     continue;
    123                 } else if (p == 255) {
    124                     if (overexposed) {
    125                         ++*overexposed;
    126                     }
    127                     continue;
    128                 }
    129 
    130                 acc += p;
    131             }
    132         }
    133 
    134         return acc;
    135     }
    136 
    137     // Parses a comma-separated string list into a Vector
    138     template<typename T>
    139     void ParseList(const char *src, Vector<T> &list) {
    140         std::istringstream s(src);
    141         while (!s.eof()) {
    142             char c = s.peek();
    143             if (c == ',' || c == ' ') {
    144                 s.ignore(1, EOF);
    145                 continue;
    146             }
    147             T val;
    148             s >> val;
    149             list.push_back(val);
    150         }
    151     }
    152 
    153 };
    154 
    155 TEST_F(CameraBurstTest, ManualExposureControl) {
    156 
    157     TEST_EXTENSION_FORKING_INIT;
    158 
    159     // Range of valid exposure times, in nanoseconds
    160     int64_t minExp, maxExp;
    161     {
    162         camera_metadata_ro_entry exposureTimeRange =
    163             GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
    164 
    165         ASSERT_EQ(2u, exposureTimeRange.count);
    166         minExp = exposureTimeRange.data.i64[0];
    167         maxExp = exposureTimeRange.data.i64[1];
    168     }
    169 
    170     dout << "Min exposure is " << minExp;
    171     dout << " max exposure is " << maxExp << std::endl;
    172 
    173     // Calculate some set of valid exposure times for each request
    174     int64_t exposures[CAMERA_FRAME_BURST_COUNT];
    175     exposures[0] = CAMERA_EXPOSURE_STARTING;
    176     for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
    177         exposures[i] = exposures[i-1] * CAMERA_EXPOSURE_DOUBLE;
    178     }
    179     // Our calculated exposure times should be in [minExp, maxExp]
    180     EXPECT_LE(minExp, exposures[0])
    181         << "Minimum exposure range is too high, wanted at most "
    182         << exposures[0] << "ns";
    183     EXPECT_GE(maxExp, exposures[CAMERA_FRAME_BURST_COUNT-1])
    184         << "Maximum exposure range is too low, wanted at least "
    185         << exposures[CAMERA_FRAME_BURST_COUNT-1] << "ns";
    186 
    187     // Create a preview request, turning off all 3A
    188     CameraMetadata previewRequest;
    189     ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
    190                                                 &previewRequest));
    191     {
    192         Vector<int32_t> outputStreamIds;
    193         outputStreamIds.push(mStreamId);
    194         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
    195                                             outputStreamIds));
    196 
    197         // Disable all 3A routines
    198         uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
    199         ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
    200                                             &cmOff, 1));
    201 
    202         int requestId = 1;
    203         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
    204                                             &requestId, 1));
    205 
    206         if (CAMERA_BURST_DEBUGGING) {
    207             int frameCount = 0;
    208             ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_FRAME_COUNT,
    209                                                 &frameCount, 1));
    210         }
    211     }
    212 
    213     if (CAMERA_BURST_DEBUGGING) {
    214         previewRequest.dump(STDOUT_FILENO);
    215     }
    216 
    217     // Submit capture requests
    218     for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
    219         CameraMetadata tmpRequest = previewRequest;
    220         ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
    221                                         &exposures[i], 1));
    222         ALOGV("Submitting capture request %d with exposure %" PRId64, i,
    223             exposures[i]);
    224         dout << "Capture request " << i << " exposure is "
    225              << (exposures[i]/1e6f) << std::endl;
    226         ASSERT_EQ(OK, mDevice->capture(tmpRequest));
    227     }
    228 
    229     dout << "Buffer dimensions " << mWidth << "x" << mHeight << std::endl;
    230 
    231     float brightnesses[CAMERA_FRAME_BURST_COUNT];
    232     // Get each frame (metadata) and then the buffer. Calculate brightness.
    233     for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
    234         ALOGV("Reading capture request %d with exposure %" PRId64, i, exposures[i]);
    235         ASSERT_EQ(OK, mDevice->waitForNextFrame(CAMERA_FRAME_TIMEOUT));
    236         ALOGV("Reading capture request-1 %d", i);
    237         CaptureResult result;
    238         ASSERT_EQ(OK, mDevice->getNextResult(&result));
    239         ALOGV("Reading capture request-2 %d", i);
    240 
    241         ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
    242         ALOGV("We got the frame now");
    243 
    244         CpuConsumer::LockedBuffer imgBuffer;
    245         ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
    246 
    247         int underexposed, overexposed;
    248         long long brightness = TotalBrightness(imgBuffer, &underexposed,
    249                                                &overexposed);
    250         float avgBrightness = brightness * 1.0f /
    251                               (mWidth * mHeight - (underexposed + overexposed));
    252         ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
    253               "overexposed %d), avg %f", i, brightness, underexposed,
    254               overexposed, avgBrightness);
    255         dout << "Average brightness (frame " << i << ") was " << avgBrightness
    256              << " (underexposed " << underexposed << ", overexposed "
    257              << overexposed << ")" << std::endl;
    258 
    259         ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
    260 
    261         brightnesses[i] = avgBrightness;
    262     }
    263 
    264     // Calculate max consecutive frame exposure doubling
    265     float prev = brightnesses[0];
    266     int doubling_count = 1;
    267     int max_doubling_count = 0;
    268     for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
    269         if (fabs(brightnesses[i] - prev*CAMERA_EXPOSURE_DOUBLE)
    270             <= CAMERA_EXPOSURE_DOUBLING_THRESHOLD) {
    271             doubling_count++;
    272         }
    273         else {
    274             max_doubling_count = std::max(max_doubling_count, doubling_count);
    275             doubling_count = 1;
    276         }
    277         prev = brightnesses[i];
    278     }
    279 
    280     dout << "max doubling count: " << max_doubling_count << std::endl;
    281 
    282     /**
    283      * Make this check warning only, since the brightness calculation is not reliable
    284      * and we have separate test to cover this case. Plus it is pretty subtle to make
    285      * it right without complicating the test too much.
    286      */
    287     WARN_LE(CAMERA_EXPOSURE_DOUBLING_COUNT, max_doubling_count)
    288             << "average brightness should double at least "
    289             << CAMERA_EXPOSURE_DOUBLING_COUNT
    290             << " times over each consecutive frame as the exposure is doubled"
    291             << std::endl;
    292 }
    293 
    294 /**
    295  * This test varies exposure time, frame duration, and sensitivity for a
    296  * burst of captures. It picks values by default, but the selection can be
    297  * overridden with the environment variables
    298  *   CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES
    299  *   CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS
    300  *   CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES
    301  * which must all be a list of comma-separated values, and each list must be
    302  * the same length.  In addition, if the environment variable
    303  *   CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES
    304  * is set to 1, then the YUV buffers are dumped into files named
    305  *   "camera2_test_variable_burst_frame_NNN.yuv"
    306  *
    307  * For example:
    308  *   $ setenv CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES 10000000,20000000
    309  *   $ setenv CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS 40000000,40000000
    310  *   $ setenv CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES 200,100
    311  *   $ setenv CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES 1
    312  *   $ /data/nativetest/camera2_test/camera2_test --gtest_filter="*VariableBurst"
    313  */
    314 TEST_F(CameraBurstTest, VariableBurst) {
    315 
    316     TEST_EXTENSION_FORKING_INIT;
    317 
    318     // Bounds for checking frame duration is within range
    319     const nsecs_t DURATION_UPPER_BOUND = 10 * MSEC;
    320     const nsecs_t DURATION_LOWER_BOUND = 20 * MSEC;
    321 
    322     // Threshold for considering two captures to have equivalent exposure value,
    323     // as a ratio of the smaller EV to the larger EV.
    324     const float   EV_MATCH_BOUND = 0.95;
    325     // Bound for two captures with equivalent exp values to have the same
    326     // measured brightness, in 0-255 luminance.
    327     const float   BRIGHTNESS_MATCH_BOUND = 5;
    328 
    329     // Environment variables to look for to override test settings
    330     const char *expEnv         = "CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES";
    331     const char *durationEnv    = "CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS";
    332     const char *sensitivityEnv = "CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES";
    333     const char *dumpFrameEnv   = "CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES";
    334 
    335     // Range of valid exposure times, in nanoseconds
    336     int64_t minExp = 0, maxExp = 0;
    337     // List of valid sensor sensitivities
    338     Vector<int32_t> sensitivities;
    339     // Range of valid frame durations, in nanoseconds
    340     int64_t minDuration = 0, maxDuration = 0;
    341 
    342     {
    343         camera_metadata_ro_entry exposureTimeRange =
    344             GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
    345 
    346         EXPECT_EQ(2u, exposureTimeRange.count) << "Bad exposure time range tag."
    347                 "Using default values";
    348         if (exposureTimeRange.count == 2) {
    349             minExp = exposureTimeRange.data.i64[0];
    350             maxExp = exposureTimeRange.data.i64[1];
    351         }
    352 
    353         EXPECT_LT(0, minExp) << "Minimum exposure time is 0";
    354         EXPECT_LT(0, maxExp) << "Maximum exposure time is 0";
    355         EXPECT_LE(minExp, maxExp) << "Minimum exposure is greater than maximum";
    356 
    357         if (minExp == 0) {
    358             minExp = 1 * MSEC; // Fallback minimum exposure time
    359         }
    360 
    361         if (maxExp == 0) {
    362             maxExp = 10 * SEC; // Fallback maximum exposure time
    363         }
    364     }
    365 
    366     camera_metadata_ro_entry hardwareLevel =
    367         GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
    368     ASSERT_EQ(1u, hardwareLevel.count);
    369     uint8_t level = hardwareLevel.data.u8[0];
    370     ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
    371     ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
    372     if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
    373         const ::testing::TestInfo* const test_info =
    374             ::testing::UnitTest::GetInstance()->current_test_info();
    375         std::cerr << "Skipping test "
    376                   << test_info->test_case_name() << "."
    377                   << test_info->name()
    378                   << " because HAL hardware supported level is limited "
    379                   << std::endl;
    380         return;
    381     }
    382 
    383     dout << "Stream size is " << mWidth << " x " << mHeight << std::endl;
    384     dout << "Valid exposure range is: " <<
    385             minExp << " - " << maxExp << " ns " << std::endl;
    386 
    387     {
    388         camera_metadata_ro_entry sensivityRange =
    389             GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
    390         EXPECT_EQ(2u, sensivityRange.count) << "No sensitivity range listed."
    391                 "Falling back to default set.";
    392         int32_t minSensitivity = 100;
    393         int32_t maxSensitivity = 800;
    394         if (sensivityRange.count == 2) {
    395             ASSERT_GT(sensivityRange.data.i32[0], 0);
    396             ASSERT_GT(sensivityRange.data.i32[1], 0);
    397             minSensitivity = sensivityRange.data.i32[0];
    398             maxSensitivity = sensivityRange.data.i32[1];
    399         }
    400         int32_t count = (maxSensitivity - minSensitivity + 99) / 100;
    401         sensitivities.push_back(minSensitivity);
    402         for (int i = 1; i < count; i++) {
    403             sensitivities.push_back(minSensitivity + i * 100);
    404         }
    405         sensitivities.push_back(maxSensitivity);
    406     }
    407 
    408     dout << "Available sensitivities: ";
    409     for (size_t i = 0; i < sensitivities.size(); i++) {
    410         dout << sensitivities[i] << " ";
    411     }
    412     dout << std::endl;
    413 
    414     {
    415         if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
    416             camera_metadata_ro_entry availableProcessedSizes =
    417                     GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
    418 
    419             camera_metadata_ro_entry availableProcessedMinFrameDurations =
    420                     GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
    421 
    422             EXPECT_EQ(availableProcessedSizes.count,
    423                     availableProcessedMinFrameDurations.count * 2) <<
    424                     "The number of minimum frame durations doesn't match the number of "
    425                     "available sizes. Using fallback values";
    426 
    427             if (availableProcessedSizes.count ==
    428                     availableProcessedMinFrameDurations.count * 2) {
    429                 bool gotSize = false;
    430                 for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
    431                     if (availableProcessedSizes.data.i32[i] == mWidth &&
    432                             availableProcessedSizes.data.i32[i+1] == mHeight) {
    433                         gotSize = true;
    434                         minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
    435                     }
    436                 }
    437                 EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
    438                         "available sizes: " << mWidth << ", " << mHeight;
    439             }
    440             if (minDuration == 0) {
    441                 minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
    442             }
    443         } else {
    444             minDuration = getMinFrameDurationFor(
    445                     HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, mWidth, mHeight);
    446         }
    447         ASSERT_LT(0, minDuration);
    448 
    449         camera_metadata_ro_entry maxFrameDuration =
    450                 GetStaticEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION);
    451 
    452         EXPECT_EQ(1u, maxFrameDuration.count) << "No valid maximum frame duration";
    453 
    454         if (maxFrameDuration.count == 1) {
    455             maxDuration = maxFrameDuration.data.i64[0];
    456         }
    457 
    458         EXPECT_GT(maxDuration, 0) << "Max duration is 0 or not given, using fallback";
    459 
    460         if (maxDuration == 0) {
    461             maxDuration = 10 * SEC; // Fall back to 10 seconds as max duration
    462         }
    463 
    464     }
    465     dout << "Available frame duration range for configured stream size: "
    466          << minDuration << " - " << maxDuration << " ns" << std::endl;
    467 
    468     // Get environment variables if set
    469     const char *expVal = getenv(expEnv);
    470     const char *durationVal = getenv(durationEnv);
    471     const char *sensitivityVal = getenv(sensitivityEnv);
    472 
    473     bool gotExp = (expVal != NULL);
    474     bool gotDuration = (durationVal != NULL);
    475     bool gotSensitivity = (sensitivityVal != NULL);
    476 
    477     // All or none must be provided if using override envs
    478     ASSERT_TRUE( (gotDuration && gotExp && gotSensitivity) ||
    479             (!gotDuration && !gotExp && !gotSensitivity) ) <<
    480             "Incomplete set of environment variable overrides provided";
    481 
    482     Vector<int64_t> expList, durationList;
    483     Vector<int32_t> sensitivityList;
    484     if (gotExp) {
    485         ParseList(expVal, expList);
    486         ParseList(durationVal, durationList);
    487         ParseList(sensitivityVal, sensitivityList);
    488 
    489         ASSERT_TRUE(
    490             (expList.size() == durationList.size()) &&
    491             (durationList.size() == sensitivityList.size())) <<
    492                 "Mismatched sizes in env lists, or parse error";
    493 
    494         dout << "Using burst list from environment with " << expList.size() <<
    495                 " captures" << std::endl;
    496     } else {
    497         // Create a default set of controls based on the available ranges
    498 
    499         int64_t e;
    500         int64_t d;
    501         int32_t s;
    502 
    503         // Exposure ramp
    504 
    505         e = minExp;
    506         d = minDuration;
    507         s = sensitivities[0];
    508         while (e < maxExp) {
    509             expList.push_back(e);
    510             durationList.push_back(d);
    511             sensitivityList.push_back(s);
    512             e = e * 2;
    513         }
    514         e = maxExp;
    515         expList.push_back(e);
    516         durationList.push_back(d);
    517         sensitivityList.push_back(s);
    518 
    519         // Duration ramp
    520 
    521         e = 30 * MSEC;
    522         d = minDuration;
    523         s = sensitivities[0];
    524         while (d < maxDuration) {
    525             // make sure exposure <= frame duration
    526             expList.push_back(e > d ? d : e);
    527             durationList.push_back(d);
    528             sensitivityList.push_back(s);
    529             d = d * 2;
    530         }
    531 
    532         // Sensitivity ramp
    533 
    534         e = 30 * MSEC;
    535         d = 30 * MSEC;
    536         d = d > minDuration ? d : minDuration;
    537         for (size_t i = 0; i < sensitivities.size(); i++) {
    538             expList.push_back(e);
    539             durationList.push_back(d);
    540             sensitivityList.push_back(sensitivities[i]);
    541         }
    542 
    543         // Constant-EV ramp, duration == exposure
    544 
    545         e = 30 * MSEC; // at ISO 100
    546         for (size_t i = 0; i < sensitivities.size(); i++) {
    547             int64_t e_adj = e * 100 / sensitivities[i];
    548             expList.push_back(e_adj);
    549             durationList.push_back(e_adj > minDuration ? e_adj : minDuration);
    550             sensitivityList.push_back(sensitivities[i]);
    551         }
    552 
    553         dout << "Default burst sequence created with " << expList.size() <<
    554                 " entries" << std::endl;
    555     }
    556 
    557     // Validate the list, but warn only
    558     for (size_t i = 0; i < expList.size(); i++) {
    559         EXPECT_GE(maxExp, expList[i])
    560                 << "Capture " << i << " exposure too long: " << expList[i];
    561         EXPECT_LE(minExp, expList[i])
    562                 << "Capture " << i << " exposure too short: " << expList[i];
    563         EXPECT_GE(maxDuration, durationList[i])
    564                 << "Capture " << i << " duration too long: " << durationList[i];
    565         EXPECT_LE(minDuration, durationList[i])
    566                  << "Capture " << i << " duration too short: "  << durationList[i];
    567         bool validSensitivity = false;
    568         for (size_t j = 0; j < sensitivities.size(); j++) {
    569             if (sensitivityList[i] == sensitivities[j]) {
    570                 validSensitivity = true;
    571                 break;
    572             }
    573         }
    574         EXPECT_TRUE(validSensitivity)
    575                 << "Capture " << i << " sensitivity not in list: " << sensitivityList[i];
    576     }
    577 
    578     // Check if debug yuv dumps are requested
    579 
    580     bool dumpFrames = false;
    581     {
    582         const char *frameDumpVal = getenv(dumpFrameEnv);
    583         if (frameDumpVal != NULL) {
    584             if (frameDumpVal[0] == '1') dumpFrames = true;
    585         }
    586     }
    587 
    588     dout << "Dumping YUV frames " <<
    589             (dumpFrames ? "enabled, not checking timing" : "disabled") << std::endl;
    590 
    591     // Create a base preview request, turning off all 3A
    592     CameraMetadata previewRequest;
    593     ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
    594                                                 &previewRequest));
    595     {
    596         Vector<int32_t> outputStreamIds;
    597         outputStreamIds.push(mStreamId);
    598         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
    599                                             outputStreamIds));
    600 
    601         // Disable all 3A routines
    602         uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
    603         ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
    604                                             &cmOff, 1));
    605 
    606         int requestId = 1;
    607         ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
    608                                             &requestId, 1));
    609     }
    610 
    611     // Submit capture requests
    612 
    613     for (size_t i = 0; i < expList.size(); ++i) {
    614         CameraMetadata tmpRequest = previewRequest;
    615         ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
    616                                         &expList[i], 1));
    617         ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_FRAME_DURATION,
    618                                         &durationList[i], 1));
    619         ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_SENSITIVITY,
    620                                         &sensitivityList[i], 1));
    621         ALOGV("Submitting capture %zu with exposure %" PRId64 ", frame duration %" PRId64 ", sensitivity %d",
    622                 i, expList[i], durationList[i], sensitivityList[i]);
    623         dout << "Capture request " << i <<
    624                 ": exposure is " << (expList[i]/1e6f) << " ms" <<
    625                 ", frame duration is " << (durationList[i]/1e6f) << " ms" <<
    626                 ", sensitivity is " << sensitivityList[i] <<
    627                 std::endl;
    628         ASSERT_EQ(OK, mDevice->capture(tmpRequest));
    629     }
    630 
    631     Vector<float> brightnesses;
    632     Vector<nsecs_t> captureTimes;
    633     brightnesses.setCapacity(expList.size());
    634     captureTimes.setCapacity(expList.size());
    635 
    636     // Get each frame (metadata) and then the buffer. Calculate brightness.
    637     for (size_t i = 0; i < expList.size(); ++i) {
    638 
    639         ALOGV("Reading request %zu", i);
    640         dout << "Waiting for capture " << i << ": " <<
    641                 " exposure " << (expList[i]/1e6f) << " ms," <<
    642                 " frame duration " << (durationList[i]/1e6f) << " ms," <<
    643                 " sensitivity " << sensitivityList[i] <<
    644                 std::endl;
    645 
    646         // Set wait limit based on expected frame duration, or minimum timeout
    647         int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
    648         if (expList[i] * 2 > waitLimit) waitLimit = expList[i] * 2;
    649         if (durationList[i] * 2 > waitLimit) waitLimit = durationList[i] * 2;
    650 
    651         ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
    652         ALOGV("Reading capture request-1 %zu", i);
    653         CaptureResult result;
    654         ASSERT_EQ(OK, mDevice->getNextResult(&result));
    655         ALOGV("Reading capture request-2 %zu", i);
    656 
    657         ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
    658         ALOGV("We got the frame now");
    659 
    660         captureTimes.push_back(systemTime());
    661 
    662         CpuConsumer::LockedBuffer imgBuffer;
    663         ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
    664 
    665         int underexposed, overexposed;
    666         float avgBrightness = 0;
    667         long long brightness = TotalBrightness(imgBuffer, &underexposed,
    668                                                &overexposed);
    669         int numValidPixels = mWidth * mHeight - (underexposed + overexposed);
    670         if (numValidPixels != 0) {
    671             avgBrightness = brightness * 1.0f / numValidPixels;
    672         } else if (underexposed < overexposed) {
    673             avgBrightness = 255;
    674         }
    675 
    676         ALOGV("Total brightness for frame %zu was %lld (underexposed %d, "
    677               "overexposed %d), avg %f", i, brightness, underexposed,
    678               overexposed, avgBrightness);
    679         dout << "Average brightness (frame " << i << ") was " << avgBrightness
    680              << " (underexposed " << underexposed << ", overexposed "
    681              << overexposed << ")" << std::endl;
    682         brightnesses.push_back(avgBrightness);
    683 
    684         if (i != 0) {
    685             float prevEv = static_cast<float>(expList[i - 1]) * sensitivityList[i - 1];
    686             float currentEv = static_cast<float>(expList[i]) * sensitivityList[i];
    687             float evRatio = (prevEv > currentEv) ? (currentEv / prevEv) :
    688                     (prevEv / currentEv);
    689             if ( evRatio > EV_MATCH_BOUND ) {
    690                 WARN_LT(fabs(brightnesses[i] - brightnesses[i - 1]),
    691                         BRIGHTNESS_MATCH_BOUND) <<
    692                         "Capture brightness different from previous, even though "
    693                         "they have the same EV value. Ev now: " << currentEv <<
    694                         ", previous: " << prevEv << ". Brightness now: " <<
    695                         brightnesses[i] << ", previous: " << brightnesses[i-1] <<
    696                         std::endl;
    697             }
    698             // Only check timing if not saving to disk, since that slows things
    699             // down substantially
    700             if (!dumpFrames) {
    701                 nsecs_t timeDelta = captureTimes[i] - captureTimes[i-1];
    702                 nsecs_t expectedDelta = expList[i] > durationList[i] ?
    703                         expList[i] : durationList[i];
    704                 WARN_LT(timeDelta, expectedDelta + DURATION_UPPER_BOUND) <<
    705                         "Capture took " << timeDelta << " ns to receive, but expected"
    706                         " frame duration was " << expectedDelta << " ns." <<
    707                         std::endl;
    708                 WARN_GT(timeDelta, expectedDelta - DURATION_LOWER_BOUND) <<
    709                         "Capture took " << timeDelta << " ns to receive, but expected"
    710                         " frame duration was " << expectedDelta << " ns." <<
    711                         std::endl;
    712                 dout << "Time delta from previous frame: " << timeDelta / 1e6 <<
    713                         " ms.  Expected " << expectedDelta / 1e6 << " ms" << std::endl;
    714             }
    715         }
    716 
    717         if (dumpFrames) {
    718             String8 dumpName =
    719                     String8::format("/data/local/tmp/camera2_test_variable_burst_frame_%03zu.yuv", i);
    720             dout << "  Writing YUV dump to " << dumpName << std::endl;
    721             DumpYuvToFile(dumpName, imgBuffer);
    722         }
    723 
    724         ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
    725     }
    726 
    727 }
    728 
    729 }
    730 }
    731 }
    732