Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2_test"
     18 #define LOG_NDEBUG 0
     19 
     20 #include <utils/Log.h>
     21 #include <gtest/gtest.h>
     22 #include <iostream>
     23 #include <fstream>
     24 
     25 #include <utils/Vector.h>
     26 #include <gui/CpuConsumer.h>
     27 #include <ui/PixelFormat.h>
     28 #include <system/camera_metadata.h>
     29 
     30 #include "camera2_utils.h"
     31 
     32 namespace android {
     33 
     34 class Camera2Test: public testing::Test {
     35   public:
     36     static void SetUpTestCase() {
     37         int res;
     38 
     39         hw_module_t *module = NULL;
     40         res = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
     41                 (const hw_module_t **)&module);
     42 
     43         ASSERT_EQ(0, res)
     44                 << "Failure opening camera hardware module: " << res;
     45         ASSERT_TRUE(NULL != module)
     46                 << "No camera module was set by hw_get_module";
     47 
     48         IF_ALOGV() {
     49             std::cout << "  Camera module name: "
     50                     << module->name << std::endl;
     51             std::cout << "  Camera module author: "
     52                     << module->author << std::endl;
     53             std::cout << "  Camera module API version: 0x" << std::hex
     54                     << module->module_api_version << std::endl;
     55             std::cout << "  Camera module HAL API version: 0x" << std::hex
     56                     << module->hal_api_version << std::endl;
     57         }
     58 
     59         int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
     60         ASSERT_EQ(version2_0, module->module_api_version)
     61                 << "Camera module version is 0x"
     62                 << std::hex << module->module_api_version
     63                 << ", not 2.0. (0x"
     64                 << std::hex << CAMERA_MODULE_API_VERSION_2_0 << ")";
     65 
     66         sCameraModule = reinterpret_cast<camera_module_t*>(module);
     67 
     68         sNumCameras = sCameraModule->get_number_of_cameras();
     69         ASSERT_LT(0, sNumCameras) << "No camera devices available!";
     70 
     71         IF_ALOGV() {
     72             std::cout << "  Camera device count: " << sNumCameras << std::endl;
     73         }
     74 
     75         sCameraSupportsHal2 = new bool[sNumCameras];
     76 
     77         for (int i = 0; i < sNumCameras; i++) {
     78             camera_info info;
     79             res = sCameraModule->get_camera_info(i, &info);
     80             ASSERT_EQ(0, res)
     81                     << "Failure getting camera info for camera " << i;
     82             IF_ALOGV() {
     83                 std::cout << "  Camera device: " << std::dec
     84                           << i << std::endl;;
     85                 std::cout << "    Facing: " << std::dec
     86                           << info.facing  << std::endl;
     87                 std::cout << "    Orientation: " << std::dec
     88                           << info.orientation  << std::endl;
     89                 std::cout << "    Version: 0x" << std::hex <<
     90                         info.device_version  << std::endl;
     91             }
     92             if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0) {
     93                 sCameraSupportsHal2[i] = true;
     94                 ASSERT_TRUE(NULL != info.static_camera_characteristics);
     95                 IF_ALOGV() {
     96                     std::cout << "    Static camera metadata:"  << std::endl;
     97                     dump_indented_camera_metadata(info.static_camera_characteristics,
     98                             0, 1, 6);
     99                 }
    100             } else {
    101                 sCameraSupportsHal2[i] = false;
    102             }
    103         }
    104     }
    105 
    106     static const camera_module_t *getCameraModule() {
    107         return sCameraModule;
    108     }
    109 
    110     static int getNumCameras() {
    111         return sNumCameras;
    112     }
    113 
    114     static bool isHal2Supported(int id) {
    115         return sCameraSupportsHal2[id];
    116     }
    117 
    118     static camera2_device_t *openCameraDevice(int id) {
    119         ALOGV("Opening camera %d", id);
    120         if (NULL == sCameraSupportsHal2) return NULL;
    121         if (id >= sNumCameras) return NULL;
    122         if (!sCameraSupportsHal2[id]) return NULL;
    123 
    124         hw_device_t *device = NULL;
    125         const camera_module_t *cam_module = getCameraModule();
    126         if (cam_module == NULL) {
    127             return NULL;
    128         }
    129 
    130         char camId[10];
    131         int res;
    132 
    133         snprintf(camId, 10, "%d", id);
    134         res = cam_module->common.methods->open(
    135             (const hw_module_t*)cam_module,
    136             camId,
    137             &device);
    138         if (res != NO_ERROR || device == NULL) {
    139             return NULL;
    140         }
    141         camera2_device_t *cam_device =
    142                 reinterpret_cast<camera2_device_t*>(device);
    143         return cam_device;
    144     }
    145 
    146     static status_t configureCameraDevice(camera2_device_t *dev,
    147             MetadataQueue &requestQueue,
    148             MetadataQueue  &frameQueue,
    149             NotifierListener &listener) {
    150 
    151         status_t err;
    152 
    153         err = dev->ops->set_request_queue_src_ops(dev,
    154                 requestQueue.getToConsumerInterface());
    155         if (err != OK) return err;
    156 
    157         requestQueue.setFromConsumerInterface(dev);
    158 
    159         err = dev->ops->set_frame_queue_dst_ops(dev,
    160                 frameQueue.getToProducerInterface());
    161         if (err != OK) return err;
    162 
    163         err = listener.getNotificationsFrom(dev);
    164         if (err != OK) return err;
    165 
    166         vendor_tag_query_ops_t *vendor_metadata_tag_ops;
    167         err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops);
    168         if (err != OK) return err;
    169 
    170         err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops);
    171         if (err != OK) return err;
    172 
    173         return OK;
    174     }
    175 
    176     static status_t closeCameraDevice(camera2_device_t *cam_dev) {
    177         int res;
    178         ALOGV("Closing camera %p", cam_dev);
    179 
    180         hw_device_t *dev = reinterpret_cast<hw_device_t *>(cam_dev);
    181         res = dev->close(dev);
    182         return res;
    183     }
    184 
    185     void setUpCamera(int id) {
    186         ASSERT_GT(sNumCameras, id);
    187         status_t res;
    188 
    189         if (mDevice != NULL) {
    190             closeCameraDevice(mDevice);
    191         }
    192         mDevice = openCameraDevice(id);
    193         ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
    194 
    195         camera_info info;
    196         res = sCameraModule->get_camera_info(id, &info);
    197         ASSERT_EQ(OK, res);
    198 
    199         mStaticInfo = info.static_camera_characteristics;
    200 
    201         res = configureCameraDevice(mDevice,
    202                 mRequests,
    203                 mFrames,
    204                 mNotifications);
    205         ASSERT_EQ(OK, res) << "Failure to configure camera device";
    206 
    207     }
    208 
    209     void setUpStream(sp<ISurfaceTexture> consumer,
    210             int width, int height, int format, int *id) {
    211         status_t res;
    212 
    213         StreamAdapter* stream = new StreamAdapter(consumer);
    214 
    215         ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
    216         res = stream->connectToDevice(mDevice, width, height, format);
    217         ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
    218                                  << strerror(-res);
    219         mStreams.push_back(stream);
    220 
    221         *id = stream->getId();
    222     }
    223 
    224     void disconnectStream(int id) {
    225         status_t res;
    226         unsigned int i=0;
    227         for (; i < mStreams.size(); i++) {
    228             if (mStreams[i]->getId() == id) {
    229                 res = mStreams[i]->disconnect();
    230                 ASSERT_EQ(NO_ERROR, res) <<
    231                         "Failed to disconnect stream " << id;
    232                 break;
    233             }
    234         }
    235         ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
    236     }
    237 
    238     void getResolutionList(int32_t format,
    239             const int32_t **list,
    240             size_t *count) {
    241         ALOGV("Getting resolutions for format %x", format);
    242         status_t res;
    243         if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
    244             camera_metadata_ro_entry_t availableFormats;
    245             res = find_camera_metadata_ro_entry(mStaticInfo,
    246                     ANDROID_SCALER_AVAILABLE_FORMATS,
    247                     &availableFormats);
    248             ASSERT_EQ(OK, res);
    249 
    250             uint32_t formatIdx;
    251             for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
    252                 if (availableFormats.data.i32[formatIdx] == format) break;
    253             }
    254             ASSERT_NE(availableFormats.count, formatIdx)
    255                 << "No support found for format 0x" << std::hex << format;
    256         }
    257 
    258         camera_metadata_ro_entry_t availableSizes;
    259         if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
    260             res = find_camera_metadata_ro_entry(mStaticInfo,
    261                     ANDROID_SCALER_AVAILABLE_RAW_SIZES,
    262                     &availableSizes);
    263         } else if (format == HAL_PIXEL_FORMAT_BLOB) {
    264             res = find_camera_metadata_ro_entry(mStaticInfo,
    265                     ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
    266                     &availableSizes);
    267         } else {
    268             res = find_camera_metadata_ro_entry(mStaticInfo,
    269                     ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
    270                     &availableSizes);
    271         }
    272         ASSERT_EQ(OK, res);
    273 
    274         *list = availableSizes.data.i32;
    275         *count = availableSizes.count;
    276     }
    277 
    278     virtual void SetUp() {
    279         const ::testing::TestInfo* const testInfo =
    280                 ::testing::UnitTest::GetInstance()->current_test_info();
    281 
    282         ALOGV("*** Starting test %s in test case %s", testInfo->name(), testInfo->test_case_name());
    283         mDevice = NULL;
    284     }
    285 
    286     virtual void TearDown() {
    287         for (unsigned int i = 0; i < mStreams.size(); i++) {
    288             delete mStreams[i];
    289         }
    290         if (mDevice != NULL) {
    291             closeCameraDevice(mDevice);
    292         }
    293     }
    294 
    295     camera2_device    *mDevice;
    296     const camera_metadata_t *mStaticInfo;
    297 
    298     MetadataQueue    mRequests;
    299     MetadataQueue    mFrames;
    300     NotifierListener mNotifications;
    301 
    302     Vector<StreamAdapter*> mStreams;
    303 
    304   private:
    305     static camera_module_t *sCameraModule;
    306     static int              sNumCameras;
    307     static bool            *sCameraSupportsHal2;
    308 };
    309 
    310 camera_module_t *Camera2Test::sCameraModule = NULL;
    311 bool *Camera2Test::sCameraSupportsHal2      = NULL;
    312 int Camera2Test::sNumCameras                = 0;
    313 
    314 static const nsecs_t USEC = 1000;
    315 static const nsecs_t MSEC = 1000*USEC;
    316 static const nsecs_t SEC = 1000*MSEC;
    317 
    318 
    319 TEST_F(Camera2Test, OpenClose) {
    320     status_t res;
    321 
    322     for (int id = 0; id < getNumCameras(); id++) {
    323         if (!isHal2Supported(id)) continue;
    324 
    325         camera2_device_t *d = openCameraDevice(id);
    326         ASSERT_TRUE(NULL != d) << "Failed to open camera device";
    327 
    328         res = closeCameraDevice(d);
    329         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
    330     }
    331 }
    332 
    333 TEST_F(Camera2Test, Capture1Raw) {
    334     status_t res;
    335 
    336     for (int id = 0; id < getNumCameras(); id++) {
    337         if (!isHal2Supported(id)) continue;
    338 
    339         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
    340 
    341         sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
    342         sp<FrameWaiter> rawWaiter = new FrameWaiter();
    343         rawConsumer->setFrameAvailableListener(rawWaiter);
    344 
    345         const int32_t *rawResolutions;
    346         size_t   rawResolutionsCount;
    347 
    348         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
    349 
    350         getResolutionList(format,
    351                 &rawResolutions, &rawResolutionsCount);
    352         ASSERT_LT((size_t)0, rawResolutionsCount);
    353 
    354         // Pick first available raw resolution
    355         int width = rawResolutions[0];
    356         int height = rawResolutions[1];
    357 
    358         int streamId;
    359         ASSERT_NO_FATAL_FAILURE(
    360             setUpStream(rawConsumer->getProducerInterface(),
    361                     width, height, format, &streamId) );
    362 
    363         camera_metadata_t *request;
    364         request = allocate_camera_metadata(20, 2000);
    365 
    366         uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
    367         add_camera_metadata_entry(request,
    368                 ANDROID_REQUEST_METADATA_MODE,
    369                 (void**)&metadataMode, 1);
    370         uint32_t outputStreams = streamId;
    371         add_camera_metadata_entry(request,
    372                 ANDROID_REQUEST_OUTPUT_STREAMS,
    373                 (void**)&outputStreams, 1);
    374 
    375         uint64_t exposureTime = 10*MSEC;
    376         add_camera_metadata_entry(request,
    377                 ANDROID_SENSOR_EXPOSURE_TIME,
    378                 (void**)&exposureTime, 1);
    379         uint64_t frameDuration = 30*MSEC;
    380         add_camera_metadata_entry(request,
    381                 ANDROID_SENSOR_FRAME_DURATION,
    382                 (void**)&frameDuration, 1);
    383         uint32_t sensitivity = 100;
    384         add_camera_metadata_entry(request,
    385                 ANDROID_SENSOR_SENSITIVITY,
    386                 (void**)&sensitivity, 1);
    387 
    388         uint32_t hourOfDay = 12;
    389         add_camera_metadata_entry(request,
    390                 0x80000000, // EMULATOR_HOUROFDAY
    391                 &hourOfDay, 1);
    392 
    393         IF_ALOGV() {
    394             std::cout << "Input request: " << std::endl;
    395             dump_indented_camera_metadata(request, 0, 1, 2);
    396         }
    397 
    398         res = mRequests.enqueue(request);
    399         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
    400 
    401         res = mFrames.waitForBuffer(exposureTime + SEC);
    402         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
    403 
    404         camera_metadata_t *frame;
    405         res = mFrames.dequeue(&frame);
    406         ASSERT_EQ(NO_ERROR, res);
    407         ASSERT_TRUE(frame != NULL);
    408 
    409         IF_ALOGV() {
    410             std::cout << "Output frame:" << std::endl;
    411             dump_indented_camera_metadata(frame, 0, 1, 2);
    412         }
    413 
    414         res = rawWaiter->waitForFrame(exposureTime + SEC);
    415         ASSERT_EQ(NO_ERROR, res);
    416 
    417         CpuConsumer::LockedBuffer buffer;
    418         res = rawConsumer->lockNextBuffer(&buffer);
    419         ASSERT_EQ(NO_ERROR, res);
    420 
    421         IF_ALOGV() {
    422             const char *dumpname =
    423                     "/data/local/tmp/camera2_test-capture1raw-dump.raw";
    424             ALOGV("Dumping raw buffer to %s", dumpname);
    425             // Write to file
    426             std::ofstream rawFile(dumpname);
    427             size_t bpp = 2;
    428             for (unsigned int y = 0; y < buffer.height; y++) {
    429                 rawFile.write(
    430                         (const char *)(buffer.data + y * buffer.stride * bpp),
    431                         buffer.width * bpp);
    432             }
    433             rawFile.close();
    434         }
    435 
    436         res = rawConsumer->unlockBuffer(buffer);
    437         ASSERT_EQ(NO_ERROR, res);
    438 
    439         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
    440 
    441         res = closeCameraDevice(mDevice);
    442         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
    443 
    444     }
    445 }
    446 
    447 TEST_F(Camera2Test, CaptureBurstRaw) {
    448     status_t res;
    449 
    450     for (int id = 0; id < getNumCameras(); id++) {
    451         if (!isHal2Supported(id)) continue;
    452 
    453         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
    454 
    455         sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
    456         sp<FrameWaiter> rawWaiter = new FrameWaiter();
    457         rawConsumer->setFrameAvailableListener(rawWaiter);
    458 
    459         const int32_t *rawResolutions;
    460         size_t    rawResolutionsCount;
    461 
    462         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
    463 
    464         getResolutionList(format,
    465                 &rawResolutions, &rawResolutionsCount);
    466         ASSERT_LT((uint32_t)0, rawResolutionsCount);
    467 
    468         // Pick first available raw resolution
    469         int width = rawResolutions[0];
    470         int height = rawResolutions[1];
    471 
    472         int streamId;
    473         ASSERT_NO_FATAL_FAILURE(
    474             setUpStream(rawConsumer->getProducerInterface(),
    475                     width, height, format, &streamId) );
    476 
    477         camera_metadata_t *request;
    478         request = allocate_camera_metadata(20, 2000);
    479 
    480         uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
    481         add_camera_metadata_entry(request,
    482                 ANDROID_REQUEST_METADATA_MODE,
    483                 (void**)&metadataMode, 1);
    484         uint32_t outputStreams = streamId;
    485         add_camera_metadata_entry(request,
    486                 ANDROID_REQUEST_OUTPUT_STREAMS,
    487                 (void**)&outputStreams, 1);
    488 
    489         uint64_t frameDuration = 30*MSEC;
    490         add_camera_metadata_entry(request,
    491                 ANDROID_SENSOR_FRAME_DURATION,
    492                 (void**)&frameDuration, 1);
    493         uint32_t sensitivity = 100;
    494         add_camera_metadata_entry(request,
    495                 ANDROID_SENSOR_SENSITIVITY,
    496                 (void**)&sensitivity, 1);
    497 
    498         uint32_t hourOfDay = 12;
    499         add_camera_metadata_entry(request,
    500                 0x80000000, // EMULATOR_HOUROFDAY
    501                 &hourOfDay, 1);
    502 
    503         IF_ALOGV() {
    504             std::cout << "Input request template: " << std::endl;
    505             dump_indented_camera_metadata(request, 0, 1, 2);
    506         }
    507 
    508         int numCaptures = 10;
    509 
    510         // Enqueue numCaptures requests with increasing exposure time
    511 
    512         uint64_t exposureTime = 100 * USEC;
    513         for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
    514             camera_metadata_t *req;
    515             req = allocate_camera_metadata(20, 2000);
    516             append_camera_metadata(req, request);
    517 
    518             add_camera_metadata_entry(req,
    519                     ANDROID_SENSOR_EXPOSURE_TIME,
    520                     (void**)&exposureTime, 1);
    521             exposureTime *= 2;
    522 
    523             res = mRequests.enqueue(req);
    524             ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
    525                     << strerror(-res);
    526         }
    527 
    528         // Get frames and image buffers one by one
    529         uint64_t expectedExposureTime = 100 * USEC;
    530         for (int frameCount = 0; frameCount < 10; frameCount++) {
    531             res = mFrames.waitForBuffer(SEC + expectedExposureTime);
    532             ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
    533 
    534             camera_metadata_t *frame;
    535             res = mFrames.dequeue(&frame);
    536             ASSERT_EQ(NO_ERROR, res);
    537             ASSERT_TRUE(frame != NULL);
    538 
    539             camera_metadata_entry_t frameNumber;
    540             res = find_camera_metadata_entry(frame,
    541                     ANDROID_REQUEST_FRAME_COUNT,
    542                     &frameNumber);
    543             ASSERT_EQ(NO_ERROR, res);
    544             ASSERT_EQ(frameCount, *frameNumber.data.i32);
    545 
    546             res = rawWaiter->waitForFrame(SEC + expectedExposureTime);
    547             ASSERT_EQ(NO_ERROR, res) <<
    548                     "Never got raw data for capture " << frameCount;
    549 
    550             CpuConsumer::LockedBuffer buffer;
    551             res = rawConsumer->lockNextBuffer(&buffer);
    552             ASSERT_EQ(NO_ERROR, res);
    553 
    554             IF_ALOGV() {
    555                 char dumpname[60];
    556                 snprintf(dumpname, 60,
    557                         "/data/local/tmp/camera2_test-"
    558                         "captureBurstRaw-dump_%d.raw",
    559                         frameCount);
    560                 ALOGV("Dumping raw buffer to %s", dumpname);
    561                 // Write to file
    562                 std::ofstream rawFile(dumpname);
    563                 for (unsigned int y = 0; y < buffer.height; y++) {
    564                     rawFile.write(
    565                             (const char *)(buffer.data + y * buffer.stride * 2),
    566                             buffer.width * 2);
    567                 }
    568                 rawFile.close();
    569             }
    570 
    571             res = rawConsumer->unlockBuffer(buffer);
    572             ASSERT_EQ(NO_ERROR, res);
    573 
    574             expectedExposureTime *= 2;
    575         }
    576     }
    577 }
    578 
    579 TEST_F(Camera2Test, ConstructDefaultRequests) {
    580     status_t res;
    581 
    582     for (int id = 0; id < getNumCameras(); id++) {
    583         if (!isHal2Supported(id)) continue;
    584 
    585         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
    586 
    587         for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT;
    588              i++) {
    589             camera_metadata_t *request = NULL;
    590             res = mDevice->ops->construct_default_request(mDevice,
    591                     i,
    592                     &request);
    593             EXPECT_EQ(NO_ERROR, res) <<
    594                     "Unable to construct request from template type %d", i;
    595             EXPECT_TRUE(request != NULL);
    596             EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request));
    597             EXPECT_LT((size_t)0, get_camera_metadata_data_count(request));
    598 
    599             IF_ALOGV() {
    600                 std::cout << "  ** Template type " << i << ":"<<std::endl;
    601                 dump_indented_camera_metadata(request, 0, 2, 4);
    602             }
    603 
    604             free_camera_metadata(request);
    605         }
    606     }
    607 }
    608 
    609 TEST_F(Camera2Test, Capture1Jpeg) {
    610     status_t res;
    611 
    612     for (int id = 0; id < getNumCameras(); id++) {
    613         if (!isHal2Supported(id)) continue;
    614 
    615         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
    616 
    617         sp<CpuConsumer> jpegConsumer = new CpuConsumer(1);
    618         sp<FrameWaiter> jpegWaiter = new FrameWaiter();
    619         jpegConsumer->setFrameAvailableListener(jpegWaiter);
    620 
    621         const int32_t *jpegResolutions;
    622         size_t   jpegResolutionsCount;
    623 
    624         int format = HAL_PIXEL_FORMAT_BLOB;
    625 
    626         getResolutionList(format,
    627                 &jpegResolutions, &jpegResolutionsCount);
    628         ASSERT_LT((size_t)0, jpegResolutionsCount);
    629 
    630         // Pick first available JPEG resolution
    631         int width = jpegResolutions[0];
    632         int height = jpegResolutions[1];
    633 
    634         int streamId;
    635         ASSERT_NO_FATAL_FAILURE(
    636             setUpStream(jpegConsumer->getProducerInterface(),
    637                     width, height, format, &streamId) );
    638 
    639         camera_metadata_t *request;
    640         request = allocate_camera_metadata(20, 2000);
    641 
    642         uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
    643         add_camera_metadata_entry(request,
    644                 ANDROID_REQUEST_METADATA_MODE,
    645                 (void**)&metadataMode, 1);
    646         uint32_t outputStreams = streamId;
    647         add_camera_metadata_entry(request,
    648                 ANDROID_REQUEST_OUTPUT_STREAMS,
    649                 (void**)&outputStreams, 1);
    650 
    651         uint64_t exposureTime = 10*MSEC;
    652         add_camera_metadata_entry(request,
    653                 ANDROID_SENSOR_EXPOSURE_TIME,
    654                 (void**)&exposureTime, 1);
    655         uint64_t frameDuration = 30*MSEC;
    656         add_camera_metadata_entry(request,
    657                 ANDROID_SENSOR_FRAME_DURATION,
    658                 (void**)&frameDuration, 1);
    659         uint32_t sensitivity = 100;
    660         add_camera_metadata_entry(request,
    661                 ANDROID_SENSOR_SENSITIVITY,
    662                 (void**)&sensitivity, 1);
    663 
    664         uint32_t hourOfDay = 12;
    665         add_camera_metadata_entry(request,
    666                 0x80000000, // EMULATOR_HOUROFDAY
    667                 &hourOfDay, 1);
    668 
    669         IF_ALOGV() {
    670             std::cout << "Input request: " << std::endl;
    671             dump_indented_camera_metadata(request, 0, 1, 4);
    672         }
    673 
    674         res = mRequests.enqueue(request);
    675         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
    676 
    677         res = mFrames.waitForBuffer(exposureTime + SEC);
    678         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
    679 
    680         camera_metadata_t *frame;
    681         res = mFrames.dequeue(&frame);
    682         ASSERT_EQ(NO_ERROR, res);
    683         ASSERT_TRUE(frame != NULL);
    684 
    685         IF_ALOGV() {
    686             std::cout << "Output frame:" << std::endl;
    687             dump_indented_camera_metadata(frame, 0, 1, 4);
    688         }
    689 
    690         res = jpegWaiter->waitForFrame(exposureTime + SEC);
    691         ASSERT_EQ(NO_ERROR, res);
    692 
    693         CpuConsumer::LockedBuffer buffer;
    694         res = jpegConsumer->lockNextBuffer(&buffer);
    695         ASSERT_EQ(NO_ERROR, res);
    696 
    697         IF_ALOGV() {
    698             const char *dumpname =
    699                     "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg";
    700             ALOGV("Dumping raw buffer to %s", dumpname);
    701             // Write to file
    702             std::ofstream jpegFile(dumpname);
    703             size_t bpp = 1;
    704             for (unsigned int y = 0; y < buffer.height; y++) {
    705                 jpegFile.write(
    706                         (const char *)(buffer.data + y * buffer.stride * bpp),
    707                         buffer.width * bpp);
    708             }
    709             jpegFile.close();
    710         }
    711 
    712         res = jpegConsumer->unlockBuffer(buffer);
    713         ASSERT_EQ(NO_ERROR, res);
    714 
    715         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
    716 
    717         res = closeCameraDevice(mDevice);
    718         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
    719 
    720     }
    721 }
    722 
    723 
    724 } // namespace android
    725