Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-JpegProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 
     21 #include <netinet/in.h>
     22 
     23 #include <binder/MemoryBase.h>
     24 #include <binder/MemoryHeapBase.h>
     25 #include <utils/Log.h>
     26 #include <utils/Trace.h>
     27 #include <gui/Surface.h>
     28 
     29 #include "common/CameraDeviceBase.h"
     30 #include "api1/Camera2Client.h"
     31 #include "api1/client2/Camera2Heap.h"
     32 #include "api1/client2/CaptureSequencer.h"
     33 #include "api1/client2/JpegProcessor.h"
     34 
     35 namespace android {
     36 namespace camera2 {
     37 
     38 JpegProcessor::JpegProcessor(
     39     sp<Camera2Client> client,
     40     wp<CaptureSequencer> sequencer):
     41         Thread(false),
     42         mDevice(client->getCameraDevice()),
     43         mSequencer(sequencer),
     44         mId(client->getCameraId()),
     45         mCaptureAvailable(false),
     46         mCaptureStreamId(NO_STREAM) {
     47 }
     48 
     49 JpegProcessor::~JpegProcessor() {
     50     ALOGV("%s: Exit", __FUNCTION__);
     51     deleteStream();
     52 }
     53 
     54 void JpegProcessor::onFrameAvailable() {
     55     Mutex::Autolock l(mInputMutex);
     56     if (!mCaptureAvailable) {
     57         mCaptureAvailable = true;
     58         mCaptureAvailableSignal.signal();
     59     }
     60 }
     61 
     62 status_t JpegProcessor::updateStream(const Parameters &params) {
     63     ATRACE_CALL();
     64     ALOGV("%s", __FUNCTION__);
     65     status_t res;
     66 
     67     Mutex::Autolock l(mInputMutex);
     68 
     69     sp<CameraDeviceBase> device = mDevice.promote();
     70     if (device == 0) {
     71         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
     72         return INVALID_OPERATION;
     73     }
     74 
     75     // Find out buffer size for JPEG
     76     camera_metadata_ro_entry_t maxJpegSize =
     77             params.staticInfo(ANDROID_JPEG_MAX_SIZE);
     78     if (maxJpegSize.count == 0) {
     79         ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
     80                 __FUNCTION__, mId);
     81         return INVALID_OPERATION;
     82     }
     83 
     84     if (mCaptureConsumer == 0) {
     85         // Create CPU buffer queue endpoint
     86         sp<BufferQueue> bq = new BufferQueue();
     87         mCaptureConsumer = new CpuConsumer(bq, 1);
     88         mCaptureConsumer->setFrameAvailableListener(this);
     89         mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
     90         mCaptureWindow = new Surface(bq);
     91         // Create memory for API consumption
     92         mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
     93                                        "Camera2Client::CaptureHeap");
     94         if (mCaptureHeap->getSize() == 0) {
     95             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
     96                     __FUNCTION__, mId);
     97             return NO_MEMORY;
     98         }
     99     }
    100 
    101     if (mCaptureStreamId != NO_STREAM) {
    102         // Check if stream parameters have to change
    103         uint32_t currentWidth, currentHeight;
    104         res = device->getStreamInfo(mCaptureStreamId,
    105                 &currentWidth, &currentHeight, 0);
    106         if (res != OK) {
    107             ALOGE("%s: Camera %d: Error querying capture output stream info: "
    108                     "%s (%d)", __FUNCTION__,
    109                     mId, strerror(-res), res);
    110             return res;
    111         }
    112         if (currentWidth != (uint32_t)params.pictureWidth ||
    113                 currentHeight != (uint32_t)params.pictureHeight) {
    114             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
    115                 __FUNCTION__, mId, mCaptureStreamId);
    116             res = device->deleteStream(mCaptureStreamId);
    117             if (res == -EBUSY) {
    118                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
    119                       " after it becomes idle", __FUNCTION__, mId);
    120                 return res;
    121             } else if (res != OK) {
    122                 ALOGE("%s: Camera %d: Unable to delete old output stream "
    123                         "for capture: %s (%d)", __FUNCTION__,
    124                         mId, strerror(-res), res);
    125                 return res;
    126             }
    127             mCaptureStreamId = NO_STREAM;
    128         }
    129     }
    130 
    131     if (mCaptureStreamId == NO_STREAM) {
    132         // Create stream for HAL production
    133         res = device->createStream(mCaptureWindow,
    134                 params.pictureWidth, params.pictureHeight,
    135                 HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
    136                 &mCaptureStreamId);
    137         if (res != OK) {
    138             ALOGE("%s: Camera %d: Can't create output stream for capture: "
    139                     "%s (%d)", __FUNCTION__, mId,
    140                     strerror(-res), res);
    141             return res;
    142         }
    143 
    144     }
    145     return OK;
    146 }
    147 
    148 status_t JpegProcessor::deleteStream() {
    149     ATRACE_CALL();
    150 
    151     Mutex::Autolock l(mInputMutex);
    152 
    153     if (mCaptureStreamId != NO_STREAM) {
    154         sp<CameraDeviceBase> device = mDevice.promote();
    155         if (device == 0) {
    156             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    157             return INVALID_OPERATION;
    158         }
    159 
    160         device->deleteStream(mCaptureStreamId);
    161 
    162         mCaptureHeap.clear();
    163         mCaptureWindow.clear();
    164         mCaptureConsumer.clear();
    165 
    166         mCaptureStreamId = NO_STREAM;
    167     }
    168     return OK;
    169 }
    170 
    171 int JpegProcessor::getStreamId() const {
    172     Mutex::Autolock l(mInputMutex);
    173     return mCaptureStreamId;
    174 }
    175 
    176 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
    177 }
    178 
    179 bool JpegProcessor::threadLoop() {
    180     status_t res;
    181 
    182     {
    183         Mutex::Autolock l(mInputMutex);
    184         while (!mCaptureAvailable) {
    185             res = mCaptureAvailableSignal.waitRelative(mInputMutex,
    186                     kWaitDuration);
    187             if (res == TIMED_OUT) return true;
    188         }
    189         mCaptureAvailable = false;
    190     }
    191 
    192     do {
    193         res = processNewCapture();
    194     } while (res == OK);
    195 
    196     return true;
    197 }
    198 
    199 status_t JpegProcessor::processNewCapture() {
    200     ATRACE_CALL();
    201     status_t res;
    202     sp<Camera2Heap> captureHeap;
    203 
    204     CpuConsumer::LockedBuffer imgBuffer;
    205 
    206     res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
    207     if (res != OK) {
    208         if (res != BAD_VALUE) {
    209             ALOGE("%s: Camera %d: Error receiving still image buffer: "
    210                     "%s (%d)", __FUNCTION__,
    211                     mId, strerror(-res), res);
    212         }
    213         return res;
    214     }
    215 
    216     ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
    217             mId);
    218 
    219     if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
    220         ALOGE("%s: Camera %d: Unexpected format for still image: "
    221                 "%x, expected %x", __FUNCTION__, mId,
    222                 imgBuffer.format,
    223                 HAL_PIXEL_FORMAT_BLOB);
    224         mCaptureConsumer->unlockBuffer(imgBuffer);
    225         return OK;
    226     }
    227 
    228     // Find size of JPEG image
    229     size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
    230     if (jpegSize == 0) { // failed to find size, default to whole buffer
    231         jpegSize = imgBuffer.width;
    232     }
    233     size_t heapSize = mCaptureHeap->getSize();
    234     if (jpegSize > heapSize) {
    235         ALOGW("%s: JPEG image is larger than expected, truncating "
    236                 "(got %d, expected at most %d bytes)",
    237                 __FUNCTION__, jpegSize, heapSize);
    238         jpegSize = heapSize;
    239     }
    240 
    241     // TODO: Optimize this to avoid memcopy
    242     sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
    243     void* captureMemory = mCaptureHeap->getBase();
    244     memcpy(captureMemory, imgBuffer.data, jpegSize);
    245 
    246     mCaptureConsumer->unlockBuffer(imgBuffer);
    247 
    248     sp<CaptureSequencer> sequencer = mSequencer.promote();
    249     if (sequencer != 0) {
    250         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
    251     }
    252 
    253     return OK;
    254 }
    255 
    256 /*
    257  * JPEG FILE FORMAT OVERVIEW.
    258  * http://www.jpeg.org/public/jfif.pdf
    259  * (JPEG is the image compression algorithm, actual file format is called JFIF)
    260  *
    261  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
    262  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
    263  * (inclusive).  Because every marker begins with the same byte, they are
    264  * referred to by the second byte's value.
    265  *
    266  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
    267  * Following it, "segment" sections begin with other markers, followed by a
    268  * 2-byte length (in network byte order), then the segment data.
    269  *
    270  * For our purposes we will ignore the data, and just use the length to skip to
    271  * the next segment.  This is necessary because the data inside segments are
    272  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
    273  * naievely scanning until the end.
    274  *
    275  * After all the segments are processed, the jpeg compressed image stream begins.
    276  * This can be considered an opaque format with one requirement: all 0xFF bytes
    277  * in this stream must be followed with a 0x00 byte.  This prevents any of the
    278  * image data to be interpreted as a segment.  The only exception to this is at
    279  * the end of the image stream there is an End of Image (EOI) marker, which is
    280  * 0xFF followed by a non-zero (0xD9) byte.
    281  */
    282 
    283 const uint8_t MARK = 0xFF; // First byte of marker
    284 const uint8_t SOI = 0xD8; // Start of Image
    285 const uint8_t EOI = 0xD9; // End of Image
    286 const size_t MARKER_LENGTH = 2; // length of a marker
    287 
    288 #pragma pack(push)
    289 #pragma pack(1)
    290 typedef struct segment {
    291     uint8_t marker[MARKER_LENGTH];
    292     uint16_t length;
    293 } segment_t;
    294 #pragma pack(pop)
    295 
    296 /* HELPER FUNCTIONS */
    297 
    298 // check for Start of Image marker
    299 bool checkJpegStart(uint8_t* buf) {
    300     return buf[0] == MARK && buf[1] == SOI;
    301 }
    302 // check for End of Image marker
    303 bool checkJpegEnd(uint8_t *buf) {
    304     return buf[0] == MARK && buf[1] == EOI;
    305 }
    306 // check for arbitrary marker, returns marker type (second byte)
    307 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
    308 uint8_t checkJpegMarker(uint8_t *buf) {
    309     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
    310         return buf[1];
    311     }
    312     return 0;
    313 }
    314 
    315 // Return the size of the JPEG, 0 indicates failure
    316 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
    317     size_t size;
    318 
    319     // First check for JPEG transport header at the end of the buffer
    320     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
    321     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
    322     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
    323         size = blob->jpeg_size;
    324         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
    325             // Verify SOI and EOI markers
    326             size_t offset = size - MARKER_LENGTH;
    327             uint8_t *end = jpegBuffer + offset;
    328             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
    329                 ALOGV("Found JPEG transport header, img size %d", size);
    330                 return size;
    331             } else {
    332                 ALOGW("Found JPEG transport header with bad Image Start/End");
    333             }
    334         } else {
    335             ALOGW("Found JPEG transport header with bad size %d", size);
    336         }
    337     }
    338 
    339     // Check Start of Image
    340     if ( !checkJpegStart(jpegBuffer) ) {
    341         ALOGE("Could not find start of JPEG marker");
    342         return 0;
    343     }
    344 
    345     // Read JFIF segment markers, skip over segment data
    346     size = 0;
    347     while (size <= maxSize - MARKER_LENGTH) {
    348         segment_t *segment = (segment_t*)(jpegBuffer + size);
    349         uint8_t type = checkJpegMarker(segment->marker);
    350         if (type == 0) { // invalid marker, no more segments, begin JPEG data
    351             ALOGV("JPEG stream found beginning at offset %d", size);
    352             break;
    353         }
    354         if (type == EOI || size > maxSize - sizeof(segment_t)) {
    355             ALOGE("Got premature End before JPEG data, offset %d", size);
    356             return 0;
    357         }
    358         size_t length = ntohs(segment->length);
    359         ALOGV("JFIF Segment, type %x length %x", type, length);
    360         size += length + MARKER_LENGTH;
    361     }
    362 
    363     // Find End of Image
    364     // Scan JPEG buffer until End of Image (EOI)
    365     bool foundEnd = false;
    366     for ( ; size <= maxSize - MARKER_LENGTH; size++) {
    367         if ( checkJpegEnd(jpegBuffer + size) ) {
    368             foundEnd = true;
    369             size += MARKER_LENGTH;
    370             break;
    371         }
    372     }
    373     if (!foundEnd) {
    374         ALOGE("Could not find end of JPEG marker");
    375         return 0;
    376     }
    377 
    378     if (size > maxSize) {
    379         ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
    380         size = maxSize;
    381     }
    382     ALOGV("Final JPEG size %d", size);
    383     return size;
    384 }
    385 
    386 }; // namespace camera2
    387 }; // namespace android
    388