Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-JpegProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 
     21 #include <netinet/in.h>
     22 
     23 #include <binder/MemoryBase.h>
     24 #include <binder/MemoryHeapBase.h>
     25 #include <utils/Log.h>
     26 #include <utils/Trace.h>
     27 #include <gui/Surface.h>
     28 
     29 #include "common/CameraDeviceBase.h"
     30 #include "api1/Camera2Client.h"
     31 #include "api1/client2/Camera2Heap.h"
     32 #include "api1/client2/CaptureSequencer.h"
     33 #include "api1/client2/JpegProcessor.h"
     34 
     35 namespace android {
     36 namespace camera2 {
     37 
     38 JpegProcessor::JpegProcessor(
     39     sp<Camera2Client> client,
     40     wp<CaptureSequencer> sequencer):
     41         Thread(false),
     42         mDevice(client->getCameraDevice()),
     43         mSequencer(sequencer),
     44         mId(client->getCameraId()),
     45         mCaptureDone(false),
     46         mCaptureSuccess(false),
     47         mCaptureStreamId(NO_STREAM) {
     48 }
     49 
     50 JpegProcessor::~JpegProcessor() {
     51     ALOGV("%s: Exit", __FUNCTION__);
     52     deleteStream();
     53 }
     54 
     55 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
     56     Mutex::Autolock l(mInputMutex);
     57     ALOGV("%s", __FUNCTION__);
     58     if (!mCaptureDone) {
     59         mCaptureDone = true;
     60         mCaptureSuccess = true;
     61         mCaptureDoneSignal.signal();
     62     }
     63 }
     64 
     65 void JpegProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
     66     // Intentionally left empty
     67 }
     68 
     69 void JpegProcessor::onBufferReleased(const BufferInfo& bufferInfo) {
     70     Mutex::Autolock l(mInputMutex);
     71     ALOGV("%s", __FUNCTION__);
     72 
     73     if (bufferInfo.mError) {
     74         mCaptureDone = true;
     75         mCaptureSuccess = false;
     76         mCaptureDoneSignal.signal();
     77     }
     78 }
     79 
     80 status_t JpegProcessor::updateStream(const Parameters &params) {
     81     ATRACE_CALL();
     82     ALOGV("%s", __FUNCTION__);
     83     status_t res;
     84 
     85     Mutex::Autolock l(mInputMutex);
     86 
     87     sp<CameraDeviceBase> device = mDevice.promote();
     88     if (device == 0) {
     89         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
     90         return INVALID_OPERATION;
     91     }
     92 
     93     // Find out buffer size for JPEG
     94     ssize_t maxJpegSize = device->getJpegBufferSize(params.pictureWidth, params.pictureHeight);
     95     if (maxJpegSize <= 0) {
     96         ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
     97                 __FUNCTION__, mId, maxJpegSize);
     98         return INVALID_OPERATION;
     99     }
    100 
    101     if (mCaptureConsumer == 0) {
    102         // Create CPU buffer queue endpoint
    103         sp<IGraphicBufferProducer> producer;
    104         sp<IGraphicBufferConsumer> consumer;
    105         BufferQueue::createBufferQueue(&producer, &consumer);
    106         mCaptureConsumer = new CpuConsumer(consumer, 1);
    107         mCaptureConsumer->setFrameAvailableListener(this);
    108         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
    109         mCaptureWindow = new Surface(producer);
    110     }
    111 
    112     // Since ashmem heaps are rounded up to page size, don't reallocate if
    113     // the capture heap isn't exactly the same size as the required JPEG buffer
    114     const size_t HEAP_SLACK_FACTOR = 2;
    115     if (mCaptureHeap == 0 ||
    116             (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
    117             (mCaptureHeap->getSize() >
    118                     static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
    119         // Create memory for API consumption
    120         mCaptureHeap.clear();
    121         mCaptureHeap =
    122                 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
    123         if (mCaptureHeap->getSize() == 0) {
    124             ALOGE("%s: Camera %d: Unable to allocate memory for capture",
    125                     __FUNCTION__, mId);
    126             return NO_MEMORY;
    127         }
    128     }
    129     ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
    130             __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
    131 
    132     if (mCaptureStreamId != NO_STREAM) {
    133         // Check if stream parameters have to change
    134         uint32_t currentWidth, currentHeight;
    135         res = device->getStreamInfo(mCaptureStreamId,
    136                 &currentWidth, &currentHeight, 0, 0);
    137         if (res != OK) {
    138             ALOGE("%s: Camera %d: Error querying capture output stream info: "
    139                     "%s (%d)", __FUNCTION__,
    140                     mId, strerror(-res), res);
    141             return res;
    142         }
    143         if (currentWidth != (uint32_t)params.pictureWidth ||
    144                 currentHeight != (uint32_t)params.pictureHeight) {
    145             ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
    146                 __FUNCTION__, mId, mCaptureStreamId);
    147             res = device->deleteStream(mCaptureStreamId);
    148             if (res == -EBUSY) {
    149                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
    150                       " after it becomes idle", __FUNCTION__, mId);
    151                 return res;
    152             } else if (res != OK) {
    153                 ALOGE("%s: Camera %d: Unable to delete old output stream "
    154                         "for capture: %s (%d)", __FUNCTION__,
    155                         mId, strerror(-res), res);
    156                 return res;
    157             }
    158             mCaptureStreamId = NO_STREAM;
    159         }
    160     }
    161 
    162     if (mCaptureStreamId == NO_STREAM) {
    163         // Create stream for HAL production
    164         res = device->createStream(mCaptureWindow,
    165                 params.pictureWidth, params.pictureHeight,
    166                 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
    167                 CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId);
    168         if (res != OK) {
    169             ALOGE("%s: Camera %d: Can't create output stream for capture: "
    170                     "%s (%d)", __FUNCTION__, mId,
    171                     strerror(-res), res);
    172             return res;
    173         }
    174 
    175         res = device->addBufferListenerForStream(mCaptureStreamId, this);
    176         if (res != OK) {
    177               ALOGE("%s: Camera %d: Can't add buffer listeneri: %s (%d)",
    178                     __FUNCTION__, mId, strerror(-res), res);
    179               return res;
    180         }
    181     }
    182     return OK;
    183 }
    184 
    185 status_t JpegProcessor::deleteStream() {
    186     ATRACE_CALL();
    187 
    188     Mutex::Autolock l(mInputMutex);
    189 
    190     if (mCaptureStreamId != NO_STREAM) {
    191         sp<CameraDeviceBase> device = mDevice.promote();
    192         if (device == 0) {
    193             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    194             return INVALID_OPERATION;
    195         }
    196 
    197         device->deleteStream(mCaptureStreamId);
    198 
    199         mCaptureHeap.clear();
    200         mCaptureWindow.clear();
    201         mCaptureConsumer.clear();
    202 
    203         mCaptureStreamId = NO_STREAM;
    204     }
    205     return OK;
    206 }
    207 
    208 int JpegProcessor::getStreamId() const {
    209     Mutex::Autolock l(mInputMutex);
    210     return mCaptureStreamId;
    211 }
    212 
    213 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
    214 }
    215 
    216 bool JpegProcessor::threadLoop() {
    217     status_t res;
    218 
    219     bool captureSuccess = false;
    220     {
    221         Mutex::Autolock l(mInputMutex);
    222 
    223         while (!mCaptureDone) {
    224             res = mCaptureDoneSignal.waitRelative(mInputMutex,
    225                     kWaitDuration);
    226             if (res == TIMED_OUT) return true;
    227         }
    228 
    229         captureSuccess = mCaptureSuccess;
    230         mCaptureDone = false;
    231     }
    232 
    233     res = processNewCapture(captureSuccess);
    234 
    235     return true;
    236 }
    237 
    238 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
    239     ATRACE_CALL();
    240     status_t res;
    241     sp<Camera2Heap> captureHeap;
    242     sp<MemoryBase> captureBuffer;
    243 
    244     CpuConsumer::LockedBuffer imgBuffer;
    245 
    246     if (captureSuccess) {
    247         Mutex::Autolock l(mInputMutex);
    248         if (mCaptureStreamId == NO_STREAM) {
    249             ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
    250             return INVALID_OPERATION;
    251         }
    252 
    253         res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
    254         if (res != OK) {
    255             if (res != BAD_VALUE) {
    256                 ALOGE("%s: Camera %d: Error receiving still image buffer: "
    257                         "%s (%d)", __FUNCTION__,
    258                         mId, strerror(-res), res);
    259             }
    260             return res;
    261         }
    262 
    263         ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
    264                 mId);
    265 
    266         if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
    267             ALOGE("%s: Camera %d: Unexpected format for still image: "
    268                     "%x, expected %x", __FUNCTION__, mId,
    269                     imgBuffer.format,
    270                     HAL_PIXEL_FORMAT_BLOB);
    271             mCaptureConsumer->unlockBuffer(imgBuffer);
    272             return OK;
    273         }
    274 
    275         // Find size of JPEG image
    276         size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
    277         if (jpegSize == 0) { // failed to find size, default to whole buffer
    278             jpegSize = imgBuffer.width;
    279         }
    280         size_t heapSize = mCaptureHeap->getSize();
    281         if (jpegSize > heapSize) {
    282             ALOGW("%s: JPEG image is larger than expected, truncating "
    283                     "(got %zu, expected at most %zu bytes)",
    284                     __FUNCTION__, jpegSize, heapSize);
    285             jpegSize = heapSize;
    286         }
    287 
    288         // TODO: Optimize this to avoid memcopy
    289         captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
    290         void* captureMemory = mCaptureHeap->getBase();
    291         memcpy(captureMemory, imgBuffer.data, jpegSize);
    292 
    293         mCaptureConsumer->unlockBuffer(imgBuffer);
    294     }
    295 
    296     sp<CaptureSequencer> sequencer = mSequencer.promote();
    297     if (sequencer != 0) {
    298         sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
    299     }
    300 
    301     return OK;
    302 }
    303 
    304 /*
    305  * JPEG FILE FORMAT OVERVIEW.
    306  * http://www.jpeg.org/public/jfif.pdf
    307  * (JPEG is the image compression algorithm, actual file format is called JFIF)
    308  *
    309  * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
    310  * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
    311  * (inclusive).  Because every marker begins with the same byte, they are
    312  * referred to by the second byte's value.
    313  *
    314  * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
    315  * Following it, "segment" sections begin with other markers, followed by a
    316  * 2-byte length (in network byte order), then the segment data.
    317  *
    318  * For our purposes we will ignore the data, and just use the length to skip to
    319  * the next segment.  This is necessary because the data inside segments are
    320  * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
    321  * naievely scanning until the end.
    322  *
    323  * After all the segments are processed, the jpeg compressed image stream begins.
    324  * This can be considered an opaque format with one requirement: all 0xFF bytes
    325  * in this stream must be followed with a 0x00 byte.  This prevents any of the
    326  * image data to be interpreted as a segment.  The only exception to this is at
    327  * the end of the image stream there is an End of Image (EOI) marker, which is
    328  * 0xFF followed by a non-zero (0xD9) byte.
    329  */
    330 
    331 const uint8_t MARK = 0xFF; // First byte of marker
    332 const uint8_t SOI = 0xD8; // Start of Image
    333 const uint8_t EOI = 0xD9; // End of Image
    334 const size_t MARKER_LENGTH = 2; // length of a marker
    335 
    336 #pragma pack(push)
    337 #pragma pack(1)
    338 typedef struct segment {
    339     uint8_t marker[MARKER_LENGTH];
    340     uint16_t length;
    341 } segment_t;
    342 #pragma pack(pop)
    343 
    344 /* HELPER FUNCTIONS */
    345 
    346 // check for Start of Image marker
    347 bool checkJpegStart(uint8_t* buf) {
    348     return buf[0] == MARK && buf[1] == SOI;
    349 }
    350 // check for End of Image marker
    351 bool checkJpegEnd(uint8_t *buf) {
    352     return buf[0] == MARK && buf[1] == EOI;
    353 }
    354 // check for arbitrary marker, returns marker type (second byte)
    355 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
    356 uint8_t checkJpegMarker(uint8_t *buf) {
    357     if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
    358         return buf[1];
    359     }
    360     return 0;
    361 }
    362 
    363 // Return the size of the JPEG, 0 indicates failure
    364 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
    365     size_t size;
    366 
    367     // First check for JPEG transport header at the end of the buffer
    368     uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
    369     struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
    370     if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
    371         size = blob->jpeg_size;
    372         if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
    373             // Verify SOI and EOI markers
    374             size_t offset = size - MARKER_LENGTH;
    375             uint8_t *end = jpegBuffer + offset;
    376             if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
    377                 ALOGV("Found JPEG transport header, img size %zu", size);
    378                 return size;
    379             } else {
    380                 ALOGW("Found JPEG transport header with bad Image Start/End");
    381             }
    382         } else {
    383             ALOGW("Found JPEG transport header with bad size %zu", size);
    384         }
    385     }
    386 
    387     // Check Start of Image
    388     if ( !checkJpegStart(jpegBuffer) ) {
    389         ALOGE("Could not find start of JPEG marker");
    390         return 0;
    391     }
    392 
    393     // Read JFIF segment markers, skip over segment data
    394     size = 0;
    395     while (size <= maxSize - MARKER_LENGTH) {
    396         segment_t *segment = (segment_t*)(jpegBuffer + size);
    397         uint8_t type = checkJpegMarker(segment->marker);
    398         if (type == 0) { // invalid marker, no more segments, begin JPEG data
    399             ALOGV("JPEG stream found beginning at offset %zu", size);
    400             break;
    401         }
    402         if (type == EOI || size > maxSize - sizeof(segment_t)) {
    403             ALOGE("Got premature End before JPEG data, offset %zu", size);
    404             return 0;
    405         }
    406         size_t length = ntohs(segment->length);
    407         ALOGV("JFIF Segment, type %x length %zx", type, length);
    408         size += length + MARKER_LENGTH;
    409     }
    410 
    411     // Find End of Image
    412     // Scan JPEG buffer until End of Image (EOI)
    413     bool foundEnd = false;
    414     for ( ; size <= maxSize - MARKER_LENGTH; size++) {
    415         if ( checkJpegEnd(jpegBuffer + size) ) {
    416             foundEnd = true;
    417             size += MARKER_LENGTH;
    418             break;
    419         }
    420     }
    421     if (!foundEnd) {
    422         ALOGE("Could not find end of JPEG marker");
    423         return 0;
    424     }
    425 
    426     if (size > maxSize) {
    427         ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
    428         size = maxSize;
    429     }
    430     ALOGV("Final JPEG size %zu", size);
    431     return size;
    432 }
    433 
    434 }; // namespace camera2
    435 }; // namespace android
    436