Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-ZslProcessor3"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 //#define LOG_NNDEBUG 0
     21 
     22 #ifdef LOG_NNDEBUG
     23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
     24 #else
     25 #define ALOGVV(...) ((void)0)
     26 #endif
     27 
     28 #include <inttypes.h>
     29 
     30 #include <utils/Log.h>
     31 #include <utils/Trace.h>
     32 #include <gui/Surface.h>
     33 
     34 #include "common/CameraDeviceBase.h"
     35 #include "api1/Camera2Client.h"
     36 #include "api1/client2/CaptureSequencer.h"
     37 #include "api1/client2/ZslProcessor3.h"
     38 #include "device3/Camera3Device.h"
     39 
     40 namespace android {
     41 namespace camera2 {
     42 
     43 ZslProcessor3::ZslProcessor3(
     44     sp<Camera2Client> client,
     45     wp<CaptureSequencer> sequencer):
     46         Thread(false),
     47         mLatestClearedBufferTimestamp(0),
     48         mState(RUNNING),
     49         mClient(client),
     50         mSequencer(sequencer),
     51         mId(client->getCameraId()),
     52         mZslStreamId(NO_STREAM),
     53         mFrameListHead(0),
     54         mZslQueueHead(0),
     55         mZslQueueTail(0),
     56         mHasFocuser(false) {
     57     // Initialize buffer queue and frame list based on pipeline max depth.
     58     size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
     59     if (client != 0) {
     60         sp<Camera3Device> device =
     61         static_cast<Camera3Device*>(client->getCameraDevice().get());
     62         if (device != 0) {
     63             camera_metadata_ro_entry_t entry =
     64                 device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
     65             if (entry.count == 1) {
     66                 pipelineMaxDepth = entry.data.u8[0];
     67             } else {
     68                 ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
     69                         " use default pipeline max depth %zu", __FUNCTION__,
     70                         kDefaultMaxPipelineDepth);
     71             }
     72 
     73             entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
     74             if (entry.count > 0 && entry.data.f[0] != 0.) {
     75                 mHasFocuser = true;
     76             }
     77         }
     78     }
     79 
     80     ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%d)",
     81           __FUNCTION__, pipelineMaxDepth);
     82     // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
     83     // earlier than metadata which causes the buffer corresponding to oldest metadata being
     84     // removed.
     85     mFrameListDepth = pipelineMaxDepth;
     86     mBufferQueueDepth = mFrameListDepth + 1;
     87 
     88 
     89     mZslQueue.insertAt(0, mBufferQueueDepth);
     90     mFrameList.insertAt(0, mFrameListDepth);
     91     sp<CaptureSequencer> captureSequencer = mSequencer.promote();
     92     if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
     93 }
     94 
     95 ZslProcessor3::~ZslProcessor3() {
     96     ALOGV("%s: Exit", __FUNCTION__);
     97     deleteStream();
     98 }
     99 
    100 void ZslProcessor3::onResultAvailable(const CaptureResult &result) {
    101     ATRACE_CALL();
    102     ALOGV("%s:", __FUNCTION__);
    103     Mutex::Autolock l(mInputMutex);
    104     camera_metadata_ro_entry_t entry;
    105     entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
    106     nsecs_t timestamp = entry.data.i64[0];
    107     if (entry.count == 0) {
    108         ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
    109         return;
    110     }
    111 
    112     entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
    113     if (entry.count == 0) {
    114         ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
    115         return;
    116     }
    117     int32_t frameNumber = entry.data.i32[0];
    118 
    119     ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
    120 
    121     if (mState != RUNNING) return;
    122 
    123     // Corresponding buffer has been cleared. No need to push into mFrameList
    124     if (timestamp <= mLatestClearedBufferTimestamp) return;
    125 
    126     mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
    127     mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
    128 }
    129 
    130 status_t ZslProcessor3::updateStream(const Parameters &params) {
    131     ATRACE_CALL();
    132     ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
    133     status_t res;
    134 
    135     Mutex::Autolock l(mInputMutex);
    136 
    137     sp<Camera2Client> client = mClient.promote();
    138     if (client == 0) {
    139         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    140         return INVALID_OPERATION;
    141     }
    142     sp<Camera3Device> device =
    143         static_cast<Camera3Device*>(client->getCameraDevice().get());
    144     if (device == 0) {
    145         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    146         return INVALID_OPERATION;
    147     }
    148 
    149     if (mZslStreamId != NO_STREAM) {
    150         // Check if stream parameters have to change
    151         uint32_t currentWidth, currentHeight;
    152         res = device->getStreamInfo(mZslStreamId,
    153                 &currentWidth, &currentHeight, 0);
    154         if (res != OK) {
    155             ALOGE("%s: Camera %d: Error querying capture output stream info: "
    156                     "%s (%d)", __FUNCTION__,
    157                     client->getCameraId(), strerror(-res), res);
    158             return res;
    159         }
    160         if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
    161                 currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
    162             ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
    163                   "dimensions changed",
    164                 __FUNCTION__, client->getCameraId(), mZslStreamId);
    165             res = device->deleteStream(mZslStreamId);
    166             if (res == -EBUSY) {
    167                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
    168                       " after it becomes idle", __FUNCTION__, mId);
    169                 return res;
    170             } else if(res != OK) {
    171                 ALOGE("%s: Camera %d: Unable to delete old output stream "
    172                         "for ZSL: %s (%d)", __FUNCTION__,
    173                         client->getCameraId(), strerror(-res), res);
    174                 return res;
    175             }
    176             mZslStreamId = NO_STREAM;
    177         }
    178     }
    179 
    180     if (mZslStreamId == NO_STREAM) {
    181         // Create stream for HAL production
    182         // TODO: Sort out better way to select resolution for ZSL
    183 
    184         // Note that format specified internally in Camera3ZslStream
    185         res = device->createZslStream(
    186                 params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
    187                 mBufferQueueDepth,
    188                 &mZslStreamId,
    189                 &mZslStream);
    190         if (res != OK) {
    191             ALOGE("%s: Camera %d: Can't create ZSL stream: "
    192                     "%s (%d)", __FUNCTION__, client->getCameraId(),
    193                     strerror(-res), res);
    194             return res;
    195         }
    196 
    197         // Only add the camera3 buffer listener when the stream is created.
    198         mZslStream->addBufferListener(this);
    199     }
    200 
    201     client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
    202             Camera2Client::kPreviewRequestIdEnd,
    203             this,
    204             /*sendPartials*/false);
    205 
    206     return OK;
    207 }
    208 
    209 status_t ZslProcessor3::deleteStream() {
    210     ATRACE_CALL();
    211     status_t res;
    212 
    213     Mutex::Autolock l(mInputMutex);
    214 
    215     if (mZslStreamId != NO_STREAM) {
    216         sp<Camera2Client> client = mClient.promote();
    217         if (client == 0) {
    218             ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    219             return INVALID_OPERATION;
    220         }
    221 
    222         sp<Camera3Device> device =
    223             reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
    224         if (device == 0) {
    225             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    226             return INVALID_OPERATION;
    227         }
    228 
    229         res = device->deleteStream(mZslStreamId);
    230         if (res != OK) {
    231             ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
    232                     "%s (%d)", __FUNCTION__, client->getCameraId(),
    233                     mZslStreamId, strerror(-res), res);
    234             return res;
    235         }
    236 
    237         mZslStreamId = NO_STREAM;
    238     }
    239     return OK;
    240 }
    241 
    242 int ZslProcessor3::getStreamId() const {
    243     Mutex::Autolock l(mInputMutex);
    244     return mZslStreamId;
    245 }
    246 
    247 status_t ZslProcessor3::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
    248     sp<Camera2Client> client = mClient.promote();
    249     if (client == 0) {
    250         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    251         return INVALID_OPERATION;
    252     }
    253     sp<Camera3Device> device =
    254         static_cast<Camera3Device*>(client->getCameraDevice().get());
    255     if (device == 0) {
    256         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    257         return INVALID_OPERATION;
    258     }
    259 
    260     CameraMetadata stillTemplate;
    261     device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
    262 
    263     // Find some of the post-processing tags, and assign the value from template to the request.
    264     // Only check the aberration mode and noise reduction mode for now, as they are very important
    265     // for image quality.
    266     uint32_t postProcessingTags[] = {
    267             ANDROID_NOISE_REDUCTION_MODE,
    268             ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
    269             ANDROID_COLOR_CORRECTION_MODE,
    270             ANDROID_TONEMAP_MODE,
    271             ANDROID_SHADING_MODE,
    272             ANDROID_HOT_PIXEL_MODE,
    273             ANDROID_EDGE_MODE
    274     };
    275 
    276     camera_metadata_entry_t entry;
    277     for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
    278         entry = stillTemplate.find(postProcessingTags[i]);
    279         if (entry.count > 0) {
    280             request.update(postProcessingTags[i], entry.data.u8, 1);
    281         }
    282     }
    283 
    284     return OK;
    285 }
    286 
    287 status_t ZslProcessor3::pushToReprocess(int32_t requestId) {
    288     ALOGV("%s: Send in reprocess request with id %d",
    289             __FUNCTION__, requestId);
    290     Mutex::Autolock l(mInputMutex);
    291     status_t res;
    292     sp<Camera2Client> client = mClient.promote();
    293 
    294     if (client == 0) {
    295         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    296         return INVALID_OPERATION;
    297     }
    298 
    299     IF_ALOGV() {
    300         dumpZslQueue(-1);
    301     }
    302 
    303     size_t metadataIdx;
    304     nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
    305 
    306     if (candidateTimestamp == -1) {
    307         ALOGE("%s: Could not find good candidate for ZSL reprocessing",
    308               __FUNCTION__);
    309         return NOT_ENOUGH_DATA;
    310     }
    311 
    312     res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
    313                                                     /*actualTimestamp*/NULL);
    314 
    315     if (res == mZslStream->NO_BUFFER_AVAILABLE) {
    316         ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
    317         return NOT_ENOUGH_DATA;
    318     } else if (res != OK) {
    319         ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
    320                 __FUNCTION__, strerror(-res), res);
    321         return res;
    322     }
    323 
    324     {
    325         CameraMetadata request = mFrameList[metadataIdx];
    326 
    327         // Verify that the frame is reasonable for reprocessing
    328 
    329         camera_metadata_entry_t entry;
    330         entry = request.find(ANDROID_CONTROL_AE_STATE);
    331         if (entry.count == 0) {
    332             ALOGE("%s: ZSL queue frame has no AE state field!",
    333                     __FUNCTION__);
    334             return BAD_VALUE;
    335         }
    336         if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
    337                 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
    338             ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
    339                     __FUNCTION__, entry.data.u8[0]);
    340             return NOT_ENOUGH_DATA;
    341         }
    342 
    343         uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
    344         res = request.update(ANDROID_REQUEST_TYPE,
    345                 &requestType, 1);
    346         if (res != OK) {
    347             ALOGE("%s: Unable to update request type",
    348                   __FUNCTION__);
    349             return INVALID_OPERATION;
    350         }
    351 
    352         int32_t inputStreams[1] =
    353                 { mZslStreamId };
    354         res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
    355                 inputStreams, 1);
    356         if (res != OK) {
    357             ALOGE("%s: Unable to update request input streams",
    358                   __FUNCTION__);
    359             return INVALID_OPERATION;
    360         }
    361 
    362         uint8_t captureIntent =
    363                 static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
    364         res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
    365                 &captureIntent, 1);
    366         if (res != OK ) {
    367             ALOGE("%s: Unable to update request capture intent",
    368                   __FUNCTION__);
    369             return INVALID_OPERATION;
    370         }
    371 
    372         // TODO: Shouldn't we also update the latest preview frame?
    373         int32_t outputStreams[1] =
    374                 { client->getCaptureStreamId() };
    375         res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
    376                 outputStreams, 1);
    377         if (res != OK) {
    378             ALOGE("%s: Unable to update request output streams",
    379                   __FUNCTION__);
    380             return INVALID_OPERATION;
    381         }
    382 
    383         res = request.update(ANDROID_REQUEST_ID,
    384                 &requestId, 1);
    385         if (res != OK ) {
    386             ALOGE("%s: Unable to update frame to a reprocess request",
    387                   __FUNCTION__);
    388             return INVALID_OPERATION;
    389         }
    390 
    391         res = client->stopStream();
    392         if (res != OK) {
    393             ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
    394                 "%s (%d)",
    395                 __FUNCTION__, client->getCameraId(), strerror(-res), res);
    396             return INVALID_OPERATION;
    397         }
    398 
    399         // Update JPEG settings
    400         {
    401             SharedParameters::Lock l(client->getParameters());
    402             res = l.mParameters.updateRequestJpeg(&request);
    403             if (res != OK) {
    404                 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
    405                         "capture request: %s (%d)", __FUNCTION__,
    406                         client->getCameraId(),
    407                         strerror(-res), res);
    408                 return res;
    409             }
    410         }
    411 
    412         // Update post-processing settings
    413         res = updateRequestWithDefaultStillRequest(request);
    414         if (res != OK) {
    415             ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
    416                     "may be compromised", __FUNCTION__);
    417         }
    418 
    419         mLatestCapturedRequest = request;
    420         res = client->getCameraDevice()->capture(request);
    421         if (res != OK ) {
    422             ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
    423                   " (%d)", __FUNCTION__, strerror(-res), res);
    424             return res;
    425         }
    426 
    427         mState = LOCKED;
    428     }
    429 
    430     return OK;
    431 }
    432 
    433 status_t ZslProcessor3::clearZslQueue() {
    434     Mutex::Autolock l(mInputMutex);
    435     // If in middle of capture, can't clear out queue
    436     if (mState == LOCKED) return OK;
    437 
    438     return clearZslQueueLocked();
    439 }
    440 
    441 status_t ZslProcessor3::clearZslQueueLocked() {
    442     if (mZslStream != 0) {
    443         // clear result metadata list first.
    444         clearZslResultQueueLocked();
    445         return mZslStream->clearInputRingBuffer(&mLatestClearedBufferTimestamp);
    446     }
    447     return OK;
    448 }
    449 
    450 void ZslProcessor3::clearZslResultQueueLocked() {
    451     mFrameList.clear();
    452     mFrameListHead = 0;
    453     mFrameList.insertAt(0, mFrameListDepth);
    454 }
    455 
    456 void ZslProcessor3::dump(int fd, const Vector<String16>& /*args*/) const {
    457     Mutex::Autolock l(mInputMutex);
    458     if (!mLatestCapturedRequest.isEmpty()) {
    459         String8 result("    Latest ZSL capture request:\n");
    460         write(fd, result.string(), result.size());
    461         mLatestCapturedRequest.dump(fd, 2, 6);
    462     } else {
    463         String8 result("    Latest ZSL capture request: none yet\n");
    464         write(fd, result.string(), result.size());
    465     }
    466     dumpZslQueue(fd);
    467 }
    468 
    469 bool ZslProcessor3::threadLoop() {
    470     // TODO: remove dependency on thread. For now, shut thread down right
    471     // away.
    472     return false;
    473 }
    474 
    475 void ZslProcessor3::dumpZslQueue(int fd) const {
    476     String8 header("ZSL queue contents:");
    477     String8 indent("    ");
    478     ALOGV("%s", header.string());
    479     if (fd != -1) {
    480         header = indent + header + "\n";
    481         write(fd, header.string(), header.size());
    482     }
    483     for (size_t i = 0; i < mZslQueue.size(); i++) {
    484         const ZslPair &queueEntry = mZslQueue[i];
    485         nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
    486         camera_metadata_ro_entry_t entry;
    487         nsecs_t frameTimestamp = 0;
    488         int frameAeState = -1;
    489         if (!queueEntry.frame.isEmpty()) {
    490             entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
    491             if (entry.count > 0) frameTimestamp = entry.data.i64[0];
    492             entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
    493             if (entry.count > 0) frameAeState = entry.data.u8[0];
    494         }
    495         String8 result =
    496                 String8::format("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
    497                         bufferTimestamp, frameTimestamp, frameAeState);
    498         ALOGV("%s", result.string());
    499         if (fd != -1) {
    500             result = indent + result + "\n";
    501             write(fd, result.string(), result.size());
    502         }
    503 
    504     }
    505 }
    506 
    507 bool ZslProcessor3::isFixedFocusMode(uint8_t afMode) const {
    508     switch (afMode) {
    509         case ANDROID_CONTROL_AF_MODE_AUTO:
    510         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
    511         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
    512         case ANDROID_CONTROL_AF_MODE_MACRO:
    513             return false;
    514             break;
    515         case ANDROID_CONTROL_AF_MODE_OFF:
    516         case ANDROID_CONTROL_AF_MODE_EDOF:
    517             return true;
    518         default:
    519             ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
    520             return false;
    521     }
    522 }
    523 
    524 nsecs_t ZslProcessor3::getCandidateTimestampLocked(size_t* metadataIdx) const {
    525     /**
    526      * Find the smallest timestamp we know about so far
    527      * - ensure that aeState is either converged or locked
    528      */
    529 
    530     size_t idx = 0;
    531     nsecs_t minTimestamp = -1;
    532 
    533     size_t emptyCount = mFrameList.size();
    534 
    535     for (size_t j = 0; j < mFrameList.size(); j++) {
    536         const CameraMetadata &frame = mFrameList[j];
    537         if (!frame.isEmpty()) {
    538 
    539             emptyCount--;
    540 
    541             camera_metadata_ro_entry_t entry;
    542             entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
    543             if (entry.count == 0) {
    544                 ALOGE("%s: Can't find timestamp in frame!",
    545                         __FUNCTION__);
    546                 continue;
    547             }
    548             nsecs_t frameTimestamp = entry.data.i64[0];
    549             if (minTimestamp > frameTimestamp || minTimestamp == -1) {
    550 
    551                 entry = frame.find(ANDROID_CONTROL_AE_STATE);
    552 
    553                 if (entry.count == 0) {
    554                     /**
    555                      * This is most likely a HAL bug. The aeState field is
    556                      * mandatory, so it should always be in a metadata packet.
    557                      */
    558                     ALOGW("%s: ZSL queue frame has no AE state field!",
    559                             __FUNCTION__);
    560                     continue;
    561                 }
    562                 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
    563                         entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
    564                     ALOGVV("%s: ZSL queue frame AE state is %d, need "
    565                            "full capture",  __FUNCTION__, entry.data.u8[0]);
    566                     continue;
    567                 }
    568 
    569                 entry = frame.find(ANDROID_CONTROL_AF_MODE);
    570                 if (entry.count == 0) {
    571                     ALOGW("%s: ZSL queue frame has no AF mode field!",
    572                             __FUNCTION__);
    573                     continue;
    574                 }
    575                 uint8_t afMode = entry.data.u8[0];
    576                 if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
    577                     // Skip all the ZSL buffer for manual AF mode, as we don't really
    578                     // know the af state.
    579                     continue;
    580                 }
    581 
    582                 // Check AF state if device has focuser and focus mode isn't fixed
    583                 if (mHasFocuser && !isFixedFocusMode(afMode)) {
    584                     // Make sure the candidate frame has good focus.
    585                     entry = frame.find(ANDROID_CONTROL_AF_STATE);
    586                     if (entry.count == 0) {
    587                         ALOGW("%s: ZSL queue frame has no AF state field!",
    588                                 __FUNCTION__);
    589                         continue;
    590                     }
    591                     uint8_t afState = entry.data.u8[0];
    592                     if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
    593                             afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
    594                             afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
    595                         ALOGW("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
    596                                 __FUNCTION__, afState);
    597                         continue;
    598                     }
    599                 }
    600 
    601                 minTimestamp = frameTimestamp;
    602                 idx = j;
    603             }
    604 
    605             ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
    606         }
    607     }
    608 
    609     if (emptyCount == mFrameList.size()) {
    610         /**
    611          * This could be mildly bad and means our ZSL was triggered before
    612          * there were any frames yet received by the camera framework.
    613          *
    614          * This is a fairly corner case which can happen under:
    615          * + a user presses the shutter button real fast when the camera starts
    616          *     (startPreview followed immediately by takePicture).
    617          * + burst capture case (hitting shutter button as fast possible)
    618          *
    619          * If this happens in steady case (preview running for a while, call
    620          *     a single takePicture) then this might be a fwk bug.
    621          */
    622         ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
    623     }
    624 
    625     ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
    626           __FUNCTION__, minTimestamp, idx, emptyCount);
    627 
    628     if (metadataIdx) {
    629         *metadataIdx = idx;
    630     }
    631 
    632     return minTimestamp;
    633 }
    634 
    635 void ZslProcessor3::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
    636     // Intentionally left empty
    637     // Although theoretically we could use this to get better dump info
    638 }
    639 
    640 void ZslProcessor3::onBufferReleased(const BufferInfo& bufferInfo) {
    641 
    642     // ignore output buffers
    643     if (bufferInfo.mOutput) {
    644         return;
    645     }
    646 
    647     // Lock mutex only once we know this is an input buffer returned to avoid
    648     // potential deadlock
    649     Mutex::Autolock l(mInputMutex);
    650     // TODO: Verify that the buffer is in our queue by looking at timestamp
    651     // theoretically unnecessary unless we change the following assumptions:
    652     // -- only 1 buffer reprocessed at a time (which is the case now)
    653 
    654     // Erase entire ZSL queue since we've now completed the capture and preview
    655     // is stopped.
    656     //
    657     // We need to guarantee that if we do two back-to-back captures,
    658     // the second won't use a buffer that's older/the same as the first, which
    659     // is theoretically possible if we don't clear out the queue and the
    660     // selection criteria is something like 'newest'. Clearing out the result
    661     // metadata queue on a completed capture ensures we'll only use new timestamp.
    662     // Calling clearZslQueueLocked is a guaranteed deadlock because this callback
    663     // holds the Camera3Stream internal lock (mLock), and clearZslQueueLocked requires
    664     // to hold the same lock.
    665     // TODO: need figure out a way to clear the Zsl buffer queue properly. Right now
    666     // it is safe not to do so, as back to back ZSL capture requires stop and start
    667     // preview, which will flush ZSL queue automatically.
    668     ALOGV("%s: Memory optimization, clearing ZSL queue",
    669           __FUNCTION__);
    670     clearZslResultQueueLocked();
    671 
    672     // Required so we accept more ZSL requests
    673     mState = RUNNING;
    674 }
    675 
    676 }; // namespace camera2
    677 }; // namespace android
    678