Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-ZslProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 //#define LOG_NNDEBUG 0
     21 
     22 #ifdef LOG_NNDEBUG
     23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
     24 #else
     25 #define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
     26 #endif
     27 
     28 #include <inttypes.h>
     29 
     30 #include <utils/Log.h>
     31 #include <utils/Trace.h>
     32 #include <gui/Surface.h>
     33 
     34 #include "common/CameraDeviceBase.h"
     35 #include "api1/Camera2Client.h"
     36 #include "api1/client2/CaptureSequencer.h"
     37 #include "api1/client2/ZslProcessor.h"
     38 #include "device3/Camera3Device.h"
     39 
     40 namespace android {
     41 namespace camera2 {
     42 
     43 ZslProcessor::ZslProcessor(
     44     sp<Camera2Client> client,
     45     wp<CaptureSequencer> sequencer):
     46         Thread(false),
     47         mLatestClearedBufferTimestamp(0),
     48         mState(RUNNING),
     49         mClient(client),
     50         mSequencer(sequencer),
     51         mId(client->getCameraId()),
     52         mZslStreamId(NO_STREAM),
     53         mFrameListHead(0),
     54         mHasFocuser(false) {
     55     // Initialize buffer queue and frame list based on pipeline max depth.
     56     size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
     57     if (client != 0) {
     58         sp<Camera3Device> device =
     59         static_cast<Camera3Device*>(client->getCameraDevice().get());
     60         if (device != 0) {
     61             camera_metadata_ro_entry_t entry =
     62                 device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
     63             if (entry.count == 1) {
     64                 pipelineMaxDepth = entry.data.u8[0];
     65             } else {
     66                 ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
     67                         " use default pipeline max depth %d", __FUNCTION__,
     68                         kDefaultMaxPipelineDepth);
     69             }
     70 
     71             entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
     72             if (entry.count > 0 && entry.data.f[0] != 0.) {
     73                 mHasFocuser = true;
     74             }
     75         }
     76     }
     77 
     78     ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
     79           __FUNCTION__, pipelineMaxDepth);
     80     // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
     81     // earlier than metadata which causes the buffer corresponding to oldest metadata being
     82     // removed.
     83     mFrameListDepth = pipelineMaxDepth;
     84     mBufferQueueDepth = mFrameListDepth + 1;
     85 
     86 
     87     mZslQueue.insertAt(0, mBufferQueueDepth);
     88     mFrameList.insertAt(0, mFrameListDepth);
     89     sp<CaptureSequencer> captureSequencer = mSequencer.promote();
     90     if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
     91 }
     92 
     93 ZslProcessor::~ZslProcessor() {
     94     ALOGV("%s: Exit", __FUNCTION__);
     95     deleteStream();
     96 }
     97 
     98 void ZslProcessor::onResultAvailable(const CaptureResult &result) {
     99     ATRACE_CALL();
    100     ALOGV("%s:", __FUNCTION__);
    101     Mutex::Autolock l(mInputMutex);
    102     camera_metadata_ro_entry_t entry;
    103     entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
    104     nsecs_t timestamp = entry.data.i64[0];
    105     if (entry.count == 0) {
    106         ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
    107         return;
    108     }
    109 
    110     entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
    111     if (entry.count == 0) {
    112         ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
    113         return;
    114     }
    115     int32_t frameNumber = entry.data.i32[0];
    116 
    117     ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
    118 
    119     if (mState != RUNNING) return;
    120 
    121     // Corresponding buffer has been cleared. No need to push into mFrameList
    122     if (timestamp <= mLatestClearedBufferTimestamp) return;
    123 
    124     mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
    125     mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
    126 }
    127 
    128 status_t ZslProcessor::updateStream(const Parameters &params) {
    129     ATRACE_CALL();
    130     ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
    131     status_t res;
    132 
    133     Mutex::Autolock l(mInputMutex);
    134 
    135     sp<Camera2Client> client = mClient.promote();
    136     if (client == 0) {
    137         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    138         return INVALID_OPERATION;
    139     }
    140     sp<Camera3Device> device =
    141         static_cast<Camera3Device*>(client->getCameraDevice().get());
    142     if (device == 0) {
    143         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    144         return INVALID_OPERATION;
    145     }
    146 
    147     if (mZslStreamId != NO_STREAM) {
    148         // Check if stream parameters have to change
    149         uint32_t currentWidth, currentHeight;
    150         res = device->getStreamInfo(mZslStreamId,
    151                 &currentWidth, &currentHeight, 0, 0);
    152         if (res != OK) {
    153             ALOGE("%s: Camera %d: Error querying capture output stream info: "
    154                     "%s (%d)", __FUNCTION__,
    155                     client->getCameraId(), strerror(-res), res);
    156             return res;
    157         }
    158         if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
    159                 currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
    160             ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
    161                   "dimensions changed",
    162                 __FUNCTION__, client->getCameraId(), mZslStreamId);
    163             res = device->deleteStream(mZslStreamId);
    164             if (res == -EBUSY) {
    165                 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
    166                       " after it becomes idle", __FUNCTION__, mId);
    167                 return res;
    168             } else if(res != OK) {
    169                 ALOGE("%s: Camera %d: Unable to delete old output stream "
    170                         "for ZSL: %s (%d)", __FUNCTION__,
    171                         client->getCameraId(), strerror(-res), res);
    172                 return res;
    173             }
    174             mZslStreamId = NO_STREAM;
    175         }
    176     }
    177 
    178     if (mZslStreamId == NO_STREAM) {
    179         // Create stream for HAL production
    180         // TODO: Sort out better way to select resolution for ZSL
    181 
    182         // Note that format specified internally in Camera3ZslStream
    183         res = device->createZslStream(
    184                 params.fastInfo.arrayWidth, params.fastInfo.arrayHeight,
    185                 mBufferQueueDepth,
    186                 &mZslStreamId,
    187                 &mZslStream);
    188         if (res != OK) {
    189             ALOGE("%s: Camera %d: Can't create ZSL stream: "
    190                     "%s (%d)", __FUNCTION__, client->getCameraId(),
    191                     strerror(-res), res);
    192             return res;
    193         }
    194 
    195         // Only add the camera3 buffer listener when the stream is created.
    196         mZslStream->addBufferListener(this);
    197     }
    198 
    199     client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
    200             Camera2Client::kPreviewRequestIdEnd,
    201             this,
    202             /*sendPartials*/false);
    203 
    204     return OK;
    205 }
    206 
    207 status_t ZslProcessor::deleteStream() {
    208     ATRACE_CALL();
    209     status_t res;
    210 
    211     Mutex::Autolock l(mInputMutex);
    212 
    213     if (mZslStreamId != NO_STREAM) {
    214         sp<Camera2Client> client = mClient.promote();
    215         if (client == 0) {
    216             ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    217             return INVALID_OPERATION;
    218         }
    219 
    220         sp<Camera3Device> device =
    221             reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
    222         if (device == 0) {
    223             ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    224             return INVALID_OPERATION;
    225         }
    226 
    227         res = device->deleteStream(mZslStreamId);
    228         if (res != OK) {
    229             ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
    230                     "%s (%d)", __FUNCTION__, client->getCameraId(),
    231                     mZslStreamId, strerror(-res), res);
    232             return res;
    233         }
    234 
    235         mZslStreamId = NO_STREAM;
    236     }
    237     return OK;
    238 }
    239 
    240 int ZslProcessor::getStreamId() const {
    241     Mutex::Autolock l(mInputMutex);
    242     return mZslStreamId;
    243 }
    244 
    245 status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
    246     sp<Camera2Client> client = mClient.promote();
    247     if (client == 0) {
    248         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    249         return INVALID_OPERATION;
    250     }
    251     sp<Camera3Device> device =
    252         static_cast<Camera3Device*>(client->getCameraDevice().get());
    253     if (device == 0) {
    254         ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
    255         return INVALID_OPERATION;
    256     }
    257 
    258     CameraMetadata stillTemplate;
    259     device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
    260 
    261     // Find some of the post-processing tags, and assign the value from template to the request.
    262     // Only check the aberration mode and noise reduction mode for now, as they are very important
    263     // for image quality.
    264     uint32_t postProcessingTags[] = {
    265             ANDROID_NOISE_REDUCTION_MODE,
    266             ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
    267             ANDROID_COLOR_CORRECTION_MODE,
    268             ANDROID_TONEMAP_MODE,
    269             ANDROID_SHADING_MODE,
    270             ANDROID_HOT_PIXEL_MODE,
    271             ANDROID_EDGE_MODE
    272     };
    273 
    274     camera_metadata_entry_t entry;
    275     for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
    276         entry = stillTemplate.find(postProcessingTags[i]);
    277         if (entry.count > 0) {
    278             request.update(postProcessingTags[i], entry.data.u8, 1);
    279         }
    280     }
    281 
    282     return OK;
    283 }
    284 
    285 status_t ZslProcessor::pushToReprocess(int32_t requestId) {
    286     ALOGV("%s: Send in reprocess request with id %d",
    287             __FUNCTION__, requestId);
    288     Mutex::Autolock l(mInputMutex);
    289     status_t res;
    290     sp<Camera2Client> client = mClient.promote();
    291 
    292     if (client == 0) {
    293         ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
    294         return INVALID_OPERATION;
    295     }
    296 
    297     IF_ALOGV() {
    298         dumpZslQueue(-1);
    299     }
    300 
    301     size_t metadataIdx;
    302     nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
    303 
    304     if (candidateTimestamp == -1) {
    305         ALOGE("%s: Could not find good candidate for ZSL reprocessing",
    306               __FUNCTION__);
    307         return NOT_ENOUGH_DATA;
    308     }
    309 
    310     res = mZslStream->enqueueInputBufferByTimestamp(candidateTimestamp,
    311                                                     /*actualTimestamp*/NULL);
    312 
    313     if (res == mZslStream->NO_BUFFER_AVAILABLE) {
    314         ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
    315         return NOT_ENOUGH_DATA;
    316     } else if (res != OK) {
    317         ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
    318                 __FUNCTION__, strerror(-res), res);
    319         return res;
    320     }
    321 
    322     {
    323         CameraMetadata request = mFrameList[metadataIdx];
    324 
    325         // Verify that the frame is reasonable for reprocessing
    326 
    327         camera_metadata_entry_t entry;
    328         entry = request.find(ANDROID_CONTROL_AE_STATE);
    329         if (entry.count == 0) {
    330             ALOGE("%s: ZSL queue frame has no AE state field!",
    331                     __FUNCTION__);
    332             return BAD_VALUE;
    333         }
    334         if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
    335                 entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
    336             ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
    337                     __FUNCTION__, entry.data.u8[0]);
    338             return NOT_ENOUGH_DATA;
    339         }
    340 
    341         uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
    342         res = request.update(ANDROID_REQUEST_TYPE,
    343                 &requestType, 1);
    344         if (res != OK) {
    345             ALOGE("%s: Unable to update request type",
    346                   __FUNCTION__);
    347             return INVALID_OPERATION;
    348         }
    349 
    350         int32_t inputStreams[1] =
    351                 { mZslStreamId };
    352         res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
    353                 inputStreams, 1);
    354         if (res != OK) {
    355             ALOGE("%s: Unable to update request input streams",
    356                   __FUNCTION__);
    357             return INVALID_OPERATION;
    358         }
    359 
    360         uint8_t captureIntent =
    361                 static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
    362         res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
    363                 &captureIntent, 1);
    364         if (res != OK ) {
    365             ALOGE("%s: Unable to update request capture intent",
    366                   __FUNCTION__);
    367             return INVALID_OPERATION;
    368         }
    369 
    370         // TODO: Shouldn't we also update the latest preview frame?
    371         int32_t outputStreams[1] =
    372                 { client->getCaptureStreamId() };
    373         res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
    374                 outputStreams, 1);
    375         if (res != OK) {
    376             ALOGE("%s: Unable to update request output streams",
    377                   __FUNCTION__);
    378             return INVALID_OPERATION;
    379         }
    380 
    381         res = request.update(ANDROID_REQUEST_ID,
    382                 &requestId, 1);
    383         if (res != OK ) {
    384             ALOGE("%s: Unable to update frame to a reprocess request",
    385                   __FUNCTION__);
    386             return INVALID_OPERATION;
    387         }
    388 
    389         res = client->stopStream();
    390         if (res != OK) {
    391             ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
    392                 "%s (%d)",
    393                 __FUNCTION__, client->getCameraId(), strerror(-res), res);
    394             return INVALID_OPERATION;
    395         }
    396 
    397         // Update JPEG settings
    398         {
    399             SharedParameters::Lock l(client->getParameters());
    400             res = l.mParameters.updateRequestJpeg(&request);
    401             if (res != OK) {
    402                 ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
    403                         "capture request: %s (%d)", __FUNCTION__,
    404                         client->getCameraId(),
    405                         strerror(-res), res);
    406                 return res;
    407             }
    408         }
    409 
    410         // Update post-processing settings
    411         res = updateRequestWithDefaultStillRequest(request);
    412         if (res != OK) {
    413             ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
    414                     "may be compromised", __FUNCTION__);
    415         }
    416 
    417         mLatestCapturedRequest = request;
    418         res = client->getCameraDevice()->capture(request);
    419         if (res != OK ) {
    420             ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
    421                   " (%d)", __FUNCTION__, strerror(-res), res);
    422             return res;
    423         }
    424 
    425         mState = LOCKED;
    426     }
    427 
    428     return OK;
    429 }
    430 
    431 status_t ZslProcessor::clearZslQueue() {
    432     Mutex::Autolock l(mInputMutex);
    433     // If in middle of capture, can't clear out queue
    434     if (mState == LOCKED) return OK;
    435 
    436     return clearZslQueueLocked();
    437 }
    438 
    439 status_t ZslProcessor::clearZslQueueLocked() {
    440     if (mZslStream != 0) {
    441         // clear result metadata list first.
    442         clearZslResultQueueLocked();
    443         return mZslStream->clearInputRingBuffer(&mLatestClearedBufferTimestamp);
    444     }
    445     return OK;
    446 }
    447 
    448 void ZslProcessor::clearZslResultQueueLocked() {
    449     mFrameList.clear();
    450     mFrameListHead = 0;
    451     mFrameList.insertAt(0, mFrameListDepth);
    452 }
    453 
    454 void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
    455     Mutex::Autolock l(mInputMutex);
    456     if (!mLatestCapturedRequest.isEmpty()) {
    457         String8 result("    Latest ZSL capture request:\n");
    458         write(fd, result.string(), result.size());
    459         mLatestCapturedRequest.dump(fd, 2, 6);
    460     } else {
    461         String8 result("    Latest ZSL capture request: none yet\n");
    462         write(fd, result.string(), result.size());
    463     }
    464     dumpZslQueue(fd);
    465 }
    466 
    467 bool ZslProcessor::threadLoop() {
    468     // TODO: remove dependency on thread. For now, shut thread down right
    469     // away.
    470     return false;
    471 }
    472 
    473 void ZslProcessor::dumpZslQueue(int fd) const {
    474     String8 header("ZSL queue contents:");
    475     String8 indent("    ");
    476     ALOGV("%s", header.string());
    477     if (fd != -1) {
    478         header = indent + header + "\n";
    479         write(fd, header.string(), header.size());
    480     }
    481     for (size_t i = 0; i < mZslQueue.size(); i++) {
    482         const ZslPair &queueEntry = mZslQueue[i];
    483         nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
    484         camera_metadata_ro_entry_t entry;
    485         nsecs_t frameTimestamp = 0;
    486         int frameAeState = -1;
    487         if (!queueEntry.frame.isEmpty()) {
    488             entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
    489             if (entry.count > 0) frameTimestamp = entry.data.i64[0];
    490             entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
    491             if (entry.count > 0) frameAeState = entry.data.u8[0];
    492         }
    493         String8 result =
    494                 String8::format("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
    495                         bufferTimestamp, frameTimestamp, frameAeState);
    496         ALOGV("%s", result.string());
    497         if (fd != -1) {
    498             result = indent + result + "\n";
    499             write(fd, result.string(), result.size());
    500         }
    501 
    502     }
    503 }
    504 
    505 bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
    506     switch (afMode) {
    507         case ANDROID_CONTROL_AF_MODE_AUTO:
    508         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
    509         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
    510         case ANDROID_CONTROL_AF_MODE_MACRO:
    511             return false;
    512             break;
    513         case ANDROID_CONTROL_AF_MODE_OFF:
    514         case ANDROID_CONTROL_AF_MODE_EDOF:
    515             return true;
    516         default:
    517             ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
    518             return false;
    519     }
    520 }
    521 
    522 nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
    523     /**
    524      * Find the smallest timestamp we know about so far
    525      * - ensure that aeState is either converged or locked
    526      */
    527 
    528     size_t idx = 0;
    529     nsecs_t minTimestamp = -1;
    530 
    531     size_t emptyCount = mFrameList.size();
    532 
    533     for (size_t j = 0; j < mFrameList.size(); j++) {
    534         const CameraMetadata &frame = mFrameList[j];
    535         if (!frame.isEmpty()) {
    536 
    537             emptyCount--;
    538 
    539             camera_metadata_ro_entry_t entry;
    540             entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
    541             if (entry.count == 0) {
    542                 ALOGE("%s: Can't find timestamp in frame!",
    543                         __FUNCTION__);
    544                 continue;
    545             }
    546             nsecs_t frameTimestamp = entry.data.i64[0];
    547             if (minTimestamp > frameTimestamp || minTimestamp == -1) {
    548 
    549                 entry = frame.find(ANDROID_CONTROL_AE_STATE);
    550 
    551                 if (entry.count == 0) {
    552                     /**
    553                      * This is most likely a HAL bug. The aeState field is
    554                      * mandatory, so it should always be in a metadata packet.
    555                      */
    556                     ALOGW("%s: ZSL queue frame has no AE state field!",
    557                             __FUNCTION__);
    558                     continue;
    559                 }
    560                 if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
    561                         entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
    562                     ALOGVV("%s: ZSL queue frame AE state is %d, need "
    563                            "full capture",  __FUNCTION__, entry.data.u8[0]);
    564                     continue;
    565                 }
    566 
    567                 entry = frame.find(ANDROID_CONTROL_AF_MODE);
    568                 if (entry.count == 0) {
    569                     ALOGW("%s: ZSL queue frame has no AF mode field!",
    570                             __FUNCTION__);
    571                     continue;
    572                 }
    573                 uint8_t afMode = entry.data.u8[0];
    574                 if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
    575                     // Skip all the ZSL buffer for manual AF mode, as we don't really
    576                     // know the af state.
    577                     continue;
    578                 }
    579 
    580                 // Check AF state if device has focuser and focus mode isn't fixed
    581                 if (mHasFocuser && !isFixedFocusMode(afMode)) {
    582                     // Make sure the candidate frame has good focus.
    583                     entry = frame.find(ANDROID_CONTROL_AF_STATE);
    584                     if (entry.count == 0) {
    585                         ALOGW("%s: ZSL queue frame has no AF state field!",
    586                                 __FUNCTION__);
    587                         continue;
    588                     }
    589                     uint8_t afState = entry.data.u8[0];
    590                     if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
    591                             afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
    592                             afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
    593                         ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
    594                                 __FUNCTION__, afState);
    595                         continue;
    596                     }
    597                 }
    598 
    599                 minTimestamp = frameTimestamp;
    600                 idx = j;
    601             }
    602 
    603             ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
    604         }
    605     }
    606 
    607     if (emptyCount == mFrameList.size()) {
    608         /**
    609          * This could be mildly bad and means our ZSL was triggered before
    610          * there were any frames yet received by the camera framework.
    611          *
    612          * This is a fairly corner case which can happen under:
    613          * + a user presses the shutter button real fast when the camera starts
    614          *     (startPreview followed immediately by takePicture).
    615          * + burst capture case (hitting shutter button as fast possible)
    616          *
    617          * If this happens in steady case (preview running for a while, call
    618          *     a single takePicture) then this might be a fwk bug.
    619          */
    620         ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
    621     }
    622 
    623     ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
    624           __FUNCTION__, minTimestamp, idx, emptyCount);
    625 
    626     if (metadataIdx) {
    627         *metadataIdx = idx;
    628     }
    629 
    630     return minTimestamp;
    631 }
    632 
    633 void ZslProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
    634     // Intentionally left empty
    635     // Although theoretically we could use this to get better dump info
    636 }
    637 
    638 void ZslProcessor::onBufferReleased(const BufferInfo& bufferInfo) {
    639 
    640     // ignore output buffers
    641     if (bufferInfo.mOutput) {
    642         return;
    643     }
    644 
    645     // Lock mutex only once we know this is an input buffer returned to avoid
    646     // potential deadlock
    647     Mutex::Autolock l(mInputMutex);
    648     // TODO: Verify that the buffer is in our queue by looking at timestamp
    649     // theoretically unnecessary unless we change the following assumptions:
    650     // -- only 1 buffer reprocessed at a time (which is the case now)
    651 
    652     // Erase entire ZSL queue since we've now completed the capture and preview
    653     // is stopped.
    654     //
    655     // We need to guarantee that if we do two back-to-back captures,
    656     // the second won't use a buffer that's older/the same as the first, which
    657     // is theoretically possible if we don't clear out the queue and the
    658     // selection criteria is something like 'newest'. Clearing out the result
    659     // metadata queue on a completed capture ensures we'll only use new timestamp.
    660     // Calling clearZslQueueLocked is a guaranteed deadlock because this callback
    661     // holds the Camera3Stream internal lock (mLock), and clearZslQueueLocked requires
    662     // to hold the same lock.
    663     // TODO: need figure out a way to clear the Zsl buffer queue properly. Right now
    664     // it is safe not to do so, as back to back ZSL capture requires stop and start
    665     // preview, which will flush ZSL queue automatically.
    666     ALOGV("%s: Memory optimization, clearing ZSL queue",
    667           __FUNCTION__);
    668     clearZslResultQueueLocked();
    669 
    670     // Required so we accept more ZSL requests
    671     mState = RUNNING;
    672 }
    673 
    674 }; // namespace camera2
    675 }; // namespace android
    676