Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-FrameProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 
     21 #include <utils/Log.h>
     22 #include <utils/Trace.h>
     23 
     24 #include "common/CameraDeviceBase.h"
     25 #include "api1/Camera2Client.h"
     26 #include "api1/client2/FrameProcessor.h"
     27 
     28 namespace android {
     29 namespace camera2 {
     30 
     31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
     32                                sp<Camera2Client> client) :
     33     FrameProcessorBase(device),
     34     mClient(client),
     35     mLastFrameNumberOfFaces(0),
     36     mLast3AFrameNumber(-1) {
     37 
     38     sp<CameraDeviceBase> d = device.promote();
     39     mSynthesize3ANotify = !(d->willNotify3A());
     40 
     41     {
     42         SharedParameters::Lock l(client->getParameters());
     43 
     44         if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
     45             mUsePartialResult = (mNumPartialResults > 1);
     46         } else {
     47             mUsePartialResult = l.mParameters.quirks.partialResults;
     48         }
     49 
     50         // Initialize starting 3A state
     51         m3aState.afTriggerId = l.mParameters.afTriggerCounter;
     52         m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
     53         // Check if lens is fixed-focus
     54         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
     55             m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
     56         }
     57     }
     58 }
     59 
     60 FrameProcessor::~FrameProcessor() {
     61 }
     62 
     63 bool FrameProcessor::processSingleFrame(CaptureResult &frame,
     64                                         const sp<CameraDeviceBase> &device) {
     65 
     66     sp<Camera2Client> client = mClient.promote();
     67     if (!client.get()) {
     68         return false;
     69     }
     70 
     71     bool isPartialResult = false;
     72     if (mUsePartialResult) {
     73         if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
     74             isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
     75         } else {
     76             camera_metadata_entry_t entry;
     77             entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
     78             if (entry.count > 0 &&
     79                     entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
     80                 isPartialResult = true;
     81             }
     82         }
     83     }
     84 
     85     if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
     86         return false;
     87     }
     88 
     89     if (mSynthesize3ANotify) {
     90         process3aState(frame, client);
     91     }
     92 
     93     return FrameProcessorBase::processSingleFrame(frame, device);
     94 }
     95 
     96 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
     97         const sp<Camera2Client> &client) {
     98     status_t res = BAD_VALUE;
     99     ATRACE_CALL();
    100     camera_metadata_ro_entry_t entry;
    101     bool enableFaceDetect;
    102 
    103     {
    104         SharedParameters::Lock l(client->getParameters());
    105         enableFaceDetect = l.mParameters.enableFaceDetect;
    106     }
    107     entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
    108 
    109     // TODO: This should be an error once implementations are compliant
    110     if (entry.count == 0) {
    111         return OK;
    112     }
    113 
    114     uint8_t faceDetectMode = entry.data.u8[0];
    115 
    116     camera_frame_metadata metadata;
    117     Vector<camera_face_t> faces;
    118     metadata.number_of_faces = 0;
    119 
    120     if (enableFaceDetect &&
    121         faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
    122 
    123         SharedParameters::Lock l(client->getParameters());
    124         entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
    125         if (entry.count == 0) {
    126             // No faces this frame
    127             /* warning: locks SharedCameraCallbacks */
    128             callbackFaceDetection(client, metadata);
    129             return OK;
    130         }
    131         metadata.number_of_faces = entry.count / 4;
    132         if (metadata.number_of_faces >
    133                 l.mParameters.fastInfo.maxFaces) {
    134             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
    135                     __FUNCTION__, client->getCameraId(),
    136                     metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
    137             return res;
    138         }
    139         const int32_t *faceRects = entry.data.i32;
    140 
    141         entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
    142         if (entry.count == 0) {
    143             ALOGE("%s: Camera %d: Unable to read face scores",
    144                     __FUNCTION__, client->getCameraId());
    145             return res;
    146         }
    147         const uint8_t *faceScores = entry.data.u8;
    148 
    149         const int32_t *faceLandmarks = NULL;
    150         const int32_t *faceIds = NULL;
    151 
    152         if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    153             entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
    154             if (entry.count == 0) {
    155                 ALOGE("%s: Camera %d: Unable to read face landmarks",
    156                         __FUNCTION__, client->getCameraId());
    157                 return res;
    158             }
    159             faceLandmarks = entry.data.i32;
    160 
    161             entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
    162 
    163             if (entry.count == 0) {
    164                 ALOGE("%s: Camera %d: Unable to read face IDs",
    165                         __FUNCTION__, client->getCameraId());
    166                 return res;
    167             }
    168             faceIds = entry.data.i32;
    169         }
    170 
    171         entry = frame.find(ANDROID_SCALER_CROP_REGION);
    172         if (entry.count < 4) {
    173             ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
    174                     __FUNCTION__, client->getCameraId(), entry.count);
    175             return res;
    176         }
    177 
    178         Parameters::CropRegion scalerCrop = {
    179             static_cast<float>(entry.data.i32[0]),
    180             static_cast<float>(entry.data.i32[1]),
    181             static_cast<float>(entry.data.i32[2]),
    182             static_cast<float>(entry.data.i32[3])};
    183 
    184         faces.setCapacity(metadata.number_of_faces);
    185 
    186         size_t maxFaces = metadata.number_of_faces;
    187         for (size_t i = 0; i < maxFaces; i++) {
    188             if (faceScores[i] == 0) {
    189                 metadata.number_of_faces--;
    190                 continue;
    191             }
    192             if (faceScores[i] > 100) {
    193                 ALOGW("%s: Face index %zu with out of range score %d",
    194                         __FUNCTION__, i, faceScores[i]);
    195             }
    196 
    197             camera_face_t face;
    198 
    199             face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
    200                                 faceRects[i*4 + 0], scalerCrop);
    201             face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
    202                                 faceRects[i*4 + 1], scalerCrop);
    203             face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
    204                                 faceRects[i*4 + 2], scalerCrop);
    205             face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
    206                                 faceRects[i*4 + 3], scalerCrop);
    207 
    208             face.score = faceScores[i];
    209             if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    210                 face.id = faceIds[i];
    211                 face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
    212                         faceLandmarks[i*6 + 0], scalerCrop);
    213                 face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
    214                         faceLandmarks[i*6 + 1], scalerCrop);
    215                 face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
    216                         faceLandmarks[i*6 + 2], scalerCrop);
    217                 face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
    218                         faceLandmarks[i*6 + 3], scalerCrop);
    219                 face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
    220                         faceLandmarks[i*6 + 4], scalerCrop);
    221                 face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
    222                         faceLandmarks[i*6 + 5], scalerCrop);
    223             } else {
    224                 face.id = 0;
    225                 face.left_eye[0] = face.left_eye[1] = -2000;
    226                 face.right_eye[0] = face.right_eye[1] = -2000;
    227                 face.mouth[0] = face.mouth[1] = -2000;
    228             }
    229             faces.push_back(face);
    230         }
    231 
    232         metadata.faces = faces.editArray();
    233     }
    234 
    235     /* warning: locks SharedCameraCallbacks */
    236     callbackFaceDetection(client, metadata);
    237 
    238     return OK;
    239 }
    240 
    241 status_t FrameProcessor::process3aState(const CaptureResult &frame,
    242         const sp<Camera2Client> &client) {
    243 
    244     ATRACE_CALL();
    245     const CameraMetadata &metadata = frame.mMetadata;
    246     camera_metadata_ro_entry_t entry;
    247     int cameraId = client->getCameraId();
    248 
    249     entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
    250     int32_t frameNumber = entry.data.i32[0];
    251 
    252     // Don't send 3A notifications for the same frame number twice
    253     if (frameNumber <= mLast3AFrameNumber) {
    254         ALOGV("%s: Already sent 3A for frame number %d, skipping",
    255                 __FUNCTION__, frameNumber);
    256         return OK;
    257     }
    258 
    259     mLast3AFrameNumber = frameNumber;
    260 
    261     // Get 3A states from result metadata
    262     bool gotAllStates = true;
    263 
    264     AlgState new3aState;
    265 
    266     // TODO: Also use AE mode, AE trigger ID
    267 
    268     gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
    269             &new3aState.afMode, frameNumber, cameraId);
    270 
    271     gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
    272             &new3aState.awbMode, frameNumber, cameraId);
    273 
    274     gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
    275             &new3aState.aeState, frameNumber, cameraId);
    276 
    277     gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
    278             &new3aState.afState, frameNumber, cameraId);
    279 
    280     gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
    281             &new3aState.awbState, frameNumber, cameraId);
    282 
    283     if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
    284         new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
    285         new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
    286     } else {
    287         gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
    288                  &new3aState.afTriggerId, frameNumber, cameraId);
    289 
    290         gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
    291                  &new3aState.aeTriggerId, frameNumber, cameraId);
    292     }
    293 
    294     if (!gotAllStates) return BAD_VALUE;
    295 
    296     if (new3aState.aeState != m3aState.aeState) {
    297         ALOGV("%s: Camera %d: AE state %d->%d",
    298                 __FUNCTION__, cameraId,
    299                 m3aState.aeState, new3aState.aeState);
    300         client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
    301     }
    302 
    303     if (new3aState.afState != m3aState.afState ||
    304         new3aState.afMode != m3aState.afMode ||
    305         new3aState.afTriggerId != m3aState.afTriggerId) {
    306         ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
    307                 __FUNCTION__, cameraId,
    308                 m3aState.afState, new3aState.afState,
    309                 m3aState.afMode, new3aState.afMode,
    310                 m3aState.afTriggerId, new3aState.afTriggerId);
    311         client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
    312     }
    313     if (new3aState.awbState != m3aState.awbState ||
    314         new3aState.awbMode != m3aState.awbMode) {
    315         ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
    316                 __FUNCTION__, cameraId,
    317                 m3aState.awbState, new3aState.awbState,
    318                 m3aState.awbMode, new3aState.awbMode);
    319         client->notifyAutoWhitebalance(new3aState.awbState,
    320                 new3aState.aeTriggerId);
    321     }
    322 
    323     m3aState = new3aState;
    324 
    325     return OK;
    326 }
    327 
    328 template<typename Src, typename T>
    329 bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
    330         T* value, int32_t frameNumber, int cameraId) {
    331     camera_metadata_ro_entry_t entry;
    332     if (value == NULL) {
    333         ALOGE("%s: Camera %d: Value to write to is NULL",
    334                 __FUNCTION__, cameraId);
    335         return false;
    336     }
    337 
    338     entry = result.find(tag);
    339     if (entry.count == 0) {
    340         ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
    341                 __FUNCTION__, cameraId,
    342                 get_camera_metadata_tag_name(tag), frameNumber);
    343         return false;
    344     } else {
    345         switch(sizeof(Src)){
    346             case sizeof(uint8_t):
    347                 *value = static_cast<T>(entry.data.u8[0]);
    348                 break;
    349             case sizeof(int32_t):
    350                 *value = static_cast<T>(entry.data.i32[0]);
    351                 break;
    352             default:
    353                 ALOGE("%s: Camera %d: Unsupported source",
    354                         __FUNCTION__, cameraId);
    355                 return false;
    356         }
    357     }
    358     return true;
    359 }
    360 
    361 
    362 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
    363                                      const camera_frame_metadata &metadata) {
    364 
    365     camera_frame_metadata *metadata_ptr =
    366         const_cast<camera_frame_metadata*>(&metadata);
    367 
    368     /**
    369      * Filter out repeated 0-face callbacks,
    370      * but not when the last frame was >0
    371      */
    372     if (metadata.number_of_faces != 0 ||
    373         mLastFrameNumberOfFaces != metadata.number_of_faces) {
    374 
    375         Camera2Client::SharedCameraCallbacks::Lock
    376             l(client->mSharedCameraCallbacks);
    377         if (l.mRemoteCallback != NULL) {
    378             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
    379                                             NULL,
    380                                             metadata_ptr);
    381         }
    382     }
    383 
    384     mLastFrameNumberOfFaces = metadata.number_of_faces;
    385 }
    386 
    387 }; // namespace camera2
    388 }; // namespace android
    389