Home | History | Annotate | Download | only in camera2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-FrameProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 
     21 #include <utils/Log.h>
     22 #include <utils/Trace.h>
     23 
     24 #include "FrameProcessor.h"
     25 #include "../CameraDeviceBase.h"
     26 #include "../Camera2Client.h"
     27 
     28 namespace android {
     29 namespace camera2 {
     30 
     31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
     32                                wp<Camera2Client> client) :
     33     ProFrameProcessor(device),
     34     mClient(client),
     35     mLastFrameNumberOfFaces(0) {
     36 
     37     sp<CameraDeviceBase> d = device.promote();
     38     mSynthesize3ANotify = !(d->willNotify3A());
     39 }
     40 
     41 FrameProcessor::~FrameProcessor() {
     42 }
     43 
     44 bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
     45                                         const sp<CameraDeviceBase> &device) {
     46 
     47     sp<Camera2Client> client = mClient.promote();
     48     if (!client.get()) {
     49         return false;
     50     }
     51 
     52     if (processFaceDetect(frame, client) != OK) {
     53         return false;
     54     }
     55 
     56     if (mSynthesize3ANotify) {
     57         // Ignoring missing fields for now
     58         process3aState(frame, client);
     59     }
     60 
     61     if (!ProFrameProcessor::processSingleFrame(frame, device)) {
     62         return false;
     63     }
     64 
     65     return true;
     66 }
     67 
     68 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
     69         const sp<Camera2Client> &client) {
     70     status_t res = BAD_VALUE;
     71     ATRACE_CALL();
     72     camera_metadata_ro_entry_t entry;
     73     bool enableFaceDetect;
     74 
     75     {
     76         SharedParameters::Lock l(client->getParameters());
     77         enableFaceDetect = l.mParameters.enableFaceDetect;
     78     }
     79     entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
     80 
     81     // TODO: This should be an error once implementations are compliant
     82     if (entry.count == 0) {
     83         return OK;
     84     }
     85 
     86     uint8_t faceDetectMode = entry.data.u8[0];
     87 
     88     camera_frame_metadata metadata;
     89     Vector<camera_face_t> faces;
     90     metadata.number_of_faces = 0;
     91 
     92     if (enableFaceDetect &&
     93         faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
     94 
     95         SharedParameters::Lock l(client->getParameters());
     96         entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
     97         if (entry.count == 0) {
     98             // No faces this frame
     99             /* warning: locks SharedCameraCallbacks */
    100             callbackFaceDetection(client, metadata);
    101             return OK;
    102         }
    103         metadata.number_of_faces = entry.count / 4;
    104         if (metadata.number_of_faces >
    105                 l.mParameters.fastInfo.maxFaces) {
    106             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
    107                     __FUNCTION__, client->getCameraId(),
    108                     metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
    109             return res;
    110         }
    111         const int32_t *faceRects = entry.data.i32;
    112 
    113         entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
    114         if (entry.count == 0) {
    115             ALOGE("%s: Camera %d: Unable to read face scores",
    116                     __FUNCTION__, client->getCameraId());
    117             return res;
    118         }
    119         const uint8_t *faceScores = entry.data.u8;
    120 
    121         const int32_t *faceLandmarks = NULL;
    122         const int32_t *faceIds = NULL;
    123 
    124         if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    125             entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
    126             if (entry.count == 0) {
    127                 ALOGE("%s: Camera %d: Unable to read face landmarks",
    128                         __FUNCTION__, client->getCameraId());
    129                 return res;
    130             }
    131             faceLandmarks = entry.data.i32;
    132 
    133             entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
    134 
    135             if (entry.count == 0) {
    136                 ALOGE("%s: Camera %d: Unable to read face IDs",
    137                         __FUNCTION__, client->getCameraId());
    138                 return res;
    139             }
    140             faceIds = entry.data.i32;
    141         }
    142 
    143         faces.setCapacity(metadata.number_of_faces);
    144 
    145         size_t maxFaces = metadata.number_of_faces;
    146         for (size_t i = 0; i < maxFaces; i++) {
    147             if (faceScores[i] == 0) {
    148                 metadata.number_of_faces--;
    149                 continue;
    150             }
    151             if (faceScores[i] > 100) {
    152                 ALOGW("%s: Face index %d with out of range score %d",
    153                         __FUNCTION__, i, faceScores[i]);
    154             }
    155 
    156             camera_face_t face;
    157 
    158             face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
    159             face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
    160             face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
    161             face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
    162 
    163             face.score = faceScores[i];
    164             if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    165                 face.id = faceIds[i];
    166                 face.left_eye[0] =
    167                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
    168                 face.left_eye[1] =
    169                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
    170                 face.right_eye[0] =
    171                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
    172                 face.right_eye[1] =
    173                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
    174                 face.mouth[0] =
    175                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
    176                 face.mouth[1] =
    177                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
    178             } else {
    179                 face.id = 0;
    180                 face.left_eye[0] = face.left_eye[1] = -2000;
    181                 face.right_eye[0] = face.right_eye[1] = -2000;
    182                 face.mouth[0] = face.mouth[1] = -2000;
    183             }
    184             faces.push_back(face);
    185         }
    186 
    187         metadata.faces = faces.editArray();
    188     }
    189 
    190     /* warning: locks SharedCameraCallbacks */
    191     callbackFaceDetection(client, metadata);
    192 
    193     return OK;
    194 }
    195 
    196 status_t FrameProcessor::process3aState(const CameraMetadata &frame,
    197         const sp<Camera2Client> &client) {
    198 
    199     ATRACE_CALL();
    200     camera_metadata_ro_entry_t entry;
    201     int mId = client->getCameraId();
    202 
    203     entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
    204     int32_t frameNumber = entry.data.i32[0];
    205 
    206     // Get 3A states from result metadata
    207     bool gotAllStates = true;
    208 
    209     AlgState new3aState;
    210 
    211     entry = frame.find(ANDROID_CONTROL_AE_STATE);
    212     if (entry.count == 0) {
    213         ALOGE("%s: Camera %d: No AE state provided by HAL for frame %d!",
    214                 __FUNCTION__, mId, frameNumber);
    215         gotAllStates = false;
    216     } else {
    217         new3aState.aeState =
    218                 static_cast<camera_metadata_enum_android_control_ae_state>(
    219                     entry.data.u8[0]);
    220     }
    221 
    222     entry = frame.find(ANDROID_CONTROL_AF_STATE);
    223     if (entry.count == 0) {
    224         ALOGE("%s: Camera %d: No AF state provided by HAL for frame %d!",
    225                 __FUNCTION__, mId, frameNumber);
    226         gotAllStates = false;
    227     } else {
    228         new3aState.afState =
    229                 static_cast<camera_metadata_enum_android_control_af_state>(
    230                     entry.data.u8[0]);
    231     }
    232 
    233     entry = frame.find(ANDROID_CONTROL_AWB_STATE);
    234     if (entry.count == 0) {
    235         ALOGE("%s: Camera %d: No AWB state provided by HAL for frame %d!",
    236                 __FUNCTION__, mId, frameNumber);
    237         gotAllStates = false;
    238     } else {
    239         new3aState.awbState =
    240                 static_cast<camera_metadata_enum_android_control_awb_state>(
    241                     entry.data.u8[0]);
    242     }
    243 
    244     int32_t afTriggerId = 0;
    245     entry = frame.find(ANDROID_CONTROL_AF_TRIGGER_ID);
    246     if (entry.count == 0) {
    247         ALOGE("%s: Camera %d: No AF trigger ID provided by HAL for frame %d!",
    248                 __FUNCTION__, mId, frameNumber);
    249         gotAllStates = false;
    250     } else {
    251         afTriggerId = entry.data.i32[0];
    252     }
    253 
    254     int32_t aeTriggerId = 0;
    255     entry = frame.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
    256     if (entry.count == 0) {
    257         ALOGE("%s: Camera %d: No AE precapture trigger ID provided by HAL"
    258                 " for frame %d!",
    259                 __FUNCTION__, mId, frameNumber);
    260         gotAllStates = false;
    261     } else {
    262         aeTriggerId = entry.data.i32[0];
    263     }
    264 
    265     if (!gotAllStates) return BAD_VALUE;
    266 
    267     if (new3aState.aeState != m3aState.aeState) {
    268         ALOGV("%s: AE state changed from 0x%x to 0x%x",
    269                 __FUNCTION__, m3aState.aeState, new3aState.aeState);
    270         client->notifyAutoExposure(new3aState.aeState, aeTriggerId);
    271     }
    272     if (new3aState.afState != m3aState.afState) {
    273         ALOGV("%s: AF state changed from 0x%x to 0x%x",
    274                 __FUNCTION__, m3aState.afState, new3aState.afState);
    275         client->notifyAutoFocus(new3aState.afState, afTriggerId);
    276     }
    277     if (new3aState.awbState != m3aState.awbState) {
    278         ALOGV("%s: AWB state changed from 0x%x to 0x%x",
    279                 __FUNCTION__, m3aState.awbState, new3aState.awbState);
    280         client->notifyAutoWhitebalance(new3aState.awbState, aeTriggerId);
    281     }
    282 
    283     m3aState = new3aState;
    284 
    285     return OK;
    286 }
    287 
    288 
    289 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
    290                                      const camera_frame_metadata &metadata) {
    291 
    292     camera_frame_metadata *metadata_ptr =
    293         const_cast<camera_frame_metadata*>(&metadata);
    294 
    295     /**
    296      * Filter out repeated 0-face callbacks,
    297      * but not when the last frame was >0
    298      */
    299     if (metadata.number_of_faces != 0 ||
    300         mLastFrameNumberOfFaces != metadata.number_of_faces) {
    301 
    302         Camera2Client::SharedCameraCallbacks::Lock
    303             l(client->mSharedCameraCallbacks);
    304         if (l.mRemoteCallback != NULL) {
    305             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
    306                                             NULL,
    307                                             metadata_ptr);
    308         }
    309     }
    310 
    311     mLastFrameNumberOfFaces = metadata.number_of_faces;
    312 }
    313 
    314 }; // namespace camera2
    315 }; // namespace android
    316