Home | History | Annotate | Download | only in client2
      1 /*
      2  * Copyright (C) 2012 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #define LOG_TAG "Camera2-FrameProcessor"
     18 #define ATRACE_TAG ATRACE_TAG_CAMERA
     19 //#define LOG_NDEBUG 0
     20 
     21 #include <utils/Log.h>
     22 #include <utils/Trace.h>
     23 
     24 #include "common/CameraDeviceBase.h"
     25 #include "api1/Camera2Client.h"
     26 #include "api1/client2/FrameProcessor.h"
     27 
     28 namespace android {
     29 namespace camera2 {
     30 
     31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
     32                                sp<Camera2Client> client) :
     33     FrameProcessorBase(device),
     34     mClient(client),
     35     mLastFrameNumberOfFaces(0),
     36     mLast3AFrameNumber(-1) {
     37 
     38     sp<CameraDeviceBase> d = device.promote();
     39     mSynthesize3ANotify = !(d->willNotify3A());
     40 
     41     {
     42         SharedParameters::Lock l(client->getParameters());
     43         mUsePartialQuirk = l.mParameters.quirks.partialResults;
     44 
     45         // Initialize starting 3A state
     46         m3aState.afTriggerId = l.mParameters.afTriggerCounter;
     47         m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
     48         // Check if lens is fixed-focus
     49         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
     50             m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
     51         }
     52     }
     53 }
     54 
     55 FrameProcessor::~FrameProcessor() {
     56 }
     57 
     58 bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
     59                                         const sp<CameraDeviceBase> &device) {
     60 
     61     sp<Camera2Client> client = mClient.promote();
     62     if (!client.get()) {
     63         return false;
     64     }
     65 
     66     bool partialResult = false;
     67     if (mUsePartialQuirk) {
     68         camera_metadata_entry_t entry;
     69         entry = frame.find(ANDROID_QUIRKS_PARTIAL_RESULT);
     70         if (entry.count > 0 &&
     71                 entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
     72             partialResult = true;
     73         }
     74     }
     75 
     76     if (!partialResult && processFaceDetect(frame, client) != OK) {
     77         return false;
     78     }
     79 
     80     if (mSynthesize3ANotify) {
     81         process3aState(frame, client);
     82     }
     83 
     84     return FrameProcessorBase::processSingleFrame(frame, device);
     85 }
     86 
     87 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
     88         const sp<Camera2Client> &client) {
     89     status_t res = BAD_VALUE;
     90     ATRACE_CALL();
     91     camera_metadata_ro_entry_t entry;
     92     bool enableFaceDetect;
     93 
     94     {
     95         SharedParameters::Lock l(client->getParameters());
     96         enableFaceDetect = l.mParameters.enableFaceDetect;
     97     }
     98     entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
     99 
    100     // TODO: This should be an error once implementations are compliant
    101     if (entry.count == 0) {
    102         return OK;
    103     }
    104 
    105     uint8_t faceDetectMode = entry.data.u8[0];
    106 
    107     camera_frame_metadata metadata;
    108     Vector<camera_face_t> faces;
    109     metadata.number_of_faces = 0;
    110 
    111     if (enableFaceDetect &&
    112         faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
    113 
    114         SharedParameters::Lock l(client->getParameters());
    115         entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
    116         if (entry.count == 0) {
    117             // No faces this frame
    118             /* warning: locks SharedCameraCallbacks */
    119             callbackFaceDetection(client, metadata);
    120             return OK;
    121         }
    122         metadata.number_of_faces = entry.count / 4;
    123         if (metadata.number_of_faces >
    124                 l.mParameters.fastInfo.maxFaces) {
    125             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
    126                     __FUNCTION__, client->getCameraId(),
    127                     metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
    128             return res;
    129         }
    130         const int32_t *faceRects = entry.data.i32;
    131 
    132         entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
    133         if (entry.count == 0) {
    134             ALOGE("%s: Camera %d: Unable to read face scores",
    135                     __FUNCTION__, client->getCameraId());
    136             return res;
    137         }
    138         const uint8_t *faceScores = entry.data.u8;
    139 
    140         const int32_t *faceLandmarks = NULL;
    141         const int32_t *faceIds = NULL;
    142 
    143         if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    144             entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
    145             if (entry.count == 0) {
    146                 ALOGE("%s: Camera %d: Unable to read face landmarks",
    147                         __FUNCTION__, client->getCameraId());
    148                 return res;
    149             }
    150             faceLandmarks = entry.data.i32;
    151 
    152             entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
    153 
    154             if (entry.count == 0) {
    155                 ALOGE("%s: Camera %d: Unable to read face IDs",
    156                         __FUNCTION__, client->getCameraId());
    157                 return res;
    158             }
    159             faceIds = entry.data.i32;
    160         }
    161 
    162         faces.setCapacity(metadata.number_of_faces);
    163 
    164         size_t maxFaces = metadata.number_of_faces;
    165         for (size_t i = 0; i < maxFaces; i++) {
    166             if (faceScores[i] == 0) {
    167                 metadata.number_of_faces--;
    168                 continue;
    169             }
    170             if (faceScores[i] > 100) {
    171                 ALOGW("%s: Face index %d with out of range score %d",
    172                         __FUNCTION__, i, faceScores[i]);
    173             }
    174 
    175             camera_face_t face;
    176 
    177             face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
    178             face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
    179             face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
    180             face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
    181 
    182             face.score = faceScores[i];
    183             if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
    184                 face.id = faceIds[i];
    185                 face.left_eye[0] =
    186                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
    187                 face.left_eye[1] =
    188                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
    189                 face.right_eye[0] =
    190                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
    191                 face.right_eye[1] =
    192                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
    193                 face.mouth[0] =
    194                     l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
    195                 face.mouth[1] =
    196                     l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
    197             } else {
    198                 face.id = 0;
    199                 face.left_eye[0] = face.left_eye[1] = -2000;
    200                 face.right_eye[0] = face.right_eye[1] = -2000;
    201                 face.mouth[0] = face.mouth[1] = -2000;
    202             }
    203             faces.push_back(face);
    204         }
    205 
    206         metadata.faces = faces.editArray();
    207     }
    208 
    209     /* warning: locks SharedCameraCallbacks */
    210     callbackFaceDetection(client, metadata);
    211 
    212     return OK;
    213 }
    214 
    215 status_t FrameProcessor::process3aState(const CameraMetadata &frame,
    216         const sp<Camera2Client> &client) {
    217 
    218     ATRACE_CALL();
    219     camera_metadata_ro_entry_t entry;
    220     int cameraId = client->getCameraId();
    221 
    222     entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
    223     int32_t frameNumber = entry.data.i32[0];
    224 
    225     // Don't send 3A notifications for the same frame number twice
    226     if (frameNumber <= mLast3AFrameNumber) {
    227         ALOGV("%s: Already sent 3A for frame number %d, skipping",
    228                 __FUNCTION__, frameNumber);
    229         return OK;
    230     }
    231 
    232     mLast3AFrameNumber = frameNumber;
    233 
    234     // Get 3A states from result metadata
    235     bool gotAllStates = true;
    236 
    237     AlgState new3aState;
    238 
    239     // TODO: Also use AE mode, AE trigger ID
    240 
    241     gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_MODE,
    242             &new3aState.afMode, frameNumber, cameraId);
    243 
    244     gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_MODE,
    245             &new3aState.awbMode, frameNumber, cameraId);
    246 
    247     gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AE_STATE,
    248             &new3aState.aeState, frameNumber, cameraId);
    249 
    250     gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AF_STATE,
    251             &new3aState.afState, frameNumber, cameraId);
    252 
    253     gotAllStates &= get3aResult<uint8_t>(frame, ANDROID_CONTROL_AWB_STATE,
    254             &new3aState.awbState, frameNumber, cameraId);
    255 
    256     gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AF_TRIGGER_ID,
    257             &new3aState.afTriggerId, frameNumber, cameraId);
    258 
    259     gotAllStates &= get3aResult<int32_t>(frame, ANDROID_CONTROL_AE_PRECAPTURE_ID,
    260             &new3aState.aeTriggerId, frameNumber, cameraId);
    261 
    262     if (!gotAllStates) return BAD_VALUE;
    263 
    264     if (new3aState.aeState != m3aState.aeState) {
    265         ALOGV("%s: Camera %d: AE state %d->%d",
    266                 __FUNCTION__, cameraId,
    267                 m3aState.aeState, new3aState.aeState);
    268         client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
    269     }
    270 
    271     if (new3aState.afState != m3aState.afState ||
    272         new3aState.afMode != m3aState.afMode ||
    273         new3aState.afTriggerId != m3aState.afTriggerId) {
    274         ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
    275                 __FUNCTION__, cameraId,
    276                 m3aState.afState, new3aState.afState,
    277                 m3aState.afMode, new3aState.afMode,
    278                 m3aState.afTriggerId, new3aState.afTriggerId);
    279         client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
    280     }
    281     if (new3aState.awbState != m3aState.awbState ||
    282         new3aState.awbMode != m3aState.awbMode) {
    283         ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
    284                 __FUNCTION__, cameraId,
    285                 m3aState.awbState, new3aState.awbState,
    286                 m3aState.awbMode, new3aState.awbMode);
    287         client->notifyAutoWhitebalance(new3aState.awbState,
    288                 new3aState.aeTriggerId);
    289     }
    290 
    291     m3aState = new3aState;
    292 
    293     return OK;
    294 }
    295 
    296 template<typename Src, typename T>
    297 bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
    298         T* value, int32_t frameNumber, int cameraId) {
    299     camera_metadata_ro_entry_t entry;
    300     if (value == NULL) {
    301         ALOGE("%s: Camera %d: Value to write to is NULL",
    302                 __FUNCTION__, cameraId);
    303         return false;
    304     }
    305 
    306     entry = result.find(tag);
    307     if (entry.count == 0) {
    308         ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
    309                 __FUNCTION__, cameraId,
    310                 get_camera_metadata_tag_name(tag), frameNumber);
    311         return false;
    312     } else {
    313         switch(sizeof(Src)){
    314             case sizeof(uint8_t):
    315                 *value = static_cast<T>(entry.data.u8[0]);
    316                 break;
    317             case sizeof(int32_t):
    318                 *value = static_cast<T>(entry.data.i32[0]);
    319                 break;
    320             default:
    321                 ALOGE("%s: Camera %d: Unsupported source",
    322                         __FUNCTION__, cameraId);
    323                 return false;
    324         }
    325     }
    326     return true;
    327 }
    328 
    329 
    330 void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
    331                                      const camera_frame_metadata &metadata) {
    332 
    333     camera_frame_metadata *metadata_ptr =
    334         const_cast<camera_frame_metadata*>(&metadata);
    335 
    336     /**
    337      * Filter out repeated 0-face callbacks,
    338      * but not when the last frame was >0
    339      */
    340     if (metadata.number_of_faces != 0 ||
    341         mLastFrameNumberOfFaces != metadata.number_of_faces) {
    342 
    343         Camera2Client::SharedCameraCallbacks::Lock
    344             l(client->mSharedCameraCallbacks);
    345         if (l.mRemoteCallback != NULL) {
    346             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
    347                                             NULL,
    348                                             metadata_ptr);
    349         }
    350     }
    351 
    352     mLastFrameNumberOfFaces = metadata.number_of_faces;
    353 }
    354 
    355 }; // namespace camera2
    356 }; // namespace android
    357