Home | History | Annotate | Download | only in its
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package com.android.camera2.its;
     18 
     19 import android.app.Service;
     20 import android.content.Context;
     21 import android.content.Intent;
     22 import android.graphics.ImageFormat;
     23 import android.hardware.camera2.CameraAccessException;
     24 import android.hardware.camera2.CameraCharacteristics;
     25 import android.hardware.camera2.CameraDevice;
     26 import android.hardware.camera2.CameraManager;
     27 import android.hardware.camera2.CaptureFailure;
     28 import android.hardware.camera2.CaptureRequest;
     29 import android.hardware.camera2.CaptureResult;
     30 import android.hardware.camera2.Rational;
     31 import android.media.Image;
     32 import android.media.ImageReader;
     33 import android.net.Uri;
     34 import android.os.ConditionVariable;
     35 import android.os.Handler;
     36 import android.os.HandlerThread;
     37 import android.os.IBinder;
     38 import android.os.Message;
     39 import android.util.Log;
     40 import android.view.Surface;
     41 
     42 import com.android.ex.camera2.blocking.BlockingCameraManager;
     43 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
     44 import com.android.ex.camera2.blocking.BlockingStateListener;
     45 
     46 import org.json.JSONObject;
     47 
     48 import java.io.File;
     49 import java.nio.ByteBuffer;
     50 import java.util.ArrayList;
     51 import java.util.Arrays;
     52 import java.util.List;
     53 import java.util.concurrent.CountDownLatch;
     54 import java.util.concurrent.TimeUnit;
     55 
     56 public class ItsService extends Service {
     57     public static final String TAG = ItsService.class.getSimpleName();
     58     public static final String PYTAG = "CAMERA-ITS-PY";
     59 
     60     // Supported intents
     61     public static final String ACTION_CAPTURE = "com.android.camera2.its.CAPTURE";
     62     public static final String ACTION_3A = "com.android.camera2.its.3A";
     63     public static final String ACTION_GETPROPS = "com.android.camera2.its.GETPROPS";
     64     private static final int MESSAGE_CAPTURE = 1;
     65     private static final int MESSAGE_3A = 2;
     66     private static final int MESSAGE_GETPROPS = 3;
     67 
     68     // Timeouts, in seconds.
     69     public static final int TIMEOUT_CAPTURE = 10;
     70     public static final int TIMEOUT_3A = 10;
     71 
     72     // State transition timeouts, in ms.
     73     private static final long TIMEOUT_IDLE_MS = 2000;
     74     private static final long TIMEOUT_STATE_MS = 500;
     75 
     76     private static final int MAX_CONCURRENT_READER_BUFFERS = 8;
     77 
     78     public static final String REGION_KEY = "regions";
     79     public static final String REGION_AE_KEY = "ae";
     80     public static final String REGION_AWB_KEY = "awb";
     81     public static final String REGION_AF_KEY = "af";
     82     public static final String TRIGGER_KEY = "triggers";
     83     public static final String TRIGGER_AE_KEY = "ae";
     84     public static final String TRIGGER_AF_KEY = "af";
     85 
     86     private CameraManager mCameraManager = null;
     87     private HandlerThread mCameraThread = null;
     88     private BlockingCameraManager mBlockingCameraManager = null;
     89     private BlockingStateListener mCameraListener = null;
     90     private CameraDevice mCamera = null;
     91     private ImageReader mCaptureReader = null;
     92     private CameraCharacteristics mCameraCharacteristics = null;
     93 
     94     private HandlerThread mCommandThread;
     95     private Handler mCommandHandler;
     96     private HandlerThread mSaveThread;
     97     private Handler mSaveHandler;
     98     private HandlerThread mResultThread;
     99     private Handler mResultHandler;
    100 
    101     private ConditionVariable mInterlock3A = new ConditionVariable(true);
    102     private volatile boolean mIssuedRequest3A = false;
    103     private volatile boolean mConvergedAE = false;
    104     private volatile boolean mConvergedAF = false;
    105     private volatile boolean mConvergedAWB = false;
    106 
    107     private CountDownLatch mCaptureCallbackLatch;
    108 
    109     public interface CaptureListener {
    110         void onCaptureAvailable(Image capture);
    111     }
    112 
    113     public abstract class CaptureResultListener extends CameraDevice.CaptureListener {}
    114 
    115     @Override
    116     public IBinder onBind(Intent intent) {
    117         return null;
    118     }
    119 
    120     @Override
    121     public void onCreate() {
    122 
    123         try {
    124             // Get handle to camera manager.
    125             mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
    126             if (mCameraManager == null) {
    127                 throw new ItsException("Failed to connect to camera manager");
    128             }
    129             mBlockingCameraManager = new BlockingCameraManager(mCameraManager);
    130             mCameraListener = new BlockingStateListener();
    131 
    132             // Open the camera device, and get its properties.
    133             String[] devices;
    134             try {
    135                 devices = mCameraManager.getCameraIdList();
    136                 if (devices == null || devices.length == 0) {
    137                     throw new ItsException("No camera devices");
    138                 }
    139             } catch (CameraAccessException e) {
    140                 throw new ItsException("Failed to get device ID list", e);
    141             }
    142 
    143             mCameraThread = new HandlerThread("ItsCameraThread");
    144             try {
    145                 mCameraThread.start();
    146                 Handler cameraHandler = new Handler(mCameraThread.getLooper());
    147 
    148                 // TODO: Add support for specifying which device to open.
    149                 mCamera = mBlockingCameraManager.openCamera(devices[0], mCameraListener,
    150                         cameraHandler);
    151                 mCameraCharacteristics = mCameraManager.getCameraCharacteristics(devices[0]);
    152             } catch (CameraAccessException e) {
    153                 throw new ItsException("Failed to open camera", e);
    154             } catch (BlockingOpenException e) {
    155                 throw new ItsException("Failed to open camera (after blocking)", e);
    156             }
    157 
    158             // Create a thread to receive images and save them.
    159             mSaveThread = new HandlerThread("SaveThread");
    160             mSaveThread.start();
    161             mSaveHandler = new Handler(mSaveThread.getLooper());
    162 
    163             // Create a thread to receive capture results and process them
    164             mResultThread = new HandlerThread("ResultThread");
    165             mResultThread.start();
    166             mResultHandler = new Handler(mResultThread.getLooper());
    167 
    168             // Create a thread to process commands.
    169             mCommandThread = new HandlerThread("CaptureThread");
    170             mCommandThread.start();
    171             mCommandHandler = new Handler(mCommandThread.getLooper(), new Handler.Callback() {
    172                 @Override
    173                 public boolean handleMessage(Message msg) {
    174                     try {
    175                         switch (msg.what) {
    176                             case MESSAGE_CAPTURE:
    177                                 doCapture((Uri) msg.obj);
    178                                 break;
    179                             case MESSAGE_3A:
    180                                 do3A((Uri) msg.obj);
    181                                 break;
    182                             case MESSAGE_GETPROPS:
    183                                 doGetProps();
    184                                 break;
    185                             default:
    186                                 throw new ItsException("Unknown message type");
    187                         }
    188                         Log.i(PYTAG, "### DONE");
    189                         return true;
    190                     }
    191                     catch (ItsException e) {
    192                         Log.e(TAG, "Script failed: ", e);
    193                         Log.e(PYTAG, "### FAIL");
    194                         return true;
    195                     }
    196                 }
    197             });
    198         } catch (ItsException e) {
    199             Log.e(TAG, "Script failed: ", e);
    200             Log.e(PYTAG, "### FAIL");
    201         }
    202     }
    203 
    204     @Override
    205     public void onDestroy() {
    206         try {
    207             if (mCommandThread != null) {
    208                 mCommandThread.quit();
    209                 mCommandThread = null;
    210             }
    211             if (mSaveThread != null) {
    212                 mSaveThread.quit();
    213                 mSaveThread = null;
    214             }
    215             if (mCameraThread != null) {
    216                 mCameraThread.quitSafely();
    217                 mCameraThread = null;
    218             }
    219             try {
    220                 mCamera.close();
    221             } catch (Exception e) {
    222                 throw new ItsException("Failed to close device");
    223             }
    224         } catch (ItsException e) {
    225             Log.e(TAG, "Script failed: ", e);
    226             Log.e(PYTAG, "### FAIL");
    227         }
    228     }
    229 
    230     @Override
    231     public int onStartCommand(Intent intent, int flags, int startId) {
    232         try {
    233             Log.i(PYTAG, "### RECV");
    234             String action = intent.getAction();
    235             if (ACTION_CAPTURE.equals(action)) {
    236                 Uri uri = intent.getData();
    237                 Message m = mCommandHandler.obtainMessage(MESSAGE_CAPTURE, uri);
    238                 mCommandHandler.sendMessage(m);
    239             } else if (ACTION_3A.equals(action)) {
    240                 Uri uri = intent.getData();
    241                 Message m = mCommandHandler.obtainMessage(MESSAGE_3A, uri);
    242                 mCommandHandler.sendMessage(m);
    243             } else if (ACTION_GETPROPS.equals(action)) {
    244                 Uri uri = intent.getData();
    245                 Message m = mCommandHandler.obtainMessage(MESSAGE_GETPROPS, uri);
    246                 mCommandHandler.sendMessage(m);
    247             } else {
    248                 throw new ItsException("Unhandled intent: " + intent.toString());
    249             }
    250         } catch (ItsException e) {
    251             Log.e(TAG, "Script failed: ", e);
    252             Log.e(PYTAG, "### FAIL");
    253         }
    254         return START_STICKY;
    255     }
    256 
    257     private ImageReader.OnImageAvailableListener
    258             createAvailableListener(final CaptureListener listener) {
    259         return new ImageReader.OnImageAvailableListener() {
    260             @Override
    261             public void onImageAvailable(ImageReader reader) {
    262                 Image i = null;
    263                 try {
    264                     i = reader.acquireNextImage();
    265                     listener.onCaptureAvailable(i);
    266                 } finally {
    267                     if (i != null) {
    268                         i.close();
    269                     }
    270                 }
    271             }
    272         };
    273     }
    274 
    275     private ImageReader.OnImageAvailableListener
    276             createAvailableListenerDropper(final CaptureListener listener) {
    277         return new ImageReader.OnImageAvailableListener() {
    278             @Override
    279             public void onImageAvailable(ImageReader reader) {
    280                 Image i = reader.acquireNextImage();
    281                 i.close();
    282             }
    283         };
    284     }
    285 
    286     private void doGetProps() throws ItsException {
    287         String fileName = ItsUtils.getMetadataFileName(0);
    288         File mdFile = ItsUtils.getOutputFile(ItsService.this, fileName);
    289         ItsUtils.storeCameraCharacteristics(mCameraCharacteristics, mdFile);
    290         Log.i(PYTAG,
    291               String.format("### FILE %s",
    292                             ItsUtils.getExternallyVisiblePath(ItsService.this, mdFile.toString())));
    293     }
    294 
    295     private void prepareCaptureReader(int width, int height, int format) {
    296         if (mCaptureReader == null
    297                 || mCaptureReader.getWidth() != width
    298                 || mCaptureReader.getHeight() != height
    299                 || mCaptureReader.getImageFormat() != format) {
    300             if (mCaptureReader != null) {
    301                 mCaptureReader.close();
    302             }
    303             mCaptureReader = ImageReader.newInstance(width, height, format,
    304                     MAX_CONCURRENT_READER_BUFFERS);
    305         }
    306     }
    307 
    308     private void do3A(Uri uri) throws ItsException {
    309         try {
    310             if (uri == null || !uri.toString().endsWith(".json")) {
    311                 throw new ItsException("Invalid URI: " + uri);
    312             }
    313 
    314             // Start a 3A action, and wait for it to converge.
    315             // Get the converged values for each "A", and package into JSON result for caller.
    316 
    317             // 3A happens on full-res frames.
    318             android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
    319                     CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
    320             int width = sizes[0].getWidth();
    321             int height = sizes[0].getHeight();
    322             int format = ImageFormat.YUV_420_888;
    323 
    324             prepareCaptureReader(width, height, format);
    325             List<Surface> outputSurfaces = new ArrayList<Surface>(1);
    326             outputSurfaces.add(mCaptureReader.getSurface());
    327             mCamera.configureOutputs(outputSurfaces);
    328             mCameraListener.waitForState(BlockingStateListener.STATE_BUSY,
    329                     TIMEOUT_STATE_MS);
    330             mCameraListener.waitForState(BlockingStateListener.STATE_IDLE,
    331                     TIMEOUT_IDLE_MS);
    332 
    333             // Add a listener that just recycles buffers; they aren't saved anywhere.
    334             ImageReader.OnImageAvailableListener readerListener =
    335                     createAvailableListenerDropper(mCaptureListener);
    336             mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
    337 
    338             // Get the user-specified regions for AE, AWB, AF.
    339             // Note that the user specifies normalized [x,y,w,h], which is converted below
    340             // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
    341             // also has a fifth "weight" element: [x0,y0,x1,y1,w].
    342             int[] regionAE = new int[]{0,0,width-1,height-1,1};
    343             int[] regionAF = new int[]{0,0,width-1,height-1,1};
    344             int[] regionAWB = new int[]{0,0,width-1,height-1,1};
    345             JSONObject params = ItsUtils.loadJsonFile(uri);
    346             if (params.has(REGION_KEY)) {
    347                 JSONObject regions = params.getJSONObject(REGION_KEY);
    348                 if (regions.has(REGION_AE_KEY)) {
    349                     int[] r = ItsUtils.getJsonRectFromArray(
    350                             regions.getJSONArray(REGION_AE_KEY), true, width, height);
    351                     regionAE = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    352                 }
    353                 if (regions.has(REGION_AF_KEY)) {
    354                     int[] r = ItsUtils.getJsonRectFromArray(
    355                             regions.getJSONArray(REGION_AF_KEY), true, width, height);
    356                     regionAF = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    357                 }
    358                 if (regions.has(REGION_AWB_KEY)) {
    359                     int[] r = ItsUtils.getJsonRectFromArray(
    360                             regions.getJSONArray(REGION_AWB_KEY), true, width, height);
    361                     regionAWB = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    362                 }
    363             }
    364             Log.i(TAG, "AE region: " + Arrays.toString(regionAE));
    365             Log.i(TAG, "AF region: " + Arrays.toString(regionAF));
    366             Log.i(TAG, "AWB region: " + Arrays.toString(regionAWB));
    367 
    368             // By default, AE and AF both get triggered, but the user can optionally override this.
    369             boolean doAE = true;
    370             boolean doAF = true;
    371             if (params.has(TRIGGER_KEY)) {
    372                 JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
    373                 if (triggers.has(TRIGGER_AE_KEY)) {
    374                     doAE = triggers.getBoolean(TRIGGER_AE_KEY);
    375                 }
    376                 if (triggers.has(TRIGGER_AF_KEY)) {
    377                     doAF = triggers.getBoolean(TRIGGER_AF_KEY);
    378                 }
    379             }
    380 
    381             mInterlock3A.open();
    382             mIssuedRequest3A = false;
    383             mConvergedAE = false;
    384             mConvergedAWB = false;
    385             mConvergedAF = false;
    386             long tstart = System.currentTimeMillis();
    387             boolean triggeredAE = false;
    388             boolean triggeredAF = false;
    389 
    390             // Keep issuing capture requests until 3A has converged.
    391             // First do AE, then do AF and AWB together.
    392             while (true) {
    393 
    394                 // Block until can take the next 3A frame. Only want one outstanding frame
    395                 // at a time, to simplify the logic here.
    396                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
    397                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
    398                     throw new ItsException("3A failed to converge (timeout)");
    399                 }
    400                 mInterlock3A.close();
    401 
    402                 // If not converged yet, issue another capture request.
    403                 if ((doAE && !mConvergedAE) || !mConvergedAWB || (doAF && !mConvergedAF)) {
    404 
    405                     // Baseline capture request for 3A.
    406                     CaptureRequest.Builder req = mCamera.createCaptureRequest(
    407                             CameraDevice.TEMPLATE_PREVIEW);
    408                     req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
    409                     req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
    410                     req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
    411                             CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
    412                     req.set(CaptureRequest.CONTROL_AE_MODE,
    413                             CaptureRequest.CONTROL_AE_MODE_ON);
    414                     req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
    415                     req.set(CaptureRequest.CONTROL_AE_LOCK, false);
    416                     req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
    417                     req.set(CaptureRequest.CONTROL_AF_MODE,
    418                             CaptureRequest.CONTROL_AF_MODE_AUTO);
    419                     req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
    420                     req.set(CaptureRequest.CONTROL_AWB_MODE,
    421                             CaptureRequest.CONTROL_AWB_MODE_AUTO);
    422                     req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
    423                     req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
    424 
    425                     // Trigger AE first.
    426                     if (doAE && !triggeredAE) {
    427                         Log.i(TAG, "Triggering AE");
    428                         req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
    429                                 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
    430                         triggeredAE = true;
    431                     }
    432 
    433                     // After AE has converged, trigger AF.
    434                     if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
    435                         Log.i(TAG, "Triggering AF");
    436                         req.set(CaptureRequest.CONTROL_AF_TRIGGER,
    437                                 CaptureRequest.CONTROL_AF_TRIGGER_START);
    438                         triggeredAF = true;
    439                     }
    440 
    441                     req.addTarget(mCaptureReader.getSurface());
    442 
    443                     mIssuedRequest3A = true;
    444                     mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
    445                 } else {
    446                     Log.i(TAG, "3A converged");
    447                     break;
    448                 }
    449             }
    450         } catch (android.hardware.camera2.CameraAccessException e) {
    451             throw new ItsException("Access error: ", e);
    452         } catch (org.json.JSONException e) {
    453             throw new ItsException("JSON error: ", e);
    454         }
    455     }
    456 
    457     private void doCapture(Uri uri) throws ItsException {
    458         try {
    459             if (uri == null || !uri.toString().endsWith(".json")) {
    460                 throw new ItsException("Invalid URI: " + uri);
    461             }
    462 
    463             // Parse the JSON to get the list of capture requests.
    464             List<CaptureRequest.Builder> requests = ItsUtils.loadRequestList(mCamera, uri);
    465 
    466             // Set the output surface and listeners.
    467             try {
    468                 // Default:
    469                 // Capture full-frame images. Use the reported JPEG size rather than the sensor
    470                 // size since this is more likely to be the unscaled size; the crop from sensor
    471                 // size is probably for the ISP (e.g. demosaicking) rather than the encoder.
    472                 android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
    473                         CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
    474                 int width = sizes[0].getWidth();
    475                 int height = sizes[0].getHeight();
    476                 int format = ImageFormat.YUV_420_888;
    477 
    478                 JSONObject jsonOutputSpecs = ItsUtils.getOutputSpecs(uri);
    479                 if (jsonOutputSpecs != null) {
    480                     // Use the user's JSON capture spec.
    481                     int width2 = jsonOutputSpecs.optInt("width");
    482                     int height2 = jsonOutputSpecs.optInt("height");
    483                     if (width2 > 0) {
    484                         width = width2;
    485                     }
    486                     if (height2 > 0) {
    487                         height = height2;
    488                     }
    489                     String sformat = jsonOutputSpecs.optString("format");
    490                     if ("yuv".equals(sformat)) {
    491                         format = ImageFormat.YUV_420_888;
    492                     } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
    493                         format = ImageFormat.JPEG;
    494                     } else if ("".equals(sformat)) {
    495                         // No format specified.
    496                     } else {
    497                         throw new ItsException("Unsupported format: " + sformat);
    498                     }
    499                 }
    500 
    501                 Log.i(PYTAG, String.format("### SIZE %d %d", width, height));
    502 
    503                 prepareCaptureReader(width, height, format);
    504                 List<Surface> outputSurfaces = new ArrayList<Surface>(1);
    505                 outputSurfaces.add(mCaptureReader.getSurface());
    506                 mCamera.configureOutputs(outputSurfaces);
    507                 mCameraListener.waitForState(BlockingStateListener.STATE_BUSY,
    508                         TIMEOUT_STATE_MS);
    509                 mCameraListener.waitForState(BlockingStateListener.STATE_IDLE,
    510                         TIMEOUT_IDLE_MS);
    511 
    512                 ImageReader.OnImageAvailableListener readerListener =
    513                         createAvailableListener(mCaptureListener);
    514                 mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
    515 
    516                 // Plan for how many callbacks need to be received throughout the duration of this
    517                 // sequence of capture requests.
    518                 int numCaptures = requests.size();
    519                 mCaptureCallbackLatch = new CountDownLatch(
    520                         numCaptures * ItsUtils.getCallbacksPerCapture(format));
    521 
    522             } catch (CameraAccessException e) {
    523                 throw new ItsException("Error configuring outputs", e);
    524             }
    525 
    526             // Initiate the captures.
    527             for (int i = 0; i < requests.size(); i++) {
    528                 CaptureRequest.Builder req = requests.get(i);
    529                 Log.i(PYTAG, String.format("### CAPT %d of %d", i+1, requests.size()));
    530                 req.addTarget(mCaptureReader.getSurface());
    531                 mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
    532             }
    533 
    534             // Make sure all callbacks have been hit (wait until captures are done).
    535             try {
    536                 if (!mCaptureCallbackLatch.await(TIMEOUT_CAPTURE, TimeUnit.SECONDS)) {
    537                     throw new ItsException(
    538                             "Timeout hit, but all callbacks not received");
    539                 }
    540             } catch (InterruptedException e) {
    541                 throw new ItsException("Interrupted: ", e);
    542             }
    543 
    544         } catch (android.hardware.camera2.CameraAccessException e) {
    545             throw new ItsException("Access error: ", e);
    546         }
    547     }
    548 
    549     private final CaptureListener mCaptureListener = new CaptureListener() {
    550         @Override
    551         public void onCaptureAvailable(Image capture) {
    552             try {
    553                 int format = capture.getFormat();
    554                 String extFileName = null;
    555                 if (format == ImageFormat.JPEG) {
    556                     String fileName = ItsUtils.getJpegFileName(capture.getTimestamp());
    557                     ByteBuffer buf = capture.getPlanes()[0].getBuffer();
    558                     extFileName = ItsUtils.writeImageToFile(ItsService.this, buf, fileName);
    559                 } else if (format == ImageFormat.YUV_420_888) {
    560                     String fileName = ItsUtils.getYuvFileName(capture.getTimestamp());
    561                     byte[] img = ItsUtils.getDataFromImage(capture);
    562                     ByteBuffer buf = ByteBuffer.wrap(img);
    563                     extFileName = ItsUtils.writeImageToFile(ItsService.this, buf, fileName);
    564                 } else {
    565                     throw new ItsException("Unsupported image format: " + format);
    566                 }
    567                 Log.i(PYTAG, String.format("### FILE %s", extFileName));
    568                 mCaptureCallbackLatch.countDown();
    569             } catch (ItsException e) {
    570                 Log.e(TAG, "Script error: " + e);
    571                 Log.e(PYTAG, "### FAIL");
    572             }
    573         }
    574     };
    575 
    576     private static float r2f(Rational r) {
    577         return (float)r.getNumerator() / (float)r.getDenominator();
    578     }
    579 
    580     private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
    581         @Override
    582         public void onCaptureStarted(CameraDevice camera, CaptureRequest request, long timestamp) {
    583         }
    584 
    585         @Override
    586         public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
    587                 CaptureResult result) {
    588             try {
    589                 // Currently result has all 0 values.
    590                 if (request == null || result == null) {
    591                     throw new ItsException("Request/result is invalid");
    592                 }
    593 
    594                 StringBuilder logMsg = new StringBuilder();
    595                 logMsg.append(String.format(
    596                         "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ",
    597                         result.get(CaptureResult.CONTROL_AE_STATE),
    598                         result.get(CaptureResult.CONTROL_AF_STATE),
    599                         result.get(CaptureResult.CONTROL_AWB_STATE),
    600                         result.get(CaptureResult.SENSOR_SENSITIVITY),
    601                         result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f,
    602                         result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f));
    603                 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
    604                     logMsg.append(String.format(
    605                             "gains=[%.1f, %.1f, %.1f, %.1f], ",
    606                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
    607                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
    608                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
    609                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3]));
    610                 } else {
    611                     logMsg.append("gains=[], ");
    612                 }
    613                 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
    614                     logMsg.append(String.format(
    615                             "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
    616                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
    617                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
    618                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
    619                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
    620                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
    621                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
    622                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
    623                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
    624                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])));
    625                 } else {
    626                     logMsg.append("xform=[], ");
    627                 }
    628                 logMsg.append(String.format(
    629                         "foc=%.1f",
    630                         result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
    631                 Log.i(TAG, logMsg.toString());
    632 
    633                 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
    634                                           CaptureResult.CONTROL_AE_STATE_CONVERGED;
    635                 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
    636                                           CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
    637                 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
    638                                            CaptureResult.CONTROL_AWB_STATE_CONVERGED;
    639 
    640                 if (mConvergedAE) {
    641                     Log.i(PYTAG, String.format(
    642                             "### 3A-E %d %d",
    643                             result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
    644                             result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
    645                             ));
    646                 }
    647 
    648                 if (mConvergedAF) {
    649                     Log.i(PYTAG, String.format(
    650                             "### 3A-F %f",
    651                             result.get(CaptureResult.LENS_FOCUS_DISTANCE)
    652                             ));
    653                 }
    654 
    655                 if (mConvergedAWB && result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
    656                         && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
    657                     Log.i(PYTAG, String.format(
    658                             "### 3A-W %f %f %f %f %f %f %f %f %f %f %f %f %f",
    659                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
    660                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
    661                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
    662                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3],
    663                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
    664                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
    665                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
    666                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
    667                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
    668                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
    669                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
    670                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
    671                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])
    672                             ));
    673                 }
    674 
    675                 if (mIssuedRequest3A) {
    676                     mIssuedRequest3A = false;
    677                     mInterlock3A.open();
    678                 } else {
    679                     String fileName = ItsUtils.getMetadataFileName(
    680                             result.get(CaptureResult.SENSOR_TIMESTAMP));
    681                     File mdFile = ItsUtils.getOutputFile(ItsService.this, fileName);
    682                     ItsUtils.storeResults(mCameraCharacteristics, request, result, mdFile);
    683                     mCaptureCallbackLatch.countDown();
    684                 }
    685             } catch (ItsException e) {
    686                 Log.e(TAG, "Script error: " + e);
    687                 Log.e(PYTAG, "### FAIL");
    688             } catch (Exception e) {
    689                 Log.e(TAG, "Script error: " + e);
    690                 Log.e(PYTAG, "### FAIL");
    691             }
    692         }
    693 
    694         @Override
    695         public void onCaptureFailed(CameraDevice camera, CaptureRequest request,
    696                 CaptureFailure failure) {
    697             mCaptureCallbackLatch.countDown();
    698             Log.e(TAG, "Script error: capture failed");
    699             Log.e(PYTAG, "### FAIL");
    700         }
    701     };
    702 
    703 }
    704