Home | History | Annotate | Download | only in its
      1 /*
      2  * Copyright (C) 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package com.android.camera2.its;
     18 
     19 import android.app.Service;
     20 import android.content.Context;
     21 import android.content.Intent;
     22 import android.graphics.ImageFormat;
     23 import android.hardware.camera2.CameraAccessException;
     24 import android.hardware.camera2.CameraCharacteristics;
     25 import android.hardware.camera2.CameraDevice;
     26 import android.hardware.camera2.CameraManager;
     27 import android.hardware.camera2.CaptureFailure;
     28 import android.hardware.camera2.CaptureRequest;
     29 import android.hardware.camera2.CaptureResult;
     30 import android.hardware.camera2.Rational;
     31 import android.media.Image;
     32 import android.media.ImageReader;
     33 import android.net.Uri;
     34 import android.os.ConditionVariable;
     35 import android.os.Handler;
     36 import android.os.HandlerThread;
     37 import android.os.IBinder;
     38 import android.os.Message;
     39 import android.util.Log;
     40 import android.view.Surface;
     41 
     42 import com.android.ex.camera2.blocking.BlockingCameraManager;
     43 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
     44 import com.android.ex.camera2.blocking.BlockingStateListener;
     45 
     46 import org.json.JSONObject;
     47 
     48 import java.io.BufferedReader;
     49 import java.io.BufferedWriter;
     50 import java.io.IOException;
     51 import java.io.InputStreamReader;
     52 import java.io.OutputStreamWriter;
     53 import java.io.PrintWriter;
     54 import java.math.BigInteger;
     55 import java.net.ServerSocket;
     56 import java.net.Socket;
     57 import java.net.SocketTimeoutException;
     58 import java.nio.ByteBuffer;
     59 import java.nio.charset.Charset;
     60 import java.security.MessageDigest;
     61 import java.util.ArrayList;
     62 import java.util.Arrays;
     63 import java.util.List;
     64 import java.util.concurrent.BlockingQueue;
     65 import java.util.concurrent.CountDownLatch;
     66 import java.util.concurrent.LinkedBlockingDeque;
     67 import java.util.concurrent.TimeUnit;
     68 
     69 public class ItsService extends Service {
     70     public static final String TAG = ItsService.class.getSimpleName();
     71 
     72     // Timeouts, in seconds.
     73     public static final int TIMEOUT_CAPTURE = 10;
     74     public static final int TIMEOUT_3A = 10;
     75 
     76     // State transition timeouts, in ms.
     77     private static final long TIMEOUT_IDLE_MS = 2000;
     78     private static final long TIMEOUT_STATE_MS = 500;
     79 
     80     private static final int MAX_CONCURRENT_READER_BUFFERS = 8;
     81 
     82     public static final int SERVERPORT = 6000;
     83 
     84     public static final String REGION_KEY = "regions";
     85     public static final String REGION_AE_KEY = "ae";
     86     public static final String REGION_AWB_KEY = "awb";
     87     public static final String REGION_AF_KEY = "af";
     88     public static final String TRIGGER_KEY = "triggers";
     89     public static final String TRIGGER_AE_KEY = "ae";
     90     public static final String TRIGGER_AF_KEY = "af";
     91 
     92     private CameraManager mCameraManager = null;
     93     private HandlerThread mCameraThread = null;
     94     private BlockingCameraManager mBlockingCameraManager = null;
     95     private BlockingStateListener mCameraListener = null;
     96     private CameraDevice mCamera = null;
     97     private ImageReader mCaptureReader = null;
     98     private CameraCharacteristics mCameraCharacteristics = null;
     99 
    100     private HandlerThread mSaveThread;
    101     private Handler mSaveHandler;
    102     private HandlerThread mResultThread;
    103     private Handler mResultHandler;
    104 
    105     private volatile ServerSocket mSocket = null;
    106     private volatile SocketRunnable mSocketRunnableObj = null;
    107     private volatile Thread mSocketThread = null;
    108     private volatile Thread mSocketWriteRunnable = null;
    109     private volatile boolean mSocketThreadExitFlag = false;
    110     private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = new LinkedBlockingDeque<ByteBuffer>();
    111     private final Object mSocketWriteLock = new Object();
    112 
    113     private volatile ConditionVariable mInterlock3A = new ConditionVariable(true);
    114     private volatile boolean mIssuedRequest3A = false;
    115     private volatile boolean mConvergedAE = false;
    116     private volatile boolean mConvergedAF = false;
    117     private volatile boolean mConvergedAWB = false;
    118 
    119     private CountDownLatch mCaptureCallbackLatch;
    120 
    121     public interface CaptureListener {
    122         void onCaptureAvailable(Image capture);
    123     }
    124 
    125     public abstract class CaptureResultListener extends CameraDevice.CaptureListener {}
    126 
    127     @Override
    128     public IBinder onBind(Intent intent) {
    129         return null;
    130     }
    131 
    132     @Override
    133     public void onCreate() {
    134         try {
    135             // Get handle to camera manager.
    136             mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
    137             if (mCameraManager == null) {
    138                 throw new ItsException("Failed to connect to camera manager");
    139             }
    140             mBlockingCameraManager = new BlockingCameraManager(mCameraManager);
    141             mCameraListener = new BlockingStateListener();
    142 
    143             // Open the camera device, and get its properties.
    144             String[] devices;
    145             try {
    146                 devices = mCameraManager.getCameraIdList();
    147                 if (devices == null || devices.length == 0) {
    148                     throw new ItsException("No camera devices");
    149                 }
    150             } catch (CameraAccessException e) {
    151                 throw new ItsException("Failed to get device ID list", e);
    152             }
    153 
    154             mCameraThread = new HandlerThread("ItsCameraThread");
    155             try {
    156                 mCameraThread.start();
    157                 Handler cameraHandler = new Handler(mCameraThread.getLooper());
    158 
    159                 // TODO: Add support for specifying which device to open.
    160                 mCamera = mBlockingCameraManager.openCamera(devices[0], mCameraListener,
    161                         cameraHandler);
    162                 mCameraCharacteristics = mCameraManager.getCameraCharacteristics(devices[0]);
    163             } catch (CameraAccessException e) {
    164                 throw new ItsException("Failed to open camera", e);
    165             } catch (BlockingOpenException e) {
    166                 throw new ItsException("Failed to open camera (after blocking)", e);
    167             }
    168 
    169             // Create a thread to receive images and save them.
    170             mSaveThread = new HandlerThread("SaveThread");
    171             mSaveThread.start();
    172             mSaveHandler = new Handler(mSaveThread.getLooper());
    173 
    174             // Create a thread to receive capture results and process them
    175             mResultThread = new HandlerThread("ResultThread");
    176             mResultThread.start();
    177             mResultHandler = new Handler(mResultThread.getLooper());
    178 
    179             // Create a thread to process commands, listening on a TCP socket.
    180             mSocketRunnableObj = new SocketRunnable();
    181             mSocketThread = new Thread(mSocketRunnableObj);
    182             mSocketThread.start();
    183         } catch (ItsException e) {
    184             Log.e(TAG, "Service failed to start: ", e);
    185         }
    186     }
    187 
    188     @Override
    189     public void onDestroy() {
    190         try {
    191             mSocketThreadExitFlag = true;
    192             if (mSaveThread != null) {
    193                 mSaveThread.quit();
    194                 mSaveThread = null;
    195             }
    196             if (mCameraThread != null) {
    197                 mCameraThread.quitSafely();
    198                 mCameraThread = null;
    199             }
    200             try {
    201                 mCamera.close();
    202             } catch (Exception e) {
    203                 throw new ItsException("Failed to close device");
    204             }
    205         } catch (ItsException e) {
    206             Log.e(TAG, "Script failed: ", e);
    207         }
    208     }
    209 
    210     @Override
    211     public int onStartCommand(Intent intent, int flags, int startId) {
    212         return START_STICKY;
    213     }
    214 
    215     class SocketWriteRunnable implements Runnable {
    216 
    217         // Use a separate thread to service a queue of objects to be written to the socket,
    218         // writing each sequentially in order. This is needed since different handler functions
    219         // (called on different threads) will need to send data back to the host script.
    220 
    221         public Socket mOpenSocket = null;
    222 
    223         public SocketWriteRunnable(Socket openSocket) {
    224             mOpenSocket = openSocket;
    225         }
    226 
    227         public void run() {
    228             Log.i(TAG, "Socket writer thread starting");
    229             while (true) {
    230                 try {
    231                     ByteBuffer b = mSocketWriteQueue.take();
    232                     //Log.i(TAG, String.format("Writing to socket: %d bytes", b.capacity()));
    233                     if (b.hasArray()) {
    234                         mOpenSocket.getOutputStream().write(b.array());
    235                     } else {
    236                         byte[] barray = new byte[b.capacity()];
    237                         b.get(barray);
    238                         mOpenSocket.getOutputStream().write(barray);
    239                     }
    240                     mOpenSocket.getOutputStream().flush();
    241                 } catch (IOException e) {
    242                     Log.e(TAG, "Error writing to socket");
    243                     break;
    244                 } catch (java.lang.InterruptedException e) {
    245                     Log.e(TAG, "Error writing to socket (interrupted)");
    246                     break;
    247                 }
    248             }
    249             Log.i(TAG, "Socket writer thread terminated");
    250         }
    251     }
    252 
    253     class SocketRunnable implements Runnable {
    254 
    255         // Format of sent messages (over the socket):
    256         // * Serialized JSON object on a single line (newline-terminated)
    257         // * For byte buffers, the binary data then follows
    258         //
    259         // Format of received messages (from the socket):
    260         // * Serialized JSON object on a single line (newline-terminated)
    261 
    262         private Socket mOpenSocket = null;
    263         private SocketWriteRunnable mSocketWriteRunnable = null;
    264 
    265         public void run() {
    266             Log.i(TAG, "Socket thread starting");
    267             try {
    268                 mSocket = new ServerSocket(SERVERPORT);
    269             } catch (IOException e) {
    270                 Log.e(TAG, "Failed to create socket");
    271             }
    272             try {
    273                 Log.i(TAG, "Waiting for client to connect to socket");
    274                 mOpenSocket = mSocket.accept();
    275                 if (mOpenSocket == null) {
    276                     Log.e(TAG, "Socket connection error");
    277                     return;
    278                 }
    279                 Log.i(TAG, "Socket connected");
    280             } catch (IOException e) {
    281                 Log.e(TAG, "Socket open error: " + e);
    282                 return;
    283             }
    284             mSocketThread = new Thread(new SocketWriteRunnable(mOpenSocket));
    285             mSocketThread.start();
    286             while (!mSocketThreadExitFlag) {
    287                 try {
    288                     BufferedReader input = new BufferedReader(
    289                             new InputStreamReader(mOpenSocket.getInputStream()));
    290                     if (input == null) {
    291                         Log.e(TAG, "Failed to get socket input stream");
    292                         break;
    293                     }
    294                     String line = input.readLine();
    295                     if (line == null) {
    296                         Log.e(TAG, "Failed to read socket line");
    297                         break;
    298                     }
    299                     processSocketCommand(line);
    300                 } catch (IOException e) {
    301                     Log.e(TAG, "Socket read error: " + e);
    302                     break;
    303                 } catch (ItsException e) {
    304                     Log.e(TAG, "Script error: " + e);
    305                     break;
    306                 }
    307             }
    308             Log.i(TAG, "Socket server loop exited");
    309             try {
    310                 if (mOpenSocket != null) {
    311                     mOpenSocket.close();
    312                     mOpenSocket = null;
    313                 }
    314             } catch (java.io.IOException e) {
    315                 Log.w(TAG, "Exception closing socket");
    316             }
    317             try {
    318                 if (mSocket != null) {
    319                     mSocket.close();
    320                     mSocket = null;
    321                 }
    322             } catch (java.io.IOException e) {
    323                 Log.w(TAG, "Exception closing socket");
    324             }
    325             Log.i(TAG, "Socket server thread exited");
    326         }
    327 
    328         public void processSocketCommand(String cmd)
    329                 throws ItsException {
    330             // Each command is a serialized JSON object.
    331             try {
    332                 JSONObject cmdObj = new JSONObject(cmd);
    333                 if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) {
    334                     doGetProps();
    335                 }
    336                 else if ("do3A".equals(cmdObj.getString("cmdName"))) {
    337                     do3A(cmdObj);
    338                 }
    339                 else if ("doCapture".equals(cmdObj.getString("cmdName"))) {
    340                     doCapture(cmdObj);
    341                 }
    342                 else {
    343                     throw new ItsException("Unknown command: " + cmd);
    344                 }
    345             } catch (org.json.JSONException e) {
    346                 Log.e(TAG, "Invalid command: ", e);
    347             }
    348         }
    349 
    350         public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)
    351                 throws ItsException {
    352             try {
    353                 JSONObject jsonObj = new JSONObject();
    354                 jsonObj.put("tag", tag);
    355                 if (str != null) {
    356                     jsonObj.put("strValue", str);
    357                 }
    358                 if (obj != null) {
    359                     jsonObj.put("objValue", obj);
    360                 }
    361                 if (bbuf != null) {
    362                     jsonObj.put("bufValueSize", bbuf.capacity());
    363                 }
    364                 ByteBuffer bstr = ByteBuffer.wrap(
    365                         (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset()));
    366                 synchronized(mSocketWriteLock) {
    367                     if (bstr != null) {
    368                         mSocketWriteQueue.put(bstr);
    369                     }
    370                     if (bbuf != null) {
    371                         mSocketWriteQueue.put(bbuf);
    372                     }
    373                 }
    374             } catch (org.json.JSONException e) {
    375                 throw new ItsException("JSON error: ", e);
    376             } catch (java.lang.InterruptedException e) {
    377                 throw new ItsException("Socket error: ", e);
    378             }
    379         }
    380 
    381         public void sendResponse(String tag, String str)
    382                 throws ItsException {
    383             sendResponse(tag, str, null, null);
    384         }
    385 
    386         public void sendResponse(String tag, JSONObject obj)
    387                 throws ItsException {
    388             sendResponse(tag, null, obj, null);
    389         }
    390 
    391         public void sendResponse(String tag, ByteBuffer bbuf)
    392                 throws ItsException {
    393             sendResponse(tag, null, null, bbuf);
    394         }
    395 
    396         public void sendResponse(CameraCharacteristics props)
    397                 throws ItsException {
    398             try {
    399                 JSONObject jsonObj = new JSONObject();
    400                 jsonObj.put("cameraProperties", ItsSerializer.serialize(props));
    401                 sendResponse("cameraProperties", null, jsonObj, null);
    402             } catch (org.json.JSONException e) {
    403                 throw new ItsException("JSON error: ", e);
    404             }
    405         }
    406 
    407         public void sendResponse(CameraCharacteristics props,
    408                                  CaptureRequest request,
    409                                  CaptureResult result)
    410                 throws ItsException {
    411             try {
    412                 JSONObject jsonObj = new JSONObject();
    413                 jsonObj.put("cameraProperties", ItsSerializer.serialize(props));
    414                 jsonObj.put("captureRequest", ItsSerializer.serialize(request));
    415                 jsonObj.put("captureResult", ItsSerializer.serialize(result));
    416                 jsonObj.put("width", mCaptureReader.getWidth());
    417                 jsonObj.put("height", mCaptureReader.getHeight());
    418                 sendResponse("captureResults", null, jsonObj, null);
    419             } catch (org.json.JSONException e) {
    420                 throw new ItsException("JSON error: ", e);
    421             }
    422         }
    423     }
    424 
    425     public ImageReader.OnImageAvailableListener
    426             createAvailableListener(final CaptureListener listener) {
    427         return new ImageReader.OnImageAvailableListener() {
    428             @Override
    429             public void onImageAvailable(ImageReader reader) {
    430                 Image i = null;
    431                 try {
    432                     i = reader.acquireNextImage();
    433                     listener.onCaptureAvailable(i);
    434                 } finally {
    435                     if (i != null) {
    436                         i.close();
    437                     }
    438                 }
    439             }
    440         };
    441     }
    442 
    443     private ImageReader.OnImageAvailableListener
    444             createAvailableListenerDropper(final CaptureListener listener) {
    445         return new ImageReader.OnImageAvailableListener() {
    446             @Override
    447             public void onImageAvailable(ImageReader reader) {
    448                 Image i = reader.acquireNextImage();
    449                 i.close();
    450             }
    451         };
    452     }
    453 
    454     private void doGetProps() throws ItsException {
    455         mSocketRunnableObj.sendResponse(mCameraCharacteristics);
    456     }
    457 
    458     private void prepareCaptureReader(int width, int height, int format) {
    459         if (mCaptureReader == null
    460                 || mCaptureReader.getWidth() != width
    461                 || mCaptureReader.getHeight() != height
    462                 || mCaptureReader.getImageFormat() != format) {
    463             if (mCaptureReader != null) {
    464                 mCaptureReader.close();
    465             }
    466             mCaptureReader = ImageReader.newInstance(width, height, format,
    467                     MAX_CONCURRENT_READER_BUFFERS);
    468         }
    469     }
    470 
    471     private void do3A(JSONObject params) throws ItsException {
    472         try {
    473             // Start a 3A action, and wait for it to converge.
    474             // Get the converged values for each "A", and package into JSON result for caller.
    475 
    476             // 3A happens on full-res frames.
    477             android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
    478                     CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
    479             int width = sizes[0].getWidth();
    480             int height = sizes[0].getHeight();
    481             int format = ImageFormat.YUV_420_888;
    482 
    483             prepareCaptureReader(width, height, format);
    484             List<Surface> outputSurfaces = new ArrayList<Surface>(1);
    485             outputSurfaces.add(mCaptureReader.getSurface());
    486             mCamera.configureOutputs(outputSurfaces);
    487             mCameraListener.waitForState(BlockingStateListener.STATE_BUSY,
    488                     TIMEOUT_STATE_MS);
    489             mCameraListener.waitForState(BlockingStateListener.STATE_IDLE,
    490                     TIMEOUT_IDLE_MS);
    491 
    492             // Add a listener that just recycles buffers; they aren't saved anywhere.
    493             ImageReader.OnImageAvailableListener readerListener =
    494                     createAvailableListenerDropper(mCaptureListener);
    495             mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
    496 
    497             // Get the user-specified regions for AE, AWB, AF.
    498             // Note that the user specifies normalized [x,y,w,h], which is converted below
    499             // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
    500             // also has a fifth "weight" element: [x0,y0,x1,y1,w].
    501             int[] regionAE = new int[]{0,0,width-1,height-1,1};
    502             int[] regionAF = new int[]{0,0,width-1,height-1,1};
    503             int[] regionAWB = new int[]{0,0,width-1,height-1,1};
    504             if (params.has(REGION_KEY)) {
    505                 JSONObject regions = params.getJSONObject(REGION_KEY);
    506                 if (regions.has(REGION_AE_KEY)) {
    507                     int[] r = ItsUtils.getJsonRectFromArray(
    508                             regions.getJSONArray(REGION_AE_KEY), true, width, height);
    509                     regionAE = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    510                 }
    511                 if (regions.has(REGION_AF_KEY)) {
    512                     int[] r = ItsUtils.getJsonRectFromArray(
    513                             regions.getJSONArray(REGION_AF_KEY), true, width, height);
    514                     regionAF = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    515                 }
    516                 if (regions.has(REGION_AWB_KEY)) {
    517                     int[] r = ItsUtils.getJsonRectFromArray(
    518                             regions.getJSONArray(REGION_AWB_KEY), true, width, height);
    519                     regionAWB = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
    520                 }
    521             }
    522             Log.i(TAG, "AE region: " + Arrays.toString(regionAE));
    523             Log.i(TAG, "AF region: " + Arrays.toString(regionAF));
    524             Log.i(TAG, "AWB region: " + Arrays.toString(regionAWB));
    525 
    526             // By default, AE and AF both get triggered, but the user can optionally override this.
    527             boolean doAE = true;
    528             boolean doAF = true;
    529             if (params.has(TRIGGER_KEY)) {
    530                 JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
    531                 if (triggers.has(TRIGGER_AE_KEY)) {
    532                     doAE = triggers.getBoolean(TRIGGER_AE_KEY);
    533                 }
    534                 if (triggers.has(TRIGGER_AF_KEY)) {
    535                     doAF = triggers.getBoolean(TRIGGER_AF_KEY);
    536                 }
    537             }
    538 
    539             mInterlock3A.open();
    540             mIssuedRequest3A = false;
    541             mConvergedAE = false;
    542             mConvergedAWB = false;
    543             mConvergedAF = false;
    544             long tstart = System.currentTimeMillis();
    545             boolean triggeredAE = false;
    546             boolean triggeredAF = false;
    547 
    548             // Keep issuing capture requests until 3A has converged.
    549             while (true) {
    550 
    551                 // Block until can take the next 3A frame. Only want one outstanding frame
    552                 // at a time, to simplify the logic here.
    553                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
    554                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
    555                     throw new ItsException("3A failed to converge (timeout)");
    556                 }
    557                 mInterlock3A.close();
    558 
    559                 // If not converged yet, issue another capture request.
    560                 if ((doAE && !mConvergedAE) || !mConvergedAWB || (doAF && !mConvergedAF)) {
    561 
    562                     // Baseline capture request for 3A.
    563                     CaptureRequest.Builder req = mCamera.createCaptureRequest(
    564                             CameraDevice.TEMPLATE_PREVIEW);
    565                     req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
    566                     req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
    567                     req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
    568                             CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
    569                     req.set(CaptureRequest.CONTROL_AE_MODE,
    570                             CaptureRequest.CONTROL_AE_MODE_ON);
    571                     req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
    572                     req.set(CaptureRequest.CONTROL_AE_LOCK, false);
    573                     req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
    574                     req.set(CaptureRequest.CONTROL_AF_MODE,
    575                             CaptureRequest.CONTROL_AF_MODE_AUTO);
    576                     req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
    577                     req.set(CaptureRequest.CONTROL_AWB_MODE,
    578                             CaptureRequest.CONTROL_AWB_MODE_AUTO);
    579                     req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
    580                     req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
    581 
    582                     // Trigger AE first.
    583                     if (doAE && !triggeredAE) {
    584                         Log.i(TAG, "Triggering AE");
    585                         req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
    586                                 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
    587                         triggeredAE = true;
    588                     }
    589 
    590                     // After AE has converged, trigger AF.
    591                     if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
    592                         Log.i(TAG, "Triggering AF");
    593                         req.set(CaptureRequest.CONTROL_AF_TRIGGER,
    594                                 CaptureRequest.CONTROL_AF_TRIGGER_START);
    595                         triggeredAF = true;
    596                     }
    597 
    598                     req.addTarget(mCaptureReader.getSurface());
    599 
    600                     mIssuedRequest3A = true;
    601                     mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
    602                 } else {
    603                     Log.i(TAG, "3A converged");
    604                     break;
    605                 }
    606             }
    607         } catch (android.hardware.camera2.CameraAccessException e) {
    608             throw new ItsException("Access error: ", e);
    609         } catch (org.json.JSONException e) {
    610             throw new ItsException("JSON error: ", e);
    611         } finally {
    612             mSocketRunnableObj.sendResponse("3aDone", "");
    613         }
    614     }
    615 
    616     private void doCapture(JSONObject params) throws ItsException {
    617         try {
    618             // Parse the JSON to get the list of capture requests.
    619             List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
    620                     mCamera, params);
    621 
    622             // Set the output surface and listeners.
    623             try {
    624                 // Default:
    625                 // Capture full-frame images. Use the reported JPEG size rather than the sensor
    626                 // size since this is more likely to be the unscaled size; the crop from sensor
    627                 // size is probably for the ISP (e.g. demosaicking) rather than the encoder.
    628                 android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
    629                         CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
    630                 int width = sizes[0].getWidth();
    631                 int height = sizes[0].getHeight();
    632                 int format = ImageFormat.YUV_420_888;
    633 
    634                 JSONObject jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
    635                 if (jsonOutputSpecs != null) {
    636                     // Use the user's JSON capture spec.
    637                     int width2 = jsonOutputSpecs.optInt("width");
    638                     int height2 = jsonOutputSpecs.optInt("height");
    639                     if (width2 > 0) {
    640                         width = width2;
    641                     }
    642                     if (height2 > 0) {
    643                         height = height2;
    644                     }
    645                     String sformat = jsonOutputSpecs.optString("format");
    646                     if ("yuv".equals(sformat)) {
    647                         format = ImageFormat.YUV_420_888;
    648                     } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
    649                         format = ImageFormat.JPEG;
    650                     } else if ("".equals(sformat)) {
    651                         // No format specified.
    652                     } else {
    653                         throw new ItsException("Unsupported format: " + sformat);
    654                     }
    655                 }
    656 
    657                 prepareCaptureReader(width, height, format);
    658                 List<Surface> outputSurfaces = new ArrayList<Surface>(1);
    659                 outputSurfaces.add(mCaptureReader.getSurface());
    660                 mCamera.configureOutputs(outputSurfaces);
    661                 mCameraListener.waitForState(BlockingStateListener.STATE_BUSY,
    662                         TIMEOUT_STATE_MS);
    663                 mCameraListener.waitForState(BlockingStateListener.STATE_IDLE,
    664                         TIMEOUT_IDLE_MS);
    665 
    666                 ImageReader.OnImageAvailableListener readerListener =
    667                         createAvailableListener(mCaptureListener);
    668                 mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
    669 
    670                 // Plan for how many callbacks need to be received throughout the duration of this
    671                 // sequence of capture requests.
    672                 int numCaptures = requests.size();
    673                 mCaptureCallbackLatch = new CountDownLatch(
    674                         numCaptures * ItsUtils.getCallbacksPerCapture(format));
    675 
    676             } catch (CameraAccessException e) {
    677                 throw new ItsException("Error configuring outputs", e);
    678             }
    679 
    680             // Initiate the captures.
    681             for (int i = 0; i < requests.size(); i++) {
    682                 CaptureRequest.Builder req = requests.get(i);
    683                 req.addTarget(mCaptureReader.getSurface());
    684                 mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
    685             }
    686 
    687             // Make sure all callbacks have been hit (wait until captures are done).
    688             try {
    689                 if (!mCaptureCallbackLatch.await(TIMEOUT_CAPTURE, TimeUnit.SECONDS)) {
    690                     throw new ItsException(
    691                             "Timeout hit, but all callbacks not received");
    692                 }
    693             } catch (InterruptedException e) {
    694                 throw new ItsException("Interrupted: ", e);
    695             }
    696 
    697         } catch (android.hardware.camera2.CameraAccessException e) {
    698             throw new ItsException("Access error: ", e);
    699         }
    700     }
    701 
    702     private final CaptureListener mCaptureListener = new CaptureListener() {
    703         @Override
    704         public void onCaptureAvailable(Image capture) {
    705             try {
    706                 int format = capture.getFormat();
    707                 String extFileName = null;
    708                 if (format == ImageFormat.JPEG) {
    709                     ByteBuffer buf = capture.getPlanes()[0].getBuffer();
    710                     Log.i(TAG, "Received JPEG capture");
    711                     mSocketRunnableObj.sendResponse("jpegImage", buf);
    712                 } else if (format == ImageFormat.YUV_420_888) {
    713                     byte[] img = ItsUtils.getDataFromImage(capture);
    714                     ByteBuffer buf = ByteBuffer.wrap(img);
    715                     Log.i(TAG, "Received YUV capture");
    716                     mSocketRunnableObj.sendResponse("yuvImage", buf);
    717                 } else {
    718                     throw new ItsException("Unsupported image format: " + format);
    719                 }
    720                 mCaptureCallbackLatch.countDown();
    721             } catch (ItsException e) {
    722                 Log.e(TAG, "Script error: " + e);
    723                 mSocketThreadExitFlag = true;
    724             }
    725         }
    726     };
    727 
    728     private static float r2f(Rational r) {
    729         return (float)r.getNumerator() / (float)r.getDenominator();
    730     }
    731 
    732     private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
    733         @Override
    734         public void onCaptureStarted(CameraDevice camera, CaptureRequest request, long timestamp) {
    735         }
    736 
    737         @Override
    738         public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
    739                 CaptureResult result) {
    740             try {
    741                 // Currently result has all 0 values.
    742                 if (request == null || result == null) {
    743                     throw new ItsException("Request/result is invalid");
    744                 }
    745 
    746                 StringBuilder logMsg = new StringBuilder();
    747                 logMsg.append(String.format(
    748                         "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ",
    749                         result.get(CaptureResult.CONTROL_AE_STATE),
    750                         result.get(CaptureResult.CONTROL_AF_STATE),
    751                         result.get(CaptureResult.CONTROL_AWB_STATE),
    752                         result.get(CaptureResult.SENSOR_SENSITIVITY),
    753                         result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f,
    754                         result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f));
    755                 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
    756                     logMsg.append(String.format(
    757                             "gains=[%.1f, %.1f, %.1f, %.1f], ",
    758                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
    759                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
    760                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
    761                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3]));
    762                 } else {
    763                     logMsg.append("gains=[], ");
    764                 }
    765                 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
    766                     logMsg.append(String.format(
    767                             "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
    768                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
    769                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
    770                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
    771                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
    772                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
    773                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
    774                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
    775                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
    776                              r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])));
    777                 } else {
    778                     logMsg.append("xform=[], ");
    779                 }
    780                 logMsg.append(String.format(
    781                         "foc=%.1f",
    782                         result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
    783                 Log.i(TAG, logMsg.toString());
    784 
    785                 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) {
    786                     mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
    787                                               CaptureResult.CONTROL_AE_STATE_CONVERGED;
    788                 }
    789                 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) {
    790                     mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
    791                                               CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
    792                 }
    793                 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) {
    794                     mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
    795                                                CaptureResult.CONTROL_AWB_STATE_CONVERGED;
    796                 }
    797 
    798                 if (mConvergedAE) {
    799                     mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d",
    800                             result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
    801                             result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
    802                             ));
    803                 }
    804 
    805                 if (mConvergedAF) {
    806                     mSocketRunnableObj.sendResponse("afResult", String.format("%f",
    807                             result.get(CaptureResult.LENS_FOCUS_DISTANCE)
    808                             ));
    809                 }
    810 
    811                 if (mConvergedAWB && result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
    812                         && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
    813                     mSocketRunnableObj.sendResponse("awbResult", String.format(
    814                             "%f %f %f %f %f %f %f %f %f %f %f %f %f",
    815                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
    816                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
    817                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
    818                             result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3],
    819                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
    820                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
    821                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
    822                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
    823                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
    824                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
    825                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
    826                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
    827                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])
    828                             ));
    829                 }
    830 
    831                 if (mIssuedRequest3A) {
    832                     mIssuedRequest3A = false;
    833                     mInterlock3A.open();
    834                 } else {
    835                     mSocketRunnableObj.sendResponse(mCameraCharacteristics, request, result);
    836                     mCaptureCallbackLatch.countDown();
    837                 }
    838             } catch (ItsException e) {
    839                 Log.e(TAG, "Script error: " + e);
    840                 mSocketThreadExitFlag = true;
    841             } catch (Exception e) {
    842                 Log.e(TAG, "Script error: " + e);
    843                 mSocketThreadExitFlag = true;
    844             }
    845         }
    846 
    847         @Override
    848         public void onCaptureFailed(CameraDevice camera, CaptureRequest request,
    849                 CaptureFailure failure) {
    850             mCaptureCallbackLatch.countDown();
    851             Log.e(TAG, "Script error: capture failed");
    852         }
    853     };
    854 }
    855