1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import android.app.Notification; 20 import android.app.NotificationChannel; 21 import android.app.NotificationManager; 22 import android.app.Service; 23 import android.content.Context; 24 import android.content.Intent; 25 import android.graphics.ImageFormat; 26 import android.graphics.Rect; 27 import android.hardware.camera2.CameraCaptureSession; 28 import android.hardware.camera2.CameraAccessException; 29 import android.hardware.camera2.CameraCharacteristics; 30 import android.hardware.camera2.CameraDevice; 31 import android.hardware.camera2.CameraManager; 32 import android.hardware.camera2.CaptureFailure; 33 import android.hardware.camera2.CaptureRequest; 34 import android.hardware.camera2.CaptureResult; 35 import android.hardware.camera2.DngCreator; 36 import android.hardware.camera2.TotalCaptureResult; 37 import android.hardware.camera2.params.InputConfiguration; 38 import android.hardware.camera2.params.MeteringRectangle; 39 import android.hardware.camera2.params.OutputConfiguration; 40 import android.hardware.camera2.params.SessionConfiguration; 41 import android.hardware.Sensor; 42 import android.hardware.SensorEvent; 43 import android.hardware.SensorEventListener; 44 import android.hardware.SensorManager; 45 import android.media.Image; 46 import android.media.ImageReader; 47 import android.media.ImageWriter; 48 import android.media.Image.Plane; 49 import android.net.Uri; 50 import android.os.ConditionVariable; 51 import android.os.Handler; 52 import android.os.HandlerThread; 53 import android.os.IBinder; 54 import android.os.Message; 55 import android.os.SystemClock; 56 import android.os.Vibrator; 57 import android.util.Log; 58 import android.util.Rational; 59 import android.util.Size; 60 import android.util.SparseArray; 61 import android.view.Surface; 62 63 import com.android.ex.camera2.blocking.BlockingCameraManager; 64 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 65 import com.android.ex.camera2.blocking.BlockingStateCallback; 66 import com.android.ex.camera2.blocking.BlockingSessionCallback; 67 68 import com.android.cts.verifier.camera.its.StatsImage; 69 import com.android.cts.verifier.R; 70 71 import org.json.JSONArray; 72 import org.json.JSONObject; 73 74 import java.io.BufferedReader; 75 import java.io.BufferedWriter; 76 import java.io.ByteArrayOutputStream; 77 import java.io.IOException; 78 import java.io.InputStreamReader; 79 import java.io.OutputStreamWriter; 80 import java.io.PrintWriter; 81 import java.math.BigInteger; 82 import java.net.ServerSocket; 83 import java.net.Socket; 84 import java.nio.ByteBuffer; 85 import java.nio.ByteOrder; 86 import java.nio.FloatBuffer; 87 import java.nio.charset.Charset; 88 import java.security.MessageDigest; 89 import java.util.ArrayList; 90 import java.util.Arrays; 91 import java.util.HashMap; 92 import java.util.LinkedList; 93 import java.util.List; 94 import java.util.Map; 95 import java.util.Set; 96 import java.util.concurrent.BlockingQueue; 97 import java.util.concurrent.CountDownLatch; 98 import java.util.concurrent.Executor; 99 import java.util.concurrent.LinkedBlockingDeque; 100 import java.util.concurrent.LinkedBlockingQueue; 101 import java.util.concurrent.Semaphore; 102 import java.util.concurrent.TimeUnit; 103 import java.util.concurrent.atomic.AtomicInteger; 104 105 public class ItsService extends Service implements SensorEventListener { 106 public static final String TAG = ItsService.class.getSimpleName(); 107 108 // Version number to keep host/server communication in sync 109 // This string must be in sync with python side device.py 110 // Updated when interface between script and ItsService is changed 111 private final String ITS_SERVICE_VERSION = "1.0"; 112 113 private final int SERVICE_NOTIFICATION_ID = 37; // random int that is unique within app 114 private NotificationChannel mChannel; 115 116 // Timeouts, in seconds. 117 private static final int TIMEOUT_CALLBACK = 20; 118 private static final int TIMEOUT_3A = 10; 119 120 // Time given for background requests to warm up pipeline 121 private static final long PIPELINE_WARMUP_TIME_MS = 2000; 122 123 // State transition timeouts, in ms. 124 private static final long TIMEOUT_IDLE_MS = 2000; 125 private static final long TIMEOUT_STATE_MS = 500; 126 private static final long TIMEOUT_SESSION_CLOSE = 3000; 127 128 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 129 private static final long TIMEOUT_CAP_RES = 2000; 130 131 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 132 133 // Supports at most RAW+YUV+JPEG, one surface each, plus optional background stream 134 private static final int MAX_NUM_OUTPUT_SURFACES = 4; 135 136 public static final int SERVERPORT = 6000; 137 138 public static final String REGION_KEY = "regions"; 139 public static final String REGION_AE_KEY = "ae"; 140 public static final String REGION_AWB_KEY = "awb"; 141 public static final String REGION_AF_KEY = "af"; 142 public static final String LOCK_AE_KEY = "aeLock"; 143 public static final String LOCK_AWB_KEY = "awbLock"; 144 public static final String TRIGGER_KEY = "triggers"; 145 public static final String PHYSICAL_ID_KEY = "physicalId"; 146 public static final String TRIGGER_AE_KEY = "ae"; 147 public static final String TRIGGER_AF_KEY = "af"; 148 public static final String VIB_PATTERN_KEY = "pattern"; 149 public static final String EVCOMP_KEY = "evComp"; 150 151 private CameraManager mCameraManager = null; 152 private HandlerThread mCameraThread = null; 153 private Handler mCameraHandler = null; 154 private BlockingCameraManager mBlockingCameraManager = null; 155 private BlockingStateCallback mCameraListener = null; 156 private CameraDevice mCamera = null; 157 private CameraCaptureSession mSession = null; 158 private ImageReader[] mOutputImageReaders = null; 159 private SparseArray<String> mPhysicalStreamMap = new SparseArray<String>(); 160 private ImageReader mInputImageReader = null; 161 private CameraCharacteristics mCameraCharacteristics = null; 162 private HashMap<String, CameraCharacteristics> mPhysicalCameraChars = 163 new HashMap<String, CameraCharacteristics>(); 164 private ItsUtils.ItsCameraIdList mItsCameraIdList = null; 165 166 private Vibrator mVibrator = null; 167 168 private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 169 private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES]; 170 private HandlerThread mResultThread = null; 171 private Handler mResultHandler = null; 172 173 private volatile boolean mThreadExitFlag = false; 174 175 private volatile ServerSocket mSocket = null; 176 private volatile SocketRunnable mSocketRunnableObj = null; 177 private Semaphore mSocketQueueQuota = null; 178 private int mMemoryQuota = -1; 179 private LinkedList<Integer> mInflightImageSizes = new LinkedList<>(); 180 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = 181 new LinkedBlockingDeque<ByteBuffer>(); 182 private final Object mSocketWriteEnqueueLock = new Object(); 183 private final Object mSocketWriteDrainLock = new Object(); 184 185 private volatile BlockingQueue<Object[]> mSerializerQueue = 186 new LinkedBlockingDeque<Object[]>(); 187 188 private AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 189 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 190 private AtomicInteger mCountRaw10 = new AtomicInteger(); 191 private AtomicInteger mCountRaw12 = new AtomicInteger(); 192 private AtomicInteger mCountJpg = new AtomicInteger(); 193 private AtomicInteger mCountYuv = new AtomicInteger(); 194 private AtomicInteger mCountCapRes = new AtomicInteger(); 195 private boolean mCaptureRawIsDng; 196 private boolean mCaptureRawIsStats; 197 private int mCaptureStatsGridWidth; 198 private int mCaptureStatsGridHeight; 199 private CaptureResult mCaptureResults[] = null; 200 201 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 202 203 final Object m3AStateLock = new Object(); 204 private volatile boolean mConvergedAE = false; 205 private volatile boolean mConvergedAF = false; 206 private volatile boolean mConvergedAWB = false; 207 private volatile boolean mLockedAE = false; 208 private volatile boolean mLockedAWB = false; 209 private volatile boolean mNeedsLockedAE = false; 210 private volatile boolean mNeedsLockedAWB = false; 211 212 class MySensorEvent { 213 public Sensor sensor; 214 public int accuracy; 215 public long timestamp; 216 public float values[]; 217 } 218 219 // For capturing motion sensor traces. 220 private SensorManager mSensorManager = null; 221 private Sensor mAccelSensor = null; 222 private Sensor mMagSensor = null; 223 private Sensor mGyroSensor = null; 224 private volatile LinkedList<MySensorEvent> mEvents = null; 225 private volatile Object mEventLock = new Object(); 226 private volatile boolean mEventsEnabled = false; 227 private HandlerThread mSensorThread = null; 228 private Handler mSensorHandler = null; 229 230 private static final int SERIALIZER_SURFACES_ID = 2; 231 private static final int SERIALIZER_PHYSICAL_METADATA_ID = 3; 232 233 public interface CaptureCallback { 234 void onCaptureAvailable(Image capture, String physicalCameraId); 235 } 236 237 public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {} 238 239 @Override 240 public IBinder onBind(Intent intent) { 241 return null; 242 } 243 244 @Override 245 public void onCreate() { 246 try { 247 mThreadExitFlag = false; 248 249 // Get handle to camera manager. 250 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 251 if (mCameraManager == null) { 252 throw new ItsException("Failed to connect to camera manager"); 253 } 254 mBlockingCameraManager = new BlockingCameraManager(mCameraManager); 255 mCameraListener = new BlockingStateCallback(); 256 257 // Register for motion events. 258 mEvents = new LinkedList<MySensorEvent>(); 259 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 260 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 261 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 262 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 263 mSensorThread = new HandlerThread("SensorThread"); 264 mSensorThread.start(); 265 mSensorHandler = new Handler(mSensorThread.getLooper()); 266 mSensorManager.registerListener(this, mAccelSensor, 267 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 268 mSensorManager.registerListener(this, mMagSensor, 269 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 270 mSensorManager.registerListener(this, mGyroSensor, 271 /*200hz*/5000, mSensorHandler); 272 273 // Get a handle to the system vibrator. 274 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 275 276 // Create threads to receive images and save them. 277 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 278 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 279 mSaveThreads[i].start(); 280 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 281 } 282 283 // Create a thread to handle object serialization. 284 (new Thread(new SerializerRunnable())).start();; 285 286 // Create a thread to receive capture results and process them. 287 mResultThread = new HandlerThread("ResultThread"); 288 mResultThread.start(); 289 mResultHandler = new Handler(mResultThread.getLooper()); 290 291 // Create a thread for the camera device. 292 mCameraThread = new HandlerThread("ItsCameraThread"); 293 mCameraThread.start(); 294 mCameraHandler = new Handler(mCameraThread.getLooper()); 295 296 // Create a thread to process commands, listening on a TCP socket. 297 mSocketRunnableObj = new SocketRunnable(); 298 (new Thread(mSocketRunnableObj)).start(); 299 } catch (ItsException e) { 300 Logt.e(TAG, "Service failed to start: ", e); 301 } 302 303 NotificationManager notificationManager = 304 (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); 305 mChannel = new NotificationChannel( 306 "ItsServiceChannel", "ItsService", NotificationManager.IMPORTANCE_LOW); 307 // Configure the notification channel. 308 mChannel.setDescription("ItsServiceChannel"); 309 mChannel.enableVibration(false); 310 notificationManager.createNotificationChannel(mChannel); 311 } 312 313 @Override 314 public int onStartCommand(Intent intent, int flags, int startId) { 315 try { 316 // Just log a message indicating that the service is running and is able to accept 317 // socket connections. 318 while (!mThreadExitFlag && mSocket==null) { 319 Thread.sleep(1); 320 } 321 if (!mThreadExitFlag){ 322 Logt.i(TAG, "ItsService ready"); 323 } else { 324 Logt.e(TAG, "Starting ItsService in bad state"); 325 } 326 327 Notification notification = new Notification.Builder(this, mChannel.getId()) 328 .setContentTitle("CameraITS Service") 329 .setContentText("CameraITS Service is running") 330 .setSmallIcon(R.drawable.icon) 331 .setOngoing(true).build(); 332 startForeground(SERVICE_NOTIFICATION_ID, notification); 333 } catch (java.lang.InterruptedException e) { 334 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 335 } 336 return START_STICKY; 337 } 338 339 @Override 340 public void onDestroy() { 341 mThreadExitFlag = true; 342 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 343 if (mSaveThreads[i] != null) { 344 mSaveThreads[i].quit(); 345 mSaveThreads[i] = null; 346 } 347 } 348 if (mSensorThread != null) { 349 mSensorThread.quitSafely(); 350 mSensorThread = null; 351 } 352 if (mResultThread != null) { 353 mResultThread.quitSafely(); 354 mResultThread = null; 355 } 356 if (mCameraThread != null) { 357 mCameraThread.quitSafely(); 358 mCameraThread = null; 359 } 360 } 361 362 public void openCameraDevice(String cameraId) throws ItsException { 363 Logt.i(TAG, String.format("Opening camera %s", cameraId)); 364 365 try { 366 if (mMemoryQuota == -1) { 367 // Initialize memory quota on this device 368 if (mItsCameraIdList == null) { 369 mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager); 370 } 371 if (mItsCameraIdList.mCameraIds.size() == 0) { 372 throw new ItsException("No camera devices"); 373 } 374 for (String camId : mItsCameraIdList.mCameraIds) { 375 CameraCharacteristics chars = mCameraManager.getCameraCharacteristics(camId); 376 Size maxYuvSize = ItsUtils.getMaxOutputSize( 377 chars, ImageFormat.YUV_420_888); 378 // 4 bytes per pixel for RGBA8888 Bitmap and at least 3 Bitmaps per CDD 379 int quota = maxYuvSize.getWidth() * maxYuvSize.getHeight() * 4 * 3; 380 if (quota > mMemoryQuota) { 381 mMemoryQuota = quota; 382 } 383 } 384 } 385 } catch (CameraAccessException e) { 386 throw new ItsException("Failed to get device ID list", e); 387 } 388 389 try { 390 mCamera = mBlockingCameraManager.openCamera(cameraId, mCameraListener, mCameraHandler); 391 mCameraCharacteristics = mCameraManager.getCameraCharacteristics(cameraId); 392 393 boolean isLogicalCamera = hasCapability( 394 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA); 395 if (isLogicalCamera) { 396 Set<String> physicalCameraIds = mCameraCharacteristics.getPhysicalCameraIds(); 397 for (String id : physicalCameraIds) { 398 mPhysicalCameraChars.put(id, mCameraManager.getCameraCharacteristics(id)); 399 } 400 } 401 mSocketQueueQuota = new Semaphore(mMemoryQuota, true); 402 } catch (CameraAccessException e) { 403 throw new ItsException("Failed to open camera", e); 404 } catch (BlockingOpenException e) { 405 throw new ItsException("Failed to open camera (after blocking)", e); 406 } 407 mSocketRunnableObj.sendResponse("cameraOpened", ""); 408 } 409 410 public void closeCameraDevice() throws ItsException { 411 try { 412 if (mCamera != null) { 413 Logt.i(TAG, "Closing camera"); 414 mCamera.close(); 415 mCamera = null; 416 } 417 } catch (Exception e) { 418 throw new ItsException("Failed to close device"); 419 } 420 mSocketRunnableObj.sendResponse("cameraClosed", ""); 421 } 422 423 class SerializerRunnable implements Runnable { 424 // Use a separate thread to perform JSON serialization (since this can be slow due to 425 // the reflection). 426 @Override 427 public void run() { 428 Logt.i(TAG, "Serializer thread starting"); 429 while (! mThreadExitFlag) { 430 try { 431 Object objs[] = mSerializerQueue.take(); 432 JSONObject jsonObj = new JSONObject(); 433 String tag = null; 434 for (int i = 0; i < objs.length; i++) { 435 Object obj = objs[i]; 436 if (obj instanceof String) { 437 if (tag != null) { 438 throw new ItsException("Multiple tags for socket response"); 439 } 440 tag = (String)obj; 441 } else if (obj instanceof CameraCharacteristics) { 442 jsonObj.put("cameraProperties", ItsSerializer.serialize( 443 (CameraCharacteristics)obj)); 444 } else if (obj instanceof CaptureRequest) { 445 jsonObj.put("captureRequest", ItsSerializer.serialize( 446 (CaptureRequest)obj)); 447 } else if (obj instanceof CaptureResult) { 448 jsonObj.put("captureResult", ItsSerializer.serialize( 449 (CaptureResult)obj)); 450 } else if (obj instanceof JSONArray) { 451 if (tag == "captureResults") { 452 if (i == SERIALIZER_SURFACES_ID) { 453 jsonObj.put("outputs", (JSONArray)obj); 454 } else if (i == SERIALIZER_PHYSICAL_METADATA_ID) { 455 jsonObj.put("physicalResults", (JSONArray)obj); 456 } else { 457 throw new ItsException( 458 "Unsupported JSONArray for captureResults"); 459 } 460 } else { 461 jsonObj.put("outputs", (JSONArray)obj); 462 } 463 } else { 464 throw new ItsException("Invalid object received for serialization"); 465 } 466 } 467 if (tag == null) { 468 throw new ItsException("No tag provided for socket response"); 469 } 470 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 471 Logt.i(TAG, String.format("Serialized %s", tag)); 472 } catch (org.json.JSONException e) { 473 Logt.e(TAG, "Error serializing object", e); 474 break; 475 } catch (ItsException e) { 476 Logt.e(TAG, "Error serializing object", e); 477 break; 478 } catch (java.lang.InterruptedException e) { 479 Logt.e(TAG, "Error serializing object (interrupted)", e); 480 break; 481 } 482 } 483 Logt.i(TAG, "Serializer thread terminated"); 484 } 485 } 486 487 class SocketWriteRunnable implements Runnable { 488 489 // Use a separate thread to service a queue of objects to be written to the socket, 490 // writing each sequentially in order. This is needed since different handler functions 491 // (called on different threads) will need to send data back to the host script. 492 493 public Socket mOpenSocket = null; 494 private Thread mThread = null; 495 496 public SocketWriteRunnable(Socket openSocket) { 497 mOpenSocket = openSocket; 498 } 499 500 public void setOpenSocket(Socket openSocket) { 501 mOpenSocket = openSocket; 502 } 503 504 @Override 505 public void run() { 506 Logt.i(TAG, "Socket writer thread starting"); 507 while (true) { 508 try { 509 ByteBuffer b = mSocketWriteQueue.take(); 510 synchronized(mSocketWriteDrainLock) { 511 if (mOpenSocket == null) { 512 Logt.e(TAG, "No open socket connection!"); 513 continue; 514 } 515 if (b.hasArray()) { 516 mOpenSocket.getOutputStream().write(b.array(), 0, b.capacity()); 517 } else { 518 byte[] barray = new byte[b.capacity()]; 519 b.get(barray); 520 mOpenSocket.getOutputStream().write(barray); 521 } 522 mOpenSocket.getOutputStream().flush(); 523 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 524 Integer imgBufSize = mInflightImageSizes.peek(); 525 if (imgBufSize != null && imgBufSize == b.capacity()) { 526 mInflightImageSizes.removeFirst(); 527 if (mSocketQueueQuota != null) { 528 mSocketQueueQuota.release(imgBufSize); 529 } 530 } 531 } 532 } catch (IOException e) { 533 Logt.e(TAG, "Error writing to socket", e); 534 mOpenSocket = null; 535 break; 536 } catch (java.lang.InterruptedException e) { 537 Logt.e(TAG, "Error writing to socket (interrupted)", e); 538 mOpenSocket = null; 539 break; 540 } 541 } 542 Logt.i(TAG, "Socket writer thread terminated"); 543 } 544 545 public synchronized void checkAndStartThread() { 546 if (mThread == null || mThread.getState() == Thread.State.TERMINATED) { 547 mThread = new Thread(this); 548 } 549 if (mThread.getState() == Thread.State.NEW) { 550 mThread.start(); 551 } 552 } 553 554 } 555 556 class SocketRunnable implements Runnable { 557 558 // Format of sent messages (over the socket): 559 // * Serialized JSON object on a single line (newline-terminated) 560 // * For byte buffers, the binary data then follows 561 // 562 // Format of received messages (from the socket): 563 // * Serialized JSON object on a single line (newline-terminated) 564 565 private Socket mOpenSocket = null; 566 private SocketWriteRunnable mSocketWriteRunnable = null; 567 568 @Override 569 public void run() { 570 Logt.i(TAG, "Socket thread starting"); 571 try { 572 mSocket = new ServerSocket(SERVERPORT); 573 } catch (IOException e) { 574 Logt.e(TAG, "Failed to create socket", e); 575 } 576 577 // Create a new thread to handle writes to this socket. 578 mSocketWriteRunnable = new SocketWriteRunnable(null); 579 580 while (!mThreadExitFlag) { 581 // Receive the socket-open request from the host. 582 try { 583 Logt.i(TAG, "Waiting for client to connect to socket"); 584 mOpenSocket = mSocket.accept(); 585 if (mOpenSocket == null) { 586 Logt.e(TAG, "Socket connection error"); 587 break; 588 } 589 mSocketWriteQueue.clear(); 590 mInflightImageSizes.clear(); 591 mSocketWriteRunnable.setOpenSocket(mOpenSocket); 592 mSocketWriteRunnable.checkAndStartThread(); 593 Logt.i(TAG, "Socket connected"); 594 } catch (IOException e) { 595 Logt.e(TAG, "Socket open error: ", e); 596 break; 597 } 598 599 // Process commands over the open socket. 600 while (!mThreadExitFlag) { 601 try { 602 BufferedReader input = new BufferedReader( 603 new InputStreamReader(mOpenSocket.getInputStream())); 604 if (input == null) { 605 Logt.e(TAG, "Failed to get socket input stream"); 606 break; 607 } 608 String line = input.readLine(); 609 if (line == null) { 610 Logt.i(TAG, "Socket readline retuned null (host disconnected)"); 611 break; 612 } 613 processSocketCommand(line); 614 } catch (IOException e) { 615 Logt.e(TAG, "Socket read error: ", e); 616 break; 617 } catch (ItsException e) { 618 Logt.e(TAG, "Script error: ", e); 619 break; 620 } 621 } 622 623 // Close socket and go back to waiting for a new connection. 624 try { 625 synchronized(mSocketWriteDrainLock) { 626 mSocketWriteQueue.clear(); 627 mInflightImageSizes.clear(); 628 mOpenSocket.close(); 629 mOpenSocket = null; 630 mSocketWriteRunnable.setOpenSocket(null); 631 Logt.i(TAG, "Socket disconnected"); 632 } 633 } catch (java.io.IOException e) { 634 Logt.e(TAG, "Exception closing socket"); 635 } 636 } 637 638 // It's an overall error state if the code gets here; no recevery. 639 // Try to do some cleanup, but the service probably needs to be restarted. 640 Logt.i(TAG, "Socket server loop exited"); 641 mThreadExitFlag = true; 642 try { 643 synchronized(mSocketWriteDrainLock) { 644 if (mOpenSocket != null) { 645 mOpenSocket.close(); 646 mOpenSocket = null; 647 mSocketWriteRunnable.setOpenSocket(null); 648 } 649 } 650 } catch (java.io.IOException e) { 651 Logt.w(TAG, "Exception closing socket"); 652 } 653 try { 654 if (mSocket != null) { 655 mSocket.close(); 656 mSocket = null; 657 } 658 } catch (java.io.IOException e) { 659 Logt.w(TAG, "Exception closing socket"); 660 } 661 } 662 663 public void processSocketCommand(String cmd) 664 throws ItsException { 665 // Each command is a serialized JSON object. 666 try { 667 JSONObject cmdObj = new JSONObject(cmd); 668 Logt.i(TAG, "Start processing command" + cmdObj.getString("cmdName")); 669 if ("open".equals(cmdObj.getString("cmdName"))) { 670 String cameraId = cmdObj.getString("cameraId"); 671 openCameraDevice(cameraId); 672 } else if ("close".equals(cmdObj.getString("cmdName"))) { 673 closeCameraDevice(); 674 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 675 doGetProps(); 676 } else if ("getCameraPropertiesById".equals(cmdObj.getString("cmdName"))) { 677 doGetPropsById(cmdObj); 678 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 679 doStartSensorEvents(); 680 } else if ("checkSensorExistence".equals(cmdObj.getString("cmdName"))) { 681 doCheckSensorExistence(); 682 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 683 doGetSensorEvents(); 684 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 685 do3A(cmdObj); 686 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 687 doCapture(cmdObj); 688 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 689 doVibrate(cmdObj); 690 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) { 691 doGetCameraIds(); 692 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) { 693 doReprocessCapture(cmdObj); 694 } else if ("getItsVersion".equals(cmdObj.getString("cmdName"))) { 695 mSocketRunnableObj.sendResponse("ItsVersion", ITS_SERVICE_VERSION); 696 } else if ("isStreamCombinationSupported".equals(cmdObj.getString("cmdName"))) { 697 doCheckStreamCombination(cmdObj); 698 } else { 699 throw new ItsException("Unknown command: " + cmd); 700 } 701 Logt.i(TAG, "Finish processing command" + cmdObj.getString("cmdName")); 702 } catch (org.json.JSONException e) { 703 Logt.e(TAG, "Invalid command: ", e); 704 } 705 } 706 707 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 708 throws ItsException { 709 try { 710 JSONObject jsonObj = new JSONObject(); 711 jsonObj.put("tag", tag); 712 if (str != null) { 713 jsonObj.put("strValue", str); 714 } 715 if (obj != null) { 716 jsonObj.put("objValue", obj); 717 } 718 if (bbuf != null) { 719 jsonObj.put("bufValueSize", bbuf.capacity()); 720 } 721 ByteBuffer bstr = ByteBuffer.wrap( 722 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 723 synchronized(mSocketWriteEnqueueLock) { 724 if (bstr != null) { 725 mSocketWriteQueue.put(bstr); 726 } 727 if (bbuf != null) { 728 mInflightImageSizes.add(bbuf.capacity()); 729 mSocketWriteQueue.put(bbuf); 730 } 731 } 732 } catch (org.json.JSONException e) { 733 throw new ItsException("JSON error: ", e); 734 } catch (java.lang.InterruptedException e) { 735 throw new ItsException("Socket error: ", e); 736 } 737 } 738 739 public void sendResponse(String tag, String str) 740 throws ItsException { 741 sendResponse(tag, str, null, null); 742 } 743 744 public void sendResponse(String tag, JSONObject obj) 745 throws ItsException { 746 sendResponse(tag, null, obj, null); 747 } 748 749 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 750 throws ItsException { 751 sendResponse(tag, null, null, bbuf); 752 } 753 754 public void sendResponse(LinkedList<MySensorEvent> events) 755 throws ItsException { 756 Logt.i(TAG, "Sending " + events.size() + " sensor events"); 757 try { 758 JSONArray accels = new JSONArray(); 759 JSONArray mags = new JSONArray(); 760 JSONArray gyros = new JSONArray(); 761 for (MySensorEvent event : events) { 762 JSONObject obj = new JSONObject(); 763 obj.put("time", event.timestamp); 764 obj.put("x", event.values[0]); 765 obj.put("y", event.values[1]); 766 obj.put("z", event.values[2]); 767 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 768 accels.put(obj); 769 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 770 mags.put(obj); 771 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 772 gyros.put(obj); 773 } 774 } 775 JSONObject obj = new JSONObject(); 776 obj.put("accel", accels); 777 obj.put("mag", mags); 778 obj.put("gyro", gyros); 779 sendResponse("sensorEvents", null, obj, null); 780 } catch (org.json.JSONException e) { 781 throw new ItsException("JSON error: ", e); 782 } 783 Logt.i(TAG, "Sent sensor events"); 784 } 785 786 public void sendResponse(CameraCharacteristics props) 787 throws ItsException { 788 try { 789 Object objs[] = new Object[2]; 790 objs[0] = "cameraProperties"; 791 objs[1] = props; 792 mSerializerQueue.put(objs); 793 } catch (InterruptedException e) { 794 throw new ItsException("Interrupted: ", e); 795 } 796 } 797 798 public void sendResponseCaptureResult(CameraCharacteristics props, 799 CaptureRequest request, 800 TotalCaptureResult result, 801 ImageReader[] readers) 802 throws ItsException { 803 try { 804 JSONArray jsonSurfaces = new JSONArray(); 805 for (int i = 0; i < readers.length; i++) { 806 JSONObject jsonSurface = new JSONObject(); 807 jsonSurface.put("width", readers[i].getWidth()); 808 jsonSurface.put("height", readers[i].getHeight()); 809 int format = readers[i].getImageFormat(); 810 if (format == ImageFormat.RAW_SENSOR) { 811 if (mCaptureRawIsStats) { 812 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 813 .width(); 814 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 815 .height(); 816 jsonSurface.put("format", "rawStats"); 817 jsonSurface.put("width", aaw/mCaptureStatsGridWidth); 818 jsonSurface.put("height", aah/mCaptureStatsGridHeight); 819 } else if (mCaptureRawIsDng) { 820 jsonSurface.put("format", "dng"); 821 } else { 822 jsonSurface.put("format", "raw"); 823 } 824 } else if (format == ImageFormat.RAW10) { 825 jsonSurface.put("format", "raw10"); 826 } else if (format == ImageFormat.RAW12) { 827 jsonSurface.put("format", "raw12"); 828 } else if (format == ImageFormat.JPEG) { 829 jsonSurface.put("format", "jpeg"); 830 } else if (format == ImageFormat.YUV_420_888) { 831 jsonSurface.put("format", "yuv"); 832 } else if (format == ImageFormat.Y8) { 833 jsonSurface.put("format", "y8"); 834 } else { 835 throw new ItsException("Invalid format"); 836 } 837 jsonSurfaces.put(jsonSurface); 838 } 839 840 Map<String, CaptureResult> physicalMetadata = 841 result.getPhysicalCameraResults(); 842 JSONArray jsonPhysicalMetadata = new JSONArray(); 843 for (Map.Entry<String, CaptureResult> pair : physicalMetadata.entrySet()) { 844 JSONObject jsonOneMetadata = new JSONObject(); 845 jsonOneMetadata.put(pair.getKey(), ItsSerializer.serialize(pair.getValue())); 846 jsonPhysicalMetadata.put(jsonOneMetadata); 847 } 848 Object objs[] = new Object[4]; 849 objs[0] = "captureResults"; 850 objs[1] = result; 851 objs[SERIALIZER_SURFACES_ID] = jsonSurfaces; 852 objs[SERIALIZER_PHYSICAL_METADATA_ID] = jsonPhysicalMetadata; 853 mSerializerQueue.put(objs); 854 } catch (org.json.JSONException e) { 855 throw new ItsException("JSON error: ", e); 856 } catch (InterruptedException e) { 857 throw new ItsException("Interrupted: ", e); 858 } 859 } 860 } 861 862 public ImageReader.OnImageAvailableListener 863 createAvailableListener(final CaptureCallback listener) { 864 return new ImageReader.OnImageAvailableListener() { 865 @Override 866 public void onImageAvailable(ImageReader reader) { 867 Image i = null; 868 try { 869 i = reader.acquireNextImage(); 870 String physicalCameraId = new String(); 871 for (int idx = 0; idx < mOutputImageReaders.length; idx++) { 872 if (mOutputImageReaders[idx] == reader) { 873 physicalCameraId = mPhysicalStreamMap.get(idx); 874 } 875 } 876 listener.onCaptureAvailable(i, physicalCameraId); 877 } finally { 878 if (i != null) { 879 i.close(); 880 } 881 } 882 } 883 }; 884 } 885 886 private ImageReader.OnImageAvailableListener 887 createAvailableListenerDropper() { 888 return new ImageReader.OnImageAvailableListener() { 889 @Override 890 public void onImageAvailable(ImageReader reader) { 891 Image i = reader.acquireNextImage(); 892 i.close(); 893 } 894 }; 895 } 896 897 private void doStartSensorEvents() throws ItsException { 898 synchronized(mEventLock) { 899 mEventsEnabled = true; 900 } 901 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 902 } 903 904 private void doCheckSensorExistence() throws ItsException { 905 try { 906 JSONObject obj = new JSONObject(); 907 obj.put("accel", mAccelSensor != null); 908 obj.put("mag", mMagSensor != null); 909 obj.put("gyro", mGyroSensor != null); 910 mSocketRunnableObj.sendResponse("sensorExistence", null, obj, null); 911 } catch (org.json.JSONException e) { 912 throw new ItsException("JSON error: ", e); 913 } 914 } 915 916 private void doGetSensorEvents() throws ItsException { 917 synchronized(mEventLock) { 918 mSocketRunnableObj.sendResponse(mEvents); 919 mEvents.clear(); 920 mEventsEnabled = false; 921 } 922 } 923 924 private void doGetProps() throws ItsException { 925 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 926 } 927 928 private void doGetPropsById(JSONObject params) throws ItsException { 929 String[] devices; 930 try { 931 // Intentionally not using ItsUtils.getItsCompatibleCameraIds here so it's possible to 932 // write some simple script to query camera characteristics even for devices exempted 933 // from ITS today. 934 devices = mCameraManager.getCameraIdList(); 935 if (devices == null || devices.length == 0) { 936 throw new ItsException("No camera devices"); 937 } 938 } catch (CameraAccessException e) { 939 throw new ItsException("Failed to get device ID list", e); 940 } 941 942 try { 943 String cameraId = params.getString("cameraId"); 944 CameraCharacteristics characteristics = 945 mCameraManager.getCameraCharacteristics(cameraId); 946 mSocketRunnableObj.sendResponse(characteristics); 947 } catch (org.json.JSONException e) { 948 throw new ItsException("JSON error: ", e); 949 } catch (IllegalArgumentException e) { 950 throw new ItsException("Illegal argument error:", e); 951 } catch (CameraAccessException e) { 952 throw new ItsException("Access error: ", e); 953 } 954 } 955 956 private void doGetCameraIds() throws ItsException { 957 if (mItsCameraIdList == null) { 958 mItsCameraIdList = ItsUtils.getItsCompatibleCameraIds(mCameraManager); 959 } 960 if (mItsCameraIdList.mCameraIdCombos.size() == 0) { 961 throw new ItsException("No camera devices"); 962 } 963 964 try { 965 JSONObject obj = new JSONObject(); 966 JSONArray array = new JSONArray(); 967 for (String id : mItsCameraIdList.mCameraIdCombos) { 968 array.put(id); 969 } 970 obj.put("cameraIdArray", array); 971 mSocketRunnableObj.sendResponse("cameraIds", obj); 972 } catch (org.json.JSONException e) { 973 throw new ItsException("JSON error: ", e); 974 } 975 } 976 977 private static class HandlerExecutor implements Executor { 978 private final Handler mHandler; 979 980 public HandlerExecutor(Handler handler) { 981 mHandler = handler; 982 } 983 984 @Override 985 public void execute(Runnable runCmd) { 986 mHandler.post(runCmd); 987 } 988 } 989 990 private void doCheckStreamCombination(JSONObject params) throws ItsException { 991 try { 992 JSONObject obj = new JSONObject(); 993 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 994 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 995 /*inputFormat*/0, /*maxInputBuffers*/0, /*backgroundRequest*/false); 996 int numSurfaces = mOutputImageReaders.length; 997 List<OutputConfiguration> outputConfigs = 998 new ArrayList<OutputConfiguration>(numSurfaces); 999 for (int i = 0; i < numSurfaces; i++) { 1000 OutputConfiguration config = new OutputConfiguration( 1001 mOutputImageReaders[i].getSurface()); 1002 if (mPhysicalStreamMap.get(i) != null) { 1003 config.setPhysicalCameraId(mPhysicalStreamMap.get(i)); 1004 } 1005 outputConfigs.add(config); 1006 } 1007 1008 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1009 SessionConfiguration sessionConfig = new SessionConfiguration( 1010 SessionConfiguration.SESSION_REGULAR, outputConfigs, 1011 new HandlerExecutor(mCameraHandler), sessionListener); 1012 boolean supported = mCamera.isSessionConfigurationSupported(sessionConfig); 1013 1014 String supportString = supported ? "supportedCombination" : "unsupportedCombination"; 1015 mSocketRunnableObj.sendResponse("streamCombinationSupport", supportString); 1016 1017 } catch (UnsupportedOperationException e) { 1018 mSocketRunnableObj.sendResponse("streamCombinationSupport", "unsupportedOperation"); 1019 } catch (IllegalArgumentException e) { 1020 throw new ItsException("Error checking stream combination", e); 1021 } catch (CameraAccessException e) { 1022 throw new ItsException("Error checking stream combination", e); 1023 } 1024 } 1025 1026 private void prepareImageReaders(Size[] outputSizes, int[] outputFormats, Size inputSize, 1027 int inputFormat, int maxInputBuffers) { 1028 closeImageReaders(); 1029 mOutputImageReaders = new ImageReader[outputSizes.length]; 1030 for (int i = 0; i < outputSizes.length; i++) { 1031 // Check if the output image reader can be shared with the input image reader. 1032 if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) { 1033 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 1034 outputSizes[i].getHeight(), outputFormats[i], 1035 MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers); 1036 mInputImageReader = mOutputImageReaders[i]; 1037 } else { 1038 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 1039 outputSizes[i].getHeight(), outputFormats[i], 1040 MAX_CONCURRENT_READER_BUFFERS); 1041 } 1042 } 1043 1044 if (inputSize != null && mInputImageReader == null) { 1045 mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(), 1046 inputFormat, maxInputBuffers); 1047 } 1048 } 1049 1050 private void closeImageReaders() { 1051 if (mOutputImageReaders != null) { 1052 for (int i = 0; i < mOutputImageReaders.length; i++) { 1053 if (mOutputImageReaders[i] != null) { 1054 mOutputImageReaders[i].close(); 1055 mOutputImageReaders[i] = null; 1056 } 1057 } 1058 } 1059 if (mInputImageReader != null) { 1060 mInputImageReader.close(); 1061 mInputImageReader = null; 1062 } 1063 } 1064 1065 private void do3A(JSONObject params) throws ItsException { 1066 ThreeAResultListener threeAListener = new ThreeAResultListener(); 1067 try { 1068 // Start a 3A action, and wait for it to converge. 1069 // Get the converged values for each "A", and package into JSON result for caller. 1070 1071 // Configure streams on physical sub-camera if PHYSICAL_ID_KEY is specified. 1072 String physicalId = null; 1073 CameraCharacteristics c = mCameraCharacteristics; 1074 if (params.has(PHYSICAL_ID_KEY)) { 1075 physicalId = params.getString(PHYSICAL_ID_KEY); 1076 c = mPhysicalCameraChars.get(physicalId); 1077 } 1078 1079 // 3A happens on full-res frames. 1080 Size sizes[] = ItsUtils.getYuvOutputSizes(c); 1081 int outputFormats[] = new int[1]; 1082 outputFormats[0] = ImageFormat.YUV_420_888; 1083 Size[] outputSizes = new Size[1]; 1084 outputSizes[0] = sizes[0]; 1085 int width = outputSizes[0].getWidth(); 1086 int height = outputSizes[0].getHeight(); 1087 1088 prepareImageReaders(outputSizes, outputFormats, /*inputSize*/null, /*inputFormat*/0, 1089 /*maxInputBuffers*/0); 1090 1091 List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>(1); 1092 OutputConfiguration config = 1093 new OutputConfiguration(mOutputImageReaders[0].getSurface()); 1094 if (physicalId != null) { 1095 config.setPhysicalCameraId(physicalId); 1096 } 1097 outputConfigs.add(config); 1098 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1099 mCamera.createCaptureSessionByOutputConfigurations( 1100 outputConfigs, sessionListener, mCameraHandler); 1101 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1102 1103 // Add a listener that just recycles buffers; they aren't saved anywhere. 1104 ImageReader.OnImageAvailableListener readerListener = 1105 createAvailableListenerDropper(); 1106 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 1107 1108 // Get the user-specified regions for AE, AWB, AF. 1109 // Note that the user specifies normalized [x,y,w,h], which is converted below 1110 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 1111 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 1112 // Use logical camera's active array size for 3A regions. 1113 Rect activeArray = mCameraCharacteristics.get( 1114 CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); 1115 int aaWidth = activeArray.right - activeArray.left; 1116 int aaHeight = activeArray.bottom - activeArray.top; 1117 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 1118 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 1119 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 1120 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 1121 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 1122 new MeteringRectangle(0,0,aaWidth,aaHeight,1)}; 1123 if (params.has(REGION_KEY)) { 1124 JSONObject regions = params.getJSONObject(REGION_KEY); 1125 if (regions.has(REGION_AE_KEY)) { 1126 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 1127 regions.getJSONArray(REGION_AE_KEY), true, aaWidth, aaHeight); 1128 } 1129 if (regions.has(REGION_AF_KEY)) { 1130 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 1131 regions.getJSONArray(REGION_AF_KEY), true, aaWidth, aaHeight); 1132 } 1133 if (regions.has(REGION_AWB_KEY)) { 1134 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 1135 regions.getJSONArray(REGION_AWB_KEY), true, aaWidth, aaHeight); 1136 } 1137 } 1138 1139 // An EV compensation can be specified as part of AE convergence. 1140 int evComp = params.optInt(EVCOMP_KEY, 0); 1141 if (evComp != 0) { 1142 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 1143 } 1144 1145 // By default, AE and AF both get triggered, but the user can optionally override this. 1146 // Also, AF won't get triggered if the lens is fixed-focus. 1147 boolean doAE = true; 1148 boolean doAF = true; 1149 if (params.has(TRIGGER_KEY)) { 1150 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 1151 if (triggers.has(TRIGGER_AE_KEY)) { 1152 doAE = triggers.getBoolean(TRIGGER_AE_KEY); 1153 } 1154 if (triggers.has(TRIGGER_AF_KEY)) { 1155 doAF = triggers.getBoolean(TRIGGER_AF_KEY); 1156 } 1157 } 1158 Float minFocusDistance = c.get( 1159 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); 1160 boolean isFixedFocusLens = minFocusDistance != null && minFocusDistance == 0.0; 1161 if (doAF && isFixedFocusLens) { 1162 // Send a dummy result back for the code that is waiting for this message to see 1163 // that AF has converged. 1164 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 1165 mSocketRunnableObj.sendResponse("afResult", "0.0"); 1166 doAF = false; 1167 } 1168 1169 mInterlock3A.open(); 1170 synchronized(m3AStateLock) { 1171 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 1172 // values, waiting until the HAL has reported that the lock was successful. 1173 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 1174 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 1175 mConvergedAE = false; 1176 mConvergedAWB = false; 1177 mConvergedAF = false; 1178 mLockedAE = false; 1179 mLockedAWB = false; 1180 } 1181 long tstart = System.currentTimeMillis(); 1182 boolean triggeredAE = false; 1183 boolean triggeredAF = false; 1184 1185 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 1186 doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 1187 1188 // Keep issuing capture requests until 3A has converged. 1189 while (true) { 1190 1191 // Block until can take the next 3A frame. Only want one outstanding frame 1192 // at a time, to simplify the logic here. 1193 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 1194 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 1195 throw new ItsException( 1196 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 1197 "AE converge state: " + mConvergedAE + ", \n" + 1198 "AF convergence state: " + mConvergedAF + ", \n" + 1199 "AWB convergence state: " + mConvergedAWB + "."); 1200 } 1201 mInterlock3A.close(); 1202 1203 synchronized(m3AStateLock) { 1204 // If not converged yet, issue another capture request. 1205 if ( (doAE && (!triggeredAE || !mConvergedAE)) 1206 || !mConvergedAWB 1207 || (doAF && (!triggeredAF || !mConvergedAF)) 1208 || (doAE && mNeedsLockedAE && !mLockedAE) 1209 || (mNeedsLockedAWB && !mLockedAWB)) { 1210 1211 // Baseline capture request for 3A. 1212 CaptureRequest.Builder req = mCamera.createCaptureRequest( 1213 CameraDevice.TEMPLATE_PREVIEW); 1214 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1215 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1216 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 1217 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 1218 req.set(CaptureRequest.CONTROL_AE_MODE, 1219 CaptureRequest.CONTROL_AE_MODE_ON); 1220 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 1221 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 1222 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 1223 req.set(CaptureRequest.CONTROL_AF_MODE, 1224 CaptureRequest.CONTROL_AF_MODE_AUTO); 1225 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 1226 req.set(CaptureRequest.CONTROL_AWB_MODE, 1227 CaptureRequest.CONTROL_AWB_MODE_AUTO); 1228 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 1229 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 1230 // ITS only turns OIS on when it's explicitly requested 1231 req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 1232 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 1233 1234 if (evComp != 0) { 1235 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 1236 } 1237 1238 if (mConvergedAE && mNeedsLockedAE) { 1239 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 1240 } 1241 if (mConvergedAWB && mNeedsLockedAWB) { 1242 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 1243 } 1244 1245 boolean triggering = false; 1246 // Trigger AE first. 1247 if (doAE && !triggeredAE) { 1248 Logt.i(TAG, "Triggering AE"); 1249 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 1250 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 1251 triggeredAE = true; 1252 triggering = true; 1253 } 1254 1255 // After AE has converged, trigger AF. 1256 if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) { 1257 Logt.i(TAG, "Triggering AF"); 1258 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 1259 CaptureRequest.CONTROL_AF_TRIGGER_START); 1260 triggeredAF = true; 1261 triggering = true; 1262 } 1263 1264 req.addTarget(mOutputImageReaders[0].getSurface()); 1265 1266 if (triggering) { 1267 // Send single request for AE/AF trigger 1268 mSession.capture(req.build(), 1269 threeAListener, mResultHandler); 1270 } else { 1271 // Use repeating request for non-trigger requests 1272 mSession.setRepeatingRequest(req.build(), 1273 threeAListener, mResultHandler); 1274 } 1275 } else { 1276 mSocketRunnableObj.sendResponse("3aConverged", ""); 1277 Logt.i(TAG, "3A converged"); 1278 break; 1279 } 1280 } 1281 } 1282 } catch (android.hardware.camera2.CameraAccessException e) { 1283 throw new ItsException("Access error: ", e); 1284 } catch (org.json.JSONException e) { 1285 throw new ItsException("JSON error: ", e); 1286 } finally { 1287 mSocketRunnableObj.sendResponse("3aDone", ""); 1288 // stop listener from updating 3A states 1289 threeAListener.stop(); 1290 if (mSession != null) { 1291 mSession.close(); 1292 } 1293 } 1294 } 1295 1296 private void doVibrate(JSONObject params) throws ItsException { 1297 try { 1298 if (mVibrator == null) { 1299 throw new ItsException("Unable to start vibrator"); 1300 } 1301 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 1302 int len = patternArray.length(); 1303 long pattern[] = new long[len]; 1304 for (int i = 0; i < len; i++) { 1305 pattern[i] = patternArray.getLong(i); 1306 } 1307 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 1308 mVibrator.vibrate(pattern, -1); 1309 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 1310 } catch (org.json.JSONException e) { 1311 throw new ItsException("JSON error: ", e); 1312 } 1313 } 1314 1315 /** 1316 * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output 1317 * image readers for the parsed output surface sizes, output formats, and the given input 1318 * size and format. 1319 */ 1320 private void prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, Size inputSize, 1321 int inputFormat, int maxInputBuffers, boolean backgroundRequest) throws ItsException { 1322 Size outputSizes[]; 1323 int outputFormats[]; 1324 int numSurfaces = 0; 1325 mPhysicalStreamMap.clear(); 1326 1327 if (jsonOutputSpecs != null) { 1328 try { 1329 numSurfaces = jsonOutputSpecs.length(); 1330 if (backgroundRequest) { 1331 numSurfaces += 1; 1332 } 1333 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 1334 throw new ItsException("Too many output surfaces"); 1335 } 1336 1337 outputSizes = new Size[numSurfaces]; 1338 outputFormats = new int[numSurfaces]; 1339 for (int i = 0; i < numSurfaces; i++) { 1340 // Append optional background stream at the end 1341 if (backgroundRequest && i == numSurfaces - 1) { 1342 outputFormats[i] = ImageFormat.YUV_420_888; 1343 outputSizes[i] = new Size(640, 480); 1344 continue; 1345 } 1346 // Get the specified surface. 1347 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 1348 String physicalCameraId = surfaceObj.optString("physicalCamera"); 1349 CameraCharacteristics cameraCharacteristics = mCameraCharacteristics; 1350 mPhysicalStreamMap.put(i, physicalCameraId); 1351 if (!physicalCameraId.isEmpty()) { 1352 cameraCharacteristics = mPhysicalCameraChars.get(physicalCameraId); 1353 } 1354 1355 String sformat = surfaceObj.optString("format"); 1356 Size sizes[]; 1357 if ("yuv".equals(sformat) || "".equals(sformat)) { 1358 // Default to YUV if no format is specified. 1359 outputFormats[i] = ImageFormat.YUV_420_888; 1360 sizes = ItsUtils.getYuvOutputSizes(cameraCharacteristics); 1361 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 1362 outputFormats[i] = ImageFormat.JPEG; 1363 sizes = ItsUtils.getJpegOutputSizes(cameraCharacteristics); 1364 } else if ("raw".equals(sformat)) { 1365 outputFormats[i] = ImageFormat.RAW_SENSOR; 1366 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 1367 } else if ("raw10".equals(sformat)) { 1368 outputFormats[i] = ImageFormat.RAW10; 1369 sizes = ItsUtils.getRaw10OutputSizes(cameraCharacteristics); 1370 } else if ("raw12".equals(sformat)) { 1371 outputFormats[i] = ImageFormat.RAW12; 1372 sizes = ItsUtils.getRaw12OutputSizes(cameraCharacteristics); 1373 } else if ("dng".equals(sformat)) { 1374 outputFormats[i] = ImageFormat.RAW_SENSOR; 1375 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 1376 mCaptureRawIsDng = true; 1377 } else if ("rawStats".equals(sformat)) { 1378 outputFormats[i] = ImageFormat.RAW_SENSOR; 1379 sizes = ItsUtils.getRaw16OutputSizes(cameraCharacteristics); 1380 mCaptureRawIsStats = true; 1381 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 1382 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 1383 } else if ("y8".equals(sformat)) { 1384 outputFormats[i] = ImageFormat.Y8; 1385 sizes = ItsUtils.getY8OutputSizes(cameraCharacteristics); 1386 } else { 1387 throw new ItsException("Unsupported format: " + sformat); 1388 } 1389 // If the size is omitted, then default to the largest allowed size for the 1390 // format. 1391 int width = surfaceObj.optInt("width"); 1392 int height = surfaceObj.optInt("height"); 1393 if (width <= 0) { 1394 if (sizes == null || sizes.length == 0) { 1395 throw new ItsException(String.format( 1396 "Zero stream configs available for requested format: %s", 1397 sformat)); 1398 } 1399 width = ItsUtils.getMaxSize(sizes).getWidth(); 1400 } 1401 if (height <= 0) { 1402 height = ItsUtils.getMaxSize(sizes).getHeight(); 1403 } 1404 // The stats computation only applies to the active array region. 1405 int aaw = ItsUtils.getActiveArrayCropRegion(cameraCharacteristics).width(); 1406 int aah = ItsUtils.getActiveArrayCropRegion(cameraCharacteristics).height(); 1407 if (mCaptureStatsGridWidth <= 0 || mCaptureStatsGridWidth > aaw) { 1408 mCaptureStatsGridWidth = aaw; 1409 } 1410 if (mCaptureStatsGridHeight <= 0 || mCaptureStatsGridHeight > aah) { 1411 mCaptureStatsGridHeight = aah; 1412 } 1413 1414 outputSizes[i] = new Size(width, height); 1415 } 1416 } catch (org.json.JSONException e) { 1417 throw new ItsException("JSON error", e); 1418 } 1419 } else { 1420 // No surface(s) specified at all. 1421 // Default: a single output surface which is full-res YUV. 1422 Size maxYuvSize = ItsUtils.getMaxOutputSize( 1423 mCameraCharacteristics, ImageFormat.YUV_420_888); 1424 numSurfaces = backgroundRequest ? 2 : 1; 1425 1426 outputSizes = new Size[numSurfaces]; 1427 outputFormats = new int[numSurfaces]; 1428 outputSizes[0] = maxYuvSize; 1429 outputFormats[0] = ImageFormat.YUV_420_888; 1430 if (backgroundRequest) { 1431 outputSizes[1] = new Size(640, 480); 1432 outputFormats[1] = ImageFormat.YUV_420_888; 1433 } 1434 } 1435 1436 prepareImageReaders(outputSizes, outputFormats, inputSize, inputFormat, maxInputBuffers); 1437 } 1438 1439 /** 1440 * Wait until mCountCallbacksRemaining is 0 or a specified amount of time has elapsed between 1441 * each callback. 1442 */ 1443 private void waitForCallbacks(long timeoutMs) throws ItsException { 1444 synchronized(mCountCallbacksRemaining) { 1445 int currentCount = mCountCallbacksRemaining.get(); 1446 while (currentCount > 0) { 1447 try { 1448 mCountCallbacksRemaining.wait(timeoutMs); 1449 } catch (InterruptedException e) { 1450 throw new ItsException("Waiting for callbacks was interrupted.", e); 1451 } 1452 1453 int newCount = mCountCallbacksRemaining.get(); 1454 if (newCount == currentCount) { 1455 throw new ItsException("No callback received within timeout " + 1456 timeoutMs + "ms"); 1457 } 1458 currentCount = newCount; 1459 } 1460 } 1461 } 1462 1463 private void doCapture(JSONObject params) throws ItsException { 1464 try { 1465 // Parse the JSON to get the list of capture requests. 1466 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 1467 mCamera, params, "captureRequests"); 1468 1469 // optional background preview requests 1470 List<CaptureRequest.Builder> backgroundRequests = ItsSerializer.deserializeRequestList( 1471 mCamera, params, "repeatRequests"); 1472 boolean backgroundRequest = backgroundRequests.size() > 0; 1473 1474 int numSurfaces = 0; 1475 int numCaptureSurfaces = 0; 1476 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1477 try { 1478 mCountRawOrDng.set(0); 1479 mCountJpg.set(0); 1480 mCountYuv.set(0); 1481 mCountRaw10.set(0); 1482 mCountRaw12.set(0); 1483 mCountCapRes.set(0); 1484 mCaptureRawIsDng = false; 1485 mCaptureRawIsStats = false; 1486 mCaptureResults = new CaptureResult[requests.size()]; 1487 1488 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1489 1490 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 1491 /*inputFormat*/0, /*maxInputBuffers*/0, backgroundRequest); 1492 numSurfaces = mOutputImageReaders.length; 1493 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0); 1494 1495 List<OutputConfiguration> outputConfigs = 1496 new ArrayList<OutputConfiguration>(numSurfaces); 1497 for (int i = 0; i < numSurfaces; i++) { 1498 OutputConfiguration config = new OutputConfiguration( 1499 mOutputImageReaders[i].getSurface()); 1500 if (mPhysicalStreamMap.get(i) != null) { 1501 config.setPhysicalCameraId(mPhysicalStreamMap.get(i)); 1502 } 1503 outputConfigs.add(config); 1504 } 1505 mCamera.createCaptureSessionByOutputConfigurations(outputConfigs, 1506 sessionListener, mCameraHandler); 1507 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1508 1509 for (int i = 0; i < numSurfaces; i++) { 1510 ImageReader.OnImageAvailableListener readerListener; 1511 if (backgroundRequest && i == numSurfaces - 1) { 1512 readerListener = createAvailableListenerDropper(); 1513 } else { 1514 readerListener = createAvailableListener(mCaptureCallback); 1515 } 1516 mOutputImageReaders[i].setOnImageAvailableListener(readerListener, 1517 mSaveHandlers[i]); 1518 } 1519 1520 // Plan for how many callbacks need to be received throughout the duration of this 1521 // sequence of capture requests. There is one callback per image surface, and one 1522 // callback for the CaptureResult, for each capture. 1523 int numCaptures = requests.size(); 1524 mCountCallbacksRemaining.set(numCaptures * (numCaptureSurfaces + 1)); 1525 1526 } catch (CameraAccessException e) { 1527 throw new ItsException("Error configuring outputs", e); 1528 } 1529 1530 // Start background requests and let it warm up pipeline 1531 if (backgroundRequest) { 1532 List<CaptureRequest> bgRequestList = 1533 new ArrayList<CaptureRequest>(backgroundRequests.size()); 1534 for (int i = 0; i < backgroundRequests.size(); i++) { 1535 CaptureRequest.Builder req = backgroundRequests.get(i); 1536 req.addTarget(mOutputImageReaders[numCaptureSurfaces].getSurface()); 1537 bgRequestList.add(req.build()); 1538 } 1539 mSession.setRepeatingBurst(bgRequestList, null, null); 1540 // warm up the pipeline 1541 Thread.sleep(PIPELINE_WARMUP_TIME_MS); 1542 } 1543 1544 // Initiate the captures. 1545 long maxExpTimeNs = -1; 1546 List<CaptureRequest> requestList = 1547 new ArrayList<>(requests.size()); 1548 for (int i = 0; i < requests.size(); i++) { 1549 CaptureRequest.Builder req = requests.get(i); 1550 // For DNG captures, need the LSC map to be available. 1551 if (mCaptureRawIsDng) { 1552 req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 1553 } 1554 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 1555 if (expTimeNs != null && expTimeNs > maxExpTimeNs) { 1556 maxExpTimeNs = expTimeNs; 1557 } 1558 1559 for (int j = 0; j < numCaptureSurfaces; j++) { 1560 req.addTarget(mOutputImageReaders[j].getSurface()); 1561 } 1562 requestList.add(req.build()); 1563 } 1564 mSession.captureBurst(requestList, mCaptureResultListener, mResultHandler); 1565 1566 long timeout = TIMEOUT_CALLBACK * 1000; 1567 if (maxExpTimeNs > 0) { 1568 timeout += maxExpTimeNs / 1000000; // ns to ms 1569 } 1570 // Make sure all callbacks have been hit (wait until captures are done). 1571 // If no timeouts are received after a timeout, then fail. 1572 waitForCallbacks(timeout); 1573 1574 // Close session and wait until session is fully closed 1575 mSession.close(); 1576 sessionListener.getStateWaiter().waitForState( 1577 BlockingSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 1578 1579 } catch (android.hardware.camera2.CameraAccessException e) { 1580 throw new ItsException("Access error: ", e); 1581 } catch (InterruptedException e) { 1582 throw new ItsException("Unexpected InterruptedException: ", e); 1583 } 1584 } 1585 1586 /** 1587 * Perform reprocess captures. 1588 * 1589 * It takes captureRequests in a JSON object and perform capture requests in two steps: 1590 * regular capture request to get reprocess input and reprocess capture request to get 1591 * reprocess outputs. 1592 * 1593 * Regular capture requests: 1594 * 1. For each capture request in the JSON object, create a full-size capture request with 1595 * the settings in the JSON object. 1596 * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor 1597 * from the regular capture requests. (Those settings will be used for reprocess requests.) 1598 * 3. Submit the regular capture requests. 1599 * 1600 * Reprocess capture requests: 1601 * 4. Wait for the regular capture results and use them to create reprocess capture requests. 1602 * 5. Wait for the regular capture output images and queue them to the image writer. 1603 * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2. 1604 * 7. Submit the reprocess capture requests. 1605 * 1606 * The output images and results for the regular capture requests won't be written to socket. 1607 * The output images and results for the reprocess capture requests will be written to socket. 1608 */ 1609 private void doReprocessCapture(JSONObject params) throws ItsException { 1610 ImageWriter imageWriter = null; 1611 ArrayList<Integer> noiseReductionModes = new ArrayList<>(); 1612 ArrayList<Integer> edgeModes = new ArrayList<>(); 1613 ArrayList<Float> effectiveExposureFactors = new ArrayList<>(); 1614 1615 mCountRawOrDng.set(0); 1616 mCountJpg.set(0); 1617 mCountYuv.set(0); 1618 mCountRaw10.set(0); 1619 mCountRaw12.set(0); 1620 mCountCapRes.set(0); 1621 mCaptureRawIsDng = false; 1622 mCaptureRawIsStats = false; 1623 1624 try { 1625 // Parse the JSON to get the list of capture requests. 1626 List<CaptureRequest.Builder> inputRequests = 1627 ItsSerializer.deserializeRequestList(mCamera, params, "captureRequests"); 1628 1629 // Prepare the image readers for reprocess input and reprocess outputs. 1630 int inputFormat = getReprocessInputFormat(params); 1631 Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat); 1632 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1633 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat, 1634 inputRequests.size(), /*backgroundRequest*/false); 1635 1636 // Prepare a reprocessable session. 1637 int numOutputSurfaces = mOutputImageReaders.length; 1638 InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(), 1639 inputSize.getHeight(), inputFormat); 1640 List<Surface> outputSurfaces = new ArrayList<Surface>(); 1641 boolean addSurfaceForInput = true; 1642 for (int i = 0; i < numOutputSurfaces; i++) { 1643 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1644 if (mOutputImageReaders[i] == mInputImageReader) { 1645 // If input and one of the outputs share the same image reader, avoid 1646 // adding the same surfaces twice. 1647 addSurfaceForInput = false; 1648 } 1649 } 1650 1651 if (addSurfaceForInput) { 1652 // Besides the output surfaces specified in JSON object, add an additional one 1653 // for reprocess input. 1654 outputSurfaces.add(mInputImageReader.getSurface()); 1655 } 1656 1657 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1658 mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener, 1659 mCameraHandler); 1660 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1661 1662 // Create an image writer for reprocess input. 1663 Surface inputSurface = mSession.getInputSurface(); 1664 imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size()); 1665 1666 // Set up input reader listener and capture callback listener to get 1667 // reprocess input buffers and the results in order to create reprocess capture 1668 // requests. 1669 ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter(); 1670 mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]); 1671 1672 CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter(); 1673 // Prepare the reprocess input request 1674 for (CaptureRequest.Builder inputReqest : inputRequests) { 1675 // Remember and clear noise reduction, edge enhancement, and effective exposure 1676 // factors. 1677 noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE)); 1678 edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE)); 1679 effectiveExposureFactors.add(inputReqest.get( 1680 CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)); 1681 1682 inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE, 1683 CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG); 1684 inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG); 1685 inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null); 1686 inputReqest.addTarget(mInputImageReader.getSurface()); 1687 mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler); 1688 } 1689 1690 // Wait for reprocess input images 1691 ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>(); 1692 for (int i = 0; i < inputRequests.size(); i++) { 1693 TotalCaptureResult result = 1694 captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000); 1695 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result)); 1696 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000)); 1697 } 1698 1699 // Start performing reprocess captures. 1700 1701 mCaptureResults = new CaptureResult[inputRequests.size()]; 1702 1703 // Prepare reprocess capture requests. 1704 for (int i = 0; i < numOutputSurfaces; i++) { 1705 ImageReader.OnImageAvailableListener outputReaderListener = 1706 createAvailableListener(mCaptureCallback); 1707 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener, 1708 mSaveHandlers[i]); 1709 } 1710 1711 // Plan for how many callbacks need to be received throughout the duration of this 1712 // sequence of capture requests. There is one callback per image surface, and one 1713 // callback for the CaptureResult, for each capture. 1714 int numCaptures = reprocessOutputRequests.size(); 1715 mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1)); 1716 1717 // Initiate the captures. 1718 for (int i = 0; i < reprocessOutputRequests.size(); i++) { 1719 CaptureRequest.Builder req = reprocessOutputRequests.get(i); 1720 for (ImageReader outputImageReader : mOutputImageReaders) { 1721 req.addTarget(outputImageReader.getSurface()); 1722 } 1723 1724 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i)); 1725 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i)); 1726 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, 1727 effectiveExposureFactors.get(i)); 1728 1729 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1730 } 1731 1732 // Make sure all callbacks have been hit (wait until captures are done). 1733 // If no timeouts are received after a timeout, then fail. 1734 waitForCallbacks(TIMEOUT_CALLBACK * 1000); 1735 } catch (android.hardware.camera2.CameraAccessException e) { 1736 throw new ItsException("Access error: ", e); 1737 } finally { 1738 closeImageReaders(); 1739 if (mSession != null) { 1740 mSession.close(); 1741 mSession = null; 1742 } 1743 if (imageWriter != null) { 1744 imageWriter.close(); 1745 } 1746 } 1747 } 1748 1749 @Override 1750 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 1751 Logt.i(TAG, "Sensor " + sensor.getName() + " accuracy changed to " + accuracy); 1752 } 1753 1754 @Override 1755 public final void onSensorChanged(SensorEvent event) { 1756 synchronized(mEventLock) { 1757 if (mEventsEnabled) { 1758 MySensorEvent ev2 = new MySensorEvent(); 1759 ev2.sensor = event.sensor; 1760 ev2.accuracy = event.accuracy; 1761 ev2.timestamp = event.timestamp; 1762 ev2.values = new float[event.values.length]; 1763 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 1764 mEvents.add(ev2); 1765 } 1766 } 1767 } 1768 1769 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 1770 @Override 1771 public void onCaptureAvailable(Image capture, String physicalCameraId) { 1772 try { 1773 int format = capture.getFormat(); 1774 if (format == ImageFormat.JPEG) { 1775 Logt.i(TAG, "Received JPEG capture"); 1776 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1777 ByteBuffer buf = ByteBuffer.wrap(img); 1778 int count = mCountJpg.getAndIncrement(); 1779 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage"+physicalCameraId, buf); 1780 } else if (format == ImageFormat.YUV_420_888) { 1781 Logt.i(TAG, "Received YUV capture"); 1782 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1783 ByteBuffer buf = ByteBuffer.wrap(img); 1784 mSocketRunnableObj.sendResponseCaptureBuffer( 1785 "yuvImage"+physicalCameraId, buf); 1786 } else if (format == ImageFormat.RAW10) { 1787 Logt.i(TAG, "Received RAW10 capture"); 1788 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1789 ByteBuffer buf = ByteBuffer.wrap(img); 1790 int count = mCountRaw10.getAndIncrement(); 1791 mSocketRunnableObj.sendResponseCaptureBuffer( 1792 "raw10Image"+physicalCameraId, buf); 1793 } else if (format == ImageFormat.RAW12) { 1794 Logt.i(TAG, "Received RAW12 capture"); 1795 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1796 ByteBuffer buf = ByteBuffer.wrap(img); 1797 int count = mCountRaw12.getAndIncrement(); 1798 mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image"+physicalCameraId, buf); 1799 } else if (format == ImageFormat.RAW_SENSOR) { 1800 Logt.i(TAG, "Received RAW16 capture"); 1801 int count = mCountRawOrDng.getAndIncrement(); 1802 if (! mCaptureRawIsDng) { 1803 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1804 if (! mCaptureRawIsStats) { 1805 ByteBuffer buf = ByteBuffer.wrap(img); 1806 mSocketRunnableObj.sendResponseCaptureBuffer( 1807 "rawImage" + physicalCameraId, buf); 1808 } else { 1809 // Compute the requested stats on the raw frame, and return the results 1810 // in a new "stats image". 1811 long startTimeMs = SystemClock.elapsedRealtime(); 1812 int w = capture.getWidth(); 1813 int h = capture.getHeight(); 1814 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1815 .width(); 1816 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1817 .height(); 1818 int aax = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1819 .left; 1820 int aay = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1821 .top; 1822 1823 if (w == aaw) { 1824 aax = 0; 1825 } 1826 if (h == aah) { 1827 aay = 0; 1828 } 1829 1830 int gw = mCaptureStatsGridWidth; 1831 int gh = mCaptureStatsGridHeight; 1832 float[] stats = StatsImage.computeStatsImage( 1833 img, w, h, aax, aay, aaw, aah, gw, gh); 1834 long endTimeMs = SystemClock.elapsedRealtime(); 1835 Log.e(TAG, "Raw stats computation takes " + (endTimeMs - startTimeMs) + " ms"); 1836 int statsImgSize = stats.length * 4; 1837 if (mSocketQueueQuota != null) { 1838 mSocketQueueQuota.release(img.length); 1839 mSocketQueueQuota.acquire(statsImgSize); 1840 } 1841 ByteBuffer bBuf = ByteBuffer.allocate(statsImgSize); 1842 bBuf.order(ByteOrder.nativeOrder()); 1843 FloatBuffer fBuf = bBuf.asFloatBuffer(); 1844 fBuf.put(stats); 1845 fBuf.position(0); 1846 mSocketRunnableObj.sendResponseCaptureBuffer( 1847 "rawStatsImage"+physicalCameraId, bBuf); 1848 } 1849 } else { 1850 // Wait until the corresponding capture result is ready, up to a timeout. 1851 long t0 = android.os.SystemClock.elapsedRealtime(); 1852 while (! mThreadExitFlag 1853 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) { 1854 if (mCaptureResults[count] != null) { 1855 Logt.i(TAG, "Writing capture as DNG"); 1856 DngCreator dngCreator = new DngCreator( 1857 mCameraCharacteristics, mCaptureResults[count]); 1858 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 1859 dngCreator.writeImage(dngStream, capture); 1860 byte[] dngArray = dngStream.toByteArray(); 1861 if (mSocketQueueQuota != null) { 1862 // Ideally we should acquire before allocating memory, but 1863 // here the DNG size is unknown before toByteArray call, so 1864 // we have to register the size afterward. This should still 1865 // works most of the time since all DNG images are handled by 1866 // the same handler thread, so we are at most one buffer over 1867 // the quota. 1868 mSocketQueueQuota.acquire(dngArray.length); 1869 } 1870 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 1871 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 1872 break; 1873 } else { 1874 Thread.sleep(1); 1875 } 1876 } 1877 } 1878 } else if (format == ImageFormat.Y8) { 1879 Logt.i(TAG, "Received Y8 capture"); 1880 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1881 ByteBuffer buf = ByteBuffer.wrap(img); 1882 mSocketRunnableObj.sendResponseCaptureBuffer( 1883 "y8Image"+physicalCameraId, buf); 1884 } else { 1885 throw new ItsException("Unsupported image format: " + format); 1886 } 1887 1888 synchronized(mCountCallbacksRemaining) { 1889 mCountCallbacksRemaining.decrementAndGet(); 1890 mCountCallbacksRemaining.notify(); 1891 } 1892 } catch (IOException e) { 1893 Logt.e(TAG, "Script error: ", e); 1894 } catch (InterruptedException e) { 1895 Logt.e(TAG, "Script error: ", e); 1896 } catch (ItsException e) { 1897 Logt.e(TAG, "Script error: ", e); 1898 } 1899 } 1900 }; 1901 1902 private static float r2f(Rational r) { 1903 return (float)r.getNumerator() / (float)r.getDenominator(); 1904 } 1905 1906 private boolean hasCapability(int capability) throws ItsException { 1907 int[] capabilities = mCameraCharacteristics.get( 1908 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1909 if (capabilities == null) { 1910 throw new ItsException("Failed to get capabilities"); 1911 } 1912 for (int c : capabilities) { 1913 if (c == capability) { 1914 return true; 1915 } 1916 } 1917 return false; 1918 } 1919 1920 private String buildLogString(CaptureResult result) throws ItsException { 1921 StringBuilder logMsg = new StringBuilder(); 1922 logMsg.append(String.format( 1923 "Capt result: AE=%d, AF=%d, AWB=%d, ", 1924 result.get(CaptureResult.CONTROL_AE_STATE), 1925 result.get(CaptureResult.CONTROL_AF_STATE), 1926 result.get(CaptureResult.CONTROL_AWB_STATE))); 1927 1928 boolean readSensorSettings = hasCapability( 1929 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS); 1930 1931 if (readSensorSettings) { 1932 logMsg.append(String.format( 1933 "sens=%d, exp=%.1fms, dur=%.1fms, ", 1934 result.get(CaptureResult.SENSOR_SENSITIVITY), 1935 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).longValue() / 1000000.0f, 1936 result.get(CaptureResult.SENSOR_FRAME_DURATION).longValue() / 1937 1000000.0f)); 1938 } 1939 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 1940 logMsg.append(String.format( 1941 "gains=[%.1f, %.1f, %.1f, %.1f], ", 1942 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1943 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1944 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1945 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 1946 } else { 1947 logMsg.append("gains=[], "); 1948 } 1949 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1950 logMsg.append(String.format( 1951 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 1952 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1953 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1954 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1955 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1956 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1957 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1958 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1959 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1960 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 1961 } else { 1962 logMsg.append("xform=[], "); 1963 } 1964 logMsg.append(String.format( 1965 "foc=%.1f", 1966 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 1967 return logMsg.toString(); 1968 } 1969 1970 private class ThreeAResultListener extends CaptureResultListener { 1971 private volatile boolean stopped = false; 1972 private boolean aeResultSent = false; 1973 private boolean awbResultSent = false; 1974 private boolean afResultSent = false; 1975 1976 @Override 1977 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1978 long timestamp, long frameNumber) { 1979 } 1980 1981 @Override 1982 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1983 TotalCaptureResult result) { 1984 try { 1985 if (stopped) { 1986 return; 1987 } 1988 1989 if (request == null || result == null) { 1990 throw new ItsException("Request/result is invalid"); 1991 } 1992 1993 Logt.i(TAG, buildLogString(result)); 1994 1995 synchronized(m3AStateLock) { 1996 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 1997 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1998 CaptureResult.CONTROL_AE_STATE_CONVERGED || 1999 result.get(CaptureResult.CONTROL_AE_STATE) == 2000 CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED || 2001 result.get(CaptureResult.CONTROL_AE_STATE) == 2002 CaptureResult.CONTROL_AE_STATE_LOCKED; 2003 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 2004 CaptureResult.CONTROL_AE_STATE_LOCKED; 2005 } 2006 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 2007 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) == 2008 CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 2009 } 2010 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 2011 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 2012 CaptureResult.CONTROL_AWB_STATE_CONVERGED || 2013 result.get(CaptureResult.CONTROL_AWB_STATE) == 2014 CaptureResult.CONTROL_AWB_STATE_LOCKED; 2015 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 2016 CaptureResult.CONTROL_AWB_STATE_LOCKED; 2017 } 2018 2019 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE) && !aeResultSent) { 2020 aeResultSent = true; 2021 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 2022 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 2023 mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d", 2024 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 2025 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 2026 )); 2027 } else { 2028 Logt.i(TAG, String.format( 2029 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b", 2030 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 2031 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 2032 } 2033 } 2034 2035 if (mConvergedAF && !afResultSent) { 2036 afResultSent = true; 2037 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 2038 mSocketRunnableObj.sendResponse("afResult", String.format("%f", 2039 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 2040 )); 2041 } else { 2042 Logt.i(TAG, "AF converged but NULL focus distance values"); 2043 } 2044 } 2045 2046 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB) && !awbResultSent) { 2047 awbResultSent = true; 2048 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 2049 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 2050 mSocketRunnableObj.sendResponse("awbResult", String.format( 2051 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 2052 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 2053 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 2054 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 2055 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(), 2056 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2057 getElement(0,0)), 2058 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2059 getElement(1,0)), 2060 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2061 getElement(2,0)), 2062 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2063 getElement(0,1)), 2064 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2065 getElement(1,1)), 2066 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2067 getElement(2,1)), 2068 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2069 getElement(0,2)), 2070 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2071 getElement(1,2)), 2072 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM). 2073 getElement(2,2)))); 2074 } else { 2075 Logt.i(TAG, String.format( 2076 "AWB converged but NULL color correction values, gains:%b, ccm:%b", 2077 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 2078 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null)); 2079 } 2080 } 2081 } 2082 2083 mInterlock3A.open(); 2084 } catch (ItsException e) { 2085 Logt.e(TAG, "Script error: ", e); 2086 } catch (Exception e) { 2087 Logt.e(TAG, "Script error: ", e); 2088 } 2089 } 2090 2091 @Override 2092 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 2093 CaptureFailure failure) { 2094 Logt.e(TAG, "Script error: capture failed"); 2095 } 2096 2097 public void stop() { 2098 stopped = true; 2099 } 2100 } 2101 2102 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 2103 @Override 2104 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 2105 long timestamp, long frameNumber) { 2106 } 2107 2108 @Override 2109 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 2110 TotalCaptureResult result) { 2111 try { 2112 if (request == null || result == null) { 2113 throw new ItsException("Request/result is invalid"); 2114 } 2115 2116 Logt.i(TAG, buildLogString(result)); 2117 2118 int count = mCountCapRes.getAndIncrement(); 2119 mCaptureResults[count] = result; 2120 mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics, 2121 request, result, mOutputImageReaders); 2122 synchronized(mCountCallbacksRemaining) { 2123 mCountCallbacksRemaining.decrementAndGet(); 2124 mCountCallbacksRemaining.notify(); 2125 } 2126 } catch (ItsException e) { 2127 Logt.e(TAG, "Script error: ", e); 2128 } catch (Exception e) { 2129 Logt.e(TAG, "Script error: ", e); 2130 } 2131 } 2132 2133 @Override 2134 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 2135 CaptureFailure failure) { 2136 Logt.e(TAG, "Script error: capture failed"); 2137 } 2138 }; 2139 2140 private class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback { 2141 private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue = 2142 new LinkedBlockingQueue<>(); 2143 2144 @Override 2145 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 2146 long timestamp, long frameNumber) { 2147 } 2148 2149 @Override 2150 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 2151 TotalCaptureResult result) { 2152 try { 2153 mResultQueue.put(result); 2154 } catch (InterruptedException e) { 2155 throw new UnsupportedOperationException( 2156 "Can't handle InterruptedException in onImageAvailable"); 2157 } 2158 } 2159 2160 @Override 2161 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 2162 CaptureFailure failure) { 2163 Logt.e(TAG, "Script error: capture failed"); 2164 } 2165 2166 public TotalCaptureResult getResult(long timeoutMs) throws ItsException { 2167 TotalCaptureResult result; 2168 try { 2169 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 2170 } catch (InterruptedException e) { 2171 throw new ItsException(e); 2172 } 2173 2174 if (result == null) { 2175 throw new ItsException("Getting an image timed out after " + timeoutMs + 2176 "ms"); 2177 } 2178 2179 return result; 2180 } 2181 } 2182 2183 private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener { 2184 private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>(); 2185 2186 @Override 2187 public void onImageAvailable(ImageReader reader) { 2188 try { 2189 mImageQueue.put(reader.acquireNextImage()); 2190 } catch (InterruptedException e) { 2191 throw new UnsupportedOperationException( 2192 "Can't handle InterruptedException in onImageAvailable"); 2193 } 2194 } 2195 2196 public Image getImage(long timeoutMs) throws ItsException { 2197 Image image; 2198 try { 2199 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 2200 } catch (InterruptedException e) { 2201 throw new ItsException(e); 2202 } 2203 2204 if (image == null) { 2205 throw new ItsException("Getting an image timed out after " + timeoutMs + 2206 "ms"); 2207 } 2208 return image; 2209 } 2210 } 2211 2212 private int getReprocessInputFormat(JSONObject params) throws ItsException { 2213 String reprocessFormat; 2214 try { 2215 reprocessFormat = params.getString("reprocessFormat"); 2216 } catch (org.json.JSONException e) { 2217 throw new ItsException("Error parsing reprocess format: " + e); 2218 } 2219 2220 if (reprocessFormat.equals("yuv")) { 2221 return ImageFormat.YUV_420_888; 2222 } else if (reprocessFormat.equals("private")) { 2223 return ImageFormat.PRIVATE; 2224 } 2225 2226 throw new ItsException("Uknown reprocess format: " + reprocessFormat); 2227 } 2228 } 2229