1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import android.app.Service; 20 import android.content.Context; 21 import android.content.Intent; 22 import android.graphics.ImageFormat; 23 import android.hardware.camera2.CameraCaptureSession; 24 import android.hardware.camera2.CameraAccessException; 25 import android.hardware.camera2.CameraCharacteristics; 26 import android.hardware.camera2.CameraDevice; 27 import android.hardware.camera2.CameraManager; 28 import android.hardware.camera2.CaptureFailure; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.DngCreator; 32 import android.hardware.camera2.TotalCaptureResult; 33 import android.hardware.camera2.params.InputConfiguration; 34 import android.hardware.camera2.params.MeteringRectangle; 35 import android.hardware.Sensor; 36 import android.hardware.SensorEvent; 37 import android.hardware.SensorEventListener; 38 import android.hardware.SensorManager; 39 import android.media.Image; 40 import android.media.ImageReader; 41 import android.media.ImageWriter; 42 import android.media.Image.Plane; 43 import android.net.Uri; 44 import android.os.ConditionVariable; 45 import android.os.Handler; 46 import android.os.HandlerThread; 47 import android.os.IBinder; 48 import android.os.Message; 49 import android.os.SystemClock; 50 import android.os.Vibrator; 51 import android.util.Log; 52 import android.util.Rational; 53 import android.util.Size; 54 import android.view.Surface; 55 56 import com.android.ex.camera2.blocking.BlockingCameraManager; 57 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 58 import com.android.ex.camera2.blocking.BlockingStateCallback; 59 import com.android.ex.camera2.blocking.BlockingSessionCallback; 60 61 import com.android.cts.verifier.camera.its.StatsImage; 62 63 import org.json.JSONArray; 64 import org.json.JSONObject; 65 66 import java.io.BufferedReader; 67 import java.io.BufferedWriter; 68 import java.io.ByteArrayOutputStream; 69 import java.io.IOException; 70 import java.io.InputStreamReader; 71 import java.io.OutputStreamWriter; 72 import java.io.PrintWriter; 73 import java.math.BigInteger; 74 import java.net.ServerSocket; 75 import java.net.Socket; 76 import java.nio.ByteBuffer; 77 import java.nio.ByteOrder; 78 import java.nio.FloatBuffer; 79 import java.nio.charset.Charset; 80 import java.security.MessageDigest; 81 import java.util.ArrayList; 82 import java.util.Arrays; 83 import java.util.LinkedList; 84 import java.util.List; 85 import java.util.concurrent.BlockingQueue; 86 import java.util.concurrent.CountDownLatch; 87 import java.util.concurrent.LinkedBlockingDeque; 88 import java.util.concurrent.LinkedBlockingQueue; 89 import java.util.concurrent.Semaphore; 90 import java.util.concurrent.TimeUnit; 91 import java.util.concurrent.atomic.AtomicInteger; 92 93 public class ItsService extends Service implements SensorEventListener { 94 public static final String TAG = ItsService.class.getSimpleName(); 95 96 // Timeouts, in seconds. 97 private static final int TIMEOUT_CALLBACK = 20; 98 private static final int TIMEOUT_3A = 10; 99 100 // Time given for background requests to warm up pipeline 101 private static final long PIPELINE_WARMUP_TIME_MS = 2000; 102 103 // State transition timeouts, in ms. 104 private static final long TIMEOUT_IDLE_MS = 2000; 105 private static final long TIMEOUT_STATE_MS = 500; 106 private static final long TIMEOUT_SESSION_CLOSE = 3000; 107 108 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 109 private static final long TIMEOUT_CAP_RES = 2000; 110 111 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 112 113 // Supports at most RAW+YUV+JPEG, one surface each, plus optional background stream 114 private static final int MAX_NUM_OUTPUT_SURFACES = 4; 115 116 public static final int SERVERPORT = 6000; 117 118 public static final String REGION_KEY = "regions"; 119 public static final String REGION_AE_KEY = "ae"; 120 public static final String REGION_AWB_KEY = "awb"; 121 public static final String REGION_AF_KEY = "af"; 122 public static final String LOCK_AE_KEY = "aeLock"; 123 public static final String LOCK_AWB_KEY = "awbLock"; 124 public static final String TRIGGER_KEY = "triggers"; 125 public static final String TRIGGER_AE_KEY = "ae"; 126 public static final String TRIGGER_AF_KEY = "af"; 127 public static final String VIB_PATTERN_KEY = "pattern"; 128 public static final String EVCOMP_KEY = "evComp"; 129 130 private CameraManager mCameraManager = null; 131 private HandlerThread mCameraThread = null; 132 private Handler mCameraHandler = null; 133 private BlockingCameraManager mBlockingCameraManager = null; 134 private BlockingStateCallback mCameraListener = null; 135 private CameraDevice mCamera = null; 136 private CameraCaptureSession mSession = null; 137 private ImageReader[] mOutputImageReaders = null; 138 private ImageReader mInputImageReader = null; 139 private CameraCharacteristics mCameraCharacteristics = null; 140 141 private Vibrator mVibrator = null; 142 143 private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 144 private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES]; 145 private HandlerThread mResultThread = null; 146 private Handler mResultHandler = null; 147 148 private volatile boolean mThreadExitFlag = false; 149 150 private volatile ServerSocket mSocket = null; 151 private volatile SocketRunnable mSocketRunnableObj = null; 152 private Semaphore mSocketQueueQuota = null; 153 private LinkedList<Integer> mInflightImageSizes = new LinkedList<>(); 154 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = 155 new LinkedBlockingDeque<ByteBuffer>(); 156 private final Object mSocketWriteEnqueueLock = new Object(); 157 private final Object mSocketWriteDrainLock = new Object(); 158 159 private volatile BlockingQueue<Object[]> mSerializerQueue = 160 new LinkedBlockingDeque<Object[]>(); 161 162 private AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 163 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 164 private AtomicInteger mCountRaw10 = new AtomicInteger(); 165 private AtomicInteger mCountRaw12 = new AtomicInteger(); 166 private AtomicInteger mCountJpg = new AtomicInteger(); 167 private AtomicInteger mCountYuv = new AtomicInteger(); 168 private AtomicInteger mCountCapRes = new AtomicInteger(); 169 private boolean mCaptureRawIsDng; 170 private boolean mCaptureRawIsStats; 171 private int mCaptureStatsGridWidth; 172 private int mCaptureStatsGridHeight; 173 private CaptureResult mCaptureResults[] = null; 174 175 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 176 private volatile boolean mIssuedRequest3A = false; 177 private volatile boolean mConvergedAE = false; 178 private volatile boolean mConvergedAF = false; 179 private volatile boolean mConvergedAWB = false; 180 private volatile boolean mLockedAE = false; 181 private volatile boolean mLockedAWB = false; 182 private volatile boolean mNeedsLockedAE = false; 183 private volatile boolean mNeedsLockedAWB = false; 184 185 class MySensorEvent { 186 public Sensor sensor; 187 public int accuracy; 188 public long timestamp; 189 public float values[]; 190 } 191 192 // For capturing motion sensor traces. 193 private SensorManager mSensorManager = null; 194 private Sensor mAccelSensor = null; 195 private Sensor mMagSensor = null; 196 private Sensor mGyroSensor = null; 197 private volatile LinkedList<MySensorEvent> mEvents = null; 198 private volatile Object mEventLock = new Object(); 199 private volatile boolean mEventsEnabled = false; 200 private HandlerThread mSensorThread = null; 201 private Handler mSensorHandler = null; 202 203 public interface CaptureCallback { 204 void onCaptureAvailable(Image capture); 205 } 206 207 public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {} 208 209 @Override 210 public IBinder onBind(Intent intent) { 211 return null; 212 } 213 214 @Override 215 public void onCreate() { 216 try { 217 mThreadExitFlag = false; 218 219 // Get handle to camera manager. 220 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 221 if (mCameraManager == null) { 222 throw new ItsException("Failed to connect to camera manager"); 223 } 224 mBlockingCameraManager = new BlockingCameraManager(mCameraManager); 225 mCameraListener = new BlockingStateCallback(); 226 227 // Register for motion events. 228 mEvents = new LinkedList<MySensorEvent>(); 229 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 230 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 231 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 232 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 233 mSensorThread = new HandlerThread("SensorThread"); 234 mSensorThread.start(); 235 mSensorHandler = new Handler(mSensorThread.getLooper()); 236 mSensorManager.registerListener(this, mAccelSensor, 237 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 238 mSensorManager.registerListener(this, mMagSensor, 239 SensorManager.SENSOR_DELAY_NORMAL, mSensorHandler); 240 mSensorManager.registerListener(this, mGyroSensor, 241 /*200hz*/5000, mSensorHandler); 242 243 // Get a handle to the system vibrator. 244 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 245 246 // Create threads to receive images and save them. 247 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 248 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 249 mSaveThreads[i].start(); 250 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 251 } 252 253 // Create a thread to handle object serialization. 254 (new Thread(new SerializerRunnable())).start();; 255 256 // Create a thread to receive capture results and process them. 257 mResultThread = new HandlerThread("ResultThread"); 258 mResultThread.start(); 259 mResultHandler = new Handler(mResultThread.getLooper()); 260 261 // Create a thread for the camera device. 262 mCameraThread = new HandlerThread("ItsCameraThread"); 263 mCameraThread.start(); 264 mCameraHandler = new Handler(mCameraThread.getLooper()); 265 266 // Create a thread to process commands, listening on a TCP socket. 267 mSocketRunnableObj = new SocketRunnable(); 268 (new Thread(mSocketRunnableObj)).start(); 269 } catch (ItsException e) { 270 Logt.e(TAG, "Service failed to start: ", e); 271 } 272 } 273 274 @Override 275 public int onStartCommand(Intent intent, int flags, int startId) { 276 try { 277 // Just log a message indicating that the service is running and is able to accept 278 // socket connections. 279 while (!mThreadExitFlag && mSocket==null) { 280 Thread.sleep(1); 281 } 282 if (!mThreadExitFlag){ 283 Logt.i(TAG, "ItsService ready"); 284 } else { 285 Logt.e(TAG, "Starting ItsService in bad state"); 286 } 287 } catch (java.lang.InterruptedException e) { 288 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 289 } 290 return START_STICKY; 291 } 292 293 @Override 294 public void onDestroy() { 295 mThreadExitFlag = true; 296 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 297 if (mSaveThreads[i] != null) { 298 mSaveThreads[i].quit(); 299 mSaveThreads[i] = null; 300 } 301 } 302 if (mSensorThread != null) { 303 mSensorThread.quitSafely(); 304 mSensorThread = null; 305 } 306 if (mResultThread != null) { 307 mResultThread.quitSafely(); 308 mResultThread = null; 309 } 310 if (mCameraThread != null) { 311 mCameraThread.quitSafely(); 312 mCameraThread = null; 313 } 314 } 315 316 public void openCameraDevice(int cameraId) throws ItsException { 317 Logt.i(TAG, String.format("Opening camera %d", cameraId)); 318 319 String[] devices; 320 try { 321 devices = mCameraManager.getCameraIdList(); 322 if (devices == null || devices.length == 0) { 323 throw new ItsException("No camera devices"); 324 } 325 } catch (CameraAccessException e) { 326 throw new ItsException("Failed to get device ID list", e); 327 } 328 329 try { 330 mCamera = mBlockingCameraManager.openCamera(devices[cameraId], 331 mCameraListener, mCameraHandler); 332 mCameraCharacteristics = mCameraManager.getCameraCharacteristics( 333 devices[cameraId]); 334 Size maxYuvSize = ItsUtils.getYuvOutputSizes(mCameraCharacteristics)[0]; 335 // 2 bytes per pixel for RGBA Bitmap and at least 3 Bitmaps per CDD 336 int quota = maxYuvSize.getWidth() * maxYuvSize.getHeight() * 2 * 3; 337 mSocketQueueQuota = new Semaphore(quota, true); 338 } catch (CameraAccessException e) { 339 throw new ItsException("Failed to open camera", e); 340 } catch (BlockingOpenException e) { 341 throw new ItsException("Failed to open camera (after blocking)", e); 342 } 343 mSocketRunnableObj.sendResponse("cameraOpened", ""); 344 } 345 346 public void closeCameraDevice() throws ItsException { 347 try { 348 if (mCamera != null) { 349 Logt.i(TAG, "Closing camera"); 350 mCamera.close(); 351 mCamera = null; 352 } 353 } catch (Exception e) { 354 throw new ItsException("Failed to close device"); 355 } 356 mSocketRunnableObj.sendResponse("cameraClosed", ""); 357 } 358 359 class SerializerRunnable implements Runnable { 360 // Use a separate thread to perform JSON serialization (since this can be slow due to 361 // the reflection). 362 @Override 363 public void run() { 364 Logt.i(TAG, "Serializer thread starting"); 365 while (! mThreadExitFlag) { 366 try { 367 Object objs[] = mSerializerQueue.take(); 368 JSONObject jsonObj = new JSONObject(); 369 String tag = null; 370 for (int i = 0; i < objs.length; i++) { 371 Object obj = objs[i]; 372 if (obj instanceof String) { 373 if (tag != null) { 374 throw new ItsException("Multiple tags for socket response"); 375 } 376 tag = (String)obj; 377 } else if (obj instanceof CameraCharacteristics) { 378 jsonObj.put("cameraProperties", ItsSerializer.serialize( 379 (CameraCharacteristics)obj)); 380 } else if (obj instanceof CaptureRequest) { 381 jsonObj.put("captureRequest", ItsSerializer.serialize( 382 (CaptureRequest)obj)); 383 } else if (obj instanceof CaptureResult) { 384 jsonObj.put("captureResult", ItsSerializer.serialize( 385 (CaptureResult)obj)); 386 } else if (obj instanceof JSONArray) { 387 jsonObj.put("outputs", (JSONArray)obj); 388 } else { 389 throw new ItsException("Invalid object received for serialiation"); 390 } 391 } 392 if (tag == null) { 393 throw new ItsException("No tag provided for socket response"); 394 } 395 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 396 Logt.i(TAG, String.format("Serialized %s", tag)); 397 } catch (org.json.JSONException e) { 398 Logt.e(TAG, "Error serializing object", e); 399 break; 400 } catch (ItsException e) { 401 Logt.e(TAG, "Error serializing object", e); 402 break; 403 } catch (java.lang.InterruptedException e) { 404 Logt.e(TAG, "Error serializing object (interrupted)", e); 405 break; 406 } 407 } 408 Logt.i(TAG, "Serializer thread terminated"); 409 } 410 } 411 412 class SocketWriteRunnable implements Runnable { 413 414 // Use a separate thread to service a queue of objects to be written to the socket, 415 // writing each sequentially in order. This is needed since different handler functions 416 // (called on different threads) will need to send data back to the host script. 417 418 public Socket mOpenSocket = null; 419 private Thread mThread = null; 420 421 public SocketWriteRunnable(Socket openSocket) { 422 mOpenSocket = openSocket; 423 } 424 425 public void setOpenSocket(Socket openSocket) { 426 mOpenSocket = openSocket; 427 } 428 429 @Override 430 public void run() { 431 Logt.i(TAG, "Socket writer thread starting"); 432 while (true) { 433 try { 434 ByteBuffer b = mSocketWriteQueue.take(); 435 synchronized(mSocketWriteDrainLock) { 436 if (mOpenSocket == null) { 437 Logt.e(TAG, "No open socket connection!"); 438 continue; 439 } 440 if (b.hasArray()) { 441 mOpenSocket.getOutputStream().write(b.array(), 0, b.capacity()); 442 } else { 443 byte[] barray = new byte[b.capacity()]; 444 b.get(barray); 445 mOpenSocket.getOutputStream().write(barray); 446 } 447 mOpenSocket.getOutputStream().flush(); 448 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 449 Integer imgBufSize = mInflightImageSizes.peek(); 450 if (imgBufSize != null && imgBufSize == b.capacity()) { 451 mInflightImageSizes.removeFirst(); 452 if (mSocketQueueQuota != null) { 453 mSocketQueueQuota.release(imgBufSize); 454 } 455 } 456 } 457 } catch (IOException e) { 458 Logt.e(TAG, "Error writing to socket", e); 459 mOpenSocket = null; 460 break; 461 } catch (java.lang.InterruptedException e) { 462 Logt.e(TAG, "Error writing to socket (interrupted)", e); 463 mOpenSocket = null; 464 break; 465 } 466 } 467 Logt.i(TAG, "Socket writer thread terminated"); 468 } 469 470 public synchronized void checkAndStartThread() { 471 if (mThread == null || mThread.getState() == Thread.State.TERMINATED) { 472 mThread = new Thread(this); 473 } 474 if (mThread.getState() == Thread.State.NEW) { 475 mThread.start(); 476 } 477 } 478 479 } 480 481 class SocketRunnable implements Runnable { 482 483 // Format of sent messages (over the socket): 484 // * Serialized JSON object on a single line (newline-terminated) 485 // * For byte buffers, the binary data then follows 486 // 487 // Format of received messages (from the socket): 488 // * Serialized JSON object on a single line (newline-terminated) 489 490 private Socket mOpenSocket = null; 491 private SocketWriteRunnable mSocketWriteRunnable = null; 492 493 @Override 494 public void run() { 495 Logt.i(TAG, "Socket thread starting"); 496 try { 497 mSocket = new ServerSocket(SERVERPORT); 498 } catch (IOException e) { 499 Logt.e(TAG, "Failed to create socket", e); 500 } 501 502 // Create a new thread to handle writes to this socket. 503 mSocketWriteRunnable = new SocketWriteRunnable(null); 504 505 while (!mThreadExitFlag) { 506 // Receive the socket-open request from the host. 507 try { 508 Logt.i(TAG, "Waiting for client to connect to socket"); 509 mOpenSocket = mSocket.accept(); 510 if (mOpenSocket == null) { 511 Logt.e(TAG, "Socket connection error"); 512 break; 513 } 514 mSocketWriteQueue.clear(); 515 mInflightImageSizes.clear(); 516 mSocketWriteRunnable.setOpenSocket(mOpenSocket); 517 mSocketWriteRunnable.checkAndStartThread(); 518 Logt.i(TAG, "Socket connected"); 519 } catch (IOException e) { 520 Logt.e(TAG, "Socket open error: ", e); 521 break; 522 } 523 524 // Process commands over the open socket. 525 while (!mThreadExitFlag) { 526 try { 527 BufferedReader input = new BufferedReader( 528 new InputStreamReader(mOpenSocket.getInputStream())); 529 if (input == null) { 530 Logt.e(TAG, "Failed to get socket input stream"); 531 break; 532 } 533 String line = input.readLine(); 534 if (line == null) { 535 Logt.i(TAG, "Socket readline retuned null (host disconnected)"); 536 break; 537 } 538 processSocketCommand(line); 539 } catch (IOException e) { 540 Logt.e(TAG, "Socket read error: ", e); 541 break; 542 } catch (ItsException e) { 543 Logt.e(TAG, "Script error: ", e); 544 break; 545 } 546 } 547 548 // Close socket and go back to waiting for a new connection. 549 try { 550 synchronized(mSocketWriteDrainLock) { 551 mSocketWriteQueue.clear(); 552 mInflightImageSizes.clear(); 553 mOpenSocket.close(); 554 mOpenSocket = null; 555 mSocketWriteRunnable.setOpenSocket(null); 556 Logt.i(TAG, "Socket disconnected"); 557 } 558 } catch (java.io.IOException e) { 559 Logt.e(TAG, "Exception closing socket"); 560 } 561 } 562 563 // It's an overall error state if the code gets here; no recevery. 564 // Try to do some cleanup, but the service probably needs to be restarted. 565 Logt.i(TAG, "Socket server loop exited"); 566 mThreadExitFlag = true; 567 try { 568 synchronized(mSocketWriteDrainLock) { 569 if (mOpenSocket != null) { 570 mOpenSocket.close(); 571 mOpenSocket = null; 572 mSocketWriteRunnable.setOpenSocket(null); 573 } 574 } 575 } catch (java.io.IOException e) { 576 Logt.w(TAG, "Exception closing socket"); 577 } 578 try { 579 if (mSocket != null) { 580 mSocket.close(); 581 mSocket = null; 582 } 583 } catch (java.io.IOException e) { 584 Logt.w(TAG, "Exception closing socket"); 585 } 586 } 587 588 public void processSocketCommand(String cmd) 589 throws ItsException { 590 // Each command is a serialized JSON object. 591 try { 592 JSONObject cmdObj = new JSONObject(cmd); 593 Logt.i(TAG, "Start processing command" + cmdObj.getString("cmdName")); 594 if ("open".equals(cmdObj.getString("cmdName"))) { 595 int cameraId = cmdObj.getInt("cameraId"); 596 openCameraDevice(cameraId); 597 } else if ("close".equals(cmdObj.getString("cmdName"))) { 598 closeCameraDevice(); 599 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 600 doGetProps(); 601 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 602 doStartSensorEvents(); 603 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 604 doGetSensorEvents(); 605 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 606 do3A(cmdObj); 607 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 608 doCapture(cmdObj); 609 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 610 doVibrate(cmdObj); 611 } else if ("getCameraIds".equals(cmdObj.getString("cmdName"))) { 612 doGetCameraIds(); 613 } else if ("doReprocessCapture".equals(cmdObj.getString("cmdName"))) { 614 doReprocessCapture(cmdObj); 615 } else { 616 throw new ItsException("Unknown command: " + cmd); 617 } 618 Logt.i(TAG, "Finish processing command" + cmdObj.getString("cmdName")); 619 } catch (org.json.JSONException e) { 620 Logt.e(TAG, "Invalid command: ", e); 621 } 622 } 623 624 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 625 throws ItsException { 626 try { 627 JSONObject jsonObj = new JSONObject(); 628 jsonObj.put("tag", tag); 629 if (str != null) { 630 jsonObj.put("strValue", str); 631 } 632 if (obj != null) { 633 jsonObj.put("objValue", obj); 634 } 635 if (bbuf != null) { 636 jsonObj.put("bufValueSize", bbuf.capacity()); 637 } 638 ByteBuffer bstr = ByteBuffer.wrap( 639 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 640 synchronized(mSocketWriteEnqueueLock) { 641 if (bstr != null) { 642 mSocketWriteQueue.put(bstr); 643 } 644 if (bbuf != null) { 645 mInflightImageSizes.add(bbuf.capacity()); 646 mSocketWriteQueue.put(bbuf); 647 } 648 } 649 } catch (org.json.JSONException e) { 650 throw new ItsException("JSON error: ", e); 651 } catch (java.lang.InterruptedException e) { 652 throw new ItsException("Socket error: ", e); 653 } 654 } 655 656 public void sendResponse(String tag, String str) 657 throws ItsException { 658 sendResponse(tag, str, null, null); 659 } 660 661 public void sendResponse(String tag, JSONObject obj) 662 throws ItsException { 663 sendResponse(tag, null, obj, null); 664 } 665 666 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 667 throws ItsException { 668 sendResponse(tag, null, null, bbuf); 669 } 670 671 public void sendResponse(LinkedList<MySensorEvent> events) 672 throws ItsException { 673 Logt.i(TAG, "Sending " + events.size() + " sensor events"); 674 try { 675 JSONArray accels = new JSONArray(); 676 JSONArray mags = new JSONArray(); 677 JSONArray gyros = new JSONArray(); 678 for (MySensorEvent event : events) { 679 JSONObject obj = new JSONObject(); 680 obj.put("time", event.timestamp); 681 obj.put("x", event.values[0]); 682 obj.put("y", event.values[1]); 683 obj.put("z", event.values[2]); 684 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 685 accels.put(obj); 686 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 687 mags.put(obj); 688 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 689 gyros.put(obj); 690 } 691 } 692 JSONObject obj = new JSONObject(); 693 obj.put("accel", accels); 694 obj.put("mag", mags); 695 obj.put("gyro", gyros); 696 sendResponse("sensorEvents", null, obj, null); 697 } catch (org.json.JSONException e) { 698 throw new ItsException("JSON error: ", e); 699 } 700 Logt.i(TAG, "Sent sensor events"); 701 } 702 703 public void sendResponse(CameraCharacteristics props) 704 throws ItsException { 705 try { 706 Object objs[] = new Object[2]; 707 objs[0] = "cameraProperties"; 708 objs[1] = props; 709 mSerializerQueue.put(objs); 710 } catch (InterruptedException e) { 711 throw new ItsException("Interrupted: ", e); 712 } 713 } 714 715 public void sendResponseCaptureResult(CameraCharacteristics props, 716 CaptureRequest request, 717 CaptureResult result, 718 ImageReader[] readers) 719 throws ItsException { 720 try { 721 JSONArray jsonSurfaces = new JSONArray(); 722 for (int i = 0; i < readers.length; i++) { 723 JSONObject jsonSurface = new JSONObject(); 724 jsonSurface.put("width", readers[i].getWidth()); 725 jsonSurface.put("height", readers[i].getHeight()); 726 int format = readers[i].getImageFormat(); 727 if (format == ImageFormat.RAW_SENSOR) { 728 if (mCaptureRawIsStats) { 729 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 730 .width(); 731 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 732 .height(); 733 jsonSurface.put("format", "rawStats"); 734 jsonSurface.put("width", aaw/mCaptureStatsGridWidth); 735 jsonSurface.put("height", aah/mCaptureStatsGridHeight); 736 } else if (mCaptureRawIsDng) { 737 jsonSurface.put("format", "dng"); 738 } else { 739 jsonSurface.put("format", "raw"); 740 } 741 } else if (format == ImageFormat.RAW10) { 742 jsonSurface.put("format", "raw10"); 743 } else if (format == ImageFormat.RAW12) { 744 jsonSurface.put("format", "raw12"); 745 } else if (format == ImageFormat.JPEG) { 746 jsonSurface.put("format", "jpeg"); 747 } else if (format == ImageFormat.YUV_420_888) { 748 jsonSurface.put("format", "yuv"); 749 } else { 750 throw new ItsException("Invalid format"); 751 } 752 jsonSurfaces.put(jsonSurface); 753 } 754 755 Object objs[] = new Object[5]; 756 objs[0] = "captureResults"; 757 objs[1] = props; 758 objs[2] = request; 759 objs[3] = result; 760 objs[4] = jsonSurfaces; 761 mSerializerQueue.put(objs); 762 } catch (org.json.JSONException e) { 763 throw new ItsException("JSON error: ", e); 764 } catch (InterruptedException e) { 765 throw new ItsException("Interrupted: ", e); 766 } 767 } 768 } 769 770 public ImageReader.OnImageAvailableListener 771 createAvailableListener(final CaptureCallback listener) { 772 return new ImageReader.OnImageAvailableListener() { 773 @Override 774 public void onImageAvailable(ImageReader reader) { 775 Image i = null; 776 try { 777 i = reader.acquireNextImage(); 778 listener.onCaptureAvailable(i); 779 } finally { 780 if (i != null) { 781 i.close(); 782 } 783 } 784 } 785 }; 786 } 787 788 private ImageReader.OnImageAvailableListener 789 createAvailableListenerDropper() { 790 return new ImageReader.OnImageAvailableListener() { 791 @Override 792 public void onImageAvailable(ImageReader reader) { 793 Image i = reader.acquireNextImage(); 794 i.close(); 795 } 796 }; 797 } 798 799 private void doStartSensorEvents() throws ItsException { 800 synchronized(mEventLock) { 801 mEventsEnabled = true; 802 } 803 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 804 } 805 806 private void doGetSensorEvents() throws ItsException { 807 synchronized(mEventLock) { 808 mSocketRunnableObj.sendResponse(mEvents); 809 mEvents.clear(); 810 mEventsEnabled = false; 811 } 812 } 813 814 private void doGetProps() throws ItsException { 815 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 816 } 817 818 private void doGetCameraIds() throws ItsException { 819 String[] devices; 820 try { 821 devices = mCameraManager.getCameraIdList(); 822 if (devices == null || devices.length == 0) { 823 throw new ItsException("No camera devices"); 824 } 825 } catch (CameraAccessException e) { 826 throw new ItsException("Failed to get device ID list", e); 827 } 828 829 try { 830 JSONObject obj = new JSONObject(); 831 JSONArray array = new JSONArray(); 832 for (String id : devices) { 833 CameraCharacteristics characteristics = mCameraManager.getCameraCharacteristics(id); 834 // Only supply camera Id for non-legacy cameras since legacy camera does not 835 // support ITS 836 if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) != 837 CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { 838 array.put(id); 839 } 840 } 841 obj.put("cameraIdArray", array); 842 mSocketRunnableObj.sendResponse("cameraIds", obj); 843 } catch (org.json.JSONException e) { 844 throw new ItsException("JSON error: ", e); 845 } catch (android.hardware.camera2.CameraAccessException e) { 846 throw new ItsException("Access error: ", e); 847 } 848 } 849 850 private void prepareImageReaders(Size[] outputSizes, int[] outputFormats, Size inputSize, 851 int inputFormat, int maxInputBuffers) { 852 closeImageReaders(); 853 mOutputImageReaders = new ImageReader[outputSizes.length]; 854 for (int i = 0; i < outputSizes.length; i++) { 855 // Check if the output image reader can be shared with the input image reader. 856 if (outputSizes[i].equals(inputSize) && outputFormats[i] == inputFormat) { 857 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 858 outputSizes[i].getHeight(), outputFormats[i], 859 MAX_CONCURRENT_READER_BUFFERS + maxInputBuffers); 860 mInputImageReader = mOutputImageReaders[i]; 861 } else { 862 mOutputImageReaders[i] = ImageReader.newInstance(outputSizes[i].getWidth(), 863 outputSizes[i].getHeight(), outputFormats[i], 864 MAX_CONCURRENT_READER_BUFFERS); 865 } 866 } 867 868 if (inputSize != null && mInputImageReader == null) { 869 mInputImageReader = ImageReader.newInstance(inputSize.getWidth(), inputSize.getHeight(), 870 inputFormat, maxInputBuffers); 871 } 872 } 873 874 private void closeImageReaders() { 875 if (mOutputImageReaders != null) { 876 for (int i = 0; i < mOutputImageReaders.length; i++) { 877 if (mOutputImageReaders[i] != null) { 878 mOutputImageReaders[i].close(); 879 mOutputImageReaders[i] = null; 880 } 881 } 882 } 883 if (mInputImageReader != null) { 884 mInputImageReader.close(); 885 mInputImageReader = null; 886 } 887 } 888 889 private void do3A(JSONObject params) throws ItsException { 890 try { 891 // Start a 3A action, and wait for it to converge. 892 // Get the converged values for each "A", and package into JSON result for caller. 893 894 // 3A happens on full-res frames. 895 Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 896 int outputFormats[] = new int[1]; 897 outputFormats[0] = ImageFormat.YUV_420_888; 898 Size[] outputSizes = new Size[1]; 899 outputSizes[0] = sizes[0]; 900 int width = outputSizes[0].getWidth(); 901 int height = outputSizes[0].getHeight(); 902 903 prepareImageReaders(outputSizes, outputFormats, /*inputSize*/null, /*inputFormat*/0, 904 /*maxInputBuffers*/0); 905 List<Surface> outputSurfaces = new ArrayList<Surface>(1); 906 outputSurfaces.add(mOutputImageReaders[0].getSurface()); 907 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 908 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 909 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 910 911 // Add a listener that just recycles buffers; they aren't saved anywhere. 912 ImageReader.OnImageAvailableListener readerListener = 913 createAvailableListenerDropper(); 914 mOutputImageReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 915 916 // Get the user-specified regions for AE, AWB, AF. 917 // Note that the user specifies normalized [x,y,w,h], which is converted below 918 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 919 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 920 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 921 new MeteringRectangle(0,0,width,height,1)}; 922 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 923 new MeteringRectangle(0,0,width,height,1)}; 924 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 925 new MeteringRectangle(0,0,width,height,1)}; 926 if (params.has(REGION_KEY)) { 927 JSONObject regions = params.getJSONObject(REGION_KEY); 928 if (regions.has(REGION_AE_KEY)) { 929 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 930 regions.getJSONArray(REGION_AE_KEY), true, width, height); 931 } 932 if (regions.has(REGION_AF_KEY)) { 933 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 934 regions.getJSONArray(REGION_AF_KEY), true, width, height); 935 } 936 if (regions.has(REGION_AWB_KEY)) { 937 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 938 regions.getJSONArray(REGION_AWB_KEY), true, width, height); 939 } 940 } 941 942 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 943 // values, waiting until the HAL has reported that the lock was successful. 944 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 945 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 946 947 // An EV compensation can be specified as part of AE convergence. 948 int evComp = params.optInt(EVCOMP_KEY, 0); 949 if (evComp != 0) { 950 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 951 } 952 953 // By default, AE and AF both get triggered, but the user can optionally override this. 954 // Also, AF won't get triggered if the lens is fixed-focus. 955 boolean doAE = true; 956 boolean doAF = true; 957 if (params.has(TRIGGER_KEY)) { 958 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 959 if (triggers.has(TRIGGER_AE_KEY)) { 960 doAE = triggers.getBoolean(TRIGGER_AE_KEY); 961 } 962 if (triggers.has(TRIGGER_AF_KEY)) { 963 doAF = triggers.getBoolean(TRIGGER_AF_KEY); 964 } 965 } 966 Float minFocusDistance = mCameraCharacteristics.get( 967 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE); 968 boolean isFixedFocusLens = minFocusDistance != null && minFocusDistance == 0.0; 969 if (doAF && isFixedFocusLens) { 970 // Send a dummy result back for the code that is waiting for this message to see 971 // that AF has converged. 972 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 973 mSocketRunnableObj.sendResponse("afResult", "0.0"); 974 doAF = false; 975 } 976 977 mInterlock3A.open(); 978 mIssuedRequest3A = false; 979 mConvergedAE = false; 980 mConvergedAWB = false; 981 mConvergedAF = false; 982 mLockedAE = false; 983 mLockedAWB = false; 984 long tstart = System.currentTimeMillis(); 985 boolean triggeredAE = false; 986 boolean triggeredAF = false; 987 988 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 989 doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 990 991 // Keep issuing capture requests until 3A has converged. 992 while (true) { 993 994 // Block until can take the next 3A frame. Only want one outstanding frame 995 // at a time, to simplify the logic here. 996 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 997 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 998 throw new ItsException( 999 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 1000 "AE converge state: " + mConvergedAE + ", \n" + 1001 "AF convergence state: " + mConvergedAF + ", \n" + 1002 "AWB convergence state: " + mConvergedAWB + "."); 1003 } 1004 mInterlock3A.close(); 1005 1006 // If not converged yet, issue another capture request. 1007 if ( (doAE && (!triggeredAE || !mConvergedAE)) 1008 || !mConvergedAWB 1009 || (doAF && (!triggeredAF || !mConvergedAF)) 1010 || (doAE && mNeedsLockedAE && !mLockedAE) 1011 || (mNeedsLockedAWB && !mLockedAWB)) { 1012 1013 // Baseline capture request for 3A. 1014 CaptureRequest.Builder req = mCamera.createCaptureRequest( 1015 CameraDevice.TEMPLATE_PREVIEW); 1016 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 1017 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 1018 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 1019 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 1020 req.set(CaptureRequest.CONTROL_AE_MODE, 1021 CaptureRequest.CONTROL_AE_MODE_ON); 1022 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 1023 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 1024 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 1025 req.set(CaptureRequest.CONTROL_AF_MODE, 1026 CaptureRequest.CONTROL_AF_MODE_AUTO); 1027 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 1028 req.set(CaptureRequest.CONTROL_AWB_MODE, 1029 CaptureRequest.CONTROL_AWB_MODE_AUTO); 1030 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 1031 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 1032 // ITS only turns OIS on when it's explicitly requested 1033 req.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, 1034 CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF); 1035 1036 if (evComp != 0) { 1037 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 1038 } 1039 1040 if (mConvergedAE && mNeedsLockedAE) { 1041 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 1042 } 1043 if (mConvergedAWB && mNeedsLockedAWB) { 1044 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 1045 } 1046 1047 // Trigger AE first. 1048 if (doAE && !triggeredAE) { 1049 Logt.i(TAG, "Triggering AE"); 1050 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 1051 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 1052 triggeredAE = true; 1053 } 1054 1055 // After AE has converged, trigger AF. 1056 if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) { 1057 Logt.i(TAG, "Triggering AF"); 1058 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 1059 CaptureRequest.CONTROL_AF_TRIGGER_START); 1060 triggeredAF = true; 1061 } 1062 1063 req.addTarget(mOutputImageReaders[0].getSurface()); 1064 1065 mIssuedRequest3A = true; 1066 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1067 } else { 1068 mSocketRunnableObj.sendResponse("3aConverged", ""); 1069 Logt.i(TAG, "3A converged"); 1070 break; 1071 } 1072 } 1073 } catch (android.hardware.camera2.CameraAccessException e) { 1074 throw new ItsException("Access error: ", e); 1075 } catch (org.json.JSONException e) { 1076 throw new ItsException("JSON error: ", e); 1077 } finally { 1078 mSocketRunnableObj.sendResponse("3aDone", ""); 1079 } 1080 } 1081 1082 private void doVibrate(JSONObject params) throws ItsException { 1083 try { 1084 if (mVibrator == null) { 1085 throw new ItsException("Unable to start vibrator"); 1086 } 1087 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 1088 int len = patternArray.length(); 1089 long pattern[] = new long[len]; 1090 for (int i = 0; i < len; i++) { 1091 pattern[i] = patternArray.getLong(i); 1092 } 1093 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 1094 mVibrator.vibrate(pattern, -1); 1095 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 1096 } catch (org.json.JSONException e) { 1097 throw new ItsException("JSON error: ", e); 1098 } 1099 } 1100 1101 /** 1102 * Parse jsonOutputSpecs to get output surface sizes and formats. Create input and output 1103 * image readers for the parsed output surface sizes, output formats, and the given input 1104 * size and format. 1105 */ 1106 private void prepareImageReadersWithOutputSpecs(JSONArray jsonOutputSpecs, Size inputSize, 1107 int inputFormat, int maxInputBuffers, boolean backgroundRequest) throws ItsException { 1108 Size outputSizes[]; 1109 int outputFormats[]; 1110 int numSurfaces = 0; 1111 1112 if (jsonOutputSpecs != null) { 1113 try { 1114 numSurfaces = jsonOutputSpecs.length(); 1115 if (backgroundRequest) { 1116 numSurfaces += 1; 1117 } 1118 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 1119 throw new ItsException("Too many output surfaces"); 1120 } 1121 1122 outputSizes = new Size[numSurfaces]; 1123 outputFormats = new int[numSurfaces]; 1124 for (int i = 0; i < numSurfaces; i++) { 1125 // Append optional background stream at the end 1126 if (backgroundRequest && i == numSurfaces - 1) { 1127 outputFormats[i] = ImageFormat.YUV_420_888; 1128 outputSizes[i] = new Size(640, 480); 1129 continue; 1130 } 1131 // Get the specified surface. 1132 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 1133 String sformat = surfaceObj.optString("format"); 1134 Size sizes[]; 1135 if ("yuv".equals(sformat) || "".equals(sformat)) { 1136 // Default to YUV if no format is specified. 1137 outputFormats[i] = ImageFormat.YUV_420_888; 1138 sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1139 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 1140 outputFormats[i] = ImageFormat.JPEG; 1141 sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); 1142 } else if ("raw".equals(sformat)) { 1143 outputFormats[i] = ImageFormat.RAW_SENSOR; 1144 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1145 } else if ("raw10".equals(sformat)) { 1146 outputFormats[i] = ImageFormat.RAW10; 1147 sizes = ItsUtils.getRaw10OutputSizes(mCameraCharacteristics); 1148 } else if ("raw12".equals(sformat)) { 1149 outputFormats[i] = ImageFormat.RAW12; 1150 sizes = ItsUtils.getRaw12OutputSizes(mCameraCharacteristics); 1151 } else if ("dng".equals(sformat)) { 1152 outputFormats[i] = ImageFormat.RAW_SENSOR; 1153 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1154 mCaptureRawIsDng = true; 1155 } else if ("rawStats".equals(sformat)) { 1156 outputFormats[i] = ImageFormat.RAW_SENSOR; 1157 sizes = ItsUtils.getRaw16OutputSizes(mCameraCharacteristics); 1158 mCaptureRawIsStats = true; 1159 mCaptureStatsGridWidth = surfaceObj.optInt("gridWidth"); 1160 mCaptureStatsGridHeight = surfaceObj.optInt("gridHeight"); 1161 } else { 1162 throw new ItsException("Unsupported format: " + sformat); 1163 } 1164 // If the size is omitted, then default to the largest allowed size for the 1165 // format. 1166 int width = surfaceObj.optInt("width"); 1167 int height = surfaceObj.optInt("height"); 1168 if (width <= 0) { 1169 if (sizes == null || sizes.length == 0) { 1170 throw new ItsException(String.format( 1171 "Zero stream configs available for requested format: %s", 1172 sformat)); 1173 } 1174 width = ItsUtils.getMaxSize(sizes).getWidth(); 1175 } 1176 if (height <= 0) { 1177 height = ItsUtils.getMaxSize(sizes).getHeight(); 1178 } 1179 1180 // The stats computation only applies to the active array region. 1181 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics).width(); 1182 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics).height(); 1183 if (mCaptureStatsGridWidth <= 0 || mCaptureStatsGridWidth > aaw) { 1184 mCaptureStatsGridWidth = aaw; 1185 } 1186 if (mCaptureStatsGridHeight <= 0 || mCaptureStatsGridHeight > aah) { 1187 mCaptureStatsGridHeight = aah; 1188 } 1189 1190 outputSizes[i] = new Size(width, height); 1191 } 1192 } catch (org.json.JSONException e) { 1193 throw new ItsException("JSON error", e); 1194 } 1195 } else { 1196 // No surface(s) specified at all. 1197 // Default: a single output surface which is full-res YUV. 1198 Size maxYuvSize = ItsUtils.getMaxOutputSize( 1199 mCameraCharacteristics, ImageFormat.YUV_420_888); 1200 numSurfaces = backgroundRequest ? 2 : 1; 1201 1202 outputSizes = new Size[numSurfaces]; 1203 outputFormats = new int[numSurfaces]; 1204 outputSizes[0] = maxYuvSize; 1205 outputFormats[0] = ImageFormat.YUV_420_888; 1206 if (backgroundRequest) { 1207 outputSizes[1] = new Size(640, 480); 1208 outputFormats[1] = ImageFormat.YUV_420_888; 1209 } 1210 } 1211 1212 prepareImageReaders(outputSizes, outputFormats, inputSize, inputFormat, maxInputBuffers); 1213 } 1214 1215 /** 1216 * Wait until mCountCallbacksRemaining is 0 or a specified amount of time has elapsed between 1217 * each callback. 1218 */ 1219 private void waitForCallbacks(long timeoutMs) throws ItsException { 1220 synchronized(mCountCallbacksRemaining) { 1221 int currentCount = mCountCallbacksRemaining.get(); 1222 while (currentCount > 0) { 1223 try { 1224 mCountCallbacksRemaining.wait(timeoutMs); 1225 } catch (InterruptedException e) { 1226 throw new ItsException("Waiting for callbacks was interrupted.", e); 1227 } 1228 1229 int newCount = mCountCallbacksRemaining.get(); 1230 if (newCount == currentCount) { 1231 throw new ItsException("No callback received within timeout"); 1232 } 1233 currentCount = newCount; 1234 } 1235 } 1236 } 1237 1238 private void doCapture(JSONObject params) throws ItsException { 1239 try { 1240 // Parse the JSON to get the list of capture requests. 1241 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 1242 mCamera, params, "captureRequests"); 1243 1244 // optional background preview requests 1245 List<CaptureRequest.Builder> backgroundRequests = ItsSerializer.deserializeRequestList( 1246 mCamera, params, "repeatRequests"); 1247 boolean backgroundRequest = backgroundRequests.size() > 0; 1248 1249 int numSurfaces = 0; 1250 int numCaptureSurfaces = 0; 1251 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1252 try { 1253 mCountRawOrDng.set(0); 1254 mCountJpg.set(0); 1255 mCountYuv.set(0); 1256 mCountRaw10.set(0); 1257 mCountRaw12.set(0); 1258 mCountCapRes.set(0); 1259 mCaptureRawIsDng = false; 1260 mCaptureRawIsStats = false; 1261 mCaptureResults = new CaptureResult[requests.size()]; 1262 1263 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1264 1265 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, /*inputSize*/null, 1266 /*inputFormat*/0, /*maxInputBuffers*/0, backgroundRequest); 1267 numSurfaces = mOutputImageReaders.length; 1268 numCaptureSurfaces = numSurfaces - (backgroundRequest ? 1 : 0); 1269 1270 List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces); 1271 for (int i = 0; i < numSurfaces; i++) { 1272 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1273 } 1274 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 1275 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1276 1277 for (int i = 0; i < numSurfaces; i++) { 1278 ImageReader.OnImageAvailableListener readerListener; 1279 if (backgroundRequest && i == numSurfaces - 1) { 1280 readerListener = createAvailableListenerDropper(); 1281 } else { 1282 readerListener = createAvailableListener(mCaptureCallback); 1283 } 1284 mOutputImageReaders[i].setOnImageAvailableListener(readerListener, 1285 mSaveHandlers[i]); 1286 } 1287 1288 // Plan for how many callbacks need to be received throughout the duration of this 1289 // sequence of capture requests. There is one callback per image surface, and one 1290 // callback for the CaptureResult, for each capture. 1291 int numCaptures = requests.size(); 1292 mCountCallbacksRemaining.set(numCaptures * (numCaptureSurfaces + 1)); 1293 1294 } catch (CameraAccessException e) { 1295 throw new ItsException("Error configuring outputs", e); 1296 } 1297 1298 // Start background requests and let it warm up pipeline 1299 if (backgroundRequest) { 1300 List<CaptureRequest> bgRequestList = 1301 new ArrayList<CaptureRequest>(backgroundRequests.size()); 1302 for (int i = 0; i < backgroundRequests.size(); i++) { 1303 CaptureRequest.Builder req = backgroundRequests.get(i); 1304 req.addTarget(mOutputImageReaders[numCaptureSurfaces].getSurface()); 1305 bgRequestList.add(req.build()); 1306 } 1307 mSession.setRepeatingBurst(bgRequestList, null, null); 1308 // warm up the pipeline 1309 Thread.sleep(PIPELINE_WARMUP_TIME_MS); 1310 } 1311 1312 // Initiate the captures. 1313 long maxExpTimeNs = -1; 1314 List<CaptureRequest> requestList = 1315 new ArrayList<>(requests.size()); 1316 for (int i = 0; i < requests.size(); i++) { 1317 CaptureRequest.Builder req = requests.get(i); 1318 // For DNG captures, need the LSC map to be available. 1319 if (mCaptureRawIsDng) { 1320 req.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 1321 } 1322 Long expTimeNs = req.get(CaptureRequest.SENSOR_EXPOSURE_TIME); 1323 if (expTimeNs != null && expTimeNs > maxExpTimeNs) { 1324 maxExpTimeNs = expTimeNs; 1325 } 1326 1327 for (int j = 0; j < numCaptureSurfaces; j++) { 1328 req.addTarget(mOutputImageReaders[j].getSurface()); 1329 } 1330 requestList.add(req.build()); 1331 } 1332 mSession.captureBurst(requestList, mCaptureResultListener, mResultHandler); 1333 1334 long timeout = TIMEOUT_CALLBACK * 1000; 1335 if (maxExpTimeNs > 0) { 1336 timeout += maxExpTimeNs / 1000000; // ns to ms 1337 } 1338 // Make sure all callbacks have been hit (wait until captures are done). 1339 // If no timeouts are received after a timeout, then fail. 1340 waitForCallbacks(timeout); 1341 1342 // Close session and wait until session is fully closed 1343 mSession.close(); 1344 sessionListener.getStateWaiter().waitForState( 1345 BlockingSessionCallback.SESSION_CLOSED, TIMEOUT_SESSION_CLOSE); 1346 1347 } catch (android.hardware.camera2.CameraAccessException e) { 1348 throw new ItsException("Access error: ", e); 1349 } catch (InterruptedException e) { 1350 throw new ItsException("Unexpected InterruptedException: ", e); 1351 } 1352 } 1353 1354 /** 1355 * Perform reprocess captures. 1356 * 1357 * It takes captureRequests in a JSON object and perform capture requests in two steps: 1358 * regular capture request to get reprocess input and reprocess capture request to get 1359 * reprocess outputs. 1360 * 1361 * Regular capture requests: 1362 * 1. For each capture request in the JSON object, create a full-size capture request with 1363 * the settings in the JSON object. 1364 * 2. Remember and clear noise reduction, edge enhancement, and effective exposure factor 1365 * from the regular capture requests. (Those settings will be used for reprocess requests.) 1366 * 3. Submit the regular capture requests. 1367 * 1368 * Reprocess capture requests: 1369 * 4. Wait for the regular capture results and use them to create reprocess capture requests. 1370 * 5. Wait for the regular capture output images and queue them to the image writer. 1371 * 6. Set the noise reduction, edge enhancement, and effective exposure factor from #2. 1372 * 7. Submit the reprocess capture requests. 1373 * 1374 * The output images and results for the regular capture requests won't be written to socket. 1375 * The output images and results for the reprocess capture requests will be written to socket. 1376 */ 1377 private void doReprocessCapture(JSONObject params) throws ItsException { 1378 ImageWriter imageWriter = null; 1379 ArrayList<Integer> noiseReductionModes = new ArrayList<>(); 1380 ArrayList<Integer> edgeModes = new ArrayList<>(); 1381 ArrayList<Float> effectiveExposureFactors = new ArrayList<>(); 1382 1383 mCountRawOrDng.set(0); 1384 mCountJpg.set(0); 1385 mCountYuv.set(0); 1386 mCountRaw10.set(0); 1387 mCountRaw12.set(0); 1388 mCountCapRes.set(0); 1389 mCaptureRawIsDng = false; 1390 mCaptureRawIsStats = false; 1391 1392 try { 1393 // Parse the JSON to get the list of capture requests. 1394 List<CaptureRequest.Builder> inputRequests = 1395 ItsSerializer.deserializeRequestList(mCamera, params, "captureRequests"); 1396 1397 // Prepare the image readers for reprocess input and reprocess outputs. 1398 int inputFormat = getReprocessInputFormat(params); 1399 Size inputSize = ItsUtils.getMaxOutputSize(mCameraCharacteristics, inputFormat); 1400 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 1401 prepareImageReadersWithOutputSpecs(jsonOutputSpecs, inputSize, inputFormat, 1402 inputRequests.size(), /*backgroundRequest*/false); 1403 1404 // Prepare a reprocessable session. 1405 int numOutputSurfaces = mOutputImageReaders.length; 1406 InputConfiguration inputConfig = new InputConfiguration(inputSize.getWidth(), 1407 inputSize.getHeight(), inputFormat); 1408 List<Surface> outputSurfaces = new ArrayList<Surface>(); 1409 boolean addSurfaceForInput = true; 1410 for (int i = 0; i < numOutputSurfaces; i++) { 1411 outputSurfaces.add(mOutputImageReaders[i].getSurface()); 1412 if (mOutputImageReaders[i] == mInputImageReader) { 1413 // If input and one of the outputs share the same image reader, avoid 1414 // adding the same surfaces twice. 1415 addSurfaceForInput = false; 1416 } 1417 } 1418 1419 if (addSurfaceForInput) { 1420 // Besides the output surfaces specified in JSON object, add an additional one 1421 // for reprocess input. 1422 outputSurfaces.add(mInputImageReader.getSurface()); 1423 } 1424 1425 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1426 mCamera.createReprocessableCaptureSession(inputConfig, outputSurfaces, sessionListener, 1427 mCameraHandler); 1428 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1429 1430 // Create an image writer for reprocess input. 1431 Surface inputSurface = mSession.getInputSurface(); 1432 imageWriter = ImageWriter.newInstance(inputSurface, inputRequests.size()); 1433 1434 // Set up input reader listener and capture callback listener to get 1435 // reprocess input buffers and the results in order to create reprocess capture 1436 // requests. 1437 ImageReaderListenerWaiter inputReaderListener = new ImageReaderListenerWaiter(); 1438 mInputImageReader.setOnImageAvailableListener(inputReaderListener, mSaveHandlers[0]); 1439 1440 CaptureCallbackWaiter captureCallbackWaiter = new CaptureCallbackWaiter(); 1441 // Prepare the reprocess input request 1442 for (CaptureRequest.Builder inputReqest : inputRequests) { 1443 // Remember and clear noise reduction, edge enhancement, and effective exposure 1444 // factors. 1445 noiseReductionModes.add(inputReqest.get(CaptureRequest.NOISE_REDUCTION_MODE)); 1446 edgeModes.add(inputReqest.get(CaptureRequest.EDGE_MODE)); 1447 effectiveExposureFactors.add(inputReqest.get( 1448 CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)); 1449 1450 inputReqest.set(CaptureRequest.NOISE_REDUCTION_MODE, 1451 CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG); 1452 inputReqest.set(CaptureRequest.EDGE_MODE, CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG); 1453 inputReqest.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, null); 1454 inputReqest.addTarget(mInputImageReader.getSurface()); 1455 mSession.capture(inputReqest.build(), captureCallbackWaiter, mResultHandler); 1456 } 1457 1458 // Wait for reprocess input images 1459 ArrayList<CaptureRequest.Builder> reprocessOutputRequests = new ArrayList<>(); 1460 for (int i = 0; i < inputRequests.size(); i++) { 1461 TotalCaptureResult result = 1462 captureCallbackWaiter.getResult(TIMEOUT_CALLBACK * 1000); 1463 reprocessOutputRequests.add(mCamera.createReprocessCaptureRequest(result)); 1464 imageWriter.queueInputImage(inputReaderListener.getImage(TIMEOUT_CALLBACK * 1000)); 1465 } 1466 1467 // Start performing reprocess captures. 1468 1469 mCaptureResults = new CaptureResult[inputRequests.size()]; 1470 1471 // Prepare reprocess capture requests. 1472 for (int i = 0; i < numOutputSurfaces; i++) { 1473 ImageReader.OnImageAvailableListener outputReaderListener = 1474 createAvailableListener(mCaptureCallback); 1475 mOutputImageReaders[i].setOnImageAvailableListener(outputReaderListener, 1476 mSaveHandlers[i]); 1477 } 1478 1479 // Plan for how many callbacks need to be received throughout the duration of this 1480 // sequence of capture requests. There is one callback per image surface, and one 1481 // callback for the CaptureResult, for each capture. 1482 int numCaptures = reprocessOutputRequests.size(); 1483 mCountCallbacksRemaining.set(numCaptures * (numOutputSurfaces + 1)); 1484 1485 // Initiate the captures. 1486 for (int i = 0; i < reprocessOutputRequests.size(); i++) { 1487 CaptureRequest.Builder req = reprocessOutputRequests.get(i); 1488 for (ImageReader outputImageReader : mOutputImageReaders) { 1489 req.addTarget(outputImageReader.getSurface()); 1490 } 1491 1492 req.set(CaptureRequest.NOISE_REDUCTION_MODE, noiseReductionModes.get(i)); 1493 req.set(CaptureRequest.EDGE_MODE, edgeModes.get(i)); 1494 req.set(CaptureRequest.REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, 1495 effectiveExposureFactors.get(i)); 1496 1497 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1498 } 1499 1500 // Make sure all callbacks have been hit (wait until captures are done). 1501 // If no timeouts are received after a timeout, then fail. 1502 waitForCallbacks(TIMEOUT_CALLBACK * 1000); 1503 } catch (android.hardware.camera2.CameraAccessException e) { 1504 throw new ItsException("Access error: ", e); 1505 } finally { 1506 closeImageReaders(); 1507 if (mSession != null) { 1508 mSession.close(); 1509 mSession = null; 1510 } 1511 if (imageWriter != null) { 1512 imageWriter.close(); 1513 } 1514 } 1515 } 1516 1517 @Override 1518 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 1519 Logt.i(TAG, "Sensor " + sensor.getName() + " accuracy changed to " + accuracy); 1520 } 1521 1522 @Override 1523 public final void onSensorChanged(SensorEvent event) { 1524 synchronized(mEventLock) { 1525 if (mEventsEnabled) { 1526 MySensorEvent ev2 = new MySensorEvent(); 1527 ev2.sensor = event.sensor; 1528 ev2.accuracy = event.accuracy; 1529 ev2.timestamp = event.timestamp; 1530 ev2.values = new float[event.values.length]; 1531 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 1532 mEvents.add(ev2); 1533 } 1534 } 1535 } 1536 1537 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 1538 @Override 1539 public void onCaptureAvailable(Image capture) { 1540 try { 1541 int format = capture.getFormat(); 1542 if (format == ImageFormat.JPEG) { 1543 Logt.i(TAG, "Received JPEG capture"); 1544 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1545 ByteBuffer buf = ByteBuffer.wrap(img); 1546 int count = mCountJpg.getAndIncrement(); 1547 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf); 1548 } else if (format == ImageFormat.YUV_420_888) { 1549 Logt.i(TAG, "Received YUV capture"); 1550 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1551 ByteBuffer buf = ByteBuffer.wrap(img); 1552 int count = mCountYuv.getAndIncrement(); 1553 mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf); 1554 } else if (format == ImageFormat.RAW10) { 1555 Logt.i(TAG, "Received RAW10 capture"); 1556 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1557 ByteBuffer buf = ByteBuffer.wrap(img); 1558 int count = mCountRaw10.getAndIncrement(); 1559 mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf); 1560 } else if (format == ImageFormat.RAW12) { 1561 Logt.i(TAG, "Received RAW12 capture"); 1562 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1563 ByteBuffer buf = ByteBuffer.wrap(img); 1564 int count = mCountRaw12.getAndIncrement(); 1565 mSocketRunnableObj.sendResponseCaptureBuffer("raw12Image", buf); 1566 } else if (format == ImageFormat.RAW_SENSOR) { 1567 Logt.i(TAG, "Received RAW16 capture"); 1568 int count = mCountRawOrDng.getAndIncrement(); 1569 if (! mCaptureRawIsDng) { 1570 byte[] img = ItsUtils.getDataFromImage(capture, mSocketQueueQuota); 1571 if (! mCaptureRawIsStats) { 1572 ByteBuffer buf = ByteBuffer.wrap(img); 1573 mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf); 1574 } else { 1575 // Compute the requested stats on the raw frame, and return the results 1576 // in a new "stats image". 1577 long startTimeMs = SystemClock.elapsedRealtime(); 1578 int w = capture.getWidth(); 1579 int h = capture.getHeight(); 1580 int aaw = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1581 .width(); 1582 int aah = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1583 .height(); 1584 int aax = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1585 .left; 1586 int aay = ItsUtils.getActiveArrayCropRegion(mCameraCharacteristics) 1587 .top; 1588 int gw = mCaptureStatsGridWidth; 1589 int gh = mCaptureStatsGridHeight; 1590 float[] stats = StatsImage.computeStatsImage( 1591 img, w, h, aax, aay, aaw, aah, gw, gh); 1592 long endTimeMs = SystemClock.elapsedRealtime(); 1593 Log.e(TAG, "Raw stats computation takes " + (endTimeMs - startTimeMs) + " ms"); 1594 int statsImgSize = stats.length * 4; 1595 if (mSocketQueueQuota != null) { 1596 mSocketQueueQuota.release(img.length); 1597 mSocketQueueQuota.acquire(statsImgSize); 1598 } 1599 ByteBuffer bBuf = ByteBuffer.allocate(statsImgSize); 1600 bBuf.order(ByteOrder.nativeOrder()); 1601 FloatBuffer fBuf = bBuf.asFloatBuffer(); 1602 fBuf.put(stats); 1603 fBuf.position(0); 1604 mSocketRunnableObj.sendResponseCaptureBuffer("rawStatsImage", bBuf); 1605 } 1606 } else { 1607 // Wait until the corresponding capture result is ready, up to a timeout. 1608 long t0 = android.os.SystemClock.elapsedRealtime(); 1609 while (! mThreadExitFlag 1610 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) { 1611 if (mCaptureResults[count] != null) { 1612 Logt.i(TAG, "Writing capture as DNG"); 1613 DngCreator dngCreator = new DngCreator( 1614 mCameraCharacteristics, mCaptureResults[count]); 1615 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 1616 dngCreator.writeImage(dngStream, capture); 1617 byte[] dngArray = dngStream.toByteArray(); 1618 if (mSocketQueueQuota != null) { 1619 // Ideally we should acquire before allocating memory, but 1620 // here the DNG size is unknown before toByteArray call, so 1621 // we have to register the size afterward. This should still 1622 // works most of the time since all DNG images are handled by 1623 // the same handler thread, so we are at most one buffer over 1624 // the quota. 1625 mSocketQueueQuota.acquire(dngArray.length); 1626 } 1627 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 1628 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 1629 break; 1630 } else { 1631 Thread.sleep(1); 1632 } 1633 } 1634 } 1635 } else { 1636 throw new ItsException("Unsupported image format: " + format); 1637 } 1638 1639 synchronized(mCountCallbacksRemaining) { 1640 mCountCallbacksRemaining.decrementAndGet(); 1641 mCountCallbacksRemaining.notify(); 1642 } 1643 } catch (IOException e) { 1644 Logt.e(TAG, "Script error: ", e); 1645 } catch (InterruptedException e) { 1646 Logt.e(TAG, "Script error: ", e); 1647 } catch (ItsException e) { 1648 Logt.e(TAG, "Script error: ", e); 1649 } 1650 } 1651 }; 1652 1653 private static float r2f(Rational r) { 1654 return (float)r.getNumerator() / (float)r.getDenominator(); 1655 } 1656 1657 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 1658 @Override 1659 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1660 long timestamp, long frameNumber) { 1661 } 1662 1663 @Override 1664 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1665 TotalCaptureResult result) { 1666 try { 1667 // Currently result has all 0 values. 1668 if (request == null || result == null) { 1669 throw new ItsException("Request/result is invalid"); 1670 } 1671 1672 StringBuilder logMsg = new StringBuilder(); 1673 logMsg.append(String.format( 1674 "Capt result: AE=%d, AF=%d, AWB=%d, ", 1675 result.get(CaptureResult.CONTROL_AE_STATE), 1676 result.get(CaptureResult.CONTROL_AF_STATE), 1677 result.get(CaptureResult.CONTROL_AWB_STATE))); 1678 int[] capabilities = mCameraCharacteristics.get( 1679 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1680 if (capabilities == null) { 1681 throw new ItsException("Failed to get capabilities"); 1682 } 1683 boolean readSensorSettings = false; 1684 for (int capability : capabilities) { 1685 if (capability == 1686 CameraCharacteristics. 1687 REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS) { 1688 readSensorSettings = true; 1689 break; 1690 } 1691 } 1692 if (readSensorSettings) { 1693 logMsg.append(String.format( 1694 "sens=%d, exp=%.1fms, dur=%.1fms, ", 1695 result.get(CaptureResult.SENSOR_SENSITIVITY), 1696 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f, 1697 result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1698 1000000.0f)); 1699 } 1700 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 1701 logMsg.append(String.format( 1702 "gains=[%.1f, %.1f, %.1f, %.1f], ", 1703 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1704 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1705 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1706 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 1707 } else { 1708 logMsg.append("gains=[], "); 1709 } 1710 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1711 logMsg.append(String.format( 1712 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 1713 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1714 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1715 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1716 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1717 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1718 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1719 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1720 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1721 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 1722 } else { 1723 logMsg.append("xform=[], "); 1724 } 1725 logMsg.append(String.format( 1726 "foc=%.1f", 1727 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 1728 Logt.i(TAG, logMsg.toString()); 1729 1730 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 1731 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1732 CaptureResult.CONTROL_AE_STATE_CONVERGED || 1733 result.get(CaptureResult.CONTROL_AE_STATE) == 1734 CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED || 1735 result.get(CaptureResult.CONTROL_AE_STATE) == 1736 CaptureResult.CONTROL_AE_STATE_LOCKED; 1737 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1738 CaptureResult.CONTROL_AE_STATE_LOCKED; 1739 } 1740 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 1741 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) == 1742 CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 1743 } 1744 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 1745 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1746 CaptureResult.CONTROL_AWB_STATE_CONVERGED || 1747 result.get(CaptureResult.CONTROL_AWB_STATE) == 1748 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1749 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1750 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1751 } 1752 1753 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) { 1754 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 1755 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 1756 mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d", 1757 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 1758 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 1759 )); 1760 } else { 1761 Logt.i(TAG, String.format( 1762 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b", 1763 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 1764 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 1765 } 1766 } 1767 1768 if (mConvergedAF) { 1769 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 1770 mSocketRunnableObj.sendResponse("afResult", String.format("%f", 1771 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 1772 )); 1773 } else { 1774 Logt.i(TAG, "AF converged but NULL focus distance values"); 1775 } 1776 } 1777 1778 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) { 1779 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 1780 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1781 mSocketRunnableObj.sendResponse("awbResult", String.format( 1782 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 1783 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1784 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1785 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1786 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(), 1787 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1788 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1789 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1790 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1791 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1792 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1793 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1794 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1795 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)) 1796 )); 1797 } else { 1798 Logt.i(TAG, String.format( 1799 "AWB converged but NULL color correction values, gains:%b, ccm:%b", 1800 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 1801 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null)); 1802 } 1803 } 1804 1805 if (mIssuedRequest3A) { 1806 mIssuedRequest3A = false; 1807 mInterlock3A.open(); 1808 } else { 1809 int count = mCountCapRes.getAndIncrement(); 1810 mCaptureResults[count] = result; 1811 mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics, 1812 request, result, mOutputImageReaders); 1813 synchronized(mCountCallbacksRemaining) { 1814 mCountCallbacksRemaining.decrementAndGet(); 1815 mCountCallbacksRemaining.notify(); 1816 } 1817 } 1818 } catch (ItsException e) { 1819 Logt.e(TAG, "Script error: ", e); 1820 } catch (Exception e) { 1821 Logt.e(TAG, "Script error: ", e); 1822 } 1823 } 1824 1825 @Override 1826 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1827 CaptureFailure failure) { 1828 Logt.e(TAG, "Script error: capture failed"); 1829 } 1830 }; 1831 1832 private class CaptureCallbackWaiter extends CameraCaptureSession.CaptureCallback { 1833 private final LinkedBlockingQueue<TotalCaptureResult> mResultQueue = 1834 new LinkedBlockingQueue<>(); 1835 1836 @Override 1837 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1838 long timestamp, long frameNumber) { 1839 } 1840 1841 @Override 1842 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1843 TotalCaptureResult result) { 1844 try { 1845 mResultQueue.put(result); 1846 } catch (InterruptedException e) { 1847 throw new UnsupportedOperationException( 1848 "Can't handle InterruptedException in onImageAvailable"); 1849 } 1850 } 1851 1852 @Override 1853 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1854 CaptureFailure failure) { 1855 Logt.e(TAG, "Script error: capture failed"); 1856 } 1857 1858 public TotalCaptureResult getResult(long timeoutMs) throws ItsException { 1859 TotalCaptureResult result; 1860 try { 1861 result = mResultQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1862 } catch (InterruptedException e) { 1863 throw new ItsException(e); 1864 } 1865 1866 if (result == null) { 1867 throw new ItsException("Getting an image timed out after " + timeoutMs + 1868 "ms"); 1869 } 1870 1871 return result; 1872 } 1873 } 1874 1875 private static class ImageReaderListenerWaiter implements ImageReader.OnImageAvailableListener { 1876 private final LinkedBlockingQueue<Image> mImageQueue = new LinkedBlockingQueue<>(); 1877 1878 @Override 1879 public void onImageAvailable(ImageReader reader) { 1880 try { 1881 mImageQueue.put(reader.acquireNextImage()); 1882 } catch (InterruptedException e) { 1883 throw new UnsupportedOperationException( 1884 "Can't handle InterruptedException in onImageAvailable"); 1885 } 1886 } 1887 1888 public Image getImage(long timeoutMs) throws ItsException { 1889 Image image; 1890 try { 1891 image = mImageQueue.poll(timeoutMs, TimeUnit.MILLISECONDS); 1892 } catch (InterruptedException e) { 1893 throw new ItsException(e); 1894 } 1895 1896 if (image == null) { 1897 throw new ItsException("Getting an image timed out after " + timeoutMs + 1898 "ms"); 1899 } 1900 return image; 1901 } 1902 } 1903 1904 private int getReprocessInputFormat(JSONObject params) throws ItsException { 1905 String reprocessFormat; 1906 try { 1907 reprocessFormat = params.getString("reprocessFormat"); 1908 } catch (org.json.JSONException e) { 1909 throw new ItsException("Error parsing reprocess format: " + e); 1910 } 1911 1912 if (reprocessFormat.equals("yuv")) { 1913 return ImageFormat.YUV_420_888; 1914 } else if (reprocessFormat.equals("private")) { 1915 return ImageFormat.PRIVATE; 1916 } 1917 1918 throw new ItsException("Uknown reprocess format: " + reprocessFormat); 1919 } 1920 } 1921