1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package com.android.cts.verifier.camera.its; 18 19 import android.app.Service; 20 import android.content.Context; 21 import android.content.Intent; 22 import android.graphics.ImageFormat; 23 import android.hardware.camera2.CameraCaptureSession; 24 import android.hardware.camera2.CameraAccessException; 25 import android.hardware.camera2.CameraCharacteristics; 26 import android.hardware.camera2.CameraDevice; 27 import android.hardware.camera2.CameraManager; 28 import android.hardware.camera2.CaptureFailure; 29 import android.hardware.camera2.CaptureRequest; 30 import android.hardware.camera2.CaptureResult; 31 import android.hardware.camera2.DngCreator; 32 import android.hardware.camera2.TotalCaptureResult; 33 import android.hardware.camera2.params.MeteringRectangle; 34 import android.hardware.Sensor; 35 import android.hardware.SensorEvent; 36 import android.hardware.SensorEventListener; 37 import android.hardware.SensorManager; 38 import android.media.Image; 39 import android.media.ImageReader; 40 import android.net.Uri; 41 import android.os.ConditionVariable; 42 import android.os.Handler; 43 import android.os.HandlerThread; 44 import android.os.IBinder; 45 import android.os.Message; 46 import android.os.Vibrator; 47 import android.util.Log; 48 import android.util.Rational; 49 import android.util.Size; 50 import android.view.Surface; 51 52 import com.android.ex.camera2.blocking.BlockingCameraManager; 53 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException; 54 import com.android.ex.camera2.blocking.BlockingStateCallback; 55 import com.android.ex.camera2.blocking.BlockingSessionCallback; 56 57 import org.json.JSONArray; 58 import org.json.JSONObject; 59 60 import java.io.BufferedReader; 61 import java.io.BufferedWriter; 62 import java.io.ByteArrayOutputStream; 63 import java.io.IOException; 64 import java.io.InputStreamReader; 65 import java.io.OutputStreamWriter; 66 import java.io.PrintWriter; 67 import java.math.BigInteger; 68 import java.net.ServerSocket; 69 import java.net.Socket; 70 import java.net.SocketTimeoutException; 71 import java.nio.ByteBuffer; 72 import java.nio.charset.Charset; 73 import java.security.MessageDigest; 74 import java.util.ArrayList; 75 import java.util.Arrays; 76 import java.util.LinkedList; 77 import java.util.List; 78 import java.util.concurrent.BlockingQueue; 79 import java.util.concurrent.CountDownLatch; 80 import java.util.concurrent.LinkedBlockingDeque; 81 import java.util.concurrent.TimeUnit; 82 import java.util.concurrent.atomic.AtomicInteger; 83 84 public class ItsService extends Service implements SensorEventListener { 85 public static final String TAG = ItsService.class.getSimpleName(); 86 87 // Timeouts, in seconds. 88 public static final int TIMEOUT_CALLBACK = 3; 89 public static final int TIMEOUT_3A = 10; 90 91 // State transition timeouts, in ms. 92 private static final long TIMEOUT_IDLE_MS = 2000; 93 private static final long TIMEOUT_STATE_MS = 500; 94 95 // Timeout to wait for a capture result after the capture buffer has arrived, in ms. 96 private static final long TIMEOUT_CAP_RES = 2000; 97 98 private static final int MAX_CONCURRENT_READER_BUFFERS = 10; 99 100 // Supports at most RAW+YUV+JPEG, one surface each. 101 private static final int MAX_NUM_OUTPUT_SURFACES = 3; 102 103 public static final int SERVERPORT = 6000; 104 105 public static final String REGION_KEY = "regions"; 106 public static final String REGION_AE_KEY = "ae"; 107 public static final String REGION_AWB_KEY = "awb"; 108 public static final String REGION_AF_KEY = "af"; 109 public static final String LOCK_AE_KEY = "aeLock"; 110 public static final String LOCK_AWB_KEY = "awbLock"; 111 public static final String TRIGGER_KEY = "triggers"; 112 public static final String TRIGGER_AE_KEY = "ae"; 113 public static final String TRIGGER_AF_KEY = "af"; 114 public static final String VIB_PATTERN_KEY = "pattern"; 115 public static final String EVCOMP_KEY = "evComp"; 116 117 private CameraManager mCameraManager = null; 118 private HandlerThread mCameraThread = null; 119 private Handler mCameraHandler = null; 120 private BlockingCameraManager mBlockingCameraManager = null; 121 private BlockingStateCallback mCameraListener = null; 122 private CameraDevice mCamera = null; 123 private CameraCaptureSession mSession = null; 124 private ImageReader[] mCaptureReaders = null; 125 private CameraCharacteristics mCameraCharacteristics = null; 126 127 private Vibrator mVibrator = null; 128 129 private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES]; 130 private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES]; 131 private HandlerThread mResultThread = null; 132 private Handler mResultHandler = null; 133 134 private volatile boolean mThreadExitFlag = false; 135 136 private volatile ServerSocket mSocket = null; 137 private volatile SocketRunnable mSocketRunnableObj = null; 138 private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue = 139 new LinkedBlockingDeque<ByteBuffer>(); 140 private final Object mSocketWriteEnqueueLock = new Object(); 141 private final Object mSocketWriteDrainLock = new Object(); 142 143 private volatile BlockingQueue<Object[]> mSerializerQueue = 144 new LinkedBlockingDeque<Object[]>(); 145 146 private AtomicInteger mCountCallbacksRemaining = new AtomicInteger(); 147 private AtomicInteger mCountRawOrDng = new AtomicInteger(); 148 private AtomicInteger mCountRaw10 = new AtomicInteger(); 149 private AtomicInteger mCountJpg = new AtomicInteger(); 150 private AtomicInteger mCountYuv = new AtomicInteger(); 151 private AtomicInteger mCountCapRes = new AtomicInteger(); 152 private boolean mCaptureRawIsDng; 153 private CaptureResult mCaptureResults[] = null; 154 155 private volatile ConditionVariable mInterlock3A = new ConditionVariable(true); 156 private volatile boolean mIssuedRequest3A = false; 157 private volatile boolean mConvergedAE = false; 158 private volatile boolean mConvergedAF = false; 159 private volatile boolean mConvergedAWB = false; 160 private volatile boolean mLockedAE = false; 161 private volatile boolean mLockedAWB = false; 162 private volatile boolean mNeedsLockedAE = false; 163 private volatile boolean mNeedsLockedAWB = false; 164 165 class MySensorEvent { 166 public Sensor sensor; 167 public int accuracy; 168 public long timestamp; 169 public float values[]; 170 } 171 172 // For capturing motion sensor traces. 173 private SensorManager mSensorManager = null; 174 private Sensor mAccelSensor = null; 175 private Sensor mMagSensor = null; 176 private Sensor mGyroSensor = null; 177 private volatile LinkedList<MySensorEvent> mEvents = null; 178 private volatile Object mEventLock = new Object(); 179 private volatile boolean mEventsEnabled = false; 180 181 public interface CaptureCallback { 182 void onCaptureAvailable(Image capture); 183 } 184 185 public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {} 186 187 @Override 188 public IBinder onBind(Intent intent) { 189 return null; 190 } 191 192 @Override 193 public void onCreate() { 194 try { 195 mThreadExitFlag = false; 196 197 // Get handle to camera manager. 198 mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE); 199 if (mCameraManager == null) { 200 throw new ItsException("Failed to connect to camera manager"); 201 } 202 mBlockingCameraManager = new BlockingCameraManager(mCameraManager); 203 mCameraListener = new BlockingStateCallback(); 204 205 // Register for motion events. 206 mEvents = new LinkedList<MySensorEvent>(); 207 mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE); 208 mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); 209 mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD); 210 mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE); 211 mSensorManager.registerListener(this, mAccelSensor, SensorManager.SENSOR_DELAY_FASTEST); 212 mSensorManager.registerListener(this, mMagSensor, SensorManager.SENSOR_DELAY_FASTEST); 213 mSensorManager.registerListener(this, mGyroSensor, SensorManager.SENSOR_DELAY_FASTEST); 214 215 // Get a handle to the system vibrator. 216 mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE); 217 218 // Create threads to receive images and save them. 219 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 220 mSaveThreads[i] = new HandlerThread("SaveThread" + i); 221 mSaveThreads[i].start(); 222 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper()); 223 } 224 225 // Create a thread to handle object serialization. 226 (new Thread(new SerializerRunnable())).start();; 227 228 // Create a thread to receive capture results and process them. 229 mResultThread = new HandlerThread("ResultThread"); 230 mResultThread.start(); 231 mResultHandler = new Handler(mResultThread.getLooper()); 232 233 // Create a thread for the camera device. 234 mCameraThread = new HandlerThread("ItsCameraThread"); 235 mCameraThread.start(); 236 mCameraHandler = new Handler(mCameraThread.getLooper()); 237 238 // Create a thread to process commands, listening on a TCP socket. 239 mSocketRunnableObj = new SocketRunnable(); 240 (new Thread(mSocketRunnableObj)).start(); 241 } catch (ItsException e) { 242 Logt.e(TAG, "Service failed to start: ", e); 243 } 244 } 245 246 @Override 247 public int onStartCommand(Intent intent, int flags, int startId) { 248 try { 249 // Just log a message indicating that the service is running and is able to accept 250 // socket connections. 251 while (!mThreadExitFlag && mSocket==null) { 252 Thread.sleep(1); 253 } 254 if (!mThreadExitFlag){ 255 Logt.i(TAG, "ItsService ready"); 256 } else { 257 Logt.e(TAG, "Starting ItsService in bad state"); 258 } 259 } catch (java.lang.InterruptedException e) { 260 Logt.e(TAG, "Error starting ItsService (interrupted)", e); 261 } 262 return START_STICKY; 263 } 264 265 @Override 266 public void onDestroy() { 267 mThreadExitFlag = true; 268 for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) { 269 if (mSaveThreads[i] != null) { 270 mSaveThreads[i].quit(); 271 mSaveThreads[i] = null; 272 } 273 } 274 if (mResultThread != null) { 275 mResultThread.quitSafely(); 276 mResultThread = null; 277 } 278 if (mCameraThread != null) { 279 mCameraThread.quitSafely(); 280 mCameraThread = null; 281 } 282 } 283 284 public void openCameraDevice(int cameraId) throws ItsException { 285 Logt.i(TAG, String.format("Opening camera %d", cameraId)); 286 287 String[] devices; 288 try { 289 devices = mCameraManager.getCameraIdList(); 290 if (devices == null || devices.length == 0) { 291 throw new ItsException("No camera devices"); 292 } 293 } catch (CameraAccessException e) { 294 throw new ItsException("Failed to get device ID list", e); 295 } 296 297 try { 298 mCamera = mBlockingCameraManager.openCamera(devices[cameraId], 299 mCameraListener, mCameraHandler); 300 mCameraCharacteristics = mCameraManager.getCameraCharacteristics( 301 devices[cameraId]); 302 } catch (CameraAccessException e) { 303 throw new ItsException("Failed to open camera", e); 304 } catch (BlockingOpenException e) { 305 throw new ItsException("Failed to open camera (after blocking)", e); 306 } 307 mSocketRunnableObj.sendResponse("cameraOpened", ""); 308 } 309 310 public void closeCameraDevice() throws ItsException { 311 try { 312 if (mCamera != null) { 313 Logt.i(TAG, "Closing camera"); 314 mCamera.close(); 315 mCamera = null; 316 } 317 } catch (Exception e) { 318 throw new ItsException("Failed to close device"); 319 } 320 mSocketRunnableObj.sendResponse("cameraClosed", ""); 321 } 322 323 class SerializerRunnable implements Runnable { 324 // Use a separate thread to perform JSON serialization (since this can be slow due to 325 // the reflection). 326 @Override 327 public void run() { 328 Logt.i(TAG, "Serializer thread starting"); 329 while (! mThreadExitFlag) { 330 try { 331 Object objs[] = mSerializerQueue.take(); 332 JSONObject jsonObj = new JSONObject(); 333 String tag = null; 334 for (int i = 0; i < objs.length; i++) { 335 Object obj = objs[i]; 336 if (obj instanceof String) { 337 if (tag != null) { 338 throw new ItsException("Multiple tags for socket response"); 339 } 340 tag = (String)obj; 341 } else if (obj instanceof CameraCharacteristics) { 342 jsonObj.put("cameraProperties", ItsSerializer.serialize( 343 (CameraCharacteristics)obj)); 344 } else if (obj instanceof CaptureRequest) { 345 jsonObj.put("captureRequest", ItsSerializer.serialize( 346 (CaptureRequest)obj)); 347 } else if (obj instanceof CaptureResult) { 348 jsonObj.put("captureResult", ItsSerializer.serialize( 349 (CaptureResult)obj)); 350 } else if (obj instanceof JSONArray) { 351 jsonObj.put("outputs", (JSONArray)obj); 352 } else { 353 throw new ItsException("Invalid object received for serialiation"); 354 } 355 } 356 if (tag == null) { 357 throw new ItsException("No tag provided for socket response"); 358 } 359 mSocketRunnableObj.sendResponse(tag, null, jsonObj, null); 360 Logt.i(TAG, String.format("Serialized %s", tag)); 361 } catch (org.json.JSONException e) { 362 Logt.e(TAG, "Error serializing object", e); 363 break; 364 } catch (ItsException e) { 365 Logt.e(TAG, "Error serializing object", e); 366 break; 367 } catch (java.lang.InterruptedException e) { 368 Logt.e(TAG, "Error serializing object (interrupted)", e); 369 break; 370 } 371 } 372 Logt.i(TAG, "Serializer thread terminated"); 373 } 374 } 375 376 class SocketWriteRunnable implements Runnable { 377 378 // Use a separate thread to service a queue of objects to be written to the socket, 379 // writing each sequentially in order. This is needed since different handler functions 380 // (called on different threads) will need to send data back to the host script. 381 382 public Socket mOpenSocket = null; 383 384 public SocketWriteRunnable(Socket openSocket) { 385 mOpenSocket = openSocket; 386 } 387 388 public void setOpenSocket(Socket openSocket) { 389 mOpenSocket = openSocket; 390 } 391 392 @Override 393 public void run() { 394 Logt.i(TAG, "Socket writer thread starting"); 395 while (true) { 396 try { 397 ByteBuffer b = mSocketWriteQueue.take(); 398 synchronized(mSocketWriteDrainLock) { 399 if (mOpenSocket == null) { 400 continue; 401 } 402 if (b.hasArray()) { 403 mOpenSocket.getOutputStream().write(b.array()); 404 } else { 405 byte[] barray = new byte[b.capacity()]; 406 b.get(barray); 407 mOpenSocket.getOutputStream().write(barray); 408 } 409 mOpenSocket.getOutputStream().flush(); 410 Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity())); 411 } 412 } catch (IOException e) { 413 Logt.e(TAG, "Error writing to socket", e); 414 break; 415 } catch (java.lang.InterruptedException e) { 416 Logt.e(TAG, "Error writing to socket (interrupted)", e); 417 break; 418 } 419 } 420 Logt.i(TAG, "Socket writer thread terminated"); 421 } 422 } 423 424 class SocketRunnable implements Runnable { 425 426 // Format of sent messages (over the socket): 427 // * Serialized JSON object on a single line (newline-terminated) 428 // * For byte buffers, the binary data then follows 429 // 430 // Format of received messages (from the socket): 431 // * Serialized JSON object on a single line (newline-terminated) 432 433 private Socket mOpenSocket = null; 434 private SocketWriteRunnable mSocketWriteRunnable = null; 435 436 @Override 437 public void run() { 438 Logt.i(TAG, "Socket thread starting"); 439 try { 440 mSocket = new ServerSocket(SERVERPORT); 441 } catch (IOException e) { 442 Logt.e(TAG, "Failed to create socket", e); 443 } 444 445 // Create a new thread to handle writes to this socket. 446 mSocketWriteRunnable = new SocketWriteRunnable(null); 447 (new Thread(mSocketWriteRunnable)).start(); 448 449 while (!mThreadExitFlag) { 450 // Receive the socket-open request from the host. 451 try { 452 Logt.i(TAG, "Waiting for client to connect to socket"); 453 mOpenSocket = mSocket.accept(); 454 if (mOpenSocket == null) { 455 Logt.e(TAG, "Socket connection error"); 456 break; 457 } 458 mSocketWriteQueue.clear(); 459 mSocketWriteRunnable.setOpenSocket(mOpenSocket); 460 Logt.i(TAG, "Socket connected"); 461 } catch (IOException e) { 462 Logt.e(TAG, "Socket open error: ", e); 463 break; 464 } 465 466 // Process commands over the open socket. 467 while (!mThreadExitFlag) { 468 try { 469 BufferedReader input = new BufferedReader( 470 new InputStreamReader(mOpenSocket.getInputStream())); 471 if (input == null) { 472 Logt.e(TAG, "Failed to get socket input stream"); 473 break; 474 } 475 String line = input.readLine(); 476 if (line == null) { 477 Logt.i(TAG, "Socket readline retuned null (host disconnected)"); 478 break; 479 } 480 processSocketCommand(line); 481 } catch (IOException e) { 482 Logt.e(TAG, "Socket read error: ", e); 483 break; 484 } catch (ItsException e) { 485 Logt.e(TAG, "Script error: ", e); 486 break; 487 } 488 } 489 490 // Close socket and go back to waiting for a new connection. 491 try { 492 synchronized(mSocketWriteDrainLock) { 493 mSocketWriteQueue.clear(); 494 mOpenSocket.close(); 495 mOpenSocket = null; 496 Logt.i(TAG, "Socket disconnected"); 497 } 498 } catch (java.io.IOException e) { 499 Logt.e(TAG, "Exception closing socket"); 500 } 501 } 502 503 // It's an overall error state if the code gets here; no recevery. 504 // Try to do some cleanup, but the service probably needs to be restarted. 505 Logt.i(TAG, "Socket server loop exited"); 506 mThreadExitFlag = true; 507 try { 508 if (mOpenSocket != null) { 509 mOpenSocket.close(); 510 mOpenSocket = null; 511 } 512 } catch (java.io.IOException e) { 513 Logt.w(TAG, "Exception closing socket"); 514 } 515 try { 516 if (mSocket != null) { 517 mSocket.close(); 518 mSocket = null; 519 } 520 } catch (java.io.IOException e) { 521 Logt.w(TAG, "Exception closing socket"); 522 } 523 } 524 525 public void processSocketCommand(String cmd) 526 throws ItsException { 527 // Each command is a serialized JSON object. 528 try { 529 JSONObject cmdObj = new JSONObject(cmd); 530 if ("open".equals(cmdObj.getString("cmdName"))) { 531 int cameraId = cmdObj.getInt("cameraId"); 532 openCameraDevice(cameraId); 533 } else if ("close".equals(cmdObj.getString("cmdName"))) { 534 closeCameraDevice(); 535 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) { 536 doGetProps(); 537 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) { 538 doStartSensorEvents(); 539 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) { 540 doGetSensorEvents(); 541 } else if ("do3A".equals(cmdObj.getString("cmdName"))) { 542 do3A(cmdObj); 543 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) { 544 doCapture(cmdObj); 545 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) { 546 doVibrate(cmdObj); 547 } else { 548 throw new ItsException("Unknown command: " + cmd); 549 } 550 } catch (org.json.JSONException e) { 551 Logt.e(TAG, "Invalid command: ", e); 552 } 553 } 554 555 public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf) 556 throws ItsException { 557 try { 558 JSONObject jsonObj = new JSONObject(); 559 jsonObj.put("tag", tag); 560 if (str != null) { 561 jsonObj.put("strValue", str); 562 } 563 if (obj != null) { 564 jsonObj.put("objValue", obj); 565 } 566 if (bbuf != null) { 567 jsonObj.put("bufValueSize", bbuf.capacity()); 568 } 569 ByteBuffer bstr = ByteBuffer.wrap( 570 (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset())); 571 synchronized(mSocketWriteEnqueueLock) { 572 if (bstr != null) { 573 mSocketWriteQueue.put(bstr); 574 } 575 if (bbuf != null) { 576 mSocketWriteQueue.put(bbuf); 577 } 578 } 579 } catch (org.json.JSONException e) { 580 throw new ItsException("JSON error: ", e); 581 } catch (java.lang.InterruptedException e) { 582 throw new ItsException("Socket error: ", e); 583 } 584 } 585 586 public void sendResponse(String tag, String str) 587 throws ItsException { 588 sendResponse(tag, str, null, null); 589 } 590 591 public void sendResponse(String tag, JSONObject obj) 592 throws ItsException { 593 sendResponse(tag, null, obj, null); 594 } 595 596 public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf) 597 throws ItsException { 598 sendResponse(tag, null, null, bbuf); 599 } 600 601 public void sendResponse(LinkedList<MySensorEvent> events) 602 throws ItsException { 603 try { 604 JSONArray accels = new JSONArray(); 605 JSONArray mags = new JSONArray(); 606 JSONArray gyros = new JSONArray(); 607 for (MySensorEvent event : events) { 608 JSONObject obj = new JSONObject(); 609 obj.put("time", event.timestamp); 610 obj.put("x", event.values[0]); 611 obj.put("y", event.values[1]); 612 obj.put("z", event.values[2]); 613 if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) { 614 accels.put(obj); 615 } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) { 616 mags.put(obj); 617 } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) { 618 gyros.put(obj); 619 } 620 } 621 JSONObject obj = new JSONObject(); 622 obj.put("accel", accels); 623 obj.put("mag", mags); 624 obj.put("gyro", gyros); 625 sendResponse("sensorEvents", null, obj, null); 626 } catch (org.json.JSONException e) { 627 throw new ItsException("JSON error: ", e); 628 } 629 } 630 631 public void sendResponse(CameraCharacteristics props) 632 throws ItsException { 633 try { 634 Object objs[] = new Object[2]; 635 objs[0] = "cameraProperties"; 636 objs[1] = props; 637 mSerializerQueue.put(objs); 638 } catch (InterruptedException e) { 639 throw new ItsException("Interrupted: ", e); 640 } 641 } 642 643 public void sendResponseCaptureResult(CameraCharacteristics props, 644 CaptureRequest request, 645 CaptureResult result, 646 ImageReader[] readers) 647 throws ItsException { 648 try { 649 JSONArray jsonSurfaces = new JSONArray(); 650 for (int i = 0; i < readers.length; i++) { 651 JSONObject jsonSurface = new JSONObject(); 652 jsonSurface.put("width", readers[i].getWidth()); 653 jsonSurface.put("height", readers[i].getHeight()); 654 int format = readers[i].getImageFormat(); 655 if (format == ImageFormat.RAW_SENSOR) { 656 jsonSurface.put("format", "raw"); 657 } else if (format == ImageFormat.RAW10) { 658 jsonSurface.put("format", "raw10"); 659 } else if (format == ImageFormat.JPEG) { 660 jsonSurface.put("format", "jpeg"); 661 } else if (format == ImageFormat.YUV_420_888) { 662 jsonSurface.put("format", "yuv"); 663 } else { 664 throw new ItsException("Invalid format"); 665 } 666 jsonSurfaces.put(jsonSurface); 667 } 668 669 Object objs[] = new Object[5]; 670 objs[0] = "captureResults"; 671 objs[1] = props; 672 objs[2] = request; 673 objs[3] = result; 674 objs[4] = jsonSurfaces; 675 mSerializerQueue.put(objs); 676 } catch (org.json.JSONException e) { 677 throw new ItsException("JSON error: ", e); 678 } catch (InterruptedException e) { 679 throw new ItsException("Interrupted: ", e); 680 } 681 } 682 } 683 684 public ImageReader.OnImageAvailableListener 685 createAvailableListener(final CaptureCallback listener) { 686 return new ImageReader.OnImageAvailableListener() { 687 @Override 688 public void onImageAvailable(ImageReader reader) { 689 Image i = null; 690 try { 691 i = reader.acquireNextImage(); 692 listener.onCaptureAvailable(i); 693 } finally { 694 if (i != null) { 695 i.close(); 696 } 697 } 698 } 699 }; 700 } 701 702 private ImageReader.OnImageAvailableListener 703 createAvailableListenerDropper(final CaptureCallback listener) { 704 return new ImageReader.OnImageAvailableListener() { 705 @Override 706 public void onImageAvailable(ImageReader reader) { 707 Image i = reader.acquireNextImage(); 708 i.close(); 709 } 710 }; 711 } 712 713 private void doStartSensorEvents() throws ItsException { 714 synchronized(mEventLock) { 715 mEventsEnabled = true; 716 } 717 mSocketRunnableObj.sendResponse("sensorEventsStarted", ""); 718 } 719 720 private void doGetSensorEvents() throws ItsException { 721 synchronized(mEventLock) { 722 mSocketRunnableObj.sendResponse(mEvents); 723 mEvents.clear(); 724 mEventsEnabled = false; 725 } 726 } 727 728 private void doGetProps() throws ItsException { 729 mSocketRunnableObj.sendResponse(mCameraCharacteristics); 730 } 731 732 private void prepareCaptureReader(int[] widths, int[] heights, int formats[], int numSurfaces) { 733 if (mCaptureReaders != null) { 734 for (int i = 0; i < mCaptureReaders.length; i++) { 735 if (mCaptureReaders[i] != null) { 736 mCaptureReaders[i].close(); 737 } 738 } 739 } 740 mCaptureReaders = new ImageReader[numSurfaces]; 741 for (int i = 0; i < numSurfaces; i++) { 742 mCaptureReaders[i] = ImageReader.newInstance(widths[i], heights[i], formats[i], 743 MAX_CONCURRENT_READER_BUFFERS); 744 } 745 } 746 747 private void do3A(JSONObject params) throws ItsException { 748 try { 749 // Start a 3A action, and wait for it to converge. 750 // Get the converged values for each "A", and package into JSON result for caller. 751 752 // 3A happens on full-res frames. 753 Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 754 int widths[] = new int[1]; 755 int heights[] = new int[1]; 756 int formats[] = new int[1]; 757 widths[0] = sizes[0].getWidth(); 758 heights[0] = sizes[0].getHeight(); 759 formats[0] = ImageFormat.YUV_420_888; 760 int width = widths[0]; 761 int height = heights[0]; 762 763 prepareCaptureReader(widths, heights, formats, 1); 764 List<Surface> outputSurfaces = new ArrayList<Surface>(1); 765 outputSurfaces.add(mCaptureReaders[0].getSurface()); 766 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 767 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 768 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 769 770 // Add a listener that just recycles buffers; they aren't saved anywhere. 771 ImageReader.OnImageAvailableListener readerListener = 772 createAvailableListenerDropper(mCaptureCallback); 773 mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]); 774 775 // Get the user-specified regions for AE, AWB, AF. 776 // Note that the user specifies normalized [x,y,w,h], which is converted below 777 // to an [x0,y0,x1,y1] region in sensor coords. The capture request region 778 // also has a fifth "weight" element: [x0,y0,x1,y1,w]. 779 MeteringRectangle[] regionAE = new MeteringRectangle[]{ 780 new MeteringRectangle(0,0,width,height,1)}; 781 MeteringRectangle[] regionAF = new MeteringRectangle[]{ 782 new MeteringRectangle(0,0,width,height,1)}; 783 MeteringRectangle[] regionAWB = new MeteringRectangle[]{ 784 new MeteringRectangle(0,0,width,height,1)}; 785 if (params.has(REGION_KEY)) { 786 JSONObject regions = params.getJSONObject(REGION_KEY); 787 if (regions.has(REGION_AE_KEY)) { 788 regionAE = ItsUtils.getJsonWeightedRectsFromArray( 789 regions.getJSONArray(REGION_AE_KEY), true, width, height); 790 } 791 if (regions.has(REGION_AF_KEY)) { 792 regionAF = ItsUtils.getJsonWeightedRectsFromArray( 793 regions.getJSONArray(REGION_AF_KEY), true, width, height); 794 } 795 if (regions.has(REGION_AWB_KEY)) { 796 regionAWB = ItsUtils.getJsonWeightedRectsFromArray( 797 regions.getJSONArray(REGION_AWB_KEY), true, width, height); 798 } 799 } 800 801 // If AE or AWB lock is specified, then the 3A will converge first and then lock these 802 // values, waiting until the HAL has reported that the lock was successful. 803 mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false); 804 mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false); 805 806 // An EV compensation can be specified as part of AE convergence. 807 int evComp = params.optInt(EVCOMP_KEY, 0); 808 if (evComp != 0) { 809 Logt.i(TAG, String.format("Running 3A with AE exposure compensation value: %d", evComp)); 810 } 811 812 // By default, AE and AF both get triggered, but the user can optionally override this. 813 // Also, AF won't get triggered if the lens is fixed-focus. 814 boolean doAE = true; 815 boolean doAF = true; 816 if (params.has(TRIGGER_KEY)) { 817 JSONObject triggers = params.getJSONObject(TRIGGER_KEY); 818 if (triggers.has(TRIGGER_AE_KEY)) { 819 doAE = triggers.getBoolean(TRIGGER_AE_KEY); 820 } 821 if (triggers.has(TRIGGER_AF_KEY)) { 822 doAF = triggers.getBoolean(TRIGGER_AF_KEY); 823 } 824 } 825 if (doAF && mCameraCharacteristics.get( 826 CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) { 827 // Send a dummy result back for the code that is waiting for this message to see 828 // that AF has converged. 829 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera"); 830 mSocketRunnableObj.sendResponse("afResult", "0.0"); 831 doAF = false; 832 } 833 834 mInterlock3A.open(); 835 mIssuedRequest3A = false; 836 mConvergedAE = false; 837 mConvergedAWB = false; 838 mConvergedAF = false; 839 mLockedAE = false; 840 mLockedAWB = false; 841 long tstart = System.currentTimeMillis(); 842 boolean triggeredAE = false; 843 boolean triggeredAF = false; 844 845 Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", 846 doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0)); 847 848 // Keep issuing capture requests until 3A has converged. 849 while (true) { 850 851 // Block until can take the next 3A frame. Only want one outstanding frame 852 // at a time, to simplify the logic here. 853 if (!mInterlock3A.block(TIMEOUT_3A * 1000) || 854 System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) { 855 throw new ItsException( 856 "3A failed to converge after " + TIMEOUT_3A + " seconds.\n" + 857 "AE converge state: " + mConvergedAE + ", \n" + 858 "AF convergence state: " + mConvergedAF + ", \n" + 859 "AWB convergence state: " + mConvergedAWB + "."); 860 } 861 mInterlock3A.close(); 862 863 // If not converged yet, issue another capture request. 864 if ( (doAE && (!triggeredAE || !mConvergedAE)) 865 || !mConvergedAWB 866 || (doAF && (!triggeredAF || !mConvergedAF)) 867 || (doAE && mNeedsLockedAE && !mLockedAE) 868 || (mNeedsLockedAWB && !mLockedAWB)) { 869 870 // Baseline capture request for 3A. 871 CaptureRequest.Builder req = mCamera.createCaptureRequest( 872 CameraDevice.TEMPLATE_PREVIEW); 873 req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF); 874 req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); 875 req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, 876 CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); 877 req.set(CaptureRequest.CONTROL_AE_MODE, 878 CaptureRequest.CONTROL_AE_MODE_ON); 879 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0); 880 req.set(CaptureRequest.CONTROL_AE_LOCK, false); 881 req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE); 882 req.set(CaptureRequest.CONTROL_AF_MODE, 883 CaptureRequest.CONTROL_AF_MODE_AUTO); 884 req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF); 885 req.set(CaptureRequest.CONTROL_AWB_MODE, 886 CaptureRequest.CONTROL_AWB_MODE_AUTO); 887 req.set(CaptureRequest.CONTROL_AWB_LOCK, false); 888 req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB); 889 890 if (evComp != 0) { 891 req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, evComp); 892 } 893 894 if (mConvergedAE && mNeedsLockedAE) { 895 req.set(CaptureRequest.CONTROL_AE_LOCK, true); 896 } 897 if (mConvergedAWB && mNeedsLockedAWB) { 898 req.set(CaptureRequest.CONTROL_AWB_LOCK, true); 899 } 900 901 // Trigger AE first. 902 if (doAE && !triggeredAE) { 903 Logt.i(TAG, "Triggering AE"); 904 req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, 905 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START); 906 triggeredAE = true; 907 } 908 909 // After AE has converged, trigger AF. 910 if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) { 911 Logt.i(TAG, "Triggering AF"); 912 req.set(CaptureRequest.CONTROL_AF_TRIGGER, 913 CaptureRequest.CONTROL_AF_TRIGGER_START); 914 triggeredAF = true; 915 } 916 917 req.addTarget(mCaptureReaders[0].getSurface()); 918 919 mIssuedRequest3A = true; 920 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 921 } else { 922 mSocketRunnableObj.sendResponse("3aConverged", ""); 923 Logt.i(TAG, "3A converged"); 924 break; 925 } 926 } 927 } catch (android.hardware.camera2.CameraAccessException e) { 928 throw new ItsException("Access error: ", e); 929 } catch (org.json.JSONException e) { 930 throw new ItsException("JSON error: ", e); 931 } finally { 932 mSocketRunnableObj.sendResponse("3aDone", ""); 933 } 934 } 935 936 private void doVibrate(JSONObject params) throws ItsException { 937 try { 938 if (mVibrator == null) { 939 throw new ItsException("Unable to start vibrator"); 940 } 941 JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY); 942 int len = patternArray.length(); 943 long pattern[] = new long[len]; 944 for (int i = 0; i < len; i++) { 945 pattern[i] = patternArray.getLong(i); 946 } 947 Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len)); 948 mVibrator.vibrate(pattern, -1); 949 mSocketRunnableObj.sendResponse("vibrationStarted", ""); 950 } catch (org.json.JSONException e) { 951 throw new ItsException("JSON error: ", e); 952 } 953 } 954 955 private void doCapture(JSONObject params) throws ItsException { 956 try { 957 // Parse the JSON to get the list of capture requests. 958 List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList( 959 mCamera, params); 960 961 // Set the output surface(s) and listeners. 962 int widths[] = new int[MAX_NUM_OUTPUT_SURFACES]; 963 int heights[] = new int[MAX_NUM_OUTPUT_SURFACES]; 964 int formats[] = new int[MAX_NUM_OUTPUT_SURFACES]; 965 int numSurfaces = 0; 966 try { 967 mCountRawOrDng.set(0); 968 mCountJpg.set(0); 969 mCountYuv.set(0); 970 mCountRaw10.set(0); 971 mCountCapRes.set(0); 972 mCaptureRawIsDng = false; 973 mCaptureResults = new CaptureResult[requests.size()]; 974 975 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); 976 if (jsonOutputSpecs != null) { 977 numSurfaces = jsonOutputSpecs.length(); 978 if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { 979 throw new ItsException("Too many output surfaces"); 980 } 981 for (int i = 0; i < numSurfaces; i++) { 982 // Get the specified surface. 983 JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); 984 String sformat = surfaceObj.optString("format"); 985 Size sizes[]; 986 if ("yuv".equals(sformat) || "".equals(sformat)) { 987 // Default to YUV if no format is specified. 988 formats[i] = ImageFormat.YUV_420_888; 989 sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 990 } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { 991 formats[i] = ImageFormat.JPEG; 992 sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); 993 } else if ("raw".equals(sformat)) { 994 formats[i] = ImageFormat.RAW_SENSOR; 995 sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); 996 } else if ("raw10".equals(sformat)) { 997 formats[i] = ImageFormat.RAW10; 998 sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); 999 } else if ("dng".equals(sformat)) { 1000 formats[i] = ImageFormat.RAW_SENSOR; 1001 sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); 1002 mCaptureRawIsDng = true; 1003 } else { 1004 throw new ItsException("Unsupported format: " + sformat); 1005 } 1006 // If the size is omitted, then default to the largest allowed size for the 1007 // format. 1008 widths[i] = surfaceObj.optInt("width"); 1009 heights[i] = surfaceObj.optInt("height"); 1010 if (widths[i] <= 0) { 1011 if (sizes == null || sizes.length == 0) { 1012 throw new ItsException(String.format( 1013 "Zero stream configs available for requested format: %s", 1014 sformat)); 1015 } 1016 widths[i] = sizes[0].getWidth(); 1017 } 1018 if (heights[i] <= 0) { 1019 heights[i] = sizes[0].getHeight(); 1020 } 1021 } 1022 } else { 1023 // No surface(s) specified at all. 1024 // Default: a single output surface which is full-res YUV. 1025 Size sizes[] = 1026 ItsUtils.getYuvOutputSizes(mCameraCharacteristics); 1027 numSurfaces = 1; 1028 widths[0] = sizes[0].getWidth(); 1029 heights[0] = sizes[0].getHeight(); 1030 formats[0] = ImageFormat.YUV_420_888; 1031 } 1032 1033 prepareCaptureReader(widths, heights, formats, numSurfaces); 1034 List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces); 1035 for (int i = 0; i < numSurfaces; i++) { 1036 outputSurfaces.add(mCaptureReaders[i].getSurface()); 1037 } 1038 BlockingSessionCallback sessionListener = new BlockingSessionCallback(); 1039 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); 1040 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); 1041 1042 for (int i = 0; i < numSurfaces; i++) { 1043 ImageReader.OnImageAvailableListener readerListener = 1044 createAvailableListener(mCaptureCallback); 1045 mCaptureReaders[i].setOnImageAvailableListener(readerListener,mSaveHandlers[i]); 1046 } 1047 1048 // Plan for how many callbacks need to be received throughout the duration of this 1049 // sequence of capture requests. There is one callback per image surface, and one 1050 // callback for the CaptureResult, for each capture. 1051 int numCaptures = requests.size(); 1052 mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1)); 1053 1054 } catch (CameraAccessException e) { 1055 throw new ItsException("Error configuring outputs", e); 1056 } catch (org.json.JSONException e) { 1057 throw new ItsException("JSON error", e); 1058 } 1059 1060 // Initiate the captures. 1061 for (int i = 0; i < requests.size(); i++) { 1062 // For DNG captures, need the LSC map to be available. 1063 if (mCaptureRawIsDng) { 1064 requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); 1065 } 1066 1067 CaptureRequest.Builder req = requests.get(i); 1068 for (int j = 0; j < numSurfaces; j++) { 1069 req.addTarget(mCaptureReaders[j].getSurface()); 1070 } 1071 mSession.capture(req.build(), mCaptureResultListener, mResultHandler); 1072 } 1073 1074 // Make sure all callbacks have been hit (wait until captures are done). 1075 // If no timeouts are received after a timeout, then fail. 1076 int currentCount = mCountCallbacksRemaining.get(); 1077 while (currentCount > 0) { 1078 try { 1079 Thread.sleep(TIMEOUT_CALLBACK*1000); 1080 } catch (InterruptedException e) { 1081 throw new ItsException("Timeout failure", e); 1082 } 1083 int newCount = mCountCallbacksRemaining.get(); 1084 if (newCount == currentCount) { 1085 throw new ItsException( 1086 "No callback received within timeout"); 1087 } 1088 currentCount = newCount; 1089 } 1090 } catch (android.hardware.camera2.CameraAccessException e) { 1091 throw new ItsException("Access error: ", e); 1092 } 1093 } 1094 1095 @Override 1096 public final void onSensorChanged(SensorEvent event) { 1097 synchronized(mEventLock) { 1098 if (mEventsEnabled) { 1099 MySensorEvent ev2 = new MySensorEvent(); 1100 ev2.sensor = event.sensor; 1101 ev2.accuracy = event.accuracy; 1102 ev2.timestamp = event.timestamp; 1103 ev2.values = new float[event.values.length]; 1104 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length); 1105 mEvents.add(ev2); 1106 } 1107 } 1108 } 1109 1110 @Override 1111 public final void onAccuracyChanged(Sensor sensor, int accuracy) { 1112 } 1113 1114 private final CaptureCallback mCaptureCallback = new CaptureCallback() { 1115 @Override 1116 public void onCaptureAvailable(Image capture) { 1117 try { 1118 int format = capture.getFormat(); 1119 if (format == ImageFormat.JPEG) { 1120 Logt.i(TAG, "Received JPEG capture"); 1121 byte[] img = ItsUtils.getDataFromImage(capture); 1122 ByteBuffer buf = ByteBuffer.wrap(img); 1123 int count = mCountJpg.getAndIncrement(); 1124 mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf); 1125 } else if (format == ImageFormat.YUV_420_888) { 1126 Logt.i(TAG, "Received YUV capture"); 1127 byte[] img = ItsUtils.getDataFromImage(capture); 1128 ByteBuffer buf = ByteBuffer.wrap(img); 1129 int count = mCountYuv.getAndIncrement(); 1130 mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf); 1131 } else if (format == ImageFormat.RAW10) { 1132 Logt.i(TAG, "Received RAW10 capture"); 1133 byte[] img = ItsUtils.getDataFromImage(capture); 1134 ByteBuffer buf = ByteBuffer.wrap(img); 1135 int count = mCountRaw10.getAndIncrement(); 1136 mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf); 1137 } else if (format == ImageFormat.RAW_SENSOR) { 1138 Logt.i(TAG, "Received RAW16 capture"); 1139 int count = mCountRawOrDng.getAndIncrement(); 1140 if (! mCaptureRawIsDng) { 1141 byte[] img = ItsUtils.getDataFromImage(capture); 1142 ByteBuffer buf = ByteBuffer.wrap(img); 1143 mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf); 1144 } else { 1145 // Wait until the corresponding capture result is ready, up to a timeout. 1146 long t0 = android.os.SystemClock.elapsedRealtime(); 1147 while (! mThreadExitFlag 1148 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) { 1149 if (mCaptureResults[count] != null) { 1150 Logt.i(TAG, "Writing capture as DNG"); 1151 DngCreator dngCreator = new DngCreator( 1152 mCameraCharacteristics, mCaptureResults[count]); 1153 ByteArrayOutputStream dngStream = new ByteArrayOutputStream(); 1154 dngCreator.writeImage(dngStream, capture); 1155 byte[] dngArray = dngStream.toByteArray(); 1156 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray); 1157 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf); 1158 break; 1159 } else { 1160 Thread.sleep(1); 1161 } 1162 } 1163 } 1164 } else { 1165 throw new ItsException("Unsupported image format: " + format); 1166 } 1167 mCountCallbacksRemaining.decrementAndGet(); 1168 } catch (IOException e) { 1169 Logt.e(TAG, "Script error: ", e); 1170 } catch (InterruptedException e) { 1171 Logt.e(TAG, "Script error: ", e); 1172 } catch (ItsException e) { 1173 Logt.e(TAG, "Script error: ", e); 1174 } 1175 } 1176 }; 1177 1178 private static float r2f(Rational r) { 1179 return (float)r.getNumerator() / (float)r.getDenominator(); 1180 } 1181 1182 private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() { 1183 @Override 1184 public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, 1185 long timestamp, long frameNumber) { 1186 } 1187 1188 @Override 1189 public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, 1190 TotalCaptureResult result) { 1191 try { 1192 // Currently result has all 0 values. 1193 if (request == null || result == null) { 1194 throw new ItsException("Request/result is invalid"); 1195 } 1196 1197 StringBuilder logMsg = new StringBuilder(); 1198 logMsg.append(String.format( 1199 "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ", 1200 result.get(CaptureResult.CONTROL_AE_STATE), 1201 result.get(CaptureResult.CONTROL_AF_STATE), 1202 result.get(CaptureResult.CONTROL_AWB_STATE), 1203 result.get(CaptureResult.SENSOR_SENSITIVITY), 1204 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f, 1205 result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f)); 1206 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) { 1207 logMsg.append(String.format( 1208 "gains=[%.1f, %.1f, %.1f, %.1f], ", 1209 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1210 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1211 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1212 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue())); 1213 } else { 1214 logMsg.append("gains=[], "); 1215 } 1216 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1217 logMsg.append(String.format( 1218 "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ", 1219 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1220 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1221 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1222 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1223 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1224 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1225 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1226 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1227 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)))); 1228 } else { 1229 logMsg.append("xform=[], "); 1230 } 1231 logMsg.append(String.format( 1232 "foc=%.1f", 1233 result.get(CaptureResult.LENS_FOCUS_DISTANCE))); 1234 Logt.i(TAG, logMsg.toString()); 1235 1236 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) { 1237 mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1238 CaptureResult.CONTROL_AE_STATE_CONVERGED || 1239 result.get(CaptureResult.CONTROL_AE_STATE) == 1240 CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED || 1241 result.get(CaptureResult.CONTROL_AE_STATE) == 1242 CaptureResult.CONTROL_AE_STATE_LOCKED; 1243 mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) == 1244 CaptureResult.CONTROL_AE_STATE_LOCKED; 1245 } 1246 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) { 1247 mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) == 1248 CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED; 1249 } 1250 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) { 1251 mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1252 CaptureResult.CONTROL_AWB_STATE_CONVERGED || 1253 result.get(CaptureResult.CONTROL_AWB_STATE) == 1254 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1255 mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) == 1256 CaptureResult.CONTROL_AWB_STATE_LOCKED; 1257 } 1258 1259 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) { 1260 if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null 1261 && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) { 1262 mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d", 1263 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(), 1264 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() 1265 )); 1266 } else { 1267 Logt.i(TAG, String.format( 1268 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b", 1269 result.get(CaptureResult.SENSOR_SENSITIVITY) == null, 1270 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null)); 1271 } 1272 } 1273 1274 if (mConvergedAF) { 1275 if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) { 1276 mSocketRunnableObj.sendResponse("afResult", String.format("%f", 1277 result.get(CaptureResult.LENS_FOCUS_DISTANCE) 1278 )); 1279 } else { 1280 Logt.i(TAG, "AF converged but NULL focus distance values"); 1281 } 1282 } 1283 1284 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) { 1285 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null 1286 && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) { 1287 mSocketRunnableObj.sendResponse("awbResult", String.format( 1288 "%f %f %f %f %f %f %f %f %f %f %f %f %f", 1289 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(), 1290 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(), 1291 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(), 1292 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(), 1293 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)), 1294 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)), 1295 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)), 1296 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)), 1297 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)), 1298 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)), 1299 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)), 1300 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)), 1301 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2)) 1302 )); 1303 } else { 1304 Logt.i(TAG, String.format( 1305 "AWB converged but NULL color correction values, gains:%b, ccm:%b", 1306 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null, 1307 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null)); 1308 } 1309 } 1310 1311 if (mIssuedRequest3A) { 1312 mIssuedRequest3A = false; 1313 mInterlock3A.open(); 1314 } else { 1315 int count = mCountCapRes.getAndIncrement(); 1316 mCaptureResults[count] = result; 1317 mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics, 1318 request, result, mCaptureReaders); 1319 mCountCallbacksRemaining.decrementAndGet(); 1320 } 1321 } catch (ItsException e) { 1322 Logt.e(TAG, "Script error: ", e); 1323 } catch (Exception e) { 1324 Logt.e(TAG, "Script error: ", e); 1325 } 1326 } 1327 1328 @Override 1329 public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, 1330 CaptureFailure failure) { 1331 Logt.e(TAG, "Script error: capture failed"); 1332 } 1333 }; 1334 } 1335