1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.legacy; 18 19 import android.graphics.SurfaceTexture; 20 import android.hardware.Camera; 21 import android.hardware.camera2.CameraCharacteristics; 22 import android.hardware.camera2.CaptureRequest; 23 import android.hardware.camera2.impl.CameraDeviceImpl; 24 import android.hardware.camera2.utils.SubmitInfo; 25 import android.hardware.camera2.utils.SizeAreaComparator; 26 import android.hardware.camera2.impl.CameraMetadataNative; 27 import android.os.ConditionVariable; 28 import android.os.Handler; 29 import android.os.Message; 30 import android.os.SystemClock; 31 import android.util.Log; 32 import android.util.MutableLong; 33 import android.util.Pair; 34 import android.util.Size; 35 import android.view.Surface; 36 37 import java.io.IOException; 38 import java.util.ArrayList; 39 import java.util.Collection; 40 import java.util.Collections; 41 import java.util.Iterator; 42 import java.util.List; 43 import java.util.concurrent.TimeUnit; 44 import java.util.concurrent.atomic.AtomicBoolean; 45 46 import static com.android.internal.util.Preconditions.*; 47 48 /** 49 * This class executes requests to the {@link Camera}. 50 * 51 * <p> 52 * The main components of this class are: 53 * - A message queue of requests to the {@link Camera}. 54 * - A thread that consumes requests to the {@link Camera} and executes them. 55 * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s. 56 * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations. 57 * </p> 58 */ 59 @SuppressWarnings("deprecation") 60 public class RequestThreadManager { 61 private final String TAG; 62 private final int mCameraId; 63 private final RequestHandlerThread mRequestThread; 64 65 private static final boolean DEBUG = false; 66 // For slightly more spammy messages that will get repeated every frame 67 private static final boolean VERBOSE = false; 68 private Camera mCamera; 69 private final CameraCharacteristics mCharacteristics; 70 71 private final CameraDeviceState mDeviceState; 72 private final CaptureCollector mCaptureCollector; 73 private final LegacyFocusStateMapper mFocusStateMapper; 74 private final LegacyFaceDetectMapper mFaceDetectMapper; 75 76 private static final int MSG_CONFIGURE_OUTPUTS = 1; 77 private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2; 78 private static final int MSG_CLEANUP = 3; 79 80 private static final int MAX_IN_FLIGHT_REQUESTS = 2; 81 82 private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms 83 private static final int JPEG_FRAME_TIMEOUT = 4000; // ms (same as CTS for API2) 84 private static final int REQUEST_COMPLETE_TIMEOUT = JPEG_FRAME_TIMEOUT; 85 86 private static final float ASPECT_RATIO_TOLERANCE = 0.01f; 87 private boolean mPreviewRunning = false; 88 89 private final List<Surface> mPreviewOutputs = new ArrayList<>(); 90 private final List<Surface> mCallbackOutputs = new ArrayList<>(); 91 private GLThreadManager mGLThreadManager; 92 private SurfaceTexture mPreviewTexture; 93 private Camera.Parameters mParams; 94 95 private final List<Long> mJpegSurfaceIds = new ArrayList<>(); 96 97 private Size mIntermediateBufferSize; 98 99 private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds); 100 private LegacyRequest mLastRequest = null; 101 private SurfaceTexture mDummyTexture; 102 private Surface mDummySurface; 103 104 private final Object mIdleLock = new Object(); 105 private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview"); 106 private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests"); 107 108 private final AtomicBoolean mQuit = new AtomicBoolean(false); 109 110 // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write 111 // limitations for (b/17379185). 112 private static final boolean USE_BLOB_FORMAT_OVERRIDE = true; 113 114 /** 115 * Container object for Configure messages. 116 */ 117 private static class ConfigureHolder { 118 public final ConditionVariable condition; 119 public final Collection<Pair<Surface, Size>> surfaces; 120 121 public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface, 122 Size>> surfaces) { 123 this.condition = condition; 124 this.surfaces = surfaces; 125 } 126 } 127 128 /** 129 * Counter class used to calculate and log the current FPS of frame production. 130 */ 131 public static class FpsCounter { 132 //TODO: Hook this up to SystTrace? 133 private static final String TAG = "FpsCounter"; 134 private int mFrameCount = 0; 135 private long mLastTime = 0; 136 private long mLastPrintTime = 0; 137 private double mLastFps = 0; 138 private final String mStreamType; 139 private static final long NANO_PER_SECOND = 1000000000; //ns 140 141 public FpsCounter(String streamType) { 142 mStreamType = streamType; 143 } 144 145 public synchronized void countFrame() { 146 mFrameCount++; 147 long nextTime = SystemClock.elapsedRealtimeNanos(); 148 if (mLastTime == 0) { 149 mLastTime = nextTime; 150 } 151 if (nextTime > mLastTime + NANO_PER_SECOND) { 152 long elapsed = nextTime - mLastTime; 153 mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed); 154 mFrameCount = 0; 155 mLastTime = nextTime; 156 } 157 } 158 159 public synchronized double checkFps() { 160 return mLastFps; 161 } 162 163 public synchronized void staggeredLog() { 164 if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) { 165 mLastPrintTime = mLastTime; 166 Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps ); 167 } 168 } 169 170 public synchronized void countAndLog() { 171 countFrame(); 172 staggeredLog(); 173 } 174 } 175 /** 176 * Fake preview for jpeg captures when there is no active preview 177 */ 178 private void createDummySurface() { 179 if (mDummyTexture == null || mDummySurface == null) { 180 mDummyTexture = new SurfaceTexture(/*ignored*/0); 181 // TODO: use smallest default sizes 182 mDummyTexture.setDefaultBufferSize(640, 480); 183 mDummySurface = new Surface(mDummyTexture); 184 } 185 } 186 187 private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() { 188 @Override 189 public void onError(int i, Camera camera) { 190 switch(i) { 191 case Camera.CAMERA_ERROR_EVICTED: { 192 flush(); 193 mDeviceState.setError( 194 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED); 195 } break; 196 case Camera.CAMERA_ERROR_DISABLED: { 197 flush(); 198 mDeviceState.setError( 199 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISABLED); 200 } break; 201 default: { 202 Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback"); 203 mDeviceState.setError( 204 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 205 } break; 206 } 207 } 208 }; 209 210 private final ConditionVariable mReceivedJpeg = new ConditionVariable(false); 211 212 private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() { 213 @Override 214 public void onPictureTaken(byte[] data, Camera camera) { 215 Log.i(TAG, "Received jpeg."); 216 Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced(); 217 if (captureInfo == null || captureInfo.first == null) { 218 Log.e(TAG, "Dropping jpeg frame."); 219 return; 220 } 221 RequestHolder holder = captureInfo.first; 222 long timestamp = captureInfo.second; 223 for (Surface s : holder.getHolderTargets()) { 224 try { 225 if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) { 226 Log.i(TAG, "Producing jpeg buffer..."); 227 228 int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize(); 229 totalSize = (totalSize + 3) & ~0x3; // round up to nearest octonibble 230 LegacyCameraDevice.setNextTimestamp(s, timestamp); 231 232 if (USE_BLOB_FORMAT_OVERRIDE) { 233 // Override to RGBA_8888 format. 234 LegacyCameraDevice.setSurfaceFormat(s, 235 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 236 237 int dimen = (int) Math.ceil(Math.sqrt(totalSize)); 238 dimen = (dimen + 0xf) & ~0xf; // round up to nearest multiple of 16 239 LegacyCameraDevice.setSurfaceDimens(s, dimen, dimen); 240 LegacyCameraDevice.produceFrame(s, data, dimen, dimen, 241 CameraMetadataNative.NATIVE_JPEG_FORMAT); 242 } else { 243 LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1); 244 LegacyCameraDevice.produceFrame(s, data, totalSize, /*height*/1, 245 CameraMetadataNative.NATIVE_JPEG_FORMAT); 246 } 247 } 248 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 249 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 250 } 251 } 252 253 mReceivedJpeg.open(); 254 } 255 }; 256 257 private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() { 258 @Override 259 public void onShutter() { 260 mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos()); 261 } 262 }; 263 264 private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback = 265 new SurfaceTexture.OnFrameAvailableListener() { 266 @Override 267 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 268 if (DEBUG) { 269 mPrevCounter.countAndLog(); 270 } 271 mGLThreadManager.queueNewFrame(); 272 } 273 }; 274 275 private void stopPreview() { 276 if (VERBOSE) { 277 Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning); 278 } 279 if (mPreviewRunning) { 280 mCamera.stopPreview(); 281 mPreviewRunning = false; 282 } 283 } 284 285 private void startPreview() { 286 if (VERBOSE) { 287 Log.v(TAG, "startPreview - preview running? " + mPreviewRunning); 288 } 289 if (!mPreviewRunning) { 290 // XX: CameraClient:;startPreview is not getting called after a stop 291 mCamera.startPreview(); 292 mPreviewRunning = true; 293 } 294 } 295 296 private void doJpegCapturePrepare(RequestHolder request) throws IOException { 297 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning); 298 299 if (!mPreviewRunning) { 300 if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface"); 301 302 createDummySurface(); 303 mCamera.setPreviewTexture(mDummyTexture); 304 startPreview(); 305 } 306 } 307 308 private void doJpegCapture(RequestHolder request) { 309 if (DEBUG) Log.d(TAG, "doJpegCapturePrepare"); 310 311 mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback); 312 mPreviewRunning = false; 313 } 314 315 private void doPreviewCapture(RequestHolder request) throws IOException { 316 if (VERBOSE) { 317 Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning); 318 } 319 320 if (mPreviewRunning) { 321 return; // Already running 322 } 323 324 if (mPreviewTexture == null) { 325 throw new IllegalStateException( 326 "Preview capture called with no preview surfaces configured."); 327 } 328 329 mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(), 330 mIntermediateBufferSize.getHeight()); 331 mCamera.setPreviewTexture(mPreviewTexture); 332 333 startPreview(); 334 } 335 336 private void configureOutputs(Collection<Pair<Surface, Size>> outputs) { 337 if (DEBUG) { 338 String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces"); 339 Log.d(TAG, "configureOutputs with " + outputsStr); 340 } 341 342 try { 343 stopPreview(); 344 } catch (RuntimeException e) { 345 Log.e(TAG, "Received device exception in configure call: ", e); 346 mDeviceState.setError( 347 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 348 return; 349 } 350 351 /* 352 * Try to release the previous preview's surface texture earlier if we end up 353 * using a different one; this also reduces the likelihood of getting into a deadlock 354 * when disconnecting from the old previous texture at a later time. 355 */ 356 try { 357 mCamera.setPreviewTexture(/*surfaceTexture*/null); 358 } catch (IOException e) { 359 Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e); 360 } catch (RuntimeException e) { 361 Log.e(TAG, "Received device exception in configure call: ", e); 362 mDeviceState.setError( 363 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 364 return; 365 } 366 367 if (mGLThreadManager != null) { 368 mGLThreadManager.waitUntilStarted(); 369 mGLThreadManager.ignoreNewFrames(); 370 mGLThreadManager.waitUntilIdle(); 371 } 372 resetJpegSurfaceFormats(mCallbackOutputs); 373 374 for (Surface s : mCallbackOutputs) { 375 try { 376 LegacyCameraDevice.disconnectSurface(s); 377 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 378 Log.w(TAG, "Surface abandoned, skipping...", e); 379 } 380 } 381 mPreviewOutputs.clear(); 382 mCallbackOutputs.clear(); 383 mJpegSurfaceIds.clear(); 384 mPreviewTexture = null; 385 386 List<Size> previewOutputSizes = new ArrayList<>(); 387 List<Size> callbackOutputSizes = new ArrayList<>(); 388 389 int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING); 390 int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); 391 if (outputs != null) { 392 for (Pair<Surface, Size> outPair : outputs) { 393 Surface s = outPair.first; 394 Size outSize = outPair.second; 395 try { 396 int format = LegacyCameraDevice.detectSurfaceType(s); 397 LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation); 398 switch (format) { 399 case CameraMetadataNative.NATIVE_JPEG_FORMAT: 400 if (USE_BLOB_FORMAT_OVERRIDE) { 401 // Override to RGBA_8888 format. 402 LegacyCameraDevice.setSurfaceFormat(s, 403 LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888); 404 } 405 mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s)); 406 mCallbackOutputs.add(s); 407 callbackOutputSizes.add(outSize); 408 409 // LegacyCameraDevice is the producer of JPEG output surfaces 410 // so LegacyCameraDevice needs to connect to the surfaces. 411 LegacyCameraDevice.connectSurface(s); 412 break; 413 default: 414 LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice. 415 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW); 416 mPreviewOutputs.add(s); 417 previewOutputSizes.add(outSize); 418 break; 419 } 420 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 421 Log.w(TAG, "Surface abandoned, skipping...", e); 422 } 423 } 424 } 425 try { 426 mParams = mCamera.getParameters(); 427 } catch (RuntimeException e) { 428 Log.e(TAG, "Received device exception: ", e); 429 mDeviceState.setError( 430 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 431 return; 432 } 433 434 List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange(); 435 int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges); 436 if (DEBUG) { 437 Log.d(TAG, "doPreviewCapture - Selected range [" + 438 bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," + 439 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]"); 440 } 441 mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], 442 bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); 443 444 Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs, 445 callbackOutputSizes, mParams); 446 447 if (previewOutputSizes.size() > 0) { 448 449 Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes); 450 451 // Find largest jpeg dimension - assume to have the same aspect ratio as sensor. 452 Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams); 453 454 Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize 455 : largestJpegDimen; 456 457 List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList( 458 mParams.getSupportedPreviewSizes()); 459 460 // Use smallest preview dimension with same aspect ratio as sensor that is >= than all 461 // of the configured output dimensions. If none exists, fall back to using the largest 462 // supported preview size. 463 long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth(); 464 Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes); 465 for (Size s : supportedPreviewSizes) { 466 long currArea = s.getWidth() * s.getHeight(); 467 long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight(); 468 if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea && 469 currArea >= largestOutputArea)) { 470 bestPreviewDimen = s; 471 } 472 } 473 474 mIntermediateBufferSize = bestPreviewDimen; 475 mParams.setPreviewSize(mIntermediateBufferSize.getWidth(), 476 mIntermediateBufferSize.getHeight()); 477 478 if (DEBUG) { 479 Log.d(TAG, "Intermediate buffer selected with dimens: " + 480 bestPreviewDimen.toString()); 481 } 482 } else { 483 mIntermediateBufferSize = null; 484 if (DEBUG) { 485 Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured"); 486 } 487 } 488 489 if (smallestSupportedJpegSize != null) { 490 /* 491 * Set takePicture size to the smallest supported JPEG size large enough 492 * to scale/crop out of for the bounding rectangle of the configured JPEG sizes. 493 */ 494 495 Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize); 496 mParams.setPictureSize( 497 smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight()); 498 } 499 500 // TODO: Detect and optimize single-output paths here to skip stream teeing. 501 if (mGLThreadManager == null) { 502 mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState); 503 mGLThreadManager.start(); 504 } 505 mGLThreadManager.waitUntilStarted(); 506 List<Pair<Surface, Size>> previews = new ArrayList<>(); 507 Iterator<Size> previewSizeIter = previewOutputSizes.iterator(); 508 for (Surface p : mPreviewOutputs) { 509 previews.add(new Pair<>(p, previewSizeIter.next())); 510 } 511 mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector); 512 513 for (Surface p : mPreviewOutputs) { 514 try { 515 LegacyCameraDevice.setSurfaceOrientation(p, facing, orientation); 516 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 517 Log.e(TAG, "Surface abandoned, skipping setSurfaceOrientation()", e); 518 } 519 } 520 521 mGLThreadManager.allowNewFrames(); 522 mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture(); 523 if (mPreviewTexture != null) { 524 mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback); 525 } 526 527 try { 528 mCamera.setParameters(mParams); 529 } catch (RuntimeException e) { 530 Log.e(TAG, "Received device exception while configuring: ", e); 531 mDeviceState.setError( 532 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 533 534 } 535 } 536 537 private void resetJpegSurfaceFormats(Collection<Surface> surfaces) { 538 if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) { 539 return; 540 } 541 for(Surface s : surfaces) { 542 if (s == null || !s.isValid()) { 543 Log.w(TAG, "Jpeg surface is invalid, skipping..."); 544 continue; 545 } 546 try { 547 LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB); 548 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 549 Log.w(TAG, "Surface abandoned, skipping...", e); 550 } 551 } 552 } 553 554 /** 555 * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger 556 * than all of the configured {@code JPEG} outputs (by both width and height). 557 * 558 * <p>If multiple supported JPEG sizes are larger, select the smallest of them which 559 * still satisfies the above constraint.</p> 560 * 561 * <p>As a result, the returned size is guaranteed to be usable without needing 562 * to upscale any of the outputs. If only one {@code JPEG} surface is used, 563 * then no scaling/cropping is necessary between the taken picture and 564 * the {@code JPEG} output surface.</p> 565 * 566 * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats 567 * @param params api1 parameters (used for reading only) 568 * 569 * @return a size large enough to fit all of the configured {@code JPEG} outputs, or 570 * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG} 571 * surfaces. 572 */ 573 private Size calculatePictureSize( List<Surface> callbackOutputs, 574 List<Size> callbackSizes, Camera.Parameters params) { 575 /* 576 * Find the largest JPEG size (if any), from the configured outputs: 577 * - the api1 picture size should be set to the smallest legal size that's at least as large 578 * as the largest configured JPEG size 579 */ 580 if (callbackOutputs.size() != callbackSizes.size()) { 581 throw new IllegalStateException("Input collections must be same length"); 582 } 583 List<Size> configuredJpegSizes = new ArrayList<>(); 584 Iterator<Size> sizeIterator = callbackSizes.iterator(); 585 for (Surface callbackSurface : callbackOutputs) { 586 Size jpegSize = sizeIterator.next(); 587 if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) { 588 continue; // Ignore non-JPEG callback formats 589 } 590 591 configuredJpegSizes.add(jpegSize); 592 } 593 if (!configuredJpegSizes.isEmpty()) { 594 /* 595 * Find the largest configured JPEG width, and height, independently 596 * of the rest. 597 * 598 * The rest of the JPEG streams can be cropped out of this smallest bounding 599 * rectangle. 600 */ 601 int maxConfiguredJpegWidth = -1; 602 int maxConfiguredJpegHeight = -1; 603 for (Size jpegSize : configuredJpegSizes) { 604 maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ? 605 jpegSize.getWidth() : maxConfiguredJpegWidth; 606 maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ? 607 jpegSize.getHeight() : maxConfiguredJpegHeight; 608 } 609 Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight); 610 611 List<Size> supportedJpegSizes = ParameterUtils.convertSizeList( 612 params.getSupportedPictureSizes()); 613 614 /* 615 * Find the smallest supported JPEG size that can fit the smallest bounding 616 * rectangle for the configured JPEG sizes. 617 */ 618 List<Size> candidateSupportedJpegSizes = new ArrayList<>(); 619 for (Size supportedJpegSize : supportedJpegSizes) { 620 if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth && 621 supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) { 622 candidateSupportedJpegSizes.add(supportedJpegSize); 623 } 624 } 625 626 if (candidateSupportedJpegSizes.isEmpty()) { 627 throw new AssertionError( 628 "Could not find any supported JPEG sizes large enough to fit " + 629 smallestBoundJpegSize); 630 } 631 632 Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes, 633 new SizeAreaComparator()); 634 635 if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) { 636 Log.w(TAG, 637 String.format( 638 "configureOutputs - Will need to crop picture %s into " 639 + "smallest bound size %s", 640 smallestSupportedJpegSize, smallestBoundJpegSize)); 641 } 642 643 return smallestSupportedJpegSize; 644 } 645 646 return null; 647 } 648 649 private static boolean checkAspectRatiosMatch(Size a, Size b) { 650 float aAspect = a.getWidth() / (float) a.getHeight(); 651 float bAspect = b.getWidth() / (float) b.getHeight(); 652 653 return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE; 654 } 655 656 // Calculate the highest FPS range supported 657 private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) { 658 if (frameRates.size() == 0) { 659 Log.e(TAG, "No supported frame rates returned!"); 660 return null; 661 } 662 663 int bestMin = 0; 664 int bestMax = 0; 665 int bestIndex = 0; 666 int index = 0; 667 for (int[] rate : frameRates) { 668 int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; 669 int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; 670 if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) { 671 bestMin = minFps; 672 bestMax = maxFps; 673 bestIndex = index; 674 } 675 index++; 676 } 677 678 return frameRates.get(bestIndex); 679 } 680 681 private final Handler.Callback mRequestHandlerCb = new Handler.Callback() { 682 private boolean mCleanup = false; 683 private final LegacyResultMapper mMapper = new LegacyResultMapper(); 684 685 @Override 686 public boolean handleMessage(Message msg) { 687 if (mCleanup) { 688 return true; 689 } 690 691 if (DEBUG) { 692 Log.d(TAG, "Request thread handling message:" + msg.what); 693 } 694 long startTime = 0; 695 if (DEBUG) { 696 startTime = SystemClock.elapsedRealtimeNanos(); 697 } 698 switch (msg.what) { 699 case MSG_CONFIGURE_OUTPUTS: 700 ConfigureHolder config = (ConfigureHolder) msg.obj; 701 int sizes = config.surfaces != null ? config.surfaces.size() : 0; 702 Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured."); 703 704 try { 705 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 706 TimeUnit.MILLISECONDS); 707 if (!success) { 708 Log.e(TAG, "Timed out while queueing configure request."); 709 mCaptureCollector.failAll(); 710 } 711 } catch (InterruptedException e) { 712 Log.e(TAG, "Interrupted while waiting for requests to complete."); 713 mDeviceState.setError( 714 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 715 break; 716 } 717 718 configureOutputs(config.surfaces); 719 config.condition.open(); 720 if (DEBUG) { 721 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 722 Log.d(TAG, "Configure took " + totalTime + " ns"); 723 } 724 break; 725 case MSG_SUBMIT_CAPTURE_REQUEST: 726 Handler handler = RequestThreadManager.this.mRequestThread.getHandler(); 727 boolean anyRequestOutputAbandoned = false; 728 729 // Get the next burst from the request queue. 730 RequestQueue.RequestQueueEntry nextBurst = mRequestQueue.getNext(); 731 732 if (nextBurst == null) { 733 // If there are no further requests queued, wait for any currently executing 734 // requests to complete, then switch to idle state. 735 try { 736 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 737 TimeUnit.MILLISECONDS); 738 if (!success) { 739 Log.e(TAG, 740 "Timed out while waiting for prior requests to complete."); 741 mCaptureCollector.failAll(); 742 } 743 } catch (InterruptedException e) { 744 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 745 mDeviceState.setError( 746 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 747 break; 748 } 749 750 synchronized (mIdleLock) { 751 // Retry the the request queue. 752 nextBurst = mRequestQueue.getNext(); 753 754 // If we still have no queued requests, go idle. 755 if (nextBurst == null) { 756 mDeviceState.setIdle(); 757 break; 758 } 759 } 760 } 761 762 if (nextBurst != null) { 763 // Queue another capture if we did not get the last burst. 764 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 765 766 // Check whether capture queue becomes empty 767 if (nextBurst.isQueueEmpty()) { 768 mDeviceState.setRequestQueueEmpty(); 769 } 770 } 771 772 // Complete each request in the burst 773 BurstHolder burstHolder = nextBurst.getBurstHolder(); 774 List<RequestHolder> requests = 775 burstHolder.produceRequestHolders(nextBurst.getFrameNumber()); 776 for (RequestHolder holder : requests) { 777 CaptureRequest request = holder.getRequest(); 778 779 boolean paramsChanged = false; 780 781 // Only update parameters if the request has changed 782 if (mLastRequest == null || mLastRequest.captureRequest != request) { 783 784 // The intermediate buffer is sometimes null, but we always need 785 // the Camera1 API configured preview size 786 Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize()); 787 788 LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics, 789 request, previewSize, mParams); // params are copied 790 791 792 // Parameters are mutated as a side-effect 793 LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest); 794 795 // If the parameters have changed, set them in the Camera1 API. 796 if (!mParams.same(legacyRequest.parameters)) { 797 try { 798 mCamera.setParameters(legacyRequest.parameters); 799 } catch (RuntimeException e) { 800 // If setting the parameters failed, report a request error to 801 // the camera client, and skip any further work for this request 802 Log.e(TAG, "Exception while setting camera parameters: ", e); 803 holder.failRequest(); 804 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 805 CameraDeviceImpl.CameraDeviceCallbacks. 806 ERROR_CAMERA_REQUEST); 807 continue; 808 } 809 paramsChanged = true; 810 mParams = legacyRequest.parameters; 811 } 812 813 mLastRequest = legacyRequest; 814 } 815 816 try { 817 boolean success = mCaptureCollector.queueRequest(holder, 818 mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS); 819 820 if (!success) { 821 // Report a request error if we timed out while queuing this. 822 Log.e(TAG, "Timed out while queueing capture request."); 823 holder.failRequest(); 824 mDeviceState.setCaptureStart(holder, /*timestamp*/0, 825 CameraDeviceImpl.CameraDeviceCallbacks. 826 ERROR_CAMERA_REQUEST); 827 continue; 828 } 829 830 // Starting the preview needs to happen before enabling 831 // face detection or auto focus 832 if (holder.hasPreviewTargets()) { 833 doPreviewCapture(holder); 834 } 835 if (holder.hasJpegTargets()) { 836 while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT, 837 TimeUnit.MILLISECONDS)) { 838 // Fail preview requests until the queue is empty. 839 Log.e(TAG, "Timed out while waiting for preview requests to " + 840 "complete."); 841 mCaptureCollector.failNextPreview(); 842 } 843 mReceivedJpeg.close(); 844 doJpegCapturePrepare(holder); 845 } 846 847 /* 848 * Do all the actions that require a preview to have been started 849 */ 850 851 // Toggle face detection on/off 852 // - do this before AF to give AF a chance to use faces 853 mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams); 854 855 // Unconditionally process AF triggers, since they're non-idempotent 856 // - must be done after setting the most-up-to-date AF mode 857 mFocusStateMapper.processRequestTriggers(request, mParams); 858 859 if (holder.hasJpegTargets()) { 860 doJpegCapture(holder); 861 if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) { 862 Log.e(TAG, "Hit timeout for jpeg callback!"); 863 mCaptureCollector.failNextJpeg(); 864 } 865 } 866 867 } catch (IOException e) { 868 Log.e(TAG, "Received device exception during capture call: ", e); 869 mDeviceState.setError( 870 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 871 break; 872 } catch (InterruptedException e) { 873 Log.e(TAG, "Interrupted during capture: ", e); 874 mDeviceState.setError( 875 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 876 break; 877 } catch (RuntimeException e) { 878 Log.e(TAG, "Received device exception during capture call: ", e); 879 mDeviceState.setError( 880 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 881 break; 882 } 883 884 if (paramsChanged) { 885 if (DEBUG) { 886 Log.d(TAG, "Params changed -- getting new Parameters from HAL."); 887 } 888 try { 889 mParams = mCamera.getParameters(); 890 } catch (RuntimeException e) { 891 Log.e(TAG, "Received device exception: ", e); 892 mDeviceState.setError( 893 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 894 break; 895 } 896 897 // Update parameters to the latest that we think the camera is using 898 mLastRequest.setParameters(mParams); 899 } 900 901 MutableLong timestampMutable = new MutableLong(/*value*/0L); 902 try { 903 boolean success = mCaptureCollector.waitForRequestCompleted(holder, 904 REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS, 905 /*out*/timestampMutable); 906 907 if (!success) { 908 Log.e(TAG, "Timed out while waiting for request to complete."); 909 mCaptureCollector.failAll(); 910 } 911 } catch (InterruptedException e) { 912 Log.e(TAG, "Interrupted waiting for request completion: ", e); 913 mDeviceState.setError( 914 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 915 break; 916 } 917 918 CameraMetadataNative result = mMapper.cachedConvertResultMetadata( 919 mLastRequest, timestampMutable.value); 920 /* 921 * Order matters: The default result mapper is state-less; the 922 * other mappers carry state and may override keys set by the default 923 * mapper with their own values. 924 */ 925 926 // Update AF state 927 mFocusStateMapper.mapResultTriggers(result); 928 // Update face-related results 929 mFaceDetectMapper.mapResultFaces(result, mLastRequest); 930 931 if (!holder.requestFailed()) { 932 mDeviceState.setCaptureResult(holder, result); 933 } 934 935 if (holder.isOutputAbandoned()) { 936 anyRequestOutputAbandoned = true; 937 } 938 } 939 940 // Stop the repeating request if any of its output surfaces is abandoned. 941 if (anyRequestOutputAbandoned && burstHolder.isRepeating()) { 942 long lastFrameNumber = cancelRepeating(burstHolder.getRequestId()); 943 if (DEBUG) { 944 Log.d(TAG, "Stopped repeating request. Last frame number is " + 945 lastFrameNumber); 946 } 947 mDeviceState.setRepeatingRequestError(lastFrameNumber, 948 burstHolder.getRequestId()); 949 } 950 951 if (DEBUG) { 952 long totalTime = SystemClock.elapsedRealtimeNanos() - startTime; 953 Log.d(TAG, "Capture request took " + totalTime + " ns"); 954 mRequestCounter.countAndLog(); 955 } 956 break; 957 case MSG_CLEANUP: 958 mCleanup = true; 959 try { 960 boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT, 961 TimeUnit.MILLISECONDS); 962 if (!success) { 963 Log.e(TAG, "Timed out while queueing cleanup request."); 964 mCaptureCollector.failAll(); 965 } 966 } catch (InterruptedException e) { 967 Log.e(TAG, "Interrupted while waiting for requests to complete: ", e); 968 mDeviceState.setError( 969 CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE); 970 } 971 if (mGLThreadManager != null) { 972 mGLThreadManager.quit(); 973 mGLThreadManager = null; 974 } 975 if (mCamera != null) { 976 mCamera.release(); 977 mCamera = null; 978 } 979 resetJpegSurfaceFormats(mCallbackOutputs); 980 break; 981 case RequestHandlerThread.MSG_POKE_IDLE_HANDLER: 982 // OK: Ignore message. 983 break; 984 default: 985 throw new AssertionError("Unhandled message " + msg.what + 986 " on RequestThread."); 987 } 988 return true; 989 } 990 }; 991 992 /** 993 * Create a new RequestThreadManager. 994 * 995 * @param cameraId the id of the camera to use. 996 * @param camera an open camera object. The RequestThreadManager takes ownership of this camera 997 * object, and is responsible for closing it. 998 * @param characteristics the static camera characteristics corresponding to this camera device 999 * @param deviceState a {@link CameraDeviceState} state machine. 1000 */ 1001 public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics, 1002 CameraDeviceState deviceState) { 1003 mCamera = checkNotNull(camera, "camera must not be null"); 1004 mCameraId = cameraId; 1005 mCharacteristics = checkNotNull(characteristics, "characteristics must not be null"); 1006 String name = String.format("RequestThread-%d", cameraId); 1007 TAG = name; 1008 mDeviceState = checkNotNull(deviceState, "deviceState must not be null"); 1009 mFocusStateMapper = new LegacyFocusStateMapper(mCamera); 1010 mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics); 1011 mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState); 1012 mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb); 1013 mCamera.setDetailedErrorCallback(mErrorCallback); 1014 } 1015 1016 /** 1017 * Start the request thread. 1018 */ 1019 public void start() { 1020 mRequestThread.start(); 1021 } 1022 1023 /** 1024 * Flush any pending requests. 1025 * 1026 * @return the last frame number. 1027 */ 1028 public long flush() { 1029 Log.i(TAG, "Flushing all pending requests."); 1030 long lastFrame = mRequestQueue.stopRepeating(); 1031 mCaptureCollector.failAll(); 1032 return lastFrame; 1033 } 1034 1035 /** 1036 * Quit the request thread, and clean up everything. 1037 */ 1038 public void quit() { 1039 if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler. 1040 Handler handler = mRequestThread.waitAndGetHandler(); 1041 handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP)); 1042 mRequestThread.quitSafely(); 1043 try { 1044 mRequestThread.join(); 1045 } catch (InterruptedException e) { 1046 Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.", 1047 mRequestThread.getName(), mRequestThread.getId())); 1048 } 1049 } 1050 } 1051 1052 /** 1053 * Submit the given burst of requests to be captured. 1054 * 1055 * <p>If the burst is repeating, replace the current repeating burst.</p> 1056 * 1057 * @param requests the burst of requests to add to the queue. 1058 * @param repeating true if the burst is repeating. 1059 * @return the submission info, including the new request id, and the last frame number, which 1060 * contains either the frame number of the last frame that will be returned for this request, 1061 * or the frame number of the last frame that will be returned for the current repeating 1062 * request if this burst is set to be repeating. 1063 */ 1064 public SubmitInfo submitCaptureRequests(CaptureRequest[] requests, boolean repeating) { 1065 Handler handler = mRequestThread.waitAndGetHandler(); 1066 SubmitInfo info; 1067 synchronized (mIdleLock) { 1068 info = mRequestQueue.submit(requests, repeating); 1069 handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST); 1070 } 1071 return info; 1072 } 1073 1074 /** 1075 * Cancel a repeating request. 1076 * 1077 * @param requestId the id of the repeating request to cancel. 1078 * @return the last frame to be returned from the HAL for the given repeating request, or 1079 * {@code INVALID_FRAME} if none exists. 1080 */ 1081 public long cancelRepeating(int requestId) { 1082 return mRequestQueue.stopRepeating(requestId); 1083 } 1084 1085 /** 1086 * Configure with the current list of output Surfaces. 1087 * 1088 * <p> 1089 * This operation blocks until the configuration is complete. 1090 * </p> 1091 * 1092 * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p> 1093 * 1094 * @param outputs a {@link java.util.Collection} of outputs to configure. 1095 */ 1096 public void configure(Collection<Pair<Surface, Size>> outputs) { 1097 Handler handler = mRequestThread.waitAndGetHandler(); 1098 final ConditionVariable condition = new ConditionVariable(/*closed*/false); 1099 ConfigureHolder holder = new ConfigureHolder(condition, outputs); 1100 handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder)); 1101 condition.block(); 1102 } 1103 } 1104