1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #define LOG_TAG "CameraMultiStreamTest" 18 //#define LOG_NDEBUG 0 19 #include "CameraStreamFixture.h" 20 #include "TestExtensions.h" 21 22 #include <gtest/gtest.h> 23 #include <utils/Log.h> 24 #include <utils/StrongPointer.h> 25 #include <common/CameraDeviceBase.h> 26 #include <hardware/hardware.h> 27 #include <hardware/camera2.h> 28 #include <gui/SurfaceComposerClient.h> 29 #include <gui/Surface.h> 30 31 #define DEFAULT_FRAME_DURATION 33000000LL // 33ms 32 #define CAMERA_HEAP_COUNT 1 33 #define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT 34 #define CAMERA_DISPLAY_FORMAT HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED 35 #define CAMERA_MULTI_STREAM_DEBUGGING 0 36 #define CAMERA_FRAME_TIMEOUT 1000000000LL // nsecs (1 secs) 37 #define PREVIEW_RENDERING_TIME_INTERVAL 200000 // in unit of us, 200ms 38 #define TOLERANCE_MARGIN 0.01 // 1% tolerance margin for exposure sanity check. 39 /* constants for display */ 40 #define DISPLAY_BUFFER_HEIGHT 1024 41 #define DISPLAY_BUFFER_WIDTH 1024 42 #define DISPLAY_BUFFER_FORMAT PIXEL_FORMAT_RGB_888 43 44 // This test intends to test large preview size but less than 1080p. 45 #define PREVIEW_WIDTH_CAP 1920 46 #define PREVIEW_HEIGHT_CAP 1080 47 // This test intends to test small metering burst size that is less than 640x480 48 #define METERING_WIDTH_CAP 640 49 #define METERING_HEIGHT_CAP 480 50 51 #define EXP_WAIT_MULTIPLIER 2 52 53 namespace android { 54 namespace camera2 { 55 namespace tests { 56 57 static const CameraStreamParams DEFAULT_STREAM_PARAMETERS = { 58 /*mFormat*/ CAMERA_EXPOSURE_FORMAT, 59 /*mHeapCount*/ CAMERA_HEAP_COUNT 60 }; 61 62 static const CameraStreamParams DISPLAY_STREAM_PARAMETERS = { 63 /*mFormat*/ CAMERA_DISPLAY_FORMAT, 64 /*mHeapCount*/ CAMERA_HEAP_COUNT 65 }; 66 67 class CameraMultiStreamTest 68 : public ::testing::Test, 69 public CameraStreamFixture { 70 71 public: 72 CameraMultiStreamTest() : CameraStreamFixture(DEFAULT_STREAM_PARAMETERS) { 73 TEST_EXTENSION_FORKING_CONSTRUCTOR; 74 75 if (HasFatalFailure()) { 76 return; 77 } 78 /** 79 * Don't create default stream, each test is in charge of creating 80 * its own streams. 81 */ 82 } 83 84 ~CameraMultiStreamTest() { 85 TEST_EXTENSION_FORKING_DESTRUCTOR; 86 } 87 88 sp<SurfaceComposerClient> mComposerClient; 89 sp<SurfaceControl> mSurfaceControl; 90 91 void CreateOnScreenSurface(sp<ANativeWindow>& surface) { 92 mComposerClient = new SurfaceComposerClient; 93 ASSERT_EQ(NO_ERROR, mComposerClient->initCheck()); 94 95 mSurfaceControl = mComposerClient->createSurface( 96 String8("CameraMultiStreamTest StreamingImage Surface"), 97 DISPLAY_BUFFER_HEIGHT, DISPLAY_BUFFER_WIDTH, 98 DISPLAY_BUFFER_FORMAT, 0); 99 100 ASSERT_NE((void*)NULL, mSurfaceControl.get()); 101 ASSERT_TRUE(mSurfaceControl->isValid()); 102 103 SurfaceComposerClient::openGlobalTransaction(); 104 ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF)); 105 ASSERT_EQ(NO_ERROR, mSurfaceControl->show()); 106 SurfaceComposerClient::closeGlobalTransaction(); 107 108 surface = mSurfaceControl->getSurface(); 109 110 ASSERT_NE((void*)NULL, surface.get()); 111 } 112 113 struct Size { 114 int32_t width; 115 int32_t height; 116 }; 117 118 // Select minimal size by number of pixels. 119 void GetMinSize(const int32_t* data, size_t count, 120 Size* min, int32_t* idx) { 121 ASSERT_NE((int32_t*)NULL, data); 122 int32_t minIdx = 0; 123 int32_t minSize = INT_MAX, tempSize; 124 for (size_t i = 0; i < count; i+=2) { 125 tempSize = data[i] * data[i+1]; 126 if (minSize > tempSize) { 127 minSize = tempSize; 128 minIdx = i; 129 } 130 } 131 min->width = data[minIdx]; 132 min->height = data[minIdx + 1]; 133 *idx = minIdx; 134 } 135 136 // Select maximal size by number of pixels. 137 void GetMaxSize(const int32_t* data, size_t count, 138 Size* max, int32_t* idx) { 139 ASSERT_NE((int32_t*)NULL, data); 140 int32_t maxIdx = 0; 141 int32_t maxSize = INT_MIN, tempSize; 142 for (size_t i = 0; i < count; i+=2) { 143 tempSize = data[i] * data[i+1]; 144 if (maxSize < tempSize) { 145 maxSize = tempSize; 146 maxIdx = i; 147 } 148 } 149 max->width = data[maxIdx]; 150 max->height = data[maxIdx + 1]; 151 *idx = maxIdx; 152 } 153 154 // Cap size by number of pixels. 155 Size CapSize(Size cap, Size input) { 156 if (input.width * input.height > cap.width * cap.height) { 157 return cap; 158 } 159 return input; 160 } 161 162 struct CameraStream : public RefBase { 163 164 public: 165 /** 166 * Only initialize the variables here, do the ASSERT check in 167 * SetUp function. To make this stream useful, the SetUp must 168 * be called before using it. 169 */ 170 CameraStream( 171 int width, 172 int height, 173 const sp<CameraDeviceBase>& device, 174 CameraStreamParams param, sp<ANativeWindow> surface, 175 bool useCpuConsumer) 176 : mDevice(device), 177 mWidth(width), 178 mHeight(height) { 179 mFormat = param.mFormat; 180 if (useCpuConsumer) { 181 sp<BufferQueue> bq = new BufferQueue(); 182 mCpuConsumer = new CpuConsumer(bq, param.mHeapCount); 183 mCpuConsumer->setName(String8( 184 "CameraMultiStreamTest::mCpuConsumer")); 185 mNativeWindow = new Surface(bq); 186 } else { 187 // Render the stream to screen. 188 mCpuConsumer = NULL; 189 mNativeWindow = surface; 190 } 191 192 mFrameListener = new FrameListener(); 193 if (mCpuConsumer != 0) { 194 mCpuConsumer->setFrameAvailableListener(mFrameListener); 195 } 196 } 197 198 /** 199 * Finally create camera stream, and do the ASSERT check, since we 200 * can not do it in ctor. 201 */ 202 void SetUp() { 203 ASSERT_EQ(OK, 204 mDevice->createStream(mNativeWindow, 205 mWidth, mHeight, mFormat, /*size (for jpegs)*/0, 206 &mStreamId)); 207 208 ASSERT_NE(-1, mStreamId); 209 } 210 211 int GetStreamId() { return mStreamId; } 212 sp<CpuConsumer> GetConsumer() { return mCpuConsumer; } 213 sp<FrameListener> GetFrameListener() { return mFrameListener; } 214 215 protected: 216 ~CameraStream() { 217 if (mDevice.get()) { 218 mDevice->waitUntilDrained(); 219 mDevice->deleteStream(mStreamId); 220 } 221 // Clear producer before consumer. 222 mNativeWindow.clear(); 223 mCpuConsumer.clear(); 224 } 225 226 private: 227 sp<FrameListener> mFrameListener; 228 sp<CpuConsumer> mCpuConsumer; 229 sp<ANativeWindow> mNativeWindow; 230 sp<CameraDeviceBase> mDevice; 231 int mStreamId; 232 int mWidth; 233 int mHeight; 234 int mFormat; 235 }; 236 237 int64_t GetExposureValue(const CameraMetadata& metaData) { 238 camera_metadata_ro_entry_t entry = 239 metaData.find(ANDROID_SENSOR_EXPOSURE_TIME); 240 EXPECT_EQ(1u, entry.count); 241 if (entry.count == 1) { 242 return entry.data.i64[0]; 243 } 244 return -1; 245 } 246 247 int32_t GetSensitivity(const CameraMetadata& metaData) { 248 camera_metadata_ro_entry_t entry = 249 metaData.find(ANDROID_SENSOR_SENSITIVITY); 250 EXPECT_EQ(1u, entry.count); 251 if (entry.count == 1) { 252 return entry.data.i32[0]; 253 } 254 return -1; 255 } 256 257 int64_t GetFrameDuration(const CameraMetadata& metaData) { 258 camera_metadata_ro_entry_t entry = 259 metaData.find(ANDROID_SENSOR_FRAME_DURATION); 260 EXPECT_EQ(1u, entry.count); 261 if (entry.count == 1) { 262 return entry.data.i64[0]; 263 } 264 return -1; 265 } 266 267 void CreateRequests(CameraMetadata& previewRequest, 268 CameraMetadata& meteringRequest, 269 CameraMetadata& captureRequest, 270 int previewStreamId, 271 int meteringStreamId, 272 int captureStreamId) { 273 int32_t requestId = 0; 274 Vector<int32_t> previewStreamIds; 275 previewStreamIds.push(previewStreamId); 276 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, 277 &previewRequest)); 278 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, 279 previewStreamIds)); 280 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID, 281 &requestId, 1)); 282 283 // Create metering request, manual settings 284 // Manual control: Disable 3A, noise reduction, edge sharping 285 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF); 286 uint8_t nrOff = static_cast<uint8_t>(ANDROID_NOISE_REDUCTION_MODE_OFF); 287 uint8_t sharpOff = static_cast<uint8_t>(ANDROID_EDGE_MODE_OFF); 288 Vector<int32_t> meteringStreamIds; 289 meteringStreamIds.push(meteringStreamId); 290 ASSERT_EQ(OK, mDevice->createDefaultRequest( 291 CAMERA2_TEMPLATE_PREVIEW, 292 &meteringRequest)); 293 ASSERT_EQ(OK, meteringRequest.update( 294 ANDROID_REQUEST_OUTPUT_STREAMS, 295 meteringStreamIds)); 296 ASSERT_EQ(OK, meteringRequest.update( 297 ANDROID_CONTROL_MODE, 298 &cmOff, 1)); 299 ASSERT_EQ(OK, meteringRequest.update( 300 ANDROID_NOISE_REDUCTION_MODE, 301 &nrOff, 1)); 302 ASSERT_EQ(OK, meteringRequest.update( 303 ANDROID_EDGE_MODE, 304 &sharpOff, 1)); 305 306 // Create capture request, manual settings 307 Vector<int32_t> captureStreamIds; 308 captureStreamIds.push(captureStreamId); 309 ASSERT_EQ(OK, mDevice->createDefaultRequest( 310 CAMERA2_TEMPLATE_PREVIEW, 311 &captureRequest)); 312 ASSERT_EQ(OK, captureRequest.update( 313 ANDROID_REQUEST_OUTPUT_STREAMS, 314 captureStreamIds)); 315 ASSERT_EQ(OK, captureRequest.update( 316 ANDROID_CONTROL_MODE, 317 &cmOff, 1)); 318 ASSERT_EQ(OK, captureRequest.update( 319 ANDROID_NOISE_REDUCTION_MODE, 320 &nrOff, 1)); 321 ASSERT_EQ(OK, captureRequest.update( 322 ANDROID_EDGE_MODE, 323 &sharpOff, 1)); 324 } 325 326 sp<CameraStream> CreateStream( 327 int width, 328 int height, 329 const sp<CameraDeviceBase>& device, 330 CameraStreamParams param = DEFAULT_STREAM_PARAMETERS, 331 sp<ANativeWindow> surface = NULL, 332 bool useCpuConsumer = true) { 333 param.mFormat = MapAutoFormat(param.mFormat); 334 return new CameraStream(width, height, device, 335 param, surface, useCpuConsumer); 336 } 337 338 void CaptureBurst(CameraMetadata& request, size_t requestCount, 339 const Vector<int64_t>& exposures, 340 const Vector<int32_t>& sensitivities, 341 const sp<CameraStream>& stream, 342 int64_t minFrameDuration, 343 int32_t* requestIdStart) { 344 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_FRAME_DURATION, 345 &minFrameDuration, 1)); 346 // Submit a series of requests with the specified exposure/gain values. 347 int32_t targetRequestId = *requestIdStart; 348 for (size_t i = 0; i < requestCount; i++) { 349 ASSERT_EQ(OK, request.update(ANDROID_REQUEST_ID, requestIdStart, 1)); 350 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposures[i], 1)); 351 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_SENSITIVITY, &sensitivities[i], 1)); 352 ASSERT_EQ(OK, mDevice->capture(request)); 353 ALOGV("Submitting request with: id %d with exposure %lld, sensitivity %d", 354 *requestIdStart, exposures[i], sensitivities[i]); 355 if (CAMERA_MULTI_STREAM_DEBUGGING) { 356 request.dump(STDOUT_FILENO); 357 } 358 (*requestIdStart)++; 359 } 360 // Get capture burst results. 361 Vector<nsecs_t> captureBurstTimes; 362 sp<CpuConsumer> consumer = stream->GetConsumer(); 363 sp<FrameListener> listener = stream->GetFrameListener(); 364 365 // Set wait limit based on expected frame duration. 366 int64_t waitLimit = CAMERA_FRAME_TIMEOUT; 367 for (size_t i = 0; i < requestCount; i++) { 368 ALOGV("Reading request result %d", i); 369 370 /** 371 * Raise the timeout to be at least twice as long as the exposure 372 * time. to avoid a false positive when the timeout is too short. 373 */ 374 if ((exposures[i] * EXP_WAIT_MULTIPLIER) > waitLimit) { 375 waitLimit = exposures[i] * EXP_WAIT_MULTIPLIER; 376 } 377 378 CameraMetadata frameMetadata; 379 int32_t resultRequestId; 380 do { 381 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit)); 382 ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata)); 383 384 camera_metadata_entry_t resultEntry = frameMetadata.find(ANDROID_REQUEST_ID); 385 ASSERT_EQ(1u, resultEntry.count); 386 resultRequestId = resultEntry.data.i32[0]; 387 if (CAMERA_MULTI_STREAM_DEBUGGING) { 388 std::cout << "capture result req id: " << resultRequestId << std::endl; 389 } 390 } while (resultRequestId != targetRequestId); 391 targetRequestId++; 392 ALOGV("Got capture burst result for request %d", i); 393 394 // Validate capture result 395 if (CAMERA_MULTI_STREAM_DEBUGGING) { 396 frameMetadata.dump(STDOUT_FILENO); 397 } 398 399 // TODO: Need revisit it to figure out an accurate margin. 400 int64_t resultExposure = GetExposureValue(frameMetadata); 401 int32_t resultSensitivity = GetSensitivity(frameMetadata); 402 EXPECT_LE(sensitivities[i] * (1.0 - TOLERANCE_MARGIN), resultSensitivity); 403 EXPECT_GE(sensitivities[i] * (1.0 + TOLERANCE_MARGIN), resultSensitivity); 404 EXPECT_LE(exposures[i] * (1.0 - TOLERANCE_MARGIN), resultExposure); 405 EXPECT_GE(exposures[i] * (1.0 + TOLERANCE_MARGIN), resultExposure); 406 407 ASSERT_EQ(OK, listener->waitForFrame(waitLimit)); 408 captureBurstTimes.push_back(systemTime()); 409 CpuConsumer::LockedBuffer imgBuffer; 410 ASSERT_EQ(OK, consumer->lockNextBuffer(&imgBuffer)); 411 ALOGV("Got capture buffer for request %d", i); 412 413 /** 414 * TODO: Validate capture buffer. Current brightness calculation 415 * is too slow, it also doesn't account for saturation effects, 416 * which is quite common since we are going over a significant 417 * range of EVs. we need figure out some reliable way to validate 418 * buffer data. 419 */ 420 421 ASSERT_EQ(OK, consumer->unlockBuffer(imgBuffer)); 422 if (i > 0) { 423 nsecs_t timeDelta = 424 captureBurstTimes[i] - captureBurstTimes[i-1]; 425 EXPECT_GE(timeDelta, exposures[i]); 426 } 427 } 428 } 429 430 /** 431 * Intentionally shadow default CreateStream function from base class, 432 * because we don't want any test in this class to use the default 433 * stream creation function. 434 */ 435 void CreateStream() { 436 } 437 }; 438 439 /** 440 * This test adds multiple stream use case test, basically, test 3 441 * streams: 442 * 443 * 1. Preview stream, with large size that is no bigger than 1080p 444 * we render this stream to display and vary the exposure time for 445 * for certain amount of time for visualization purpose. 446 * 447 * 2. Metering stream, with small size that is no bigger than VGA size. 448 * a burst is issued for different exposure times and analog gains 449 * (or analog gain implemented sensitivities) then check if the capture 450 * result metadata matches the request. 451 * 452 * 3. Capture stream, this is basically similar as meterting stream, but 453 * has large size, which is the largest supported JPEG capture size. 454 * 455 * This multiple stream test is to test if HAL supports: 456 * 457 * 1. Multiple streams like above, HAL should support at least 3 streams 458 * concurrently: one preview stream, 2 other YUV stream. 459 * 460 * 2. Manual control(gain/exposure) of mutiple burst capture. 461 */ 462 TEST_F(CameraMultiStreamTest, MultiBurst) { 463 464 TEST_EXTENSION_FORKING_INIT; 465 466 camera_metadata_ro_entry availableProcessedSizes = 467 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); 468 ASSERT_EQ(0u, availableProcessedSizes.count % 2); 469 ASSERT_GE(availableProcessedSizes.count, 2u); 470 camera_metadata_ro_entry availableProcessedMinFrameDurations = 471 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS); 472 EXPECT_EQ(availableProcessedSizes.count, 473 availableProcessedMinFrameDurations.count * 2); 474 475 camera_metadata_ro_entry availableJpegSizes = 476 GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES); 477 ASSERT_EQ(0u, availableJpegSizes.count % 2); 478 ASSERT_GE(availableJpegSizes.count, 2u); 479 480 camera_metadata_ro_entry hardwareLevel = 481 GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL); 482 ASSERT_EQ(1u, hardwareLevel.count); 483 uint8_t level = hardwareLevel.data.u8[0]; 484 ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED); 485 ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL); 486 if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) { 487 const ::testing::TestInfo* const test_info = 488 ::testing::UnitTest::GetInstance()->current_test_info(); 489 std::cerr << "Skipping test " 490 << test_info->test_case_name() << "." 491 << test_info->name() 492 << " because HAL hardware supported level is limited " 493 << std::endl; 494 return; 495 } 496 497 // Find the right sizes for preview, metering, and capture streams 498 // assumes at least 2 entries in availableProcessedSizes. 499 int64_t minFrameDuration = DEFAULT_FRAME_DURATION; 500 Size processedMinSize, processedMaxSize, jpegMaxSize; 501 const int32_t* data = availableProcessedSizes.data.i32; 502 size_t count = availableProcessedSizes.count; 503 504 int32_t minIdx, maxIdx; 505 GetMinSize(data, count, &processedMinSize, &minIdx); 506 GetMaxSize(data, count, &processedMaxSize, &maxIdx); 507 ALOGV("Found processed max size: %dx%d, min size = %dx%d", 508 processedMaxSize.width, processedMaxSize.height, 509 processedMinSize.width, processedMinSize.height); 510 511 if (availableProcessedSizes.count == 512 availableProcessedMinFrameDurations.count * 2) { 513 minFrameDuration = 514 availableProcessedMinFrameDurations.data.i64[maxIdx / 2]; 515 } 516 517 EXPECT_GT(minFrameDuration, 0); 518 519 if (minFrameDuration <= 0) { 520 minFrameDuration = DEFAULT_FRAME_DURATION; 521 } 522 523 ALOGV("targeted minimal frame duration is: %lldns", minFrameDuration); 524 525 data = &(availableJpegSizes.data.i32[0]); 526 count = availableJpegSizes.count; 527 GetMaxSize(data, count, &jpegMaxSize, &maxIdx); 528 ALOGV("Found Jpeg size max idx = %d", maxIdx); 529 530 // Max Jpeg size should be available in processed sizes. Use it for 531 // YUV capture anyway. 532 EXPECT_EQ(processedMaxSize.width, jpegMaxSize.width); 533 EXPECT_EQ(processedMaxSize.height, jpegMaxSize.height); 534 535 // Cap preview size. 536 Size previewLimit = { PREVIEW_WIDTH_CAP, PREVIEW_HEIGHT_CAP }; 537 // FIXME: need make sure the previewLimit is supported by HAL. 538 Size previewSize = CapSize(previewLimit, processedMaxSize); 539 // Cap Metering size. 540 Size meteringLimit = { METERING_WIDTH_CAP, METERING_HEIGHT_CAP }; 541 // Cap metering size to VGA (VGA is mandatory by CDD) 542 Size meteringSize = CapSize(meteringLimit, processedMinSize); 543 // Capture stream should be the max size of jpeg sizes. 544 ALOGV("preview size: %dx%d, metering size: %dx%d, capture size: %dx%d", 545 previewSize.width, previewSize.height, 546 meteringSize.width, meteringSize.height, 547 jpegMaxSize.width, jpegMaxSize.height); 548 549 // Create streams 550 // Preview stream: small resolution, render on the screen. 551 sp<CameraStream> previewStream; 552 { 553 sp<ANativeWindow> surface; 554 ASSERT_NO_FATAL_FAILURE(CreateOnScreenSurface(/*out*/surface)); 555 previewStream = CreateStream( 556 previewSize.width, 557 previewSize.height, 558 mDevice, 559 DISPLAY_STREAM_PARAMETERS, 560 surface, 561 false); 562 ASSERT_NE((void*)NULL, previewStream.get()); 563 ASSERT_NO_FATAL_FAILURE(previewStream->SetUp()); 564 } 565 // Metering burst stream: small resolution yuv stream 566 sp<CameraStream> meteringStream = 567 CreateStream( 568 meteringSize.width, 569 meteringSize.height, 570 mDevice); 571 ASSERT_NE((void*)NULL, meteringStream.get()); 572 ASSERT_NO_FATAL_FAILURE(meteringStream->SetUp()); 573 // Capture burst stream: full resolution yuv stream 574 sp<CameraStream> captureStream = 575 CreateStream( 576 jpegMaxSize.width, 577 jpegMaxSize.height, 578 mDevice); 579 ASSERT_NE((void*)NULL, captureStream.get()); 580 ASSERT_NO_FATAL_FAILURE(captureStream->SetUp()); 581 582 // Create Preview request. 583 CameraMetadata previewRequest, meteringRequest, captureRequest; 584 ASSERT_NO_FATAL_FAILURE(CreateRequests(previewRequest, meteringRequest, 585 captureRequest, previewStream->GetStreamId(), 586 meteringStream->GetStreamId(), captureStream->GetStreamId())); 587 588 // Start preview 589 if (CAMERA_MULTI_STREAM_DEBUGGING) { 590 previewRequest.dump(STDOUT_FILENO); 591 } 592 593 // Generate exposure and sensitivity lists 594 camera_metadata_ro_entry exposureTimeRange = 595 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE); 596 ASSERT_EQ(exposureTimeRange.count, 2u); 597 int64_t minExp = exposureTimeRange.data.i64[0]; 598 int64_t maxExp = exposureTimeRange.data.i64[1]; 599 ASSERT_GT(maxExp, minExp); 600 601 camera_metadata_ro_entry sensivityRange = 602 GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE); 603 ASSERT_EQ(2u, sensivityRange.count); 604 int32_t minSensitivity = sensivityRange.data.i32[0]; 605 int32_t maxSensitivity = sensivityRange.data.i32[1]; 606 camera_metadata_ro_entry maxAnalogSenEntry = 607 GetStaticEntry(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY); 608 EXPECT_EQ(1u, maxAnalogSenEntry.count); 609 int32_t maxAnalogSensitivity = maxAnalogSenEntry.data.i32[0]; 610 EXPECT_LE(maxAnalogSensitivity, maxSensitivity); 611 // Only test the sensitivity implemented by analog gain. 612 if (maxAnalogSensitivity > maxSensitivity) { 613 // Fallback to maxSensitity 614 maxAnalogSensitivity = maxSensitivity; 615 } 616 617 // sensitivity list, only include the sensitivities that are implemented 618 // purely by analog gain if possible. 619 Vector<int32_t> sensitivities; 620 Vector<int64_t> exposures; 621 count = (maxAnalogSensitivity - minSensitivity + 99) / 100; 622 sensitivities.push_back(minSensitivity); 623 for (size_t i = 1; i < count; i++) { 624 sensitivities.push_back(minSensitivity + i * 100); 625 } 626 sensitivities.push_back(maxAnalogSensitivity); 627 ALOGV("Sensitivity Range: min=%d, max=%d", minSensitivity, 628 maxAnalogSensitivity); 629 int64_t exp = minExp; 630 while (exp < maxExp) { 631 exposures.push_back(exp); 632 exp *= 2; 633 } 634 // Sweep the exposure value for preview, just for visual inspection purpose. 635 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF); 636 for (size_t i = 0; i < exposures.size(); i++) { 637 ASSERT_EQ(OK, previewRequest.update( 638 ANDROID_CONTROL_MODE, 639 &cmOff, 1)); 640 ASSERT_EQ(OK, previewRequest.update( 641 ANDROID_SENSOR_EXPOSURE_TIME, 642 &exposures[i], 1)); 643 ALOGV("Submitting preview request %d with exposure %lld", 644 i, exposures[i]); 645 646 ASSERT_EQ(OK, mDevice->setStreamingRequest(previewRequest)); 647 648 // Let preview run 200ms on screen for each exposure time. 649 usleep(PREVIEW_RENDERING_TIME_INTERVAL); 650 } 651 652 size_t requestCount = sensitivities.size(); 653 if (requestCount > exposures.size()) { 654 requestCount = exposures.size(); 655 } 656 657 // To maintain the request id uniqueness (preview request id is 0), make burst capture start 658 // request id 1 here. 659 int32_t requestIdStart = 1; 660 /** 661 * Submit metering request, set default frame duration to minimal possible 662 * value, we want the capture to run as fast as possible. HAL should adjust 663 * the frame duration to minimal necessary value to support the requested 664 * exposure value if exposure is larger than frame duration. 665 */ 666 CaptureBurst(meteringRequest, requestCount, exposures, sensitivities, 667 meteringStream, minFrameDuration, &requestIdStart); 668 669 /** 670 * Submit capture request, set default frame duration to minimal possible 671 * value, we want the capture to run as fast as possible. HAL should adjust 672 * the frame duration to minimal necessary value to support the requested 673 * exposure value if exposure is larger than frame duration. 674 */ 675 CaptureBurst(captureRequest, requestCount, exposures, sensitivities, 676 captureStream, minFrameDuration, &requestIdStart); 677 678 ASSERT_EQ(OK, mDevice->clearStreamingRequest()); 679 } 680 681 } 682 } 683 } 684