1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 //#define LOG_NDEBUG 0 18 #define LOG_TAG "mediafilterTest" 19 20 #include <inttypes.h> 21 22 #include <binder/ProcessState.h> 23 #include <filters/ColorConvert.h> 24 #include <gui/ISurfaceComposer.h> 25 #include <gui/SurfaceComposerClient.h> 26 #include <gui/Surface.h> 27 #include <media/ICrypto.h> 28 #include <media/IMediaHTTPService.h> 29 #include <media/MediaCodecBuffer.h> 30 #include <media/stagefright/foundation/ABuffer.h> 31 #include <media/stagefright/foundation/ADebug.h> 32 #include <media/stagefright/foundation/AMessage.h> 33 #include <media/stagefright/MediaCodec.h> 34 #include <media/stagefright/NuMediaExtractor.h> 35 #include <media/stagefright/RenderScriptWrapper.h> 36 #include <OMX_IVCommon.h> 37 #include <ui/DisplayInfo.h> 38 39 #include "RenderScript.h" 40 #include "ScriptC_argbtorgba.h" 41 #include "ScriptC_nightvision.h" 42 #include "ScriptC_saturation.h" 43 44 // test parameters 45 static const bool kTestFlush = true; // Note: true will drop 1 out of 46 static const int kFlushAfterFrames = 25; // kFlushAfterFrames output frames 47 static const int64_t kTimeout = 500ll; 48 49 // built-in filter parameters 50 static const int32_t kInvert = false; // ZeroFilter param 51 static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param 52 static const float kSaturation = 0.0f; // SaturationFilter param 53 54 static void usage(const char *me) { 55 fprintf(stderr, "usage: [flags] %s\n" 56 "\t[-b] use IntrinsicBlurFilter\n" 57 "\t[-c] use argb to rgba conversion RSFilter\n" 58 "\t[-n] use night vision RSFilter\n" 59 "\t[-r] use saturation RSFilter\n" 60 "\t[-s] use SaturationFilter\n" 61 "\t[-z] use ZeroFilter (copy filter)\n" 62 "\t[-R] render output to surface (enables -S)\n" 63 "\t[-S] allocate buffers from a surface\n" 64 "\t[-T] use render timestamps (enables -R)\n", 65 me); 66 exit(1); 67 } 68 69 namespace android { 70 71 struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback { 72 void init(const RSC::sp<RSC::RS> &context) { 73 mScript = new ScriptC_saturation(context); 74 mScript->set_gSaturation(3.f); 75 } 76 77 virtual status_t processBuffers( 78 RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { 79 mScript->forEach_root(inBuffer, outBuffer); 80 81 return OK; 82 } 83 84 status_t handleSetParameters(const sp<AMessage> &msg __unused) { 85 return OK; 86 } 87 88 private: 89 RSC::sp<ScriptC_saturation> mScript; 90 }; 91 92 struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback { 93 void init(const RSC::sp<RSC::RS> &context) { 94 mScript = new ScriptC_nightvision(context); 95 } 96 97 virtual status_t processBuffers( 98 RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { 99 mScript->forEach_root(inBuffer, outBuffer); 100 101 return OK; 102 } 103 104 status_t handleSetParameters(const sp<AMessage> &msg __unused) { 105 return OK; 106 } 107 108 private: 109 RSC::sp<ScriptC_nightvision> mScript; 110 }; 111 112 struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback { 113 void init(const RSC::sp<RSC::RS> &context) { 114 mScript = new ScriptC_argbtorgba(context); 115 } 116 117 virtual status_t processBuffers( 118 RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { 119 mScript->forEach_root(inBuffer, outBuffer); 120 121 return OK; 122 } 123 124 status_t handleSetParameters(const sp<AMessage> &msg __unused) { 125 return OK; 126 } 127 128 private: 129 RSC::sp<ScriptC_argbtorgba> mScript; 130 }; 131 132 struct CodecState { 133 sp<MediaCodec> mCodec; 134 Vector<sp<MediaCodecBuffer> > mInBuffers; 135 Vector<sp<MediaCodecBuffer> > mOutBuffers; 136 bool mSignalledInputEOS; 137 bool mSawOutputEOS; 138 int64_t mNumBuffersDecoded; 139 }; 140 141 struct DecodedFrame { 142 size_t index; 143 size_t offset; 144 size_t size; 145 int64_t presentationTimeUs; 146 uint32_t flags; 147 }; 148 149 enum FilterType { 150 FILTERTYPE_ZERO, 151 FILTERTYPE_INTRINSIC_BLUR, 152 FILTERTYPE_SATURATION, 153 FILTERTYPE_RS_SATURATION, 154 FILTERTYPE_RS_NIGHT_VISION, 155 FILTERTYPE_RS_ARGB_TO_RGBA, 156 }; 157 158 size_t inputFramesSinceFlush = 0; 159 void tryCopyDecodedBuffer( 160 List<DecodedFrame> *decodedFrameIndices, 161 CodecState *filterState, 162 CodecState *vidState) { 163 if (decodedFrameIndices->empty()) { 164 return; 165 } 166 167 size_t filterIndex; 168 status_t err = filterState->mCodec->dequeueInputBuffer( 169 &filterIndex, kTimeout); 170 if (err != OK) { 171 return; 172 } 173 174 ++inputFramesSinceFlush; 175 176 DecodedFrame frame = *decodedFrameIndices->begin(); 177 178 // only consume a buffer if we are not going to flush, since we expect 179 // the dequeue -> flush -> queue operation to cause an error and 180 // not produce an output frame 181 if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) { 182 decodedFrameIndices->erase(decodedFrameIndices->begin()); 183 } 184 size_t outIndex = frame.index; 185 186 const sp<MediaCodecBuffer> &srcBuffer = 187 vidState->mOutBuffers.itemAt(outIndex); 188 const sp<MediaCodecBuffer> &destBuffer = 189 filterState->mInBuffers.itemAt(filterIndex); 190 191 sp<AMessage> srcFormat, destFormat; 192 vidState->mCodec->getOutputFormat(&srcFormat); 193 filterState->mCodec->getInputFormat(&destFormat); 194 195 int32_t srcWidth, srcHeight, srcStride, srcSliceHeight; 196 int32_t srcColorFormat, destColorFormat; 197 int32_t destWidth, destHeight, destStride, destSliceHeight; 198 CHECK(srcFormat->findInt32("stride", &srcStride) 199 && srcFormat->findInt32("slice-height", &srcSliceHeight) 200 && srcFormat->findInt32("width", &srcWidth) 201 && srcFormat->findInt32("height", & srcHeight) 202 && srcFormat->findInt32("color-format", &srcColorFormat)); 203 CHECK(destFormat->findInt32("stride", &destStride) 204 && destFormat->findInt32("slice-height", &destSliceHeight) 205 && destFormat->findInt32("width", &destWidth) 206 && destFormat->findInt32("height", & destHeight) 207 && destFormat->findInt32("color-format", &destColorFormat)); 208 209 CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight); 210 211 convertYUV420spToARGB( 212 srcBuffer->data(), 213 srcBuffer->data() + srcStride * srcSliceHeight, 214 srcWidth, 215 srcHeight, 216 destBuffer->data()); 217 218 // copy timestamp 219 int64_t timeUs; 220 CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); 221 destBuffer->meta()->setInt64("timeUs", timeUs); 222 223 if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) { 224 inputFramesSinceFlush = 0; 225 226 // check that queueing a buffer that was dequeued before flush 227 // fails with expected error EACCES 228 filterState->mCodec->flush(); 229 230 err = filterState->mCodec->queueInputBuffer( 231 filterIndex, 0 /* offset */, destBuffer->size(), 232 timeUs, frame.flags); 233 234 if (err == OK) { 235 ALOGE("FAIL: queue after flush returned OK"); 236 } else if (err != -EACCES) { 237 ALOGE("queueInputBuffer after flush returned %d, " 238 "expected -EACCES (-13)", err); 239 } 240 } else { 241 err = filterState->mCodec->queueInputBuffer( 242 filterIndex, 0 /* offset */, destBuffer->size(), 243 timeUs, frame.flags); 244 CHECK(err == OK); 245 246 err = vidState->mCodec->releaseOutputBuffer(outIndex); 247 CHECK(err == OK); 248 } 249 } 250 251 size_t outputFramesSinceFlush = 0; 252 void tryDrainOutputBuffer( 253 CodecState *filterState, 254 const sp<Surface> &surface, bool renderSurface, 255 bool useTimestamp, int64_t *startTimeRender) { 256 size_t index; 257 size_t offset; 258 size_t size; 259 int64_t presentationTimeUs; 260 uint32_t flags; 261 status_t err = filterState->mCodec->dequeueOutputBuffer( 262 &index, &offset, &size, &presentationTimeUs, &flags, 263 kTimeout); 264 265 if (err != OK) { 266 return; 267 } 268 269 ++outputFramesSinceFlush; 270 271 if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) { 272 filterState->mCodec->flush(); 273 } 274 275 if (surface == NULL || !renderSurface) { 276 err = filterState->mCodec->releaseOutputBuffer(index); 277 } else if (useTimestamp) { 278 if (*startTimeRender == -1) { 279 // begin rendering 2 vsyncs after first decode 280 *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC) 281 + 33000000 - (presentationTimeUs * 1000); 282 } 283 presentationTimeUs = 284 (presentationTimeUs * 1000) + *startTimeRender; 285 err = filterState->mCodec->renderOutputBufferAndRelease( 286 index, presentationTimeUs); 287 } else { 288 err = filterState->mCodec->renderOutputBufferAndRelease(index); 289 } 290 291 if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) { 292 outputFramesSinceFlush = 0; 293 294 // releasing the buffer dequeued before flush should cause an error 295 // if so, the frame will also be skipped in output stream 296 if (err == OK) { 297 ALOGE("FAIL: release after flush returned OK"); 298 } else if (err != -EACCES) { 299 ALOGE("releaseOutputBuffer after flush returned %d, " 300 "expected -EACCES (-13)", err); 301 } 302 } else { 303 CHECK(err == OK); 304 } 305 306 if (flags & MediaCodec::BUFFER_FLAG_EOS) { 307 ALOGV("reached EOS on output."); 308 filterState->mSawOutputEOS = true; 309 } 310 } 311 312 static int decode( 313 const sp<ALooper> &looper, 314 const char *path, 315 const sp<Surface> &surface, 316 bool renderSurface, 317 bool useTimestamp, 318 FilterType filterType) { 319 320 static int64_t kTimeout = 500ll; 321 322 sp<NuMediaExtractor> extractor = new NuMediaExtractor; 323 if (extractor->setDataSource(NULL /* httpService */, path) != OK) { 324 fprintf(stderr, "unable to instantiate extractor.\n"); 325 return 1; 326 } 327 328 KeyedVector<size_t, CodecState> stateByTrack; 329 330 CodecState *vidState = NULL; 331 for (size_t i = 0; i < extractor->countTracks(); ++i) { 332 sp<AMessage> format; 333 status_t err = extractor->getTrackFormat(i, &format); 334 CHECK(err == OK); 335 336 AString mime; 337 CHECK(format->findString("mime", &mime)); 338 bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); 339 if (!isVideo) { 340 continue; 341 } 342 343 ALOGV("selecting track %zu", i); 344 345 err = extractor->selectTrack(i); 346 CHECK(err == OK); 347 348 CodecState *state = 349 &stateByTrack.editValueAt(stateByTrack.add(i, CodecState())); 350 351 vidState = state; 352 353 state->mNumBuffersDecoded = 0; 354 355 state->mCodec = MediaCodec::CreateByType( 356 looper, mime.c_str(), false /* encoder */); 357 358 CHECK(state->mCodec != NULL); 359 360 err = state->mCodec->configure( 361 format, NULL /* surface */, NULL /* crypto */, 0 /* flags */); 362 363 CHECK(err == OK); 364 365 state->mSignalledInputEOS = false; 366 state->mSawOutputEOS = false; 367 368 break; 369 } 370 CHECK(!stateByTrack.isEmpty()); 371 CHECK(vidState != NULL); 372 sp<AMessage> vidFormat; 373 vidState->mCodec->getOutputFormat(&vidFormat); 374 375 // set filter to use ARGB8888 376 vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888); 377 // set app cache directory path 378 vidFormat->setString("cacheDir", "/system/bin"); 379 380 // create RenderScript context for RSFilters 381 RSC::sp<RSC::RS> context = new RSC::RS(); 382 context->init("/system/bin"); 383 384 sp<RenderScriptWrapper::RSFilterCallback> rsFilter; 385 386 // create renderscript wrapper for RSFilters 387 sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper; 388 rsWrapper->mContext = context.get(); 389 390 CodecState *filterState = new CodecState(); 391 filterState->mNumBuffersDecoded = 0; 392 393 sp<AMessage> params = new AMessage(); 394 395 switch (filterType) { 396 case FILTERTYPE_ZERO: 397 { 398 filterState->mCodec = MediaCodec::CreateByComponentName( 399 looper, "android.filter.zerofilter"); 400 params->setInt32("invert", kInvert); 401 break; 402 } 403 case FILTERTYPE_INTRINSIC_BLUR: 404 { 405 filterState->mCodec = MediaCodec::CreateByComponentName( 406 looper, "android.filter.intrinsicblur"); 407 params->setFloat("blur-radius", kBlurRadius); 408 break; 409 } 410 case FILTERTYPE_SATURATION: 411 { 412 filterState->mCodec = MediaCodec::CreateByComponentName( 413 looper, "android.filter.saturation"); 414 params->setFloat("saturation", kSaturation); 415 break; 416 } 417 case FILTERTYPE_RS_SATURATION: 418 { 419 SaturationRSFilter *satFilter = new SaturationRSFilter; 420 satFilter->init(context); 421 rsFilter = satFilter; 422 rsWrapper->mCallback = rsFilter; 423 vidFormat->setObject("rs-wrapper", rsWrapper); 424 425 filterState->mCodec = MediaCodec::CreateByComponentName( 426 looper, "android.filter.RenderScript"); 427 break; 428 } 429 case FILTERTYPE_RS_NIGHT_VISION: 430 { 431 NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter; 432 nightVisionFilter->init(context); 433 rsFilter = nightVisionFilter; 434 rsWrapper->mCallback = rsFilter; 435 vidFormat->setObject("rs-wrapper", rsWrapper); 436 437 filterState->mCodec = MediaCodec::CreateByComponentName( 438 looper, "android.filter.RenderScript"); 439 break; 440 } 441 case FILTERTYPE_RS_ARGB_TO_RGBA: 442 { 443 ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter; 444 argbToRgbaFilter->init(context); 445 rsFilter = argbToRgbaFilter; 446 rsWrapper->mCallback = rsFilter; 447 vidFormat->setObject("rs-wrapper", rsWrapper); 448 449 filterState->mCodec = MediaCodec::CreateByComponentName( 450 looper, "android.filter.RenderScript"); 451 break; 452 } 453 default: 454 { 455 LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType"); 456 break; 457 } 458 } 459 CHECK(filterState->mCodec != NULL); 460 461 status_t err = filterState->mCodec->configure( 462 vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */); 463 CHECK(err == OK); 464 465 filterState->mSignalledInputEOS = false; 466 filterState->mSawOutputEOS = false; 467 468 int64_t startTimeUs = ALooper::GetNowUs(); 469 int64_t startTimeRender = -1; 470 471 for (size_t i = 0; i < stateByTrack.size(); ++i) { 472 CodecState *state = &stateByTrack.editValueAt(i); 473 474 sp<MediaCodec> codec = state->mCodec; 475 476 CHECK_EQ((status_t)OK, codec->start()); 477 478 CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers)); 479 CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers)); 480 481 ALOGV("got %zu input and %zu output buffers", 482 state->mInBuffers.size(), state->mOutBuffers.size()); 483 } 484 485 CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params)); 486 487 if (kTestFlush) { 488 status_t flushErr = filterState->mCodec->flush(); 489 if (flushErr == OK) { 490 ALOGE("FAIL: Flush before start returned OK"); 491 } else { 492 ALOGV("Flush before start returned status %d, usually ENOSYS (-38)", 493 flushErr); 494 } 495 } 496 497 CHECK_EQ((status_t)OK, filterState->mCodec->start()); 498 CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers( 499 &filterState->mInBuffers)); 500 CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers( 501 &filterState->mOutBuffers)); 502 503 if (kTestFlush) { 504 status_t flushErr = filterState->mCodec->flush(); 505 if (flushErr != OK) { 506 ALOGE("FAIL: Flush after start returned %d, expect OK (0)", 507 flushErr); 508 } else { 509 ALOGV("Flush immediately after start OK"); 510 } 511 } 512 513 List<DecodedFrame> decodedFrameIndices; 514 515 // loop until decoder reaches EOS 516 bool sawInputEOS = false; 517 bool sawOutputEOSOnAllTracks = false; 518 while (!sawOutputEOSOnAllTracks) { 519 if (!sawInputEOS) { 520 size_t trackIndex; 521 status_t err = extractor->getSampleTrackIndex(&trackIndex); 522 523 if (err != OK) { 524 ALOGV("saw input eos"); 525 sawInputEOS = true; 526 } else { 527 CodecState *state = &stateByTrack.editValueFor(trackIndex); 528 529 size_t index; 530 err = state->mCodec->dequeueInputBuffer(&index, kTimeout); 531 532 if (err == OK) { 533 ALOGV("filling input buffer %zu", index); 534 535 const sp<MediaCodecBuffer> &buffer = state->mInBuffers.itemAt(index); 536 sp<ABuffer> abuffer = new ABuffer(buffer->base(), buffer->capacity()); 537 538 err = extractor->readSampleData(abuffer); 539 CHECK(err == OK); 540 buffer->setRange(abuffer->offset(), abuffer->size()); 541 542 int64_t timeUs; 543 err = extractor->getSampleTime(&timeUs); 544 CHECK(err == OK); 545 546 uint32_t bufferFlags = 0; 547 548 err = state->mCodec->queueInputBuffer( 549 index, 0 /* offset */, buffer->size(), 550 timeUs, bufferFlags); 551 552 CHECK(err == OK); 553 554 extractor->advance(); 555 } else { 556 CHECK_EQ(err, -EAGAIN); 557 } 558 } 559 } else { 560 for (size_t i = 0; i < stateByTrack.size(); ++i) { 561 CodecState *state = &stateByTrack.editValueAt(i); 562 563 if (!state->mSignalledInputEOS) { 564 size_t index; 565 status_t err = 566 state->mCodec->dequeueInputBuffer(&index, kTimeout); 567 568 if (err == OK) { 569 ALOGV("signalling input EOS on track %zu", i); 570 571 err = state->mCodec->queueInputBuffer( 572 index, 0 /* offset */, 0 /* size */, 573 0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS); 574 575 CHECK(err == OK); 576 577 state->mSignalledInputEOS = true; 578 } else { 579 CHECK_EQ(err, -EAGAIN); 580 } 581 } 582 } 583 } 584 585 sawOutputEOSOnAllTracks = true; 586 for (size_t i = 0; i < stateByTrack.size(); ++i) { 587 CodecState *state = &stateByTrack.editValueAt(i); 588 589 if (state->mSawOutputEOS) { 590 continue; 591 } else { 592 sawOutputEOSOnAllTracks = false; 593 } 594 595 DecodedFrame frame; 596 status_t err = state->mCodec->dequeueOutputBuffer( 597 &frame.index, &frame.offset, &frame.size, 598 &frame.presentationTimeUs, &frame.flags, kTimeout); 599 600 if (err == OK) { 601 ALOGV("draining decoded buffer %zu, time = %lld us", 602 frame.index, (long long)frame.presentationTimeUs); 603 604 ++(state->mNumBuffersDecoded); 605 606 decodedFrameIndices.push_back(frame); 607 608 if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) { 609 ALOGV("reached EOS on decoder output."); 610 state->mSawOutputEOS = true; 611 } 612 613 } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { 614 ALOGV("INFO_OUTPUT_BUFFERS_CHANGED"); 615 CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers( 616 &state->mOutBuffers)); 617 618 ALOGV("got %zu output buffers", state->mOutBuffers.size()); 619 } else if (err == INFO_FORMAT_CHANGED) { 620 sp<AMessage> format; 621 CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format)); 622 623 ALOGV("INFO_FORMAT_CHANGED: %s", 624 format->debugString().c_str()); 625 } else { 626 CHECK_EQ(err, -EAGAIN); 627 } 628 629 tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState); 630 631 tryDrainOutputBuffer( 632 filterState, surface, renderSurface, 633 useTimestamp, &startTimeRender); 634 } 635 } 636 637 // after EOS on decoder, let filter reach EOS 638 while (!filterState->mSawOutputEOS) { 639 tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState); 640 641 tryDrainOutputBuffer( 642 filterState, surface, renderSurface, 643 useTimestamp, &startTimeRender); 644 } 645 646 int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs; 647 648 for (size_t i = 0; i < stateByTrack.size(); ++i) { 649 CodecState *state = &stateByTrack.editValueAt(i); 650 651 CHECK_EQ((status_t)OK, state->mCodec->release()); 652 653 printf("track %zu: %" PRId64 " frames decoded and filtered, " 654 "%.2f fps.\n", i, state->mNumBuffersDecoded, 655 state->mNumBuffersDecoded * 1E6 / elapsedTimeUs); 656 } 657 658 return 0; 659 } 660 661 } // namespace android 662 663 int main(int argc, char **argv) { 664 using namespace android; 665 666 const char *me = argv[0]; 667 668 bool useSurface = false; 669 bool renderSurface = false; 670 bool useTimestamp = false; 671 FilterType filterType = FILTERTYPE_ZERO; 672 673 int res; 674 while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) { 675 switch (res) { 676 case 'b': 677 { 678 filterType = FILTERTYPE_INTRINSIC_BLUR; 679 break; 680 } 681 case 'c': 682 { 683 filterType = FILTERTYPE_RS_ARGB_TO_RGBA; 684 break; 685 } 686 case 'n': 687 { 688 filterType = FILTERTYPE_RS_NIGHT_VISION; 689 break; 690 } 691 case 'r': 692 { 693 filterType = FILTERTYPE_RS_SATURATION; 694 break; 695 } 696 case 's': 697 { 698 filterType = FILTERTYPE_SATURATION; 699 break; 700 } 701 case 'z': 702 { 703 filterType = FILTERTYPE_ZERO; 704 break; 705 } 706 case 'T': 707 { 708 useTimestamp = true; 709 } 710 // fall through 711 case 'R': 712 { 713 renderSurface = true; 714 } 715 // fall through 716 case 'S': 717 { 718 useSurface = true; 719 break; 720 } 721 case '?': 722 case 'h': 723 default: 724 { 725 usage(me); 726 break; 727 } 728 } 729 } 730 731 argc -= optind; 732 argv += optind; 733 734 if (argc != 1) { 735 usage(me); 736 } 737 738 ProcessState::self()->startThreadPool(); 739 740 android::sp<ALooper> looper = new ALooper; 741 looper->start(); 742 743 android::sp<SurfaceComposerClient> composerClient; 744 android::sp<SurfaceControl> control; 745 android::sp<Surface> surface; 746 747 if (useSurface) { 748 composerClient = new SurfaceComposerClient; 749 CHECK_EQ((status_t)OK, composerClient->initCheck()); 750 751 android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay( 752 ISurfaceComposer::eDisplayIdMain)); 753 DisplayInfo info; 754 SurfaceComposerClient::getDisplayInfo(display, &info); 755 ssize_t displayWidth = info.w; 756 ssize_t displayHeight = info.h; 757 758 ALOGV("display is %zd x %zd", displayWidth, displayHeight); 759 760 control = composerClient->createSurface( 761 String8("A Surface"), displayWidth, displayHeight, 762 PIXEL_FORMAT_RGBA_8888, 0); 763 764 CHECK(control != NULL); 765 CHECK(control->isValid()); 766 767 SurfaceComposerClient::Transaction{} 768 .setLayer(control, INT_MAX) 769 .show(control) 770 .apply(); 771 772 surface = control->getSurface(); 773 CHECK(surface != NULL); 774 } 775 776 decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType); 777 778 if (useSurface) { 779 composerClient->dispose(); 780 } 781 782 looper->stop(); 783 784 return 0; 785 } 786