1 /* 2 * Copyright 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #include <assert.h> 18 #include <ctype.h> 19 #include <fcntl.h> 20 #include <inttypes.h> 21 #include <getopt.h> 22 #include <signal.h> 23 #include <stdio.h> 24 #include <stdlib.h> 25 #include <string.h> 26 #include <sys/stat.h> 27 #include <sys/types.h> 28 #include <sys/wait.h> 29 30 #include <termios.h> 31 #include <unistd.h> 32 33 #define LOG_TAG "ScreenRecord" 34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS 35 //#define LOG_NDEBUG 0 36 #include <utils/Log.h> 37 38 #include <binder/IPCThreadState.h> 39 #include <utils/Errors.h> 40 #include <utils/Timers.h> 41 #include <utils/Trace.h> 42 43 #include <gui/Surface.h> 44 #include <gui/SurfaceComposerClient.h> 45 #include <gui/ISurfaceComposer.h> 46 #include <ui/DisplayInfo.h> 47 #include <media/openmax/OMX_IVCommon.h> 48 #include <media/stagefright/foundation/ABuffer.h> 49 #include <media/stagefright/foundation/AMessage.h> 50 #include <media/stagefright/MediaCodec.h> 51 #include <media/stagefright/MediaErrors.h> 52 #include <media/stagefright/MediaMuxer.h> 53 #include <media/ICrypto.h> 54 55 #include "screenrecord.h" 56 #include "Overlay.h" 57 #include "FrameOutput.h" 58 59 using namespace android; 60 61 static const uint32_t kMinBitRate = 100000; // 0.1Mbps 62 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps 63 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes 64 static const uint32_t kFallbackWidth = 1280; // 720p 65 static const uint32_t kFallbackHeight = 720; 66 static const char* kMimeTypeAvc = "video/avc"; 67 68 // Command-line parameters. 69 static bool gVerbose = false; // chatty on stdout 70 static bool gRotate = false; // rotate 90 degrees 71 static enum { 72 FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES 73 } gOutputFormat = FORMAT_MP4; // data format for output 74 static bool gSizeSpecified = false; // was size explicitly requested? 75 static bool gWantInfoScreen = false; // do we want initial info screen? 76 static bool gWantFrameTime = false; // do we want times on each frame? 77 static uint32_t gVideoWidth = 0; // default width+height 78 static uint32_t gVideoHeight = 0; 79 static uint32_t gBitRate = 4000000; // 4Mbps 80 static uint32_t gTimeLimitSec = kMaxTimeLimitSec; 81 82 // Set by signal handler to stop recording. 83 static volatile bool gStopRequested; 84 85 // Previous signal handler state, restored after first hit. 86 static struct sigaction gOrigSigactionINT; 87 static struct sigaction gOrigSigactionHUP; 88 89 90 /* 91 * Catch keyboard interrupt signals. On receipt, the "stop requested" 92 * flag is raised, and the original handler is restored (so that, if 93 * we get stuck finishing, a second Ctrl-C will kill the process). 94 */ 95 static void signalCatcher(int signum) 96 { 97 gStopRequested = true; 98 switch (signum) { 99 case SIGINT: 100 case SIGHUP: 101 sigaction(SIGINT, &gOrigSigactionINT, NULL); 102 sigaction(SIGHUP, &gOrigSigactionHUP, NULL); 103 break; 104 default: 105 abort(); 106 break; 107 } 108 } 109 110 /* 111 * Configures signal handlers. The previous handlers are saved. 112 * 113 * If the command is run from an interactive adb shell, we get SIGINT 114 * when Ctrl-C is hit. If we're run from the host, the local adb process 115 * gets the signal, and we get a SIGHUP when the terminal disconnects. 116 */ 117 static status_t configureSignals() { 118 struct sigaction act; 119 memset(&act, 0, sizeof(act)); 120 act.sa_handler = signalCatcher; 121 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) { 122 status_t err = -errno; 123 fprintf(stderr, "Unable to configure SIGINT handler: %s\n", 124 strerror(errno)); 125 return err; 126 } 127 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) { 128 status_t err = -errno; 129 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n", 130 strerror(errno)); 131 return err; 132 } 133 return NO_ERROR; 134 } 135 136 /* 137 * Returns "true" if the device is rotated 90 degrees. 138 */ 139 static bool isDeviceRotated(int orientation) { 140 return orientation != DISPLAY_ORIENTATION_0 && 141 orientation != DISPLAY_ORIENTATION_180; 142 } 143 144 /* 145 * Configures and starts the MediaCodec encoder. Obtains an input surface 146 * from the codec. 147 */ 148 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec, 149 sp<IGraphicBufferProducer>* pBufferProducer) { 150 status_t err; 151 152 if (gVerbose) { 153 printf("Configuring recorder for %dx%d %s at %.2fMbps\n", 154 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0); 155 } 156 157 sp<AMessage> format = new AMessage; 158 format->setInt32("width", gVideoWidth); 159 format->setInt32("height", gVideoHeight); 160 format->setString("mime", kMimeTypeAvc); 161 format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque); 162 format->setInt32("bitrate", gBitRate); 163 format->setFloat("frame-rate", displayFps); 164 format->setInt32("i-frame-interval", 10); 165 166 sp<ALooper> looper = new ALooper; 167 looper->setName("screenrecord_looper"); 168 looper->start(); 169 ALOGV("Creating codec"); 170 sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true); 171 if (codec == NULL) { 172 fprintf(stderr, "ERROR: unable to create %s codec instance\n", 173 kMimeTypeAvc); 174 return UNKNOWN_ERROR; 175 } 176 177 err = codec->configure(format, NULL, NULL, 178 MediaCodec::CONFIGURE_FLAG_ENCODE); 179 if (err != NO_ERROR) { 180 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n", 181 kMimeTypeAvc, gVideoWidth, gVideoHeight, err); 182 codec->release(); 183 return err; 184 } 185 186 ALOGV("Creating encoder input surface"); 187 sp<IGraphicBufferProducer> bufferProducer; 188 err = codec->createInputSurface(&bufferProducer); 189 if (err != NO_ERROR) { 190 fprintf(stderr, 191 "ERROR: unable to create encoder input surface (err=%d)\n", err); 192 codec->release(); 193 return err; 194 } 195 196 ALOGV("Starting codec"); 197 err = codec->start(); 198 if (err != NO_ERROR) { 199 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err); 200 codec->release(); 201 return err; 202 } 203 204 ALOGV("Codec prepared"); 205 *pCodec = codec; 206 *pBufferProducer = bufferProducer; 207 return 0; 208 } 209 210 /* 211 * Sets the display projection, based on the display dimensions, video size, 212 * and device orientation. 213 */ 214 static status_t setDisplayProjection(const sp<IBinder>& dpy, 215 const DisplayInfo& mainDpyInfo) { 216 status_t err; 217 218 // Set the region of the layer stack we're interested in, which in our 219 // case is "all of it". If the app is rotated (so that the width of the 220 // app is based on the height of the display), reverse width/height. 221 bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation); 222 uint32_t sourceWidth, sourceHeight; 223 if (!deviceRotated) { 224 sourceWidth = mainDpyInfo.w; 225 sourceHeight = mainDpyInfo.h; 226 } else { 227 ALOGV("using rotated width/height"); 228 sourceHeight = mainDpyInfo.w; 229 sourceWidth = mainDpyInfo.h; 230 } 231 Rect layerStackRect(sourceWidth, sourceHeight); 232 233 // We need to preserve the aspect ratio of the display. 234 float displayAspect = (float) sourceHeight / (float) sourceWidth; 235 236 237 // Set the way we map the output onto the display surface (which will 238 // be e.g. 1280x720 for a 720p video). The rect is interpreted 239 // post-rotation, so if the display is rotated 90 degrees we need to 240 // "pre-rotate" it by flipping width/height, so that the orientation 241 // adjustment changes it back. 242 // 243 // We might want to encode a portrait display as landscape to use more 244 // of the screen real estate. (If players respect a 90-degree rotation 245 // hint, we can essentially get a 720x1280 video instead of 1280x720.) 246 // In that case, we swap the configured video width/height and then 247 // supply a rotation value to the display projection. 248 uint32_t videoWidth, videoHeight; 249 uint32_t outWidth, outHeight; 250 if (!gRotate) { 251 videoWidth = gVideoWidth; 252 videoHeight = gVideoHeight; 253 } else { 254 videoWidth = gVideoHeight; 255 videoHeight = gVideoWidth; 256 } 257 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) { 258 // limited by narrow width; reduce height 259 outWidth = videoWidth; 260 outHeight = (uint32_t)(videoWidth * displayAspect); 261 } else { 262 // limited by short height; restrict width 263 outHeight = videoHeight; 264 outWidth = (uint32_t)(videoHeight / displayAspect); 265 } 266 uint32_t offX, offY; 267 offX = (videoWidth - outWidth) / 2; 268 offY = (videoHeight - outHeight) / 2; 269 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight); 270 271 if (gVerbose) { 272 if (gRotate) { 273 printf("Rotated content area is %ux%u at offset x=%d y=%d\n", 274 outHeight, outWidth, offY, offX); 275 } else { 276 printf("Content area is %ux%u at offset x=%d y=%d\n", 277 outWidth, outHeight, offX, offY); 278 } 279 } 280 281 SurfaceComposerClient::setDisplayProjection(dpy, 282 gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0, 283 layerStackRect, displayRect); 284 return NO_ERROR; 285 } 286 287 /* 288 * Configures the virtual display. When this completes, virtual display 289 * frames will start arriving from the buffer producer. 290 */ 291 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo, 292 const sp<IGraphicBufferProducer>& bufferProducer, 293 sp<IBinder>* pDisplayHandle) { 294 sp<IBinder> dpy = SurfaceComposerClient::createDisplay( 295 String8("ScreenRecorder"), false /*secure*/); 296 297 SurfaceComposerClient::openGlobalTransaction(); 298 SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer); 299 setDisplayProjection(dpy, mainDpyInfo); 300 SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack 301 SurfaceComposerClient::closeGlobalTransaction(); 302 303 *pDisplayHandle = dpy; 304 305 return NO_ERROR; 306 } 307 308 /* 309 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The 310 * input frames are coming from the virtual display as fast as SurfaceFlinger 311 * wants to send them. 312 * 313 * Exactly one of muxer or rawFp must be non-null. 314 * 315 * The muxer must *not* have been started before calling. 316 */ 317 static status_t runEncoder(const sp<MediaCodec>& encoder, 318 const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy, 319 const sp<IBinder>& virtualDpy, uint8_t orientation) { 320 static int kTimeout = 250000; // be responsive on signal 321 status_t err; 322 ssize_t trackIdx = -1; 323 uint32_t debugNumFrames = 0; 324 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC); 325 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec); 326 DisplayInfo mainDpyInfo; 327 328 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL)); 329 330 Vector<sp<ABuffer> > buffers; 331 err = encoder->getOutputBuffers(&buffers); 332 if (err != NO_ERROR) { 333 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err); 334 return err; 335 } 336 337 // This is set by the signal handler. 338 gStopRequested = false; 339 340 // Run until we're signaled. 341 while (!gStopRequested) { 342 size_t bufIndex, offset, size; 343 int64_t ptsUsec; 344 uint32_t flags; 345 346 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) { 347 if (gVerbose) { 348 printf("Time limit reached\n"); 349 } 350 break; 351 } 352 353 ALOGV("Calling dequeueOutputBuffer"); 354 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec, 355 &flags, kTimeout); 356 ALOGV("dequeueOutputBuffer returned %d", err); 357 switch (err) { 358 case NO_ERROR: 359 // got a buffer 360 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) { 361 ALOGV("Got codec config buffer (%zu bytes)", size); 362 if (muxer != NULL) { 363 // ignore this -- we passed the CSD into MediaMuxer when 364 // we got the format change notification 365 size = 0; 366 } 367 } 368 if (size != 0) { 369 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64, 370 bufIndex, size, ptsUsec); 371 372 { // scope 373 ATRACE_NAME("orientation"); 374 // Check orientation, update if it has changed. 375 // 376 // Polling for changes is inefficient and wrong, but the 377 // useful stuff is hard to get at without a Dalvik VM. 378 err = SurfaceComposerClient::getDisplayInfo(mainDpy, 379 &mainDpyInfo); 380 if (err != NO_ERROR) { 381 ALOGW("getDisplayInfo(main) failed: %d", err); 382 } else if (orientation != mainDpyInfo.orientation) { 383 ALOGD("orientation changed, now %d", mainDpyInfo.orientation); 384 SurfaceComposerClient::openGlobalTransaction(); 385 setDisplayProjection(virtualDpy, mainDpyInfo); 386 SurfaceComposerClient::closeGlobalTransaction(); 387 orientation = mainDpyInfo.orientation; 388 } 389 } 390 391 // If the virtual display isn't providing us with timestamps, 392 // use the current time. This isn't great -- we could get 393 // decoded data in clusters -- but we're not expecting 394 // to hit this anyway. 395 if (ptsUsec == 0) { 396 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000; 397 } 398 399 if (muxer == NULL) { 400 fwrite(buffers[bufIndex]->data(), 1, size, rawFp); 401 // Flush the data immediately in case we're streaming. 402 // We don't want to do this if all we've written is 403 // the SPS/PPS data because mplayer gets confused. 404 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) { 405 fflush(rawFp); 406 } 407 } else { 408 // The MediaMuxer docs are unclear, but it appears that we 409 // need to pass either the full set of BufferInfo flags, or 410 // (flags & BUFFER_FLAG_SYNCFRAME). 411 // 412 // If this blocks for too long we could drop frames. We may 413 // want to queue these up and do them on a different thread. 414 ATRACE_NAME("write sample"); 415 assert(trackIdx != -1); 416 err = muxer->writeSampleData(buffers[bufIndex], trackIdx, 417 ptsUsec, flags); 418 if (err != NO_ERROR) { 419 fprintf(stderr, 420 "Failed writing data to muxer (err=%d)\n", err); 421 return err; 422 } 423 } 424 debugNumFrames++; 425 } 426 err = encoder->releaseOutputBuffer(bufIndex); 427 if (err != NO_ERROR) { 428 fprintf(stderr, "Unable to release output buffer (err=%d)\n", 429 err); 430 return err; 431 } 432 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) { 433 // Not expecting EOS from SurfaceFlinger. Go with it. 434 ALOGI("Received end-of-stream"); 435 gStopRequested = true; 436 } 437 break; 438 case -EAGAIN: // INFO_TRY_AGAIN_LATER 439 ALOGV("Got -EAGAIN, looping"); 440 break; 441 case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED 442 { 443 // Format includes CSD, which we must provide to muxer. 444 ALOGV("Encoder format changed"); 445 sp<AMessage> newFormat; 446 encoder->getOutputFormat(&newFormat); 447 if (muxer != NULL) { 448 trackIdx = muxer->addTrack(newFormat); 449 ALOGV("Starting muxer"); 450 err = muxer->start(); 451 if (err != NO_ERROR) { 452 fprintf(stderr, "Unable to start muxer (err=%d)\n", err); 453 return err; 454 } 455 } 456 } 457 break; 458 case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED 459 // Not expected for an encoder; handle it anyway. 460 ALOGV("Encoder buffers changed"); 461 err = encoder->getOutputBuffers(&buffers); 462 if (err != NO_ERROR) { 463 fprintf(stderr, 464 "Unable to get new output buffers (err=%d)\n", err); 465 return err; 466 } 467 break; 468 case INVALID_OPERATION: 469 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION"); 470 return err; 471 default: 472 fprintf(stderr, 473 "Got weird result %d from dequeueOutputBuffer\n", err); 474 return err; 475 } 476 } 477 478 ALOGV("Encoder stopping (req=%d)", gStopRequested); 479 if (gVerbose) { 480 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n", 481 debugNumFrames, nanoseconds_to_seconds( 482 systemTime(CLOCK_MONOTONIC) - startWhenNsec)); 483 } 484 return NO_ERROR; 485 } 486 487 /* 488 * Raw H.264 byte stream output requested. Send the output to stdout 489 * if desired. If the output is a tty, reconfigure it to avoid the 490 * CRLF line termination that we see with "adb shell" commands. 491 */ 492 static FILE* prepareRawOutput(const char* fileName) { 493 FILE* rawFp = NULL; 494 495 if (strcmp(fileName, "-") == 0) { 496 if (gVerbose) { 497 fprintf(stderr, "ERROR: verbose output and '-' not compatible"); 498 return NULL; 499 } 500 rawFp = stdout; 501 } else { 502 rawFp = fopen(fileName, "w"); 503 if (rawFp == NULL) { 504 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno)); 505 return NULL; 506 } 507 } 508 509 int fd = fileno(rawFp); 510 if (isatty(fd)) { 511 // best effort -- reconfigure tty for "raw" 512 ALOGD("raw video output to tty (fd=%d)", fd); 513 struct termios term; 514 if (tcgetattr(fd, &term) == 0) { 515 cfmakeraw(&term); 516 if (tcsetattr(fd, TCSANOW, &term) == 0) { 517 ALOGD("tty successfully configured for raw"); 518 } 519 } 520 } 521 522 return rawFp; 523 } 524 525 /* 526 * Main "do work" start point. 527 * 528 * Configures codec, muxer, and virtual display, then starts moving bits 529 * around. 530 */ 531 static status_t recordScreen(const char* fileName) { 532 status_t err; 533 534 // Configure signal handler. 535 err = configureSignals(); 536 if (err != NO_ERROR) return err; 537 538 // Start Binder thread pool. MediaCodec needs to be able to receive 539 // messages from mediaserver. 540 sp<ProcessState> self = ProcessState::self(); 541 self->startThreadPool(); 542 543 // Get main display parameters. 544 sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay( 545 ISurfaceComposer::eDisplayIdMain); 546 DisplayInfo mainDpyInfo; 547 err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo); 548 if (err != NO_ERROR) { 549 fprintf(stderr, "ERROR: unable to get display characteristics\n"); 550 return err; 551 } 552 if (gVerbose) { 553 printf("Main display is %dx%d @%.2ffps (orientation=%u)\n", 554 mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps, 555 mainDpyInfo.orientation); 556 } 557 558 bool rotated = isDeviceRotated(mainDpyInfo.orientation); 559 if (gVideoWidth == 0) { 560 gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w; 561 } 562 if (gVideoHeight == 0) { 563 gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h; 564 } 565 566 // Configure and start the encoder. 567 sp<MediaCodec> encoder; 568 sp<FrameOutput> frameOutput; 569 sp<IGraphicBufferProducer> encoderInputSurface; 570 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) { 571 err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface); 572 573 if (err != NO_ERROR && !gSizeSpecified) { 574 // fallback is defined for landscape; swap if we're in portrait 575 bool needSwap = gVideoWidth < gVideoHeight; 576 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth; 577 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight; 578 if (gVideoWidth != newWidth && gVideoHeight != newHeight) { 579 ALOGV("Retrying with 720p"); 580 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n", 581 gVideoWidth, gVideoHeight, newWidth, newHeight); 582 gVideoWidth = newWidth; 583 gVideoHeight = newHeight; 584 err = prepareEncoder(mainDpyInfo.fps, &encoder, 585 &encoderInputSurface); 586 } 587 } 588 if (err != NO_ERROR) return err; 589 590 // From here on, we must explicitly release() the encoder before it goes 591 // out of scope, or we will get an assertion failure from stagefright 592 // later on in a different thread. 593 } else { 594 // We're not using an encoder at all. The "encoder input surface" we hand to 595 // SurfaceFlinger will just feed directly to us. 596 frameOutput = new FrameOutput(); 597 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface); 598 if (err != NO_ERROR) { 599 return err; 600 } 601 } 602 603 // Draw the "info" page by rendering a frame with GLES and sending 604 // it directly to the encoder. 605 // TODO: consider displaying this as a regular layer to avoid b/11697754 606 if (gWantInfoScreen) { 607 Overlay::drawInfoPage(encoderInputSurface); 608 } 609 610 // Configure optional overlay. 611 sp<IGraphicBufferProducer> bufferProducer; 612 sp<Overlay> overlay; 613 if (gWantFrameTime) { 614 // Send virtual display frames to an external texture. 615 overlay = new Overlay(); 616 err = overlay->start(encoderInputSurface, &bufferProducer); 617 if (err != NO_ERROR) { 618 if (encoder != NULL) encoder->release(); 619 return err; 620 } 621 if (gVerbose) { 622 printf("Bugreport overlay created\n"); 623 } 624 } else { 625 // Use the encoder's input surface as the virtual display surface. 626 bufferProducer = encoderInputSurface; 627 } 628 629 // Configure virtual display. 630 sp<IBinder> dpy; 631 err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy); 632 if (err != NO_ERROR) { 633 if (encoder != NULL) encoder->release(); 634 return err; 635 } 636 637 sp<MediaMuxer> muxer = NULL; 638 FILE* rawFp = NULL; 639 switch (gOutputFormat) { 640 case FORMAT_MP4: { 641 // Configure muxer. We have to wait for the CSD blob from the encoder 642 // before we can start it. 643 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); 644 if (fd < 0) { 645 fprintf(stderr, "ERROR: couldn't open file\n"); 646 abort(); 647 } 648 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4); 649 close(fd); 650 if (gRotate) { 651 muxer->setOrientationHint(90); // TODO: does this do anything? 652 } 653 break; 654 } 655 case FORMAT_H264: 656 case FORMAT_FRAMES: 657 case FORMAT_RAW_FRAMES: { 658 rawFp = prepareRawOutput(fileName); 659 if (rawFp == NULL) { 660 if (encoder != NULL) encoder->release(); 661 return -1; 662 } 663 break; 664 } 665 default: 666 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat); 667 abort(); 668 } 669 670 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) { 671 // TODO: if we want to make this a proper feature, we should output 672 // an outer header with version info. Right now we never change 673 // the frame size or format, so we could conceivably just send 674 // the current frame header once and then follow it with an 675 // unbroken stream of data. 676 677 // Make the EGL context current again. This gets unhooked if we're 678 // using "--bugreport" mode. 679 // TODO: figure out if we can eliminate this 680 frameOutput->prepareToCopy(); 681 682 while (!gStopRequested) { 683 // Poll for frames, the same way we do for MediaCodec. We do 684 // all of the work on the main thread. 685 // 686 // Ideally we'd sleep indefinitely and wake when the 687 // stop was requested, but this will do for now. (It almost 688 // works because wait() wakes when a signal hits, but we 689 // need to handle the edge cases.) 690 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES; 691 err = frameOutput->copyFrame(rawFp, 250000, rawFrames); 692 if (err == ETIMEDOUT) { 693 err = NO_ERROR; 694 } else if (err != NO_ERROR) { 695 ALOGE("Got error %d from copyFrame()", err); 696 break; 697 } 698 } 699 } else { 700 // Main encoder loop. 701 err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy, 702 mainDpyInfo.orientation); 703 if (err != NO_ERROR) { 704 fprintf(stderr, "Encoder failed (err=%d)\n", err); 705 // fall through to cleanup 706 } 707 708 if (gVerbose) { 709 printf("Stopping encoder and muxer\n"); 710 } 711 } 712 713 // Shut everything down, starting with the producer side. 714 encoderInputSurface = NULL; 715 SurfaceComposerClient::destroyDisplay(dpy); 716 if (overlay != NULL) overlay->stop(); 717 if (encoder != NULL) encoder->stop(); 718 if (muxer != NULL) { 719 // If we don't stop muxer explicitly, i.e. let the destructor run, 720 // it may hang (b/11050628). 721 muxer->stop(); 722 } else if (rawFp != stdout) { 723 fclose(rawFp); 724 } 725 if (encoder != NULL) encoder->release(); 726 727 return err; 728 } 729 730 /* 731 * Sends a broadcast to the media scanner to tell it about the new video. 732 * 733 * This is optional, but nice to have. 734 */ 735 static status_t notifyMediaScanner(const char* fileName) { 736 // need to do allocations before the fork() 737 String8 fileUrl("file://"); 738 fileUrl.append(fileName); 739 740 const char* kCommand = "/system/bin/am"; 741 const char* const argv[] = { 742 kCommand, 743 "broadcast", 744 "-a", 745 "android.intent.action.MEDIA_SCANNER_SCAN_FILE", 746 "-d", 747 fileUrl.string(), 748 NULL 749 }; 750 if (gVerbose) { 751 printf("Executing:"); 752 for (int i = 0; argv[i] != NULL; i++) { 753 printf(" %s", argv[i]); 754 } 755 putchar('\n'); 756 } 757 758 pid_t pid = fork(); 759 if (pid < 0) { 760 int err = errno; 761 ALOGW("fork() failed: %s", strerror(err)); 762 return -err; 763 } else if (pid > 0) { 764 // parent; wait for the child, mostly to make the verbose-mode output 765 // look right, but also to check for and log failures 766 int status; 767 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0)); 768 if (actualPid != pid) { 769 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno); 770 } else if (status != 0) { 771 ALOGW("'am broadcast' exited with status=%d", status); 772 } else { 773 ALOGV("'am broadcast' exited successfully"); 774 } 775 } else { 776 if (!gVerbose) { 777 // non-verbose, suppress 'am' output 778 ALOGV("closing stdout/stderr in child"); 779 int fd = open("/dev/null", O_WRONLY); 780 if (fd >= 0) { 781 dup2(fd, STDOUT_FILENO); 782 dup2(fd, STDERR_FILENO); 783 close(fd); 784 } 785 } 786 execv(kCommand, const_cast<char* const*>(argv)); 787 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno)); 788 exit(1); 789 } 790 return NO_ERROR; 791 } 792 793 /* 794 * Parses a string of the form "1280x720". 795 * 796 * Returns true on success. 797 */ 798 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth, 799 uint32_t* pHeight) { 800 long width, height; 801 char* end; 802 803 // Must specify base 10, or "0x0" gets parsed differently. 804 width = strtol(widthHeight, &end, 10); 805 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') { 806 // invalid chars in width, or missing 'x', or missing height 807 return false; 808 } 809 height = strtol(end + 1, &end, 10); 810 if (*end != '\0') { 811 // invalid chars in height 812 return false; 813 } 814 815 *pWidth = width; 816 *pHeight = height; 817 return true; 818 } 819 820 /* 821 * Accepts a string with a bare number ("4000000") or with a single-character 822 * unit ("4m"). 823 * 824 * Returns an error if parsing fails. 825 */ 826 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) { 827 long value; 828 char* endptr; 829 830 value = strtol(str, &endptr, 10); 831 if (*endptr == '\0') { 832 // bare number 833 *pValue = value; 834 return NO_ERROR; 835 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') { 836 *pValue = value * 1000000; // check for overflow? 837 return NO_ERROR; 838 } else { 839 fprintf(stderr, "Unrecognized value: %s\n", str); 840 return UNKNOWN_ERROR; 841 } 842 } 843 844 /* 845 * Dumps usage on stderr. 846 */ 847 static void usage() { 848 fprintf(stderr, 849 "Usage: screenrecord [options] <filename>\n" 850 "\n" 851 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n" 852 "\n" 853 "Options:\n" 854 "--size WIDTHxHEIGHT\n" 855 " Set the video size, e.g. \"1280x720\". Default is the device's main\n" 856 " display resolution (if supported), 1280x720 if not. For best results,\n" 857 " use a size supported by the AVC encoder.\n" 858 "--bit-rate RATE\n" 859 " Set the video bit rate, in bits per second. Value may be specified as\n" 860 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n" 861 "--bugreport\n" 862 " Add additional information, such as a timestamp overlay, that is helpful\n" 863 " in videos captured to illustrate bugs.\n" 864 "--time-limit TIME\n" 865 " Set the maximum recording time, in seconds. Default / maximum is %d.\n" 866 "--verbose\n" 867 " Display interesting information on stdout.\n" 868 "--help\n" 869 " Show this message.\n" 870 "\n" 871 "Recording continues until Ctrl-C is hit or the time limit is reached.\n" 872 "\n", 873 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec 874 ); 875 } 876 877 /* 878 * Parses args and kicks things off. 879 */ 880 int main(int argc, char* const argv[]) { 881 static const struct option longOptions[] = { 882 { "help", no_argument, NULL, 'h' }, 883 { "verbose", no_argument, NULL, 'v' }, 884 { "size", required_argument, NULL, 's' }, 885 { "bit-rate", required_argument, NULL, 'b' }, 886 { "time-limit", required_argument, NULL, 't' }, 887 { "bugreport", no_argument, NULL, 'u' }, 888 // "unofficial" options 889 { "show-device-info", no_argument, NULL, 'i' }, 890 { "show-frame-time", no_argument, NULL, 'f' }, 891 { "rotate", no_argument, NULL, 'r' }, 892 { "output-format", required_argument, NULL, 'o' }, 893 { NULL, 0, NULL, 0 } 894 }; 895 896 while (true) { 897 int optionIndex = 0; 898 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex); 899 if (ic == -1) { 900 break; 901 } 902 903 switch (ic) { 904 case 'h': 905 usage(); 906 return 0; 907 case 'v': 908 gVerbose = true; 909 break; 910 case 's': 911 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) { 912 fprintf(stderr, "Invalid size '%s', must be width x height\n", 913 optarg); 914 return 2; 915 } 916 if (gVideoWidth == 0 || gVideoHeight == 0) { 917 fprintf(stderr, 918 "Invalid size %ux%u, width and height may not be zero\n", 919 gVideoWidth, gVideoHeight); 920 return 2; 921 } 922 gSizeSpecified = true; 923 break; 924 case 'b': 925 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) { 926 return 2; 927 } 928 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) { 929 fprintf(stderr, 930 "Bit rate %dbps outside acceptable range [%d,%d]\n", 931 gBitRate, kMinBitRate, kMaxBitRate); 932 return 2; 933 } 934 break; 935 case 't': 936 gTimeLimitSec = atoi(optarg); 937 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) { 938 fprintf(stderr, 939 "Time limit %ds outside acceptable range [1,%d]\n", 940 gTimeLimitSec, kMaxTimeLimitSec); 941 return 2; 942 } 943 break; 944 case 'u': 945 gWantInfoScreen = true; 946 gWantFrameTime = true; 947 break; 948 case 'i': 949 gWantInfoScreen = true; 950 break; 951 case 'f': 952 gWantFrameTime = true; 953 break; 954 case 'r': 955 // experimental feature 956 gRotate = true; 957 break; 958 case 'o': 959 if (strcmp(optarg, "mp4") == 0) { 960 gOutputFormat = FORMAT_MP4; 961 } else if (strcmp(optarg, "h264") == 0) { 962 gOutputFormat = FORMAT_H264; 963 } else if (strcmp(optarg, "frames") == 0) { 964 gOutputFormat = FORMAT_FRAMES; 965 } else if (strcmp(optarg, "raw-frames") == 0) { 966 gOutputFormat = FORMAT_RAW_FRAMES; 967 } else { 968 fprintf(stderr, "Unknown format '%s'\n", optarg); 969 return 2; 970 } 971 break; 972 default: 973 if (ic != '?') { 974 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic); 975 } 976 return 2; 977 } 978 } 979 980 if (optind != argc - 1) { 981 fprintf(stderr, "Must specify output file (see --help).\n"); 982 return 2; 983 } 984 985 const char* fileName = argv[optind]; 986 if (gOutputFormat == FORMAT_MP4) { 987 // MediaMuxer tries to create the file in the constructor, but we don't 988 // learn about the failure until muxer.start(), which returns a generic 989 // error code without logging anything. We attempt to create the file 990 // now for better diagnostics. 991 int fd = open(fileName, O_CREAT | O_RDWR, 0644); 992 if (fd < 0) { 993 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno)); 994 return 1; 995 } 996 close(fd); 997 } 998 999 status_t err = recordScreen(fileName); 1000 if (err == NO_ERROR) { 1001 // Try to notify the media scanner. Not fatal if this fails. 1002 notifyMediaScanner(fileName); 1003 } 1004 ALOGD(err == NO_ERROR ? "success" : "failed"); 1005 return (int) err; 1006 } 1007