Home | History | Annotate | Download | only in screenrecord
      1 /*
      2  * Copyright 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <assert.h>
     18 #include <ctype.h>
     19 #include <fcntl.h>
     20 #include <inttypes.h>
     21 #include <getopt.h>
     22 #include <signal.h>
     23 #include <stdio.h>
     24 #include <stdlib.h>
     25 #include <string.h>
     26 #include <sys/stat.h>
     27 #include <sys/types.h>
     28 #include <sys/wait.h>
     29 
     30 #include <termios.h>
     31 #include <unistd.h>
     32 
     33 #define LOG_TAG "ScreenRecord"
     34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
     35 //#define LOG_NDEBUG 0
     36 #include <utils/Log.h>
     37 
     38 #include <binder/IPCThreadState.h>
     39 #include <utils/Errors.h>
     40 #include <utils/Timers.h>
     41 #include <utils/Trace.h>
     42 
     43 #include <gui/Surface.h>
     44 #include <gui/SurfaceComposerClient.h>
     45 #include <gui/ISurfaceComposer.h>
     46 #include <ui/DisplayInfo.h>
     47 #include <media/openmax/OMX_IVCommon.h>
     48 #include <media/stagefright/foundation/ABuffer.h>
     49 #include <media/stagefright/foundation/AMessage.h>
     50 #include <media/stagefright/MediaCodec.h>
     51 #include <media/stagefright/MediaErrors.h>
     52 #include <media/stagefright/MediaMuxer.h>
     53 #include <media/stagefright/PersistentSurface.h>
     54 #include <media/ICrypto.h>
     55 #include <media/MediaCodecBuffer.h>
     56 
     57 #include "screenrecord.h"
     58 #include "Overlay.h"
     59 #include "FrameOutput.h"
     60 
     61 using namespace android;
     62 
     63 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
     64 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
     65 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
     66 static const uint32_t kFallbackWidth = 1280;        // 720p
     67 static const uint32_t kFallbackHeight = 720;
     68 static const char* kMimeTypeAvc = "video/avc";
     69 
     70 // Command-line parameters.
     71 static bool gVerbose = false;           // chatty on stdout
     72 static bool gRotate = false;            // rotate 90 degrees
     73 static bool gMonotonicTime = false;     // use system monotonic time for timestamps
     74 static bool gPersistentSurface = false; // use persistent surface
     75 static enum {
     76     FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
     77 } gOutputFormat = FORMAT_MP4;           // data format for output
     78 static AString gCodecName = "";         // codec name override
     79 static bool gSizeSpecified = false;     // was size explicitly requested?
     80 static bool gWantInfoScreen = false;    // do we want initial info screen?
     81 static bool gWantFrameTime = false;     // do we want times on each frame?
     82 static uint32_t gVideoWidth = 0;        // default width+height
     83 static uint32_t gVideoHeight = 0;
     84 static uint32_t gBitRate = 20000000;     // 20Mbps
     85 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
     86 
     87 // Set by signal handler to stop recording.
     88 static volatile bool gStopRequested = false;
     89 
     90 // Previous signal handler state, restored after first hit.
     91 static struct sigaction gOrigSigactionINT;
     92 static struct sigaction gOrigSigactionHUP;
     93 
     94 
     95 /*
     96  * Catch keyboard interrupt signals.  On receipt, the "stop requested"
     97  * flag is raised, and the original handler is restored (so that, if
     98  * we get stuck finishing, a second Ctrl-C will kill the process).
     99  */
    100 static void signalCatcher(int signum)
    101 {
    102     gStopRequested = true;
    103     switch (signum) {
    104     case SIGINT:
    105     case SIGHUP:
    106         sigaction(SIGINT, &gOrigSigactionINT, NULL);
    107         sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
    108         break;
    109     default:
    110         abort();
    111         break;
    112     }
    113 }
    114 
    115 /*
    116  * Configures signal handlers.  The previous handlers are saved.
    117  *
    118  * If the command is run from an interactive adb shell, we get SIGINT
    119  * when Ctrl-C is hit.  If we're run from the host, the local adb process
    120  * gets the signal, and we get a SIGHUP when the terminal disconnects.
    121  */
    122 static status_t configureSignals() {
    123     struct sigaction act;
    124     memset(&act, 0, sizeof(act));
    125     act.sa_handler = signalCatcher;
    126     if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
    127         status_t err = -errno;
    128         fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
    129                 strerror(errno));
    130         return err;
    131     }
    132     if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
    133         status_t err = -errno;
    134         fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
    135                 strerror(errno));
    136         return err;
    137     }
    138     signal(SIGPIPE, SIG_IGN);
    139     return NO_ERROR;
    140 }
    141 
    142 /*
    143  * Returns "true" if the device is rotated 90 degrees.
    144  */
    145 static bool isDeviceRotated(int orientation) {
    146     return orientation != DISPLAY_ORIENTATION_0 &&
    147             orientation != DISPLAY_ORIENTATION_180;
    148 }
    149 
    150 /*
    151  * Configures and starts the MediaCodec encoder.  Obtains an input surface
    152  * from the codec.
    153  */
    154 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
    155         sp<IGraphicBufferProducer>* pBufferProducer) {
    156     status_t err;
    157 
    158     if (gVerbose) {
    159         printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
    160                 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
    161         fflush(stdout);
    162     }
    163 
    164     sp<AMessage> format = new AMessage;
    165     format->setInt32("width", gVideoWidth);
    166     format->setInt32("height", gVideoHeight);
    167     format->setString("mime", kMimeTypeAvc);
    168     format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
    169     format->setInt32("bitrate", gBitRate);
    170     format->setFloat("frame-rate", displayFps);
    171     format->setInt32("i-frame-interval", 10);
    172 
    173     sp<ALooper> looper = new ALooper;
    174     looper->setName("screenrecord_looper");
    175     looper->start();
    176     ALOGV("Creating codec");
    177     sp<MediaCodec> codec;
    178     if (gCodecName.empty()) {
    179         codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
    180         if (codec == NULL) {
    181             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
    182                     kMimeTypeAvc);
    183             return UNKNOWN_ERROR;
    184         }
    185     } else {
    186         codec = MediaCodec::CreateByComponentName(looper, gCodecName);
    187         if (codec == NULL) {
    188             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
    189                     gCodecName.c_str());
    190             return UNKNOWN_ERROR;
    191         }
    192     }
    193 
    194     err = codec->configure(format, NULL, NULL,
    195             MediaCodec::CONFIGURE_FLAG_ENCODE);
    196     if (err != NO_ERROR) {
    197         fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
    198                 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
    199         codec->release();
    200         return err;
    201     }
    202 
    203     ALOGV("Creating encoder input surface");
    204     sp<IGraphicBufferProducer> bufferProducer;
    205     if (gPersistentSurface) {
    206         sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
    207         bufferProducer = surface->getBufferProducer();
    208         err = codec->setInputSurface(surface);
    209     } else {
    210         err = codec->createInputSurface(&bufferProducer);
    211     }
    212     if (err != NO_ERROR) {
    213         fprintf(stderr,
    214             "ERROR: unable to %s encoder input surface (err=%d)\n",
    215             gPersistentSurface ? "set" : "create",
    216             err);
    217         codec->release();
    218         return err;
    219     }
    220 
    221     ALOGV("Starting codec");
    222     err = codec->start();
    223     if (err != NO_ERROR) {
    224         fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
    225         codec->release();
    226         return err;
    227     }
    228 
    229     ALOGV("Codec prepared");
    230     *pCodec = codec;
    231     *pBufferProducer = bufferProducer;
    232     return 0;
    233 }
    234 
    235 /*
    236  * Sets the display projection, based on the display dimensions, video size,
    237  * and device orientation.
    238  */
    239 static status_t setDisplayProjection(
    240         SurfaceComposerClient::Transaction& t,
    241         const sp<IBinder>& dpy,
    242         const DisplayInfo& mainDpyInfo) {
    243 
    244     // Set the region of the layer stack we're interested in, which in our
    245     // case is "all of it".  If the app is rotated (so that the width of the
    246     // app is based on the height of the display), reverse width/height.
    247     bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
    248     uint32_t sourceWidth, sourceHeight;
    249     if (!deviceRotated) {
    250         sourceWidth = mainDpyInfo.w;
    251         sourceHeight = mainDpyInfo.h;
    252     } else {
    253         ALOGV("using rotated width/height");
    254         sourceHeight = mainDpyInfo.w;
    255         sourceWidth = mainDpyInfo.h;
    256     }
    257     Rect layerStackRect(sourceWidth, sourceHeight);
    258 
    259     // We need to preserve the aspect ratio of the display.
    260     float displayAspect = (float) sourceHeight / (float) sourceWidth;
    261 
    262 
    263     // Set the way we map the output onto the display surface (which will
    264     // be e.g. 1280x720 for a 720p video).  The rect is interpreted
    265     // post-rotation, so if the display is rotated 90 degrees we need to
    266     // "pre-rotate" it by flipping width/height, so that the orientation
    267     // adjustment changes it back.
    268     //
    269     // We might want to encode a portrait display as landscape to use more
    270     // of the screen real estate.  (If players respect a 90-degree rotation
    271     // hint, we can essentially get a 720x1280 video instead of 1280x720.)
    272     // In that case, we swap the configured video width/height and then
    273     // supply a rotation value to the display projection.
    274     uint32_t videoWidth, videoHeight;
    275     uint32_t outWidth, outHeight;
    276     if (!gRotate) {
    277         videoWidth = gVideoWidth;
    278         videoHeight = gVideoHeight;
    279     } else {
    280         videoWidth = gVideoHeight;
    281         videoHeight = gVideoWidth;
    282     }
    283     if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
    284         // limited by narrow width; reduce height
    285         outWidth = videoWidth;
    286         outHeight = (uint32_t)(videoWidth * displayAspect);
    287     } else {
    288         // limited by short height; restrict width
    289         outHeight = videoHeight;
    290         outWidth = (uint32_t)(videoHeight / displayAspect);
    291     }
    292     uint32_t offX, offY;
    293     offX = (videoWidth - outWidth) / 2;
    294     offY = (videoHeight - outHeight) / 2;
    295     Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
    296 
    297     if (gVerbose) {
    298         if (gRotate) {
    299             printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
    300                     outHeight, outWidth, offY, offX);
    301             fflush(stdout);
    302         } else {
    303             printf("Content area is %ux%u at offset x=%d y=%d\n",
    304                     outWidth, outHeight, offX, offY);
    305             fflush(stdout);
    306         }
    307     }
    308 
    309     t.setDisplayProjection(dpy,
    310             gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
    311             layerStackRect, displayRect);
    312     return NO_ERROR;
    313 }
    314 
    315 /*
    316  * Configures the virtual display.  When this completes, virtual display
    317  * frames will start arriving from the buffer producer.
    318  */
    319 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
    320         const sp<IGraphicBufferProducer>& bufferProducer,
    321         sp<IBinder>* pDisplayHandle) {
    322     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
    323             String8("ScreenRecorder"), false /*secure*/);
    324 
    325     SurfaceComposerClient::Transaction t;
    326     t.setDisplaySurface(dpy, bufferProducer);
    327     setDisplayProjection(t, dpy, mainDpyInfo);
    328     t.setDisplayLayerStack(dpy, 0);    // default stack
    329     t.apply();
    330 
    331     *pDisplayHandle = dpy;
    332 
    333     return NO_ERROR;
    334 }
    335 
    336 /*
    337  * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
    338  * input frames are coming from the virtual display as fast as SurfaceFlinger
    339  * wants to send them.
    340  *
    341  * Exactly one of muxer or rawFp must be non-null.
    342  *
    343  * The muxer must *not* have been started before calling.
    344  */
    345 static status_t runEncoder(const sp<MediaCodec>& encoder,
    346         const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
    347         const sp<IBinder>& virtualDpy, uint8_t orientation) {
    348     static int kTimeout = 250000;   // be responsive on signal
    349     status_t err;
    350     ssize_t trackIdx = -1;
    351     uint32_t debugNumFrames = 0;
    352     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
    353     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
    354     DisplayInfo mainDpyInfo;
    355 
    356     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
    357 
    358     Vector<sp<MediaCodecBuffer> > buffers;
    359     err = encoder->getOutputBuffers(&buffers);
    360     if (err != NO_ERROR) {
    361         fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
    362         return err;
    363     }
    364 
    365     // Run until we're signaled.
    366     while (!gStopRequested) {
    367         size_t bufIndex, offset, size;
    368         int64_t ptsUsec;
    369         uint32_t flags;
    370 
    371         if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
    372             if (gVerbose) {
    373                 printf("Time limit reached\n");
    374                 fflush(stdout);
    375             }
    376             break;
    377         }
    378 
    379         ALOGV("Calling dequeueOutputBuffer");
    380         err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
    381                 &flags, kTimeout);
    382         ALOGV("dequeueOutputBuffer returned %d", err);
    383         switch (err) {
    384         case NO_ERROR:
    385             // got a buffer
    386             if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
    387                 ALOGV("Got codec config buffer (%zu bytes)", size);
    388                 if (muxer != NULL) {
    389                     // ignore this -- we passed the CSD into MediaMuxer when
    390                     // we got the format change notification
    391                     size = 0;
    392                 }
    393             }
    394             if (size != 0) {
    395                 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
    396                         bufIndex, size, ptsUsec);
    397 
    398                 { // scope
    399                     ATRACE_NAME("orientation");
    400                     // Check orientation, update if it has changed.
    401                     //
    402                     // Polling for changes is inefficient and wrong, but the
    403                     // useful stuff is hard to get at without a Dalvik VM.
    404                     err = SurfaceComposerClient::getDisplayInfo(mainDpy,
    405                             &mainDpyInfo);
    406                     if (err != NO_ERROR) {
    407                         ALOGW("getDisplayInfo(main) failed: %d", err);
    408                     } else if (orientation != mainDpyInfo.orientation) {
    409                         ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
    410                         SurfaceComposerClient::Transaction t;
    411                         setDisplayProjection(t, virtualDpy, mainDpyInfo);
    412                         t.apply();
    413                         orientation = mainDpyInfo.orientation;
    414                     }
    415                 }
    416 
    417                 // If the virtual display isn't providing us with timestamps,
    418                 // use the current time.  This isn't great -- we could get
    419                 // decoded data in clusters -- but we're not expecting
    420                 // to hit this anyway.
    421                 if (ptsUsec == 0) {
    422                     ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
    423                 }
    424 
    425                 if (muxer == NULL) {
    426                     fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
    427                     // Flush the data immediately in case we're streaming.
    428                     // We don't want to do this if all we've written is
    429                     // the SPS/PPS data because mplayer gets confused.
    430                     if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
    431                         fflush(rawFp);
    432                     }
    433                 } else {
    434                     // The MediaMuxer docs are unclear, but it appears that we
    435                     // need to pass either the full set of BufferInfo flags, or
    436                     // (flags & BUFFER_FLAG_SYNCFRAME).
    437                     //
    438                     // If this blocks for too long we could drop frames.  We may
    439                     // want to queue these up and do them on a different thread.
    440                     ATRACE_NAME("write sample");
    441                     assert(trackIdx != -1);
    442                     // TODO
    443                     sp<ABuffer> buffer = new ABuffer(
    444                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
    445                     err = muxer->writeSampleData(buffer, trackIdx,
    446                             ptsUsec, flags);
    447                     if (err != NO_ERROR) {
    448                         fprintf(stderr,
    449                             "Failed writing data to muxer (err=%d)\n", err);
    450                         return err;
    451                     }
    452                 }
    453                 debugNumFrames++;
    454             }
    455             err = encoder->releaseOutputBuffer(bufIndex);
    456             if (err != NO_ERROR) {
    457                 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
    458                         err);
    459                 return err;
    460             }
    461             if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
    462                 // Not expecting EOS from SurfaceFlinger.  Go with it.
    463                 ALOGI("Received end-of-stream");
    464                 gStopRequested = true;
    465             }
    466             break;
    467         case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
    468             ALOGV("Got -EAGAIN, looping");
    469             break;
    470         case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
    471             {
    472                 // Format includes CSD, which we must provide to muxer.
    473                 ALOGV("Encoder format changed");
    474                 sp<AMessage> newFormat;
    475                 encoder->getOutputFormat(&newFormat);
    476                 if (muxer != NULL) {
    477                     trackIdx = muxer->addTrack(newFormat);
    478                     ALOGV("Starting muxer");
    479                     err = muxer->start();
    480                     if (err != NO_ERROR) {
    481                         fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
    482                         return err;
    483                     }
    484                 }
    485             }
    486             break;
    487         case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
    488             // Not expected for an encoder; handle it anyway.
    489             ALOGV("Encoder buffers changed");
    490             err = encoder->getOutputBuffers(&buffers);
    491             if (err != NO_ERROR) {
    492                 fprintf(stderr,
    493                         "Unable to get new output buffers (err=%d)\n", err);
    494                 return err;
    495             }
    496             break;
    497         case INVALID_OPERATION:
    498             ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
    499             return err;
    500         default:
    501             fprintf(stderr,
    502                     "Got weird result %d from dequeueOutputBuffer\n", err);
    503             return err;
    504         }
    505     }
    506 
    507     ALOGV("Encoder stopping (req=%d)", gStopRequested);
    508     if (gVerbose) {
    509         printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
    510                 debugNumFrames, nanoseconds_to_seconds(
    511                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
    512         fflush(stdout);
    513     }
    514     return NO_ERROR;
    515 }
    516 
    517 /*
    518  * Raw H.264 byte stream output requested.  Send the output to stdout
    519  * if desired.  If the output is a tty, reconfigure it to avoid the
    520  * CRLF line termination that we see with "adb shell" commands.
    521  */
    522 static FILE* prepareRawOutput(const char* fileName) {
    523     FILE* rawFp = NULL;
    524 
    525     if (strcmp(fileName, "-") == 0) {
    526         if (gVerbose) {
    527             fprintf(stderr, "ERROR: verbose output and '-' not compatible");
    528             return NULL;
    529         }
    530         rawFp = stdout;
    531     } else {
    532         rawFp = fopen(fileName, "w");
    533         if (rawFp == NULL) {
    534             fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
    535             return NULL;
    536         }
    537     }
    538 
    539     int fd = fileno(rawFp);
    540     if (isatty(fd)) {
    541         // best effort -- reconfigure tty for "raw"
    542         ALOGD("raw video output to tty (fd=%d)", fd);
    543         struct termios term;
    544         if (tcgetattr(fd, &term) == 0) {
    545             cfmakeraw(&term);
    546             if (tcsetattr(fd, TCSANOW, &term) == 0) {
    547                 ALOGD("tty successfully configured for raw");
    548             }
    549         }
    550     }
    551 
    552     return rawFp;
    553 }
    554 
    555 /*
    556  * Main "do work" start point.
    557  *
    558  * Configures codec, muxer, and virtual display, then starts moving bits
    559  * around.
    560  */
    561 static status_t recordScreen(const char* fileName) {
    562     status_t err;
    563 
    564     // Configure signal handler.
    565     err = configureSignals();
    566     if (err != NO_ERROR) return err;
    567 
    568     // Start Binder thread pool.  MediaCodec needs to be able to receive
    569     // messages from mediaserver.
    570     sp<ProcessState> self = ProcessState::self();
    571     self->startThreadPool();
    572 
    573     // Get main display parameters.
    574     sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
    575             ISurfaceComposer::eDisplayIdMain);
    576     DisplayInfo mainDpyInfo;
    577     err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
    578     if (err != NO_ERROR) {
    579         fprintf(stderr, "ERROR: unable to get display characteristics\n");
    580         return err;
    581     }
    582     if (gVerbose) {
    583         printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
    584                 mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
    585                 mainDpyInfo.orientation);
    586         fflush(stdout);
    587     }
    588 
    589     bool rotated = isDeviceRotated(mainDpyInfo.orientation);
    590     if (gVideoWidth == 0) {
    591         gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
    592     }
    593     if (gVideoHeight == 0) {
    594         gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
    595     }
    596 
    597     // Configure and start the encoder.
    598     sp<MediaCodec> encoder;
    599     sp<FrameOutput> frameOutput;
    600     sp<IGraphicBufferProducer> encoderInputSurface;
    601     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
    602         err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
    603 
    604         if (err != NO_ERROR && !gSizeSpecified) {
    605             // fallback is defined for landscape; swap if we're in portrait
    606             bool needSwap = gVideoWidth < gVideoHeight;
    607             uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
    608             uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
    609             if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
    610                 ALOGV("Retrying with 720p");
    611                 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
    612                         gVideoWidth, gVideoHeight, newWidth, newHeight);
    613                 gVideoWidth = newWidth;
    614                 gVideoHeight = newHeight;
    615                 err = prepareEncoder(mainDpyInfo.fps, &encoder,
    616                         &encoderInputSurface);
    617             }
    618         }
    619         if (err != NO_ERROR) return err;
    620 
    621         // From here on, we must explicitly release() the encoder before it goes
    622         // out of scope, or we will get an assertion failure from stagefright
    623         // later on in a different thread.
    624     } else {
    625         // We're not using an encoder at all.  The "encoder input surface" we hand to
    626         // SurfaceFlinger will just feed directly to us.
    627         frameOutput = new FrameOutput();
    628         err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
    629         if (err != NO_ERROR) {
    630             return err;
    631         }
    632     }
    633 
    634     // Draw the "info" page by rendering a frame with GLES and sending
    635     // it directly to the encoder.
    636     // TODO: consider displaying this as a regular layer to avoid b/11697754
    637     if (gWantInfoScreen) {
    638         Overlay::drawInfoPage(encoderInputSurface);
    639     }
    640 
    641     // Configure optional overlay.
    642     sp<IGraphicBufferProducer> bufferProducer;
    643     sp<Overlay> overlay;
    644     if (gWantFrameTime) {
    645         // Send virtual display frames to an external texture.
    646         overlay = new Overlay(gMonotonicTime);
    647         err = overlay->start(encoderInputSurface, &bufferProducer);
    648         if (err != NO_ERROR) {
    649             if (encoder != NULL) encoder->release();
    650             return err;
    651         }
    652         if (gVerbose) {
    653             printf("Bugreport overlay created\n");
    654             fflush(stdout);
    655         }
    656     } else {
    657         // Use the encoder's input surface as the virtual display surface.
    658         bufferProducer = encoderInputSurface;
    659     }
    660 
    661     // Configure virtual display.
    662     sp<IBinder> dpy;
    663     err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
    664     if (err != NO_ERROR) {
    665         if (encoder != NULL) encoder->release();
    666         return err;
    667     }
    668 
    669     sp<MediaMuxer> muxer = NULL;
    670     FILE* rawFp = NULL;
    671     switch (gOutputFormat) {
    672         case FORMAT_MP4: {
    673             // Configure muxer.  We have to wait for the CSD blob from the encoder
    674             // before we can start it.
    675             err = unlink(fileName);
    676             if (err != 0 && errno != ENOENT) {
    677                 fprintf(stderr, "ERROR: couldn't remove existing file\n");
    678                 abort();
    679             }
    680             int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
    681             if (fd < 0) {
    682                 fprintf(stderr, "ERROR: couldn't open file\n");
    683                 abort();
    684             }
    685             muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
    686             close(fd);
    687             if (gRotate) {
    688                 muxer->setOrientationHint(90);  // TODO: does this do anything?
    689             }
    690             break;
    691         }
    692         case FORMAT_H264:
    693         case FORMAT_FRAMES:
    694         case FORMAT_RAW_FRAMES: {
    695             rawFp = prepareRawOutput(fileName);
    696             if (rawFp == NULL) {
    697                 if (encoder != NULL) encoder->release();
    698                 return -1;
    699             }
    700             break;
    701         }
    702         default:
    703             fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
    704             abort();
    705     }
    706 
    707     if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
    708         // TODO: if we want to make this a proper feature, we should output
    709         //       an outer header with version info.  Right now we never change
    710         //       the frame size or format, so we could conceivably just send
    711         //       the current frame header once and then follow it with an
    712         //       unbroken stream of data.
    713 
    714         // Make the EGL context current again.  This gets unhooked if we're
    715         // using "--bugreport" mode.
    716         // TODO: figure out if we can eliminate this
    717         frameOutput->prepareToCopy();
    718 
    719         while (!gStopRequested) {
    720             // Poll for frames, the same way we do for MediaCodec.  We do
    721             // all of the work on the main thread.
    722             //
    723             // Ideally we'd sleep indefinitely and wake when the
    724             // stop was requested, but this will do for now.  (It almost
    725             // works because wait() wakes when a signal hits, but we
    726             // need to handle the edge cases.)
    727             bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
    728             err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
    729             if (err == ETIMEDOUT) {
    730                 err = NO_ERROR;
    731             } else if (err != NO_ERROR) {
    732                 ALOGE("Got error %d from copyFrame()", err);
    733                 break;
    734             }
    735         }
    736     } else {
    737         // Main encoder loop.
    738         err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
    739                 mainDpyInfo.orientation);
    740         if (err != NO_ERROR) {
    741             fprintf(stderr, "Encoder failed (err=%d)\n", err);
    742             // fall through to cleanup
    743         }
    744 
    745         if (gVerbose) {
    746             printf("Stopping encoder and muxer\n");
    747             fflush(stdout);
    748         }
    749     }
    750 
    751     // Shut everything down, starting with the producer side.
    752     encoderInputSurface = NULL;
    753     SurfaceComposerClient::destroyDisplay(dpy);
    754     if (overlay != NULL) overlay->stop();
    755     if (encoder != NULL) encoder->stop();
    756     if (muxer != NULL) {
    757         // If we don't stop muxer explicitly, i.e. let the destructor run,
    758         // it may hang (b/11050628).
    759         err = muxer->stop();
    760     } else if (rawFp != stdout) {
    761         fclose(rawFp);
    762     }
    763     if (encoder != NULL) encoder->release();
    764 
    765     return err;
    766 }
    767 
    768 /*
    769  * Sends a broadcast to the media scanner to tell it about the new video.
    770  *
    771  * This is optional, but nice to have.
    772  */
    773 static status_t notifyMediaScanner(const char* fileName) {
    774     // need to do allocations before the fork()
    775     String8 fileUrl("file://");
    776     fileUrl.append(fileName);
    777 
    778     const char* kCommand = "/system/bin/am";
    779     const char* const argv[] = {
    780             kCommand,
    781             "broadcast",
    782             "-a",
    783             "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
    784             "-d",
    785             fileUrl.string(),
    786             NULL
    787     };
    788     if (gVerbose) {
    789         printf("Executing:");
    790         for (int i = 0; argv[i] != NULL; i++) {
    791             printf(" %s", argv[i]);
    792         }
    793         putchar('\n');
    794         fflush(stdout);
    795     }
    796 
    797     pid_t pid = fork();
    798     if (pid < 0) {
    799         int err = errno;
    800         ALOGW("fork() failed: %s", strerror(err));
    801         return -err;
    802     } else if (pid > 0) {
    803         // parent; wait for the child, mostly to make the verbose-mode output
    804         // look right, but also to check for and log failures
    805         int status;
    806         pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
    807         if (actualPid != pid) {
    808             ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
    809         } else if (status != 0) {
    810             ALOGW("'am broadcast' exited with status=%d", status);
    811         } else {
    812             ALOGV("'am broadcast' exited successfully");
    813         }
    814     } else {
    815         if (!gVerbose) {
    816             // non-verbose, suppress 'am' output
    817             ALOGV("closing stdout/stderr in child");
    818             int fd = open("/dev/null", O_WRONLY);
    819             if (fd >= 0) {
    820                 dup2(fd, STDOUT_FILENO);
    821                 dup2(fd, STDERR_FILENO);
    822                 close(fd);
    823             }
    824         }
    825         execv(kCommand, const_cast<char* const*>(argv));
    826         ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
    827         exit(1);
    828     }
    829     return NO_ERROR;
    830 }
    831 
    832 /*
    833  * Parses a string of the form "1280x720".
    834  *
    835  * Returns true on success.
    836  */
    837 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
    838         uint32_t* pHeight) {
    839     long width, height;
    840     char* end;
    841 
    842     // Must specify base 10, or "0x0" gets parsed differently.
    843     width = strtol(widthHeight, &end, 10);
    844     if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
    845         // invalid chars in width, or missing 'x', or missing height
    846         return false;
    847     }
    848     height = strtol(end + 1, &end, 10);
    849     if (*end != '\0') {
    850         // invalid chars in height
    851         return false;
    852     }
    853 
    854     *pWidth = width;
    855     *pHeight = height;
    856     return true;
    857 }
    858 
    859 /*
    860  * Accepts a string with a bare number ("4000000") or with a single-character
    861  * unit ("4m").
    862  *
    863  * Returns an error if parsing fails.
    864  */
    865 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
    866     long value;
    867     char* endptr;
    868 
    869     value = strtol(str, &endptr, 10);
    870     if (*endptr == '\0') {
    871         // bare number
    872         *pValue = value;
    873         return NO_ERROR;
    874     } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
    875         *pValue = value * 1000000;  // check for overflow?
    876         return NO_ERROR;
    877     } else {
    878         fprintf(stderr, "Unrecognized value: %s\n", str);
    879         return UNKNOWN_ERROR;
    880     }
    881 }
    882 
    883 /*
    884  * Dumps usage on stderr.
    885  */
    886 static void usage() {
    887     fprintf(stderr,
    888         "Usage: screenrecord [options] <filename>\n"
    889         "\n"
    890         "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
    891         "\n"
    892         "Options:\n"
    893         "--size WIDTHxHEIGHT\n"
    894         "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
    895         "    display resolution (if supported), 1280x720 if not.  For best results,\n"
    896         "    use a size supported by the AVC encoder.\n"
    897         "--bit-rate RATE\n"
    898         "    Set the video bit rate, in bits per second.  Value may be specified as\n"
    899         "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
    900         "--bugreport\n"
    901         "    Add additional information, such as a timestamp overlay, that is helpful\n"
    902         "    in videos captured to illustrate bugs.\n"
    903         "--time-limit TIME\n"
    904         "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
    905         "--verbose\n"
    906         "    Display interesting information on stdout.\n"
    907         "--help\n"
    908         "    Show this message.\n"
    909         "\n"
    910         "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
    911         "\n",
    912         kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
    913         );
    914 }
    915 
    916 /*
    917  * Parses args and kicks things off.
    918  */
    919 int main(int argc, char* const argv[]) {
    920     static const struct option longOptions[] = {
    921         { "help",               no_argument,        NULL, 'h' },
    922         { "verbose",            no_argument,        NULL, 'v' },
    923         { "size",               required_argument,  NULL, 's' },
    924         { "bit-rate",           required_argument,  NULL, 'b' },
    925         { "time-limit",         required_argument,  NULL, 't' },
    926         { "bugreport",          no_argument,        NULL, 'u' },
    927         // "unofficial" options
    928         { "show-device-info",   no_argument,        NULL, 'i' },
    929         { "show-frame-time",    no_argument,        NULL, 'f' },
    930         { "rotate",             no_argument,        NULL, 'r' },
    931         { "output-format",      required_argument,  NULL, 'o' },
    932         { "codec-name",         required_argument,  NULL, 'N' },
    933         { "monotonic-time",     no_argument,        NULL, 'm' },
    934         { "persistent-surface", no_argument,        NULL, 'p' },
    935         { NULL,                 0,                  NULL, 0 }
    936     };
    937 
    938     while (true) {
    939         int optionIndex = 0;
    940         int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
    941         if (ic == -1) {
    942             break;
    943         }
    944 
    945         switch (ic) {
    946         case 'h':
    947             usage();
    948             return 0;
    949         case 'v':
    950             gVerbose = true;
    951             break;
    952         case 's':
    953             if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
    954                 fprintf(stderr, "Invalid size '%s', must be width x height\n",
    955                         optarg);
    956                 return 2;
    957             }
    958             if (gVideoWidth == 0 || gVideoHeight == 0) {
    959                 fprintf(stderr,
    960                     "Invalid size %ux%u, width and height may not be zero\n",
    961                     gVideoWidth, gVideoHeight);
    962                 return 2;
    963             }
    964             gSizeSpecified = true;
    965             break;
    966         case 'b':
    967             if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
    968                 return 2;
    969             }
    970             if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
    971                 fprintf(stderr,
    972                         "Bit rate %dbps outside acceptable range [%d,%d]\n",
    973                         gBitRate, kMinBitRate, kMaxBitRate);
    974                 return 2;
    975             }
    976             break;
    977         case 't':
    978             gTimeLimitSec = atoi(optarg);
    979             if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
    980                 fprintf(stderr,
    981                         "Time limit %ds outside acceptable range [1,%d]\n",
    982                         gTimeLimitSec, kMaxTimeLimitSec);
    983                 return 2;
    984             }
    985             break;
    986         case 'u':
    987             gWantInfoScreen = true;
    988             gWantFrameTime = true;
    989             break;
    990         case 'i':
    991             gWantInfoScreen = true;
    992             break;
    993         case 'f':
    994             gWantFrameTime = true;
    995             break;
    996         case 'r':
    997             // experimental feature
    998             gRotate = true;
    999             break;
   1000         case 'o':
   1001             if (strcmp(optarg, "mp4") == 0) {
   1002                 gOutputFormat = FORMAT_MP4;
   1003             } else if (strcmp(optarg, "h264") == 0) {
   1004                 gOutputFormat = FORMAT_H264;
   1005             } else if (strcmp(optarg, "frames") == 0) {
   1006                 gOutputFormat = FORMAT_FRAMES;
   1007             } else if (strcmp(optarg, "raw-frames") == 0) {
   1008                 gOutputFormat = FORMAT_RAW_FRAMES;
   1009             } else {
   1010                 fprintf(stderr, "Unknown format '%s'\n", optarg);
   1011                 return 2;
   1012             }
   1013             break;
   1014         case 'N':
   1015             gCodecName = optarg;
   1016             break;
   1017         case 'm':
   1018             gMonotonicTime = true;
   1019             break;
   1020         case 'p':
   1021             gPersistentSurface = true;
   1022             break;
   1023         default:
   1024             if (ic != '?') {
   1025                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
   1026             }
   1027             return 2;
   1028         }
   1029     }
   1030 
   1031     if (optind != argc - 1) {
   1032         fprintf(stderr, "Must specify output file (see --help).\n");
   1033         return 2;
   1034     }
   1035 
   1036     const char* fileName = argv[optind];
   1037     if (gOutputFormat == FORMAT_MP4) {
   1038         // MediaMuxer tries to create the file in the constructor, but we don't
   1039         // learn about the failure until muxer.start(), which returns a generic
   1040         // error code without logging anything.  We attempt to create the file
   1041         // now for better diagnostics.
   1042         int fd = open(fileName, O_CREAT | O_RDWR, 0644);
   1043         if (fd < 0) {
   1044             fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
   1045             return 1;
   1046         }
   1047         close(fd);
   1048     }
   1049 
   1050     status_t err = recordScreen(fileName);
   1051     if (err == NO_ERROR) {
   1052         // Try to notify the media scanner.  Not fatal if this fails.
   1053         notifyMediaScanner(fileName);
   1054     }
   1055     ALOGD(err == NO_ERROR ? "success" : "failed");
   1056     return (int) err;
   1057 }
   1058