Home | History | Annotate | Download | only in screenrecord
      1 /*
      2  * Copyright 2013 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include <assert.h>
     18 #include <ctype.h>
     19 #include <fcntl.h>
     20 #include <inttypes.h>
     21 #include <getopt.h>
     22 #include <signal.h>
     23 #include <stdio.h>
     24 #include <stdlib.h>
     25 #include <string.h>
     26 #include <sys/stat.h>
     27 #include <sys/types.h>
     28 #include <sys/wait.h>
     29 
     30 #include <termios.h>
     31 #include <unistd.h>
     32 
     33 #define LOG_TAG "ScreenRecord"
     34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
     35 //#define LOG_NDEBUG 0
     36 #include <utils/Log.h>
     37 
     38 #include <binder/IPCThreadState.h>
     39 #include <utils/Errors.h>
     40 #include <utils/Timers.h>
     41 #include <utils/Trace.h>
     42 
     43 #include <gui/Surface.h>
     44 #include <gui/SurfaceComposerClient.h>
     45 #include <gui/ISurfaceComposer.h>
     46 #include <ui/DisplayInfo.h>
     47 #include <media/openmax/OMX_IVCommon.h>
     48 #include <media/stagefright/foundation/ABuffer.h>
     49 #include <media/stagefright/foundation/AMessage.h>
     50 #include <media/stagefright/MediaCodec.h>
     51 #include <media/stagefright/MediaCodecConstants.h>
     52 #include <media/stagefright/MediaErrors.h>
     53 #include <media/stagefright/MediaMuxer.h>
     54 #include <media/stagefright/PersistentSurface.h>
     55 #include <media/ICrypto.h>
     56 #include <media/MediaCodecBuffer.h>
     57 
     58 #include "screenrecord.h"
     59 #include "Overlay.h"
     60 #include "FrameOutput.h"
     61 
     62 using android::ABuffer;
     63 using android::ALooper;
     64 using android::AMessage;
     65 using android::AString;
     66 using android::DisplayInfo;
     67 using android::FrameOutput;
     68 using android::IBinder;
     69 using android::IGraphicBufferProducer;
     70 using android::ISurfaceComposer;
     71 using android::MediaCodec;
     72 using android::MediaCodecBuffer;
     73 using android::MediaMuxer;
     74 using android::Overlay;
     75 using android::PersistentSurface;
     76 using android::ProcessState;
     77 using android::Rect;
     78 using android::String8;
     79 using android::SurfaceComposerClient;
     80 using android::Vector;
     81 using android::sp;
     82 using android::status_t;
     83 
     84 using android::DISPLAY_ORIENTATION_0;
     85 using android::DISPLAY_ORIENTATION_180;
     86 using android::DISPLAY_ORIENTATION_90;
     87 using android::INVALID_OPERATION;
     88 using android::NAME_NOT_FOUND;
     89 using android::NO_ERROR;
     90 using android::UNKNOWN_ERROR;
     91 
     92 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
     93 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
     94 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
     95 static const uint32_t kFallbackWidth = 1280;        // 720p
     96 static const uint32_t kFallbackHeight = 720;
     97 static const char* kMimeTypeAvc = "video/avc";
     98 
     99 // Command-line parameters.
    100 static bool gVerbose = false;           // chatty on stdout
    101 static bool gRotate = false;            // rotate 90 degrees
    102 static bool gMonotonicTime = false;     // use system monotonic time for timestamps
    103 static bool gPersistentSurface = false; // use persistent surface
    104 static enum {
    105     FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
    106 } gOutputFormat = FORMAT_MP4;           // data format for output
    107 static AString gCodecName = "";         // codec name override
    108 static bool gSizeSpecified = false;     // was size explicitly requested?
    109 static bool gWantInfoScreen = false;    // do we want initial info screen?
    110 static bool gWantFrameTime = false;     // do we want times on each frame?
    111 static uint32_t gVideoWidth = 0;        // default width+height
    112 static uint32_t gVideoHeight = 0;
    113 static uint32_t gBitRate = 20000000;     // 20Mbps
    114 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
    115 static uint32_t gBframes = 0;
    116 
    117 // Set by signal handler to stop recording.
    118 static volatile bool gStopRequested = false;
    119 
    120 // Previous signal handler state, restored after first hit.
    121 static struct sigaction gOrigSigactionINT;
    122 static struct sigaction gOrigSigactionHUP;
    123 
    124 
    125 /*
    126  * Catch keyboard interrupt signals.  On receipt, the "stop requested"
    127  * flag is raised, and the original handler is restored (so that, if
    128  * we get stuck finishing, a second Ctrl-C will kill the process).
    129  */
    130 static void signalCatcher(int signum)
    131 {
    132     gStopRequested = true;
    133     switch (signum) {
    134     case SIGINT:
    135     case SIGHUP:
    136         sigaction(SIGINT, &gOrigSigactionINT, NULL);
    137         sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
    138         break;
    139     default:
    140         abort();
    141         break;
    142     }
    143 }
    144 
    145 /*
    146  * Configures signal handlers.  The previous handlers are saved.
    147  *
    148  * If the command is run from an interactive adb shell, we get SIGINT
    149  * when Ctrl-C is hit.  If we're run from the host, the local adb process
    150  * gets the signal, and we get a SIGHUP when the terminal disconnects.
    151  */
    152 static status_t configureSignals() {
    153     struct sigaction act;
    154     memset(&act, 0, sizeof(act));
    155     act.sa_handler = signalCatcher;
    156     if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
    157         status_t err = -errno;
    158         fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
    159                 strerror(errno));
    160         return err;
    161     }
    162     if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
    163         status_t err = -errno;
    164         fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
    165                 strerror(errno));
    166         return err;
    167     }
    168     signal(SIGPIPE, SIG_IGN);
    169     return NO_ERROR;
    170 }
    171 
    172 /*
    173  * Configures and starts the MediaCodec encoder.  Obtains an input surface
    174  * from the codec.
    175  */
    176 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
    177         sp<IGraphicBufferProducer>* pBufferProducer) {
    178     status_t err;
    179 
    180     if (gVerbose) {
    181         printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
    182                 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
    183         fflush(stdout);
    184     }
    185 
    186     sp<AMessage> format = new AMessage;
    187     format->setInt32(KEY_WIDTH, gVideoWidth);
    188     format->setInt32(KEY_HEIGHT, gVideoHeight);
    189     format->setString(KEY_MIME, kMimeTypeAvc);
    190     format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
    191     format->setInt32(KEY_BIT_RATE, gBitRate);
    192     format->setFloat(KEY_FRAME_RATE, displayFps);
    193     format->setInt32(KEY_I_FRAME_INTERVAL, 10);
    194     format->setInt32(KEY_MAX_B_FRAMES, gBframes);
    195     if (gBframes > 0) {
    196         format->setInt32(KEY_PROFILE, AVCProfileMain);
    197         format->setInt32(KEY_LEVEL, AVCLevel41);
    198     }
    199 
    200     sp<android::ALooper> looper = new android::ALooper;
    201     looper->setName("screenrecord_looper");
    202     looper->start();
    203     ALOGV("Creating codec");
    204     sp<MediaCodec> codec;
    205     if (gCodecName.empty()) {
    206         codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
    207         if (codec == NULL) {
    208             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
    209                     kMimeTypeAvc);
    210             return UNKNOWN_ERROR;
    211         }
    212     } else {
    213         codec = MediaCodec::CreateByComponentName(looper, gCodecName);
    214         if (codec == NULL) {
    215             fprintf(stderr, "ERROR: unable to create %s codec instance\n",
    216                     gCodecName.c_str());
    217             return UNKNOWN_ERROR;
    218         }
    219     }
    220 
    221     err = codec->configure(format, NULL, NULL,
    222             MediaCodec::CONFIGURE_FLAG_ENCODE);
    223     if (err != NO_ERROR) {
    224         fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
    225                 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
    226         codec->release();
    227         return err;
    228     }
    229 
    230     ALOGV("Creating encoder input surface");
    231     sp<IGraphicBufferProducer> bufferProducer;
    232     if (gPersistentSurface) {
    233         sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
    234         bufferProducer = surface->getBufferProducer();
    235         err = codec->setInputSurface(surface);
    236     } else {
    237         err = codec->createInputSurface(&bufferProducer);
    238     }
    239     if (err != NO_ERROR) {
    240         fprintf(stderr,
    241             "ERROR: unable to %s encoder input surface (err=%d)\n",
    242             gPersistentSurface ? "set" : "create",
    243             err);
    244         codec->release();
    245         return err;
    246     }
    247 
    248     ALOGV("Starting codec");
    249     err = codec->start();
    250     if (err != NO_ERROR) {
    251         fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
    252         codec->release();
    253         return err;
    254     }
    255 
    256     ALOGV("Codec prepared");
    257     *pCodec = codec;
    258     *pBufferProducer = bufferProducer;
    259     return 0;
    260 }
    261 
    262 /*
    263  * Sets the display projection, based on the display dimensions, video size,
    264  * and device orientation.
    265  */
    266 static status_t setDisplayProjection(
    267         SurfaceComposerClient::Transaction& t,
    268         const sp<IBinder>& dpy,
    269         const DisplayInfo& mainDpyInfo) {
    270 
    271     // Set the region of the layer stack we're interested in, which in our
    272     // case is "all of it".
    273     Rect layerStackRect(mainDpyInfo.viewportW, mainDpyInfo.viewportH);
    274 
    275     // We need to preserve the aspect ratio of the display.
    276     float displayAspect = (float) mainDpyInfo.viewportH / (float) mainDpyInfo.viewportW;
    277 
    278 
    279     // Set the way we map the output onto the display surface (which will
    280     // be e.g. 1280x720 for a 720p video).  The rect is interpreted
    281     // post-rotation, so if the display is rotated 90 degrees we need to
    282     // "pre-rotate" it by flipping width/height, so that the orientation
    283     // adjustment changes it back.
    284     //
    285     // We might want to encode a portrait display as landscape to use more
    286     // of the screen real estate.  (If players respect a 90-degree rotation
    287     // hint, we can essentially get a 720x1280 video instead of 1280x720.)
    288     // In that case, we swap the configured video width/height and then
    289     // supply a rotation value to the display projection.
    290     uint32_t videoWidth, videoHeight;
    291     uint32_t outWidth, outHeight;
    292     if (!gRotate) {
    293         videoWidth = gVideoWidth;
    294         videoHeight = gVideoHeight;
    295     } else {
    296         videoWidth = gVideoHeight;
    297         videoHeight = gVideoWidth;
    298     }
    299     if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
    300         // limited by narrow width; reduce height
    301         outWidth = videoWidth;
    302         outHeight = (uint32_t)(videoWidth * displayAspect);
    303     } else {
    304         // limited by short height; restrict width
    305         outHeight = videoHeight;
    306         outWidth = (uint32_t)(videoHeight / displayAspect);
    307     }
    308     uint32_t offX, offY;
    309     offX = (videoWidth - outWidth) / 2;
    310     offY = (videoHeight - outHeight) / 2;
    311     Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
    312 
    313     if (gVerbose) {
    314         if (gRotate) {
    315             printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
    316                     outHeight, outWidth, offY, offX);
    317             fflush(stdout);
    318         } else {
    319             printf("Content area is %ux%u at offset x=%d y=%d\n",
    320                     outWidth, outHeight, offX, offY);
    321             fflush(stdout);
    322         }
    323     }
    324 
    325     t.setDisplayProjection(dpy,
    326             gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
    327             layerStackRect, displayRect);
    328     return NO_ERROR;
    329 }
    330 
    331 /*
    332  * Configures the virtual display.  When this completes, virtual display
    333  * frames will start arriving from the buffer producer.
    334  */
    335 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
    336         const sp<IGraphicBufferProducer>& bufferProducer,
    337         sp<IBinder>* pDisplayHandle) {
    338     sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
    339             String8("ScreenRecorder"), false /*secure*/);
    340 
    341     SurfaceComposerClient::Transaction t;
    342     t.setDisplaySurface(dpy, bufferProducer);
    343     setDisplayProjection(t, dpy, mainDpyInfo);
    344     t.setDisplayLayerStack(dpy, 0);    // default stack
    345     t.apply();
    346 
    347     *pDisplayHandle = dpy;
    348 
    349     return NO_ERROR;
    350 }
    351 
    352 /*
    353  * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
    354  * input frames are coming from the virtual display as fast as SurfaceFlinger
    355  * wants to send them.
    356  *
    357  * Exactly one of muxer or rawFp must be non-null.
    358  *
    359  * The muxer must *not* have been started before calling.
    360  */
    361 static status_t runEncoder(const sp<MediaCodec>& encoder,
    362         const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
    363         const sp<IBinder>& virtualDpy, uint8_t orientation) {
    364     static int kTimeout = 250000;   // be responsive on signal
    365     status_t err;
    366     ssize_t trackIdx = -1;
    367     uint32_t debugNumFrames = 0;
    368     int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
    369     int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
    370     DisplayInfo mainDpyInfo;
    371 
    372     assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
    373 
    374     Vector<sp<MediaCodecBuffer> > buffers;
    375     err = encoder->getOutputBuffers(&buffers);
    376     if (err != NO_ERROR) {
    377         fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
    378         return err;
    379     }
    380 
    381     // Run until we're signaled.
    382     while (!gStopRequested) {
    383         size_t bufIndex, offset, size;
    384         int64_t ptsUsec;
    385         uint32_t flags;
    386 
    387         if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
    388             if (gVerbose) {
    389                 printf("Time limit reached\n");
    390                 fflush(stdout);
    391             }
    392             break;
    393         }
    394 
    395         ALOGV("Calling dequeueOutputBuffer");
    396         err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
    397                 &flags, kTimeout);
    398         ALOGV("dequeueOutputBuffer returned %d", err);
    399         switch (err) {
    400         case NO_ERROR:
    401             // got a buffer
    402             if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
    403                 ALOGV("Got codec config buffer (%zu bytes)", size);
    404                 if (muxer != NULL) {
    405                     // ignore this -- we passed the CSD into MediaMuxer when
    406                     // we got the format change notification
    407                     size = 0;
    408                 }
    409             }
    410             if (size != 0) {
    411                 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
    412                         bufIndex, size, ptsUsec);
    413 
    414                 { // scope
    415                     ATRACE_NAME("orientation");
    416                     // Check orientation, update if it has changed.
    417                     //
    418                     // Polling for changes is inefficient and wrong, but the
    419                     // useful stuff is hard to get at without a Dalvik VM.
    420                     err = SurfaceComposerClient::getDisplayInfo(mainDpy,
    421                             &mainDpyInfo);
    422                     if (err != NO_ERROR) {
    423                         ALOGW("getDisplayInfo(main) failed: %d", err);
    424                     } else if (orientation != mainDpyInfo.orientation) {
    425                         ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
    426                         SurfaceComposerClient::Transaction t;
    427                         setDisplayProjection(t, virtualDpy, mainDpyInfo);
    428                         t.apply();
    429                         orientation = mainDpyInfo.orientation;
    430                     }
    431                 }
    432 
    433                 // If the virtual display isn't providing us with timestamps,
    434                 // use the current time.  This isn't great -- we could get
    435                 // decoded data in clusters -- but we're not expecting
    436                 // to hit this anyway.
    437                 if (ptsUsec == 0) {
    438                     ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
    439                 }
    440 
    441                 if (muxer == NULL) {
    442                     fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
    443                     // Flush the data immediately in case we're streaming.
    444                     // We don't want to do this if all we've written is
    445                     // the SPS/PPS data because mplayer gets confused.
    446                     if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
    447                         fflush(rawFp);
    448                     }
    449                 } else {
    450                     // The MediaMuxer docs are unclear, but it appears that we
    451                     // need to pass either the full set of BufferInfo flags, or
    452                     // (flags & BUFFER_FLAG_SYNCFRAME).
    453                     //
    454                     // If this blocks for too long we could drop frames.  We may
    455                     // want to queue these up and do them on a different thread.
    456                     ATRACE_NAME("write sample");
    457                     assert(trackIdx != -1);
    458                     // TODO
    459                     sp<ABuffer> buffer = new ABuffer(
    460                             buffers[bufIndex]->data(), buffers[bufIndex]->size());
    461                     err = muxer->writeSampleData(buffer, trackIdx,
    462                             ptsUsec, flags);
    463                     if (err != NO_ERROR) {
    464                         fprintf(stderr,
    465                             "Failed writing data to muxer (err=%d)\n", err);
    466                         return err;
    467                     }
    468                 }
    469                 debugNumFrames++;
    470             }
    471             err = encoder->releaseOutputBuffer(bufIndex);
    472             if (err != NO_ERROR) {
    473                 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
    474                         err);
    475                 return err;
    476             }
    477             if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
    478                 // Not expecting EOS from SurfaceFlinger.  Go with it.
    479                 ALOGI("Received end-of-stream");
    480                 gStopRequested = true;
    481             }
    482             break;
    483         case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
    484             ALOGV("Got -EAGAIN, looping");
    485             break;
    486         case android::INFO_FORMAT_CHANGED:    // INFO_OUTPUT_FORMAT_CHANGED
    487             {
    488                 // Format includes CSD, which we must provide to muxer.
    489                 ALOGV("Encoder format changed");
    490                 sp<AMessage> newFormat;
    491                 encoder->getOutputFormat(&newFormat);
    492                 if (muxer != NULL) {
    493                     trackIdx = muxer->addTrack(newFormat);
    494                     ALOGV("Starting muxer");
    495                     err = muxer->start();
    496                     if (err != NO_ERROR) {
    497                         fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
    498                         return err;
    499                     }
    500                 }
    501             }
    502             break;
    503         case android::INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
    504             // Not expected for an encoder; handle it anyway.
    505             ALOGV("Encoder buffers changed");
    506             err = encoder->getOutputBuffers(&buffers);
    507             if (err != NO_ERROR) {
    508                 fprintf(stderr,
    509                         "Unable to get new output buffers (err=%d)\n", err);
    510                 return err;
    511             }
    512             break;
    513         case INVALID_OPERATION:
    514             ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
    515             return err;
    516         default:
    517             fprintf(stderr,
    518                     "Got weird result %d from dequeueOutputBuffer\n", err);
    519             return err;
    520         }
    521     }
    522 
    523     ALOGV("Encoder stopping (req=%d)", gStopRequested);
    524     if (gVerbose) {
    525         printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
    526                 debugNumFrames, nanoseconds_to_seconds(
    527                         systemTime(CLOCK_MONOTONIC) - startWhenNsec));
    528         fflush(stdout);
    529     }
    530     return NO_ERROR;
    531 }
    532 
    533 /*
    534  * Raw H.264 byte stream output requested.  Send the output to stdout
    535  * if desired.  If the output is a tty, reconfigure it to avoid the
    536  * CRLF line termination that we see with "adb shell" commands.
    537  */
    538 static FILE* prepareRawOutput(const char* fileName) {
    539     FILE* rawFp = NULL;
    540 
    541     if (strcmp(fileName, "-") == 0) {
    542         if (gVerbose) {
    543             fprintf(stderr, "ERROR: verbose output and '-' not compatible");
    544             return NULL;
    545         }
    546         rawFp = stdout;
    547     } else {
    548         rawFp = fopen(fileName, "w");
    549         if (rawFp == NULL) {
    550             fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
    551             return NULL;
    552         }
    553     }
    554 
    555     int fd = fileno(rawFp);
    556     if (isatty(fd)) {
    557         // best effort -- reconfigure tty for "raw"
    558         ALOGD("raw video output to tty (fd=%d)", fd);
    559         struct termios term;
    560         if (tcgetattr(fd, &term) == 0) {
    561             cfmakeraw(&term);
    562             if (tcsetattr(fd, TCSANOW, &term) == 0) {
    563                 ALOGD("tty successfully configured for raw");
    564             }
    565         }
    566     }
    567 
    568     return rawFp;
    569 }
    570 
    571 static inline uint32_t floorToEven(uint32_t num) {
    572     return num & ~1;
    573 }
    574 
    575 /*
    576  * Main "do work" start point.
    577  *
    578  * Configures codec, muxer, and virtual display, then starts moving bits
    579  * around.
    580  */
    581 static status_t recordScreen(const char* fileName) {
    582     status_t err;
    583 
    584     // Configure signal handler.
    585     err = configureSignals();
    586     if (err != NO_ERROR) return err;
    587 
    588     // Start Binder thread pool.  MediaCodec needs to be able to receive
    589     // messages from mediaserver.
    590     sp<ProcessState> self = ProcessState::self();
    591     self->startThreadPool();
    592 
    593     // Get main display parameters.
    594     const sp<IBinder> mainDpy = SurfaceComposerClient::getInternalDisplayToken();
    595     if (mainDpy == nullptr) {
    596         fprintf(stderr, "ERROR: no display\n");
    597         return NAME_NOT_FOUND;
    598     }
    599 
    600     DisplayInfo mainDpyInfo;
    601     err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
    602     if (err != NO_ERROR) {
    603         fprintf(stderr, "ERROR: unable to get display characteristics\n");
    604         return err;
    605     }
    606 
    607     if (gVerbose) {
    608         printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
    609                 mainDpyInfo.viewportW, mainDpyInfo.viewportH, mainDpyInfo.fps,
    610                 mainDpyInfo.orientation);
    611         fflush(stdout);
    612     }
    613 
    614     // Encoder can't take odd number as config
    615     if (gVideoWidth == 0) {
    616         gVideoWidth = floorToEven(mainDpyInfo.viewportW);
    617     }
    618     if (gVideoHeight == 0) {
    619         gVideoHeight = floorToEven(mainDpyInfo.viewportH);
    620     }
    621 
    622     // Configure and start the encoder.
    623     sp<MediaCodec> encoder;
    624     sp<FrameOutput> frameOutput;
    625     sp<IGraphicBufferProducer> encoderInputSurface;
    626     if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
    627         err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
    628 
    629         if (err != NO_ERROR && !gSizeSpecified) {
    630             // fallback is defined for landscape; swap if we're in portrait
    631             bool needSwap = gVideoWidth < gVideoHeight;
    632             uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
    633             uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
    634             if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
    635                 ALOGV("Retrying with 720p");
    636                 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
    637                         gVideoWidth, gVideoHeight, newWidth, newHeight);
    638                 gVideoWidth = newWidth;
    639                 gVideoHeight = newHeight;
    640                 err = prepareEncoder(mainDpyInfo.fps, &encoder,
    641                         &encoderInputSurface);
    642             }
    643         }
    644         if (err != NO_ERROR) return err;
    645 
    646         // From here on, we must explicitly release() the encoder before it goes
    647         // out of scope, or we will get an assertion failure from stagefright
    648         // later on in a different thread.
    649     } else {
    650         // We're not using an encoder at all.  The "encoder input surface" we hand to
    651         // SurfaceFlinger will just feed directly to us.
    652         frameOutput = new FrameOutput();
    653         err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
    654         if (err != NO_ERROR) {
    655             return err;
    656         }
    657     }
    658 
    659     // Draw the "info" page by rendering a frame with GLES and sending
    660     // it directly to the encoder.
    661     // TODO: consider displaying this as a regular layer to avoid b/11697754
    662     if (gWantInfoScreen) {
    663         Overlay::drawInfoPage(encoderInputSurface);
    664     }
    665 
    666     // Configure optional overlay.
    667     sp<IGraphicBufferProducer> bufferProducer;
    668     sp<Overlay> overlay;
    669     if (gWantFrameTime) {
    670         // Send virtual display frames to an external texture.
    671         overlay = new Overlay(gMonotonicTime);
    672         err = overlay->start(encoderInputSurface, &bufferProducer);
    673         if (err != NO_ERROR) {
    674             if (encoder != NULL) encoder->release();
    675             return err;
    676         }
    677         if (gVerbose) {
    678             printf("Bugreport overlay created\n");
    679             fflush(stdout);
    680         }
    681     } else {
    682         // Use the encoder's input surface as the virtual display surface.
    683         bufferProducer = encoderInputSurface;
    684     }
    685 
    686     // Configure virtual display.
    687     sp<IBinder> dpy;
    688     err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
    689     if (err != NO_ERROR) {
    690         if (encoder != NULL) encoder->release();
    691         return err;
    692     }
    693 
    694     sp<MediaMuxer> muxer = NULL;
    695     FILE* rawFp = NULL;
    696     switch (gOutputFormat) {
    697         case FORMAT_MP4:
    698         case FORMAT_WEBM:
    699         case FORMAT_3GPP: {
    700             // Configure muxer.  We have to wait for the CSD blob from the encoder
    701             // before we can start it.
    702             err = unlink(fileName);
    703             if (err != 0 && errno != ENOENT) {
    704                 fprintf(stderr, "ERROR: couldn't remove existing file\n");
    705                 abort();
    706             }
    707             int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
    708             if (fd < 0) {
    709                 fprintf(stderr, "ERROR: couldn't open file\n");
    710                 abort();
    711             }
    712             if (gOutputFormat == FORMAT_MP4) {
    713                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
    714             } else if (gOutputFormat == FORMAT_WEBM) {
    715                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_WEBM);
    716             } else {
    717                 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_THREE_GPP);
    718             }
    719             close(fd);
    720             if (gRotate) {
    721                 muxer->setOrientationHint(90);  // TODO: does this do anything?
    722             }
    723             break;
    724         }
    725         case FORMAT_H264:
    726         case FORMAT_FRAMES:
    727         case FORMAT_RAW_FRAMES: {
    728             rawFp = prepareRawOutput(fileName);
    729             if (rawFp == NULL) {
    730                 if (encoder != NULL) encoder->release();
    731                 return -1;
    732             }
    733             break;
    734         }
    735         default:
    736             fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
    737             abort();
    738     }
    739 
    740     if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
    741         // TODO: if we want to make this a proper feature, we should output
    742         //       an outer header with version info.  Right now we never change
    743         //       the frame size or format, so we could conceivably just send
    744         //       the current frame header once and then follow it with an
    745         //       unbroken stream of data.
    746 
    747         // Make the EGL context current again.  This gets unhooked if we're
    748         // using "--bugreport" mode.
    749         // TODO: figure out if we can eliminate this
    750         frameOutput->prepareToCopy();
    751 
    752         while (!gStopRequested) {
    753             // Poll for frames, the same way we do for MediaCodec.  We do
    754             // all of the work on the main thread.
    755             //
    756             // Ideally we'd sleep indefinitely and wake when the
    757             // stop was requested, but this will do for now.  (It almost
    758             // works because wait() wakes when a signal hits, but we
    759             // need to handle the edge cases.)
    760             bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
    761             err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
    762             if (err == ETIMEDOUT) {
    763                 err = NO_ERROR;
    764             } else if (err != NO_ERROR) {
    765                 ALOGE("Got error %d from copyFrame()", err);
    766                 break;
    767             }
    768         }
    769     } else {
    770         // Main encoder loop.
    771         err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
    772                 mainDpyInfo.orientation);
    773         if (err != NO_ERROR) {
    774             fprintf(stderr, "Encoder failed (err=%d)\n", err);
    775             // fall through to cleanup
    776         }
    777 
    778         if (gVerbose) {
    779             printf("Stopping encoder and muxer\n");
    780             fflush(stdout);
    781         }
    782     }
    783 
    784     // Shut everything down, starting with the producer side.
    785     encoderInputSurface = NULL;
    786     SurfaceComposerClient::destroyDisplay(dpy);
    787     if (overlay != NULL) overlay->stop();
    788     if (encoder != NULL) encoder->stop();
    789     if (muxer != NULL) {
    790         // If we don't stop muxer explicitly, i.e. let the destructor run,
    791         // it may hang (b/11050628).
    792         err = muxer->stop();
    793     } else if (rawFp != stdout) {
    794         fclose(rawFp);
    795     }
    796     if (encoder != NULL) encoder->release();
    797 
    798     return err;
    799 }
    800 
    801 /*
    802  * Sends a broadcast to the media scanner to tell it about the new video.
    803  *
    804  * This is optional, but nice to have.
    805  */
    806 static status_t notifyMediaScanner(const char* fileName) {
    807     // need to do allocations before the fork()
    808     String8 fileUrl("file://");
    809     fileUrl.append(fileName);
    810 
    811     const char* kCommand = "/system/bin/am";
    812     const char* const argv[] = {
    813             kCommand,
    814             "broadcast",
    815             "-a",
    816             "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
    817             "-d",
    818             fileUrl.string(),
    819             NULL
    820     };
    821     if (gVerbose) {
    822         printf("Executing:");
    823         for (int i = 0; argv[i] != NULL; i++) {
    824             printf(" %s", argv[i]);
    825         }
    826         putchar('\n');
    827         fflush(stdout);
    828     }
    829 
    830     pid_t pid = fork();
    831     if (pid < 0) {
    832         int err = errno;
    833         ALOGW("fork() failed: %s", strerror(err));
    834         return -err;
    835     } else if (pid > 0) {
    836         // parent; wait for the child, mostly to make the verbose-mode output
    837         // look right, but also to check for and log failures
    838         int status;
    839         pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
    840         if (actualPid != pid) {
    841             ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
    842         } else if (status != 0) {
    843             ALOGW("'am broadcast' exited with status=%d", status);
    844         } else {
    845             ALOGV("'am broadcast' exited successfully");
    846         }
    847     } else {
    848         if (!gVerbose) {
    849             // non-verbose, suppress 'am' output
    850             ALOGV("closing stdout/stderr in child");
    851             int fd = open("/dev/null", O_WRONLY);
    852             if (fd >= 0) {
    853                 dup2(fd, STDOUT_FILENO);
    854                 dup2(fd, STDERR_FILENO);
    855                 close(fd);
    856             }
    857         }
    858         execv(kCommand, const_cast<char* const*>(argv));
    859         ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
    860         exit(1);
    861     }
    862     return NO_ERROR;
    863 }
    864 
    865 /*
    866  * Parses a string of the form "1280x720".
    867  *
    868  * Returns true on success.
    869  */
    870 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
    871         uint32_t* pHeight) {
    872     long width, height;
    873     char* end;
    874 
    875     // Must specify base 10, or "0x0" gets parsed differently.
    876     width = strtol(widthHeight, &end, 10);
    877     if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
    878         // invalid chars in width, or missing 'x', or missing height
    879         return false;
    880     }
    881     height = strtol(end + 1, &end, 10);
    882     if (*end != '\0') {
    883         // invalid chars in height
    884         return false;
    885     }
    886 
    887     *pWidth = width;
    888     *pHeight = height;
    889     return true;
    890 }
    891 
    892 /*
    893  * Accepts a string with a bare number ("4000000") or with a single-character
    894  * unit ("4m").
    895  *
    896  * Returns an error if parsing fails.
    897  */
    898 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
    899     long value;
    900     char* endptr;
    901 
    902     value = strtol(str, &endptr, 10);
    903     if (*endptr == '\0') {
    904         // bare number
    905         *pValue = value;
    906         return NO_ERROR;
    907     } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
    908         *pValue = value * 1000000;  // check for overflow?
    909         return NO_ERROR;
    910     } else {
    911         fprintf(stderr, "Unrecognized value: %s\n", str);
    912         return UNKNOWN_ERROR;
    913     }
    914 }
    915 
    916 /*
    917  * Dumps usage on stderr.
    918  */
    919 static void usage() {
    920     fprintf(stderr,
    921         "Usage: screenrecord [options] <filename>\n"
    922         "\n"
    923         "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
    924         "\n"
    925         "Options:\n"
    926         "--size WIDTHxHEIGHT\n"
    927         "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
    928         "    display resolution (if supported), 1280x720 if not.  For best results,\n"
    929         "    use a size supported by the AVC encoder.\n"
    930         "--bit-rate RATE\n"
    931         "    Set the video bit rate, in bits per second.  Value may be specified as\n"
    932         "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
    933         "--bugreport\n"
    934         "    Add additional information, such as a timestamp overlay, that is helpful\n"
    935         "    in videos captured to illustrate bugs.\n"
    936         "--time-limit TIME\n"
    937         "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
    938         "--verbose\n"
    939         "    Display interesting information on stdout.\n"
    940         "--help\n"
    941         "    Show this message.\n"
    942         "\n"
    943         "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
    944         "\n",
    945         kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
    946         );
    947 }
    948 
    949 /*
    950  * Parses args and kicks things off.
    951  */
    952 int main(int argc, char* const argv[]) {
    953     static const struct option longOptions[] = {
    954         { "help",               no_argument,        NULL, 'h' },
    955         { "verbose",            no_argument,        NULL, 'v' },
    956         { "size",               required_argument,  NULL, 's' },
    957         { "bit-rate",           required_argument,  NULL, 'b' },
    958         { "time-limit",         required_argument,  NULL, 't' },
    959         { "bugreport",          no_argument,        NULL, 'u' },
    960         // "unofficial" options
    961         { "show-device-info",   no_argument,        NULL, 'i' },
    962         { "show-frame-time",    no_argument,        NULL, 'f' },
    963         { "rotate",             no_argument,        NULL, 'r' },
    964         { "output-format",      required_argument,  NULL, 'o' },
    965         { "codec-name",         required_argument,  NULL, 'N' },
    966         { "monotonic-time",     no_argument,        NULL, 'm' },
    967         { "persistent-surface", no_argument,        NULL, 'p' },
    968         { "bframes",            required_argument,  NULL, 'B' },
    969         { NULL,                 0,                  NULL, 0 }
    970     };
    971 
    972     while (true) {
    973         int optionIndex = 0;
    974         int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
    975         if (ic == -1) {
    976             break;
    977         }
    978 
    979         switch (ic) {
    980         case 'h':
    981             usage();
    982             return 0;
    983         case 'v':
    984             gVerbose = true;
    985             break;
    986         case 's':
    987             if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
    988                 fprintf(stderr, "Invalid size '%s', must be width x height\n",
    989                         optarg);
    990                 return 2;
    991             }
    992             if (gVideoWidth == 0 || gVideoHeight == 0) {
    993                 fprintf(stderr,
    994                     "Invalid size %ux%u, width and height may not be zero\n",
    995                     gVideoWidth, gVideoHeight);
    996                 return 2;
    997             }
    998             gSizeSpecified = true;
    999             break;
   1000         case 'b':
   1001             if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
   1002                 return 2;
   1003             }
   1004             if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
   1005                 fprintf(stderr,
   1006                         "Bit rate %dbps outside acceptable range [%d,%d]\n",
   1007                         gBitRate, kMinBitRate, kMaxBitRate);
   1008                 return 2;
   1009             }
   1010             break;
   1011         case 't':
   1012             gTimeLimitSec = atoi(optarg);
   1013             if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
   1014                 fprintf(stderr,
   1015                         "Time limit %ds outside acceptable range [1,%d]\n",
   1016                         gTimeLimitSec, kMaxTimeLimitSec);
   1017                 return 2;
   1018             }
   1019             break;
   1020         case 'u':
   1021             gWantInfoScreen = true;
   1022             gWantFrameTime = true;
   1023             break;
   1024         case 'i':
   1025             gWantInfoScreen = true;
   1026             break;
   1027         case 'f':
   1028             gWantFrameTime = true;
   1029             break;
   1030         case 'r':
   1031             // experimental feature
   1032             gRotate = true;
   1033             break;
   1034         case 'o':
   1035             if (strcmp(optarg, "mp4") == 0) {
   1036                 gOutputFormat = FORMAT_MP4;
   1037             } else if (strcmp(optarg, "h264") == 0) {
   1038                 gOutputFormat = FORMAT_H264;
   1039             } else if (strcmp(optarg, "webm") == 0) {
   1040                 gOutputFormat = FORMAT_WEBM;
   1041             } else if (strcmp(optarg, "3gpp") == 0) {
   1042                 gOutputFormat = FORMAT_3GPP;
   1043             } else if (strcmp(optarg, "frames") == 0) {
   1044                 gOutputFormat = FORMAT_FRAMES;
   1045             } else if (strcmp(optarg, "raw-frames") == 0) {
   1046                 gOutputFormat = FORMAT_RAW_FRAMES;
   1047             } else {
   1048                 fprintf(stderr, "Unknown format '%s'\n", optarg);
   1049                 return 2;
   1050             }
   1051             break;
   1052         case 'N':
   1053             gCodecName = optarg;
   1054             break;
   1055         case 'm':
   1056             gMonotonicTime = true;
   1057             break;
   1058         case 'p':
   1059             gPersistentSurface = true;
   1060             break;
   1061         case 'B':
   1062             if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
   1063                 return 2;
   1064             }
   1065             break;
   1066         default:
   1067             if (ic != '?') {
   1068                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
   1069             }
   1070             return 2;
   1071         }
   1072     }
   1073 
   1074     if (optind != argc - 1) {
   1075         fprintf(stderr, "Must specify output file (see --help).\n");
   1076         return 2;
   1077     }
   1078 
   1079     const char* fileName = argv[optind];
   1080     if (gOutputFormat == FORMAT_MP4) {
   1081         // MediaMuxer tries to create the file in the constructor, but we don't
   1082         // learn about the failure until muxer.start(), which returns a generic
   1083         // error code without logging anything.  We attempt to create the file
   1084         // now for better diagnostics.
   1085         int fd = open(fileName, O_CREAT | O_RDWR, 0644);
   1086         if (fd < 0) {
   1087             fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
   1088             return 1;
   1089         }
   1090         close(fd);
   1091     }
   1092 
   1093     status_t err = recordScreen(fileName);
   1094     if (err == NO_ERROR) {
   1095         // Try to notify the media scanner.  Not fatal if this fails.
   1096         notifyMediaScanner(fileName);
   1097     }
   1098     ALOGD(err == NO_ERROR ? "success" : "failed");
   1099     return (int) err;
   1100 }
   1101