Home | History | Annotate | Download | only in videoeditor
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.videoeditor;
     18 
     19 import java.io.File;
     20 import java.io.IOException;
     21 import java.math.BigDecimal;
     22 import java.nio.IntBuffer;
     23 import java.util.Iterator;
     24 import java.util.List;
     25 import java.util.concurrent.Semaphore;
     26 
     27 import android.graphics.Bitmap;
     28 import android.graphics.BitmapFactory;
     29 import android.graphics.Canvas;
     30 import android.graphics.Paint;
     31 import android.graphics.Rect;
     32 import android.graphics.Matrix;
     33 import android.media.videoeditor.VideoEditor.ExportProgressListener;
     34 import android.media.videoeditor.VideoEditor.PreviewProgressListener;
     35 import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
     36 import android.util.Log;
     37 import android.util.Pair;
     38 import android.view.Surface;
     39 
     40 /**
     41  *This class provide Native methods to be used by MediaArtist {@hide}
     42  */
     43 class MediaArtistNativeHelper {
     44     private static final String TAG = "MediaArtistNativeHelper";
     45 
     46     static {
     47         System.loadLibrary("videoeditor_jni");
     48     }
     49 
     50     private static final int MAX_THUMBNAIL_PERMITTED = 8;
     51 
     52     public static final int TASK_LOADING_SETTINGS = 1;
     53     public static final int TASK_ENCODING = 2;
     54 
     55     /**
     56      *  The resize paint
     57      */
     58     private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
     59 
     60     private final VideoEditor mVideoEditor;
     61     /*
     62      *  Semaphore to control preview calls
     63      */
     64     private final Semaphore mLock;
     65 
     66     private EditSettings mStoryBoardSettings;
     67 
     68     private String mOutputFilename;
     69 
     70     private PreviewClipProperties mClipProperties = null;
     71 
     72     private EditSettings mPreviewEditSettings;
     73 
     74     private AudioSettings mAudioSettings = null;
     75 
     76     private AudioTrack mAudioTrack = null;
     77 
     78     private boolean mInvalidatePreviewArray = true;
     79 
     80     private boolean mRegenerateAudio = true;
     81 
     82     private String mExportFilename = null;
     83     private int mExportVideoCodec = 0;
     84     private int mExportAudioCodec = 0;
     85     private int mProgressToApp;
     86 
     87     private String mRenderPreviewOverlayFile;
     88     private int mRenderPreviewRenderingMode;
     89 
     90     private boolean mIsFirstProgress;
     91 
     92     private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
     93 
     94     // Processing indication
     95     public static final int PROCESSING_NONE          = 0;
     96     public static final int PROCESSING_AUDIO_PCM     = 1;
     97     public static final int PROCESSING_TRANSITION    = 2;
     98     public static final int PROCESSING_KENBURNS      = 3;
     99     public static final int PROCESSING_INTERMEDIATE1 = 11;
    100     public static final int PROCESSING_INTERMEDIATE2 = 12;
    101     public static final int PROCESSING_INTERMEDIATE3 = 13;
    102     public static final int PROCESSING_EXPORT        = 20;
    103 
    104     private int mProcessingState;
    105     private Object mProcessingObject;
    106     private PreviewProgressListener mPreviewProgressListener;
    107     private ExportProgressListener mExportProgressListener;
    108     private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
    109     private MediaProcessingProgressListener mMediaProcessingProgressListener;
    110     private final String mProjectPath;
    111 
    112     private long mPreviewProgress;
    113 
    114     private String mAudioTrackPCMFilePath;
    115 
    116     private int mTotalClips = 0;
    117 
    118     private boolean mErrorFlagSet = false;
    119 
    120     @SuppressWarnings("unused")
    121     private int mManualEditContext;
    122 
    123     /* Listeners */
    124 
    125     /**
    126      * Interface definition for a listener to be invoked when there is an update
    127      * in a running task.
    128      */
    129     public interface OnProgressUpdateListener {
    130         /**
    131          * Called when there is an update.
    132          *
    133          * @param taskId id of the task reporting an update.
    134          * @param progress progress of the task [0..100].
    135          * @see BasicEdit#TASK_ENCODING
    136          */
    137         public void OnProgressUpdate(int taskId, int progress);
    138     }
    139 
    140     /** Defines the version. */
    141     public final class Version {
    142 
    143         /** Major version number */
    144         public int major;
    145 
    146         /** Minor version number */
    147         public int minor;
    148 
    149         /** Revision number */
    150         public int revision;
    151 
    152         /** VIDEOEDITOR major version number */
    153         private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
    154 
    155         /** VIDEOEDITOR minor version number */
    156         private static final int VIDEOEDITOR_MINOR_VERSION = 0;
    157 
    158         /** VIDEOEDITOR revision number */
    159         private static final int VIDEOEDITOR_REVISION_VERSION = 1;
    160 
    161         /** Method which returns the current VIDEOEDITOR version */
    162         public Version getVersion() {
    163             Version version = new Version();
    164 
    165             version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
    166             version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
    167             version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
    168 
    169             return version;
    170         }
    171     }
    172 
    173     /**
    174      * Defines output audio formats.
    175      */
    176     public final class AudioFormat {
    177         /** No audio present in output clip. Used to generate video only clip */
    178         public static final int NO_AUDIO = 0;
    179 
    180         /** AMR Narrow Band. */
    181         public static final int AMR_NB = 1;
    182 
    183         /** Advanced Audio Coding (AAC). */
    184         public static final int AAC = 2;
    185 
    186         /** Advanced Audio Codec Plus (HE-AAC v1). */
    187         public static final int AAC_PLUS = 3;
    188 
    189         /** Advanced Audio Codec Plus (HE-AAC v2). */
    190         public static final int ENHANCED_AAC_PLUS = 4;
    191 
    192         /** MPEG layer 3 (MP3). */
    193         public static final int MP3 = 5;
    194 
    195         /** Enhanced Variable RateCodec (EVRC). */
    196         public static final int EVRC = 6;
    197 
    198         /** PCM (PCM). */
    199         public static final int PCM = 7;
    200 
    201         /** No transcoding. Output audio format is same as input audio format */
    202         public static final int NULL_AUDIO = 254;
    203 
    204         /** Unsupported audio format. */
    205         public static final int UNSUPPORTED_AUDIO = 255;
    206     }
    207 
    208     /**
    209      * Defines audio sampling frequencies.
    210      */
    211     public final class AudioSamplingFrequency {
    212         /**
    213          * Default sampling frequency. Uses the default frequency for a specific
    214          * audio format. For AAC the only supported (and thus default) sampling
    215          * frequency is 16 kHz. For this audio format the sampling frequency in
    216          * the OutputParams.
    217          **/
    218         public static final int FREQ_DEFAULT = 0;
    219 
    220         /** Audio sampling frequency of 8000 Hz. */
    221         public static final int FREQ_8000 = 8000;
    222 
    223         /** Audio sampling frequency of 11025 Hz. */
    224         public static final int FREQ_11025 = 11025;
    225 
    226         /** Audio sampling frequency of 12000 Hz. */
    227         public static final int FREQ_12000 = 12000;
    228 
    229         /** Audio sampling frequency of 16000 Hz. */
    230         public static final int FREQ_16000 = 16000;
    231 
    232         /** Audio sampling frequency of 22050 Hz. */
    233         public static final int FREQ_22050 = 22050;
    234 
    235         /** Audio sampling frequency of 24000 Hz. */
    236         public static final int FREQ_24000 = 24000;
    237 
    238         /** Audio sampling frequency of 32000 Hz. */
    239         public static final int FREQ_32000 = 32000;
    240 
    241         /** Audio sampling frequency of 44100 Hz. */
    242         public static final int FREQ_44100 = 44100;
    243 
    244         /** Audio sampling frequency of 48000 Hz. Not available for output file. */
    245         public static final int FREQ_48000 = 48000;
    246     }
    247 
    248     /**
    249      * Defines the supported fixed audio and video bitrates. These values are
    250      * for output audio video only.
    251      */
    252     public final class Bitrate {
    253         /** Variable bitrate. Means no bitrate regulation */
    254         public static final int VARIABLE = -1;
    255 
    256         /** An undefined bitrate. */
    257         public static final int UNDEFINED = 0;
    258 
    259         /** A bitrate of 9.2 kbits/s. */
    260         public static final int BR_9_2_KBPS = 9200;
    261 
    262         /** A bitrate of 12.2 kbits/s. */
    263         public static final int BR_12_2_KBPS = 12200;
    264 
    265         /** A bitrate of 16 kbits/s. */
    266         public static final int BR_16_KBPS = 16000;
    267 
    268         /** A bitrate of 24 kbits/s. */
    269         public static final int BR_24_KBPS = 24000;
    270 
    271         /** A bitrate of 32 kbits/s. */
    272         public static final int BR_32_KBPS = 32000;
    273 
    274         /** A bitrate of 48 kbits/s. */
    275         public static final int BR_48_KBPS = 48000;
    276 
    277         /** A bitrate of 64 kbits/s. */
    278         public static final int BR_64_KBPS = 64000;
    279 
    280         /** A bitrate of 96 kbits/s. */
    281         public static final int BR_96_KBPS = 96000;
    282 
    283         /** A bitrate of 128 kbits/s. */
    284         public static final int BR_128_KBPS = 128000;
    285 
    286         /** A bitrate of 192 kbits/s. */
    287         public static final int BR_192_KBPS = 192000;
    288 
    289         /** A bitrate of 256 kbits/s. */
    290         public static final int BR_256_KBPS = 256000;
    291 
    292         /** A bitrate of 288 kbits/s. */
    293         public static final int BR_288_KBPS = 288000;
    294 
    295         /** A bitrate of 384 kbits/s. */
    296         public static final int BR_384_KBPS = 384000;
    297 
    298         /** A bitrate of 512 kbits/s. */
    299         public static final int BR_512_KBPS = 512000;
    300 
    301         /** A bitrate of 800 kbits/s. */
    302         public static final int BR_800_KBPS = 800000;
    303 
    304         /** A bitrate of 2 Mbits/s. */
    305         public static final int BR_2_MBPS = 2000000;
    306 
    307         /** A bitrate of 5 Mbits/s. */
    308         public static final int BR_5_MBPS = 5000000;
    309 
    310         /** A bitrate of 8 Mbits/s. */
    311         public static final int BR_8_MBPS = 8000000;
    312     }
    313 
    314     /**
    315      * Defines all supported file types.
    316      */
    317     public final class FileType {
    318         /** 3GPP file type. */
    319         public static final int THREE_GPP = 0;
    320 
    321         /** MP4 file type. */
    322         public static final int MP4 = 1;
    323 
    324         /** AMR file type. */
    325         public static final int AMR = 2;
    326 
    327         /** MP3 audio file type. */
    328         public static final int MP3 = 3;
    329 
    330         /** PCM audio file type. */
    331         public static final int PCM = 4;
    332 
    333         /** JPEG image file type. */
    334         public static final int JPG = 5;
    335 
    336         /** GIF image file type. */
    337         public static final int GIF = 7;
    338 
    339         /** PNG image file type. */
    340         public static final int PNG = 8;
    341 
    342         /** M4V file type. */
    343         public static final int M4V = 10;
    344 
    345         /** Unsupported file type. */
    346         public static final int UNSUPPORTED = 255;
    347     }
    348 
    349     /**
    350      * Defines rendering types. Rendering can only be applied to files
    351      * containing video streams.
    352      **/
    353     public final class MediaRendering {
    354         /**
    355          * Resize to fit the output video with changing the aspect ratio if
    356          * needed.
    357          */
    358         public static final int RESIZING = 0;
    359 
    360         /**
    361          * Crop the input video to fit it with the output video resolution.
    362          **/
    363         public static final int CROPPING = 1;
    364 
    365         /**
    366          * Resize to fit the output video resolution but maintain the aspect
    367          * ratio. This framing type adds black borders if needed.
    368          */
    369         public static final int BLACK_BORDERS = 2;
    370     }
    371 
    372     /**
    373      * Defines the results.
    374      */
    375     public final class Result {
    376         /** No error. result OK */
    377         public static final int NO_ERROR = 0;
    378 
    379         /** File not found */
    380         public static final int ERR_FILE_NOT_FOUND = 1;
    381 
    382         /**
    383          * In case of UTF8 conversion, the size of the converted path will be
    384          * more than the corresponding allocated buffer.
    385          */
    386         public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
    387 
    388         /** Invalid file type. */
    389         public static final int ERR_INVALID_FILE_TYPE = 3;
    390 
    391         /** Invalid effect kind. */
    392         public static final int ERR_INVALID_EFFECT_KIND = 4;
    393 
    394         /** Invalid video effect. */
    395         public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
    396 
    397         /** Invalid audio effect. */
    398         public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
    399 
    400         /** Invalid video transition. */
    401         public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
    402 
    403         /** Invalid audio transition. */
    404         public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
    405 
    406         /** Invalid encoding frame rate. */
    407         public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
    408 
    409         /** External effect is called but this function is not set. */
    410         public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
    411 
    412         /** External transition is called but this function is not set. */
    413         public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
    414 
    415         /** Begin time cut is larger than the video clip duration. */
    416         public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
    417 
    418         /** Begin cut time is larger or equal than end cut. */
    419         public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
    420 
    421         /** Two consecutive transitions are overlapping on one clip. */
    422         public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
    423 
    424         /** Internal error, type size mismatch. */
    425         public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
    426 
    427         /** An input 3GPP file is invalid/corrupted. */
    428         public static final int ERR_INVALID_3GPP_FILE = 16;
    429 
    430         /** A file contains an unsupported video format. */
    431         public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
    432 
    433         /** A file contains an unsupported audio format. */
    434         public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
    435 
    436         /** A file format is not supported. */
    437         public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
    438 
    439         /** An input clip has an unexpectedly large Video AU. */
    440         public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
    441 
    442         /** An input clip has an unexpectedly large Audio AU. */
    443         public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
    444 
    445         /** An input clip has a corrupted Audio AU. */
    446         public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
    447 
    448         /** The video encoder encountered an Access Unit error. */
    449         public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
    450 
    451         /** Unsupported video format for Video Editing. */
    452         public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
    453 
    454         /** Unsupported H263 profile for Video Editing. */
    455         public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
    456 
    457         /** Unsupported MPEG-4 profile for Video Editing. */
    458         public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
    459 
    460         /** Unsupported MPEG-4 RVLC tool for Video Editing. */
    461         public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
    462 
    463         /** Unsupported audio format for Video Editing. */
    464         public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
    465 
    466         /** File contains no supported stream. */
    467         public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
    468 
    469         /** File contains no video stream or an unsupported video stream. */
    470         public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
    471 
    472         /** Internal error, clip analysis version mismatch. */
    473         public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
    474 
    475         /**
    476          * At least one of the clip analysis has been generated on another
    477          * platform (WIN32, ARM, etc.).
    478          */
    479         public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
    480 
    481         /** Clips don't have the same video format (H263 or MPEG4). */
    482         public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
    483 
    484         /** Clips don't have the same frame size. */
    485         public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
    486 
    487         /** Clips don't have the same MPEG-4 time scale. */
    488         public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
    489 
    490         /** Clips don't have the same use of MPEG-4 data partitioning. */
    491         public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
    492 
    493         /** MP3 clips can't be assembled. */
    494         public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
    495 
    496         /**
    497          * The input 3GPP file does not contain any supported audio or video
    498          * track.
    499          */
    500         public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
    501 
    502         /**
    503          * The Volume of the added audio track (AddVolume) must be strictly
    504          * superior than zero.
    505          */
    506         public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
    507 
    508         /**
    509          * The time at which an audio track is added can't be higher than the
    510          * input video track duration..
    511          */
    512         public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
    513 
    514         /** The audio track file format setting is undefined. */
    515         public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
    516 
    517         /** The added audio track stream has an unsupported format. */
    518         public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
    519 
    520         /** The audio mixing feature doesn't support the audio track type. */
    521         public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
    522 
    523         /** The audio mixing feature doesn't support MP3 audio tracks. */
    524         public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
    525 
    526         /**
    527          * An added audio track limits the available features: uiAddCts must be
    528          * 0 and bRemoveOriginal must be true.
    529          */
    530         public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
    531 
    532         /**
    533          * An added audio track limits the available features: uiAddCts must be
    534          * 0 and bRemoveOriginal must be true.
    535          */
    536         public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
    537 
    538         /** Input audio track is not of a type that can be mixed with output. */
    539         public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
    540 
    541         /** Input audio track is not AMR-NB, so it can't be mixed with output. */
    542         public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
    543 
    544         /**
    545          * An added EVRC audio track limit the available features: uiAddCts must
    546          * be 0 and bRemoveOriginal must be true.
    547          */
    548         public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
    549 
    550         /** H263 profiles other than 0 are not supported. */
    551         public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
    552 
    553         /** File contains no video stream or an unsupported video stream. */
    554         public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
    555 
    556         /** Transcoding of the input file(s) is necessary. */
    557         public static final int WAR_TRANSCODING_NECESSARY = 53;
    558 
    559         /**
    560          * The size of the output file will exceed the maximum configured value.
    561          */
    562         public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
    563 
    564         /** The time scale is too big. */
    565         public static final int WAR_TIMESCALE_TOO_BIG = 55;
    566 
    567         /** The year is out of range */
    568         public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
    569 
    570         /** The directory could not be opened */
    571         public static final int ERR_DIR_OPEN_FAILED = 57;
    572 
    573         /** The directory could not be read */
    574         public static final int ERR_DIR_READ_FAILED = 58;
    575 
    576         /** There are no more entries in the current directory */
    577         public static final int ERR_DIR_NO_MORE_ENTRY = 59;
    578 
    579         /** The input parameter/s has error */
    580         public static final int ERR_PARAMETER = 60;
    581 
    582         /** There is a state machine error */
    583         public static final int ERR_STATE = 61;
    584 
    585         /** Memory allocation failed */
    586         public static final int ERR_ALLOC = 62;
    587 
    588         /** Context is invalid */
    589         public static final int ERR_BAD_CONTEXT = 63;
    590 
    591         /** Context creation failed */
    592         public static final int ERR_CONTEXT_FAILED = 64;
    593 
    594         /** Invalid stream ID */
    595         public static final int ERR_BAD_STREAM_ID = 65;
    596 
    597         /** Invalid option ID */
    598         public static final int ERR_BAD_OPTION_ID = 66;
    599 
    600         /** The option is write only */
    601         public static final int ERR_WRITE_ONLY = 67;
    602 
    603         /** The option is read only */
    604         public static final int ERR_READ_ONLY = 68;
    605 
    606         /** The feature is not implemented in this version */
    607         public static final int ERR_NOT_IMPLEMENTED = 69;
    608 
    609         /** The media type is not supported */
    610         public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
    611 
    612         /** No data to be encoded */
    613         public static final int WAR_NO_DATA_YET = 71;
    614 
    615         /** No data to be decoded */
    616         public static final int WAR_NO_MORE_STREAM = 72;
    617 
    618         /** Time stamp is invalid */
    619         public static final int WAR_INVALID_TIME = 73;
    620 
    621         /** No more data to be decoded */
    622         public static final int WAR_NO_MORE_AU = 74;
    623 
    624         /** Semaphore timed out */
    625         public static final int WAR_TIME_OUT = 75;
    626 
    627         /** Memory buffer is full */
    628         public static final int WAR_BUFFER_FULL = 76;
    629 
    630         /** Server has asked for redirection */
    631         public static final int WAR_REDIRECT = 77;
    632 
    633         /** Too many streams in input */
    634         public static final int WAR_TOO_MUCH_STREAMS = 78;
    635 
    636         /** The file cannot be opened/ written into as it is locked */
    637         public static final int ERR_FILE_LOCKED = 79;
    638 
    639         /** The file access mode is invalid */
    640         public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
    641 
    642         /** The file pointer points to an invalid location */
    643         public static final int ERR_FILE_INVALID_POSITION = 81;
    644 
    645         /** Invalid string */
    646         public static final int ERR_STR_BAD_STRING = 94;
    647 
    648         /** The input string cannot be converted */
    649         public static final int ERR_STR_CONV_FAILED = 95;
    650 
    651         /** The string size is too large */
    652         public static final int ERR_STR_OVERFLOW = 96;
    653 
    654         /** Bad string arguments */
    655         public static final int ERR_STR_BAD_ARGS = 97;
    656 
    657         /** The string value is larger than maximum size allowed */
    658         public static final int WAR_STR_OVERFLOW = 98;
    659 
    660         /** The string value is not present in this comparison operation */
    661         public static final int WAR_STR_NOT_FOUND = 99;
    662 
    663         /** The thread is not started */
    664         public static final int ERR_THREAD_NOT_STARTED = 100;
    665 
    666         /** Trancoding done warning */
    667         public static final int WAR_TRANSCODING_DONE = 101;
    668 
    669         /** Unsupported mediatype */
    670         public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
    671 
    672         /** Input file contains invalid/unsupported streams */
    673         public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
    674 
    675         /** Invalid input file */
    676         public static final int ERR_INVALID_INPUT_FILE = 104;
    677 
    678         /** Invalid output video format */
    679         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
    680 
    681         /** Invalid output video frame size */
    682         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
    683 
    684         /** Invalid output video frame rate */
    685         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
    686 
    687         /** Invalid output audio format */
    688         public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
    689 
    690         /** Invalid video frame size for H.263 */
    691         public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
    692 
    693         /** Invalid video frame rate for H.263 */
    694         public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
    695 
    696         /** invalid playback duration */
    697         public static final int ERR_DURATION_IS_NULL = 111;
    698 
    699         /** Invalid H.263 profile in file */
    700         public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
    701 
    702         /** Invalid AAC sampling frequency */
    703         public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
    704 
    705         /** Audio conversion failure */
    706         public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
    707 
    708         /** Invalid trim start and end times */
    709         public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
    710 
    711         /** End time smaller than start time for trim */
    712         public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
    713 
    714         /** Output file size is small */
    715         public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
    716 
    717         /** Output video bitrate is too low */
    718         public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
    719 
    720         /** Output audio bitrate is too low */
    721         public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
    722 
    723         /** Output video bitrate is too high */
    724         public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
    725 
    726         /** Output audio bitrate is too high */
    727         public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
    728 
    729         /** Output file size is too small */
    730         public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
    731 
    732         /** Unknown stream type */
    733         public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
    734 
    735         /** Invalid metadata in input stream */
    736         public static final int WAR_READER_NO_METADATA = 124;
    737 
    738         /** Invalid file reader info warning */
    739         public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
    740 
    741         /** Warning to indicate the the writer is being stopped */
    742         public static final int WAR_WRITER_STOP_REQ = 131;
    743 
    744         /** Video decoder failed to provide frame for transcoding */
    745         public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
    746 
    747         /** Video deblocking filter is not implemented */
    748         public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
    749 
    750         /** H.263 decoder profile not supported */
    751         public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
    752 
    753         /** The input file contains unsupported H.263 profile */
    754         public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
    755 
    756         /** There is no more space to store the output file */
    757         public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
    758 
    759         /** Internal error. */
    760         public static final int ERR_INTERNAL = 255;
    761     }
    762 
    763     /**
    764      * Defines output video formats.
    765      */
    766     public final class VideoFormat {
    767         /** No video present in output clip. Used to generate audio only clip */
    768         public static final int NO_VIDEO = 0;
    769 
    770         /** H263 video format. */
    771         public static final int H263 = 1;
    772 
    773         /** H264 video */
    774         public static final int H264 = 2;
    775 
    776         /** MPEG4 video format. */
    777         public static final int MPEG4 = 3;
    778 
    779         /** No transcoding. Output video format is same as input video format */
    780         public static final int NULL_VIDEO = 254;
    781 
    782         /** Unsupported video format. */
    783         public static final int UNSUPPORTED = 255;
    784     }
    785 
    786     /** Defines video frame sizes. */
    787     public final class VideoFrameSize {
    788 
    789         public static final int SIZE_UNDEFINED = -1;
    790 
    791         /** SQCIF 128 x 96 pixels. */
    792         public static final int SQCIF = 0;
    793 
    794         /** QQVGA 160 x 120 pixels. */
    795         public static final int QQVGA = 1;
    796 
    797         /** QCIF 176 x 144 pixels. */
    798         public static final int QCIF = 2;
    799 
    800         /** QVGA 320 x 240 pixels. */
    801         public static final int QVGA = 3;
    802 
    803         /** CIF 352 x 288 pixels. */
    804         public static final int CIF = 4;
    805 
    806         /** VGA 640 x 480 pixels. */
    807         public static final int VGA = 5;
    808 
    809         /** WVGA 800 X 480 pixels */
    810         public static final int WVGA = 6;
    811 
    812         /** NTSC 720 X 480 pixels */
    813         public static final int NTSC = 7;
    814 
    815         /** 640 x 360 */
    816         public static final int nHD = 8;
    817 
    818         /** 854 x 480 */
    819         public static final int WVGA16x9 = 9;
    820 
    821         /** 720p 1280 X 720 */
    822         public static final int V720p = 10;
    823 
    824         /** W720p 1080 x 720 */
    825         public static final int W720p = 11;
    826 
    827         /** S720p 960 x 720 */
    828         public static final int S720p = 12;
    829 
    830         /** 1080p 1920 x 1080 */
    831         public static final int V1080p = 13;
    832     }
    833 
    834     /**
    835      * Defines output video frame rates.
    836      */
    837     public final class VideoFrameRate {
    838         /** Frame rate of 5 frames per second. */
    839         public static final int FR_5_FPS = 0;
    840 
    841         /** Frame rate of 7.5 frames per second. */
    842         public static final int FR_7_5_FPS = 1;
    843 
    844         /** Frame rate of 10 frames per second. */
    845         public static final int FR_10_FPS = 2;
    846 
    847         /** Frame rate of 12.5 frames per second. */
    848         public static final int FR_12_5_FPS = 3;
    849 
    850         /** Frame rate of 15 frames per second. */
    851         public static final int FR_15_FPS = 4;
    852 
    853         /** Frame rate of 20 frames per second. */
    854         public static final int FR_20_FPS = 5;
    855 
    856         /** Frame rate of 25 frames per second. */
    857         public static final int FR_25_FPS = 6;
    858 
    859         /** Frame rate of 30 frames per second. */
    860         public static final int FR_30_FPS = 7;
    861     }
    862 
    863     /**
    864      * Defines Video Effect Types.
    865      */
    866     public static class VideoEffect {
    867 
    868         public static final int NONE = 0;
    869 
    870         public static final int FADE_FROM_BLACK = 8;
    871 
    872         public static final int FADE_TO_BLACK = 16;
    873 
    874         public static final int EXTERNAL = 256;
    875 
    876         public static final int BLACK_AND_WHITE = 257;
    877 
    878         public static final int PINK = 258;
    879 
    880         public static final int GREEN = 259;
    881 
    882         public static final int SEPIA = 260;
    883 
    884         public static final int NEGATIVE = 261;
    885 
    886         public static final int FRAMING = 262;
    887 
    888         public static final int TEXT = 263;
    889 
    890         public static final int ZOOM_IN = 264;
    891 
    892         public static final int ZOOM_OUT = 265;
    893 
    894         public static final int FIFTIES = 266;
    895 
    896         public static final int COLORRGB16 = 267;
    897 
    898         public static final int GRADIENT = 268;
    899     }
    900 
    901     /**
    902      * Defines the video transitions.
    903      */
    904     public static class VideoTransition {
    905         /** No transition */
    906         public static final int NONE = 0;
    907 
    908         /** Cross fade transition */
    909         public static final int CROSS_FADE = 1;
    910 
    911         /** External transition. Currently not available. */
    912         public static final int EXTERNAL = 256;
    913 
    914         /** AlphaMagic transition. */
    915         public static final int ALPHA_MAGIC = 257;
    916 
    917         /** Slide transition. */
    918         public static final int SLIDE_TRANSITION = 258;
    919 
    920         /** Fade to black transition. */
    921         public static final int FADE_BLACK = 259;
    922     }
    923 
    924     /**
    925      * Defines settings for the AlphaMagic transition
    926      */
    927     public static class AlphaMagicSettings {
    928         /** Name of the alpha file (JPEG file). */
    929         public String file;
    930 
    931         /** Blending percentage [0..100] 0 = no blending. */
    932         public int blendingPercent;
    933 
    934         /** Invert the default rotation direction of the AlphaMagic effect. */
    935         public boolean invertRotation;
    936 
    937         public int rgbWidth;
    938         public int rgbHeight;
    939     }
    940 
    941     /** Defines the direction of the Slide transition. */
    942     public static final class SlideDirection {
    943 
    944         /** Right out left in. */
    945         public static final int RIGHT_OUT_LEFT_IN = 0;
    946 
    947         /** Left out right in. */
    948         public static final int LEFT_OUT_RIGTH_IN = 1;
    949 
    950         /** Top out bottom in. */
    951         public static final int TOP_OUT_BOTTOM_IN = 2;
    952 
    953         /** Bottom out top in */
    954         public static final int BOTTOM_OUT_TOP_IN = 3;
    955     }
    956 
    957     /** Defines the Slide transition settings. */
    958     public static class SlideTransitionSettings {
    959         /**
    960          * Direction of the slide transition. See {@link SlideDirection
    961          * SlideDirection} for valid values.
    962          */
    963         public int direction;
    964     }
    965 
    966     /**
    967      * Defines the settings of a single clip.
    968      */
    969     public static class ClipSettings {
    970 
    971         /**
    972          * The path to the clip file.
    973          * <p>
    974          * File format of the clip, it can be:
    975          * <ul>
    976          * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
    977          * <li>JPG file
    978          * </ul>
    979          */
    980 
    981         public String clipPath;
    982 
    983         /**
    984          * The path of the decoded file. This is used only for image files.
    985          */
    986         public String clipDecodedPath;
    987 
    988         /**
    989          * The path of the Original file. This is used only for image files.
    990          */
    991         public String clipOriginalPath;
    992 
    993         /**
    994          * File type of the clip. See {@link FileType FileType} for valid
    995          * values.
    996          */
    997         public int fileType;
    998 
    999         /** Begin of the cut in the clip in milliseconds. */
   1000         public int beginCutTime;
   1001 
   1002         /**
   1003          * End of the cut in the clip in milliseconds. Set both
   1004          * <code>beginCutTime</code> and <code>endCutTime</code> to
   1005          * <code>0</code> to get the full length of the clip without a cut. In
   1006          * case of JPG clip, this is the duration of the JPEG file.
   1007          */
   1008         public int endCutTime;
   1009 
   1010         /**
   1011          * Begin of the cut in the clip in percentage of the file duration.
   1012          */
   1013         public int beginCutPercent;
   1014 
   1015         /**
   1016          * End of the cut in the clip in percentage of the file duration. Set
   1017          * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
   1018          * <code>0</code> to get the full length of the clip without a cut.
   1019          */
   1020         public int endCutPercent;
   1021 
   1022         /** Enable panning and zooming. */
   1023         public boolean panZoomEnabled;
   1024 
   1025         /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
   1026         public int panZoomPercentStart;
   1027 
   1028         /** Top left X coordinate at start of clip. */
   1029         public int panZoomTopLeftXStart;
   1030 
   1031         /** Top left Y coordinate at start of clip. */
   1032         public int panZoomTopLeftYStart;
   1033 
   1034         /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
   1035         public int panZoomPercentEnd;
   1036 
   1037         /** Top left X coordinate at end of clip. */
   1038         public int panZoomTopLeftXEnd;
   1039 
   1040         /** Top left Y coordinate at end of clip. */
   1041         public int panZoomTopLeftYEnd;
   1042 
   1043         /**
   1044          * Set The media rendering. See {@link MediaRendering MediaRendering}
   1045          * for valid values.
   1046          */
   1047         public int mediaRendering;
   1048 
   1049         /**
   1050          * RGB width and Height
   1051          */
   1052          public int rgbWidth;
   1053          public int rgbHeight;
   1054          /**
   1055          * Video rotation degree.
   1056          */
   1057          public int rotationDegree;
   1058     }
   1059 
   1060     /**
   1061      * Defines settings for a transition.
   1062      */
   1063     public static class TransitionSettings {
   1064 
   1065         /** Duration of the transition in msec. */
   1066         public int duration;
   1067 
   1068         /**
   1069          * Transition type for video. See {@link VideoTransition
   1070          * VideoTransition} for valid values.
   1071          */
   1072         public int videoTransitionType;
   1073 
   1074         /**
   1075          * Transition type for audio. See {@link AudioTransition
   1076          * AudioTransition} for valid values.
   1077          */
   1078         public int audioTransitionType;
   1079 
   1080         /**
   1081          * Transition behaviour. See {@link TransitionBehaviour
   1082          * TransitionBehaviour} for valid values.
   1083          */
   1084         public int transitionBehaviour;
   1085 
   1086         /**
   1087          * Settings for AlphaMagic transition. Only needs to be set if
   1088          * <code>videoTransitionType</code> is set to
   1089          * <code>VideoTransition.ALPHA_MAGIC</code>. See
   1090          * {@link AlphaMagicSettings AlphaMagicSettings}.
   1091          */
   1092         public AlphaMagicSettings alphaSettings;
   1093 
   1094         /**
   1095          * Settings for the Slide transition. See
   1096          * {@link SlideTransitionSettings SlideTransitionSettings}.
   1097          */
   1098         public SlideTransitionSettings slideSettings;
   1099     }
   1100 
   1101     public static final class AudioTransition {
   1102         /** No audio transition. */
   1103         public static final int NONE = 0;
   1104 
   1105         /** Cross-fade audio transition. */
   1106         public static final int CROSS_FADE = 1;
   1107     }
   1108 
   1109     /**
   1110      * Defines transition behaviors.
   1111      */
   1112     public static final class TransitionBehaviour {
   1113 
   1114         /** The transition uses an increasing speed. */
   1115         public static final int SPEED_UP = 0;
   1116 
   1117         /** The transition uses a linear (constant) speed. */
   1118         public static final int LINEAR = 1;
   1119 
   1120         /** The transition uses a decreasing speed. */
   1121         public static final int SPEED_DOWN = 2;
   1122 
   1123         /**
   1124          * The transition uses a constant speed, but slows down in the middle
   1125          * section.
   1126          */
   1127         public static final int SLOW_MIDDLE = 3;
   1128 
   1129         /**
   1130          * The transition uses a constant speed, but increases speed in the
   1131          * middle section.
   1132          */
   1133         public static final int FAST_MIDDLE = 4;
   1134     }
   1135 
   1136     /**
   1137      * Defines settings for the background music.
   1138      */
   1139     public static class BackgroundMusicSettings {
   1140 
   1141         /** Background music file. */
   1142         public String file;
   1143 
   1144         /** File type. See {@link FileType FileType} for valid values. */
   1145         public int fileType;
   1146 
   1147         /**
   1148          * Insertion time in milliseconds, in the output video where the
   1149          * background music must be inserted.
   1150          */
   1151         public long insertionTime;
   1152 
   1153         /**
   1154          * Volume, as a percentage of the background music track, to use. If
   1155          * this field is set to 100, the background music will replace the audio
   1156          * from the video input file(s).
   1157          */
   1158         public int volumePercent;
   1159 
   1160         /**
   1161          * Start time in milliseconds in the background muisc file from where
   1162          * the background music should loop. Set both <code>beginLoop</code> and
   1163          * <code>endLoop</code> to <code>0</code> to disable looping.
   1164          */
   1165         public long beginLoop;
   1166 
   1167         /**
   1168          * End time in milliseconds in the background music file to where the
   1169          * background music should loop. Set both <code>beginLoop</code> and
   1170          * <code>endLoop</code> to <code>0</code> to disable looping.
   1171          */
   1172         public long endLoop;
   1173 
   1174         public boolean enableDucking;
   1175 
   1176         public int duckingThreshold;
   1177 
   1178         public int lowVolume;
   1179 
   1180         public boolean isLooping;
   1181     }
   1182 
   1183     /** Defines settings for an effect. */
   1184     public static class AudioEffect {
   1185         /** No audio effect. */
   1186         public static final int NONE = 0;
   1187 
   1188         /** Fade-in effect. */
   1189         public static final int FADE_IN = 8;
   1190 
   1191         /** Fade-out effect. */
   1192         public static final int FADE_OUT = 16;
   1193     }
   1194 
   1195     /** Defines the effect settings. */
   1196     public static class EffectSettings {
   1197 
   1198         /** Start time of the effect in milliseconds. */
   1199         public int startTime;
   1200 
   1201         /** Duration of the effect in milliseconds. */
   1202         public int duration;
   1203 
   1204         /**
   1205          * Video effect type. See {@link VideoEffect VideoEffect} for valid
   1206          * values.
   1207          */
   1208         public int videoEffectType;
   1209 
   1210         /**
   1211          * Audio effect type. See {@link AudioEffect AudioEffect} for valid
   1212          * values.
   1213          */
   1214         public int audioEffectType;
   1215 
   1216         /**
   1217          * Start time of the effect in percents of the duration of the clip. A
   1218          * value of 0 percent means start time is from the beginning of the
   1219          * clip.
   1220          */
   1221         public int startPercent;
   1222 
   1223         /**
   1224          * Duration of the effect in percents of the duration of the clip.
   1225          */
   1226         public int durationPercent;
   1227 
   1228         /**
   1229          * Framing file.
   1230          * <p>
   1231          * This field is only used when the field <code>videoEffectType</code>
   1232          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1233          * this field is ignored.
   1234          */
   1235         public String framingFile;
   1236 
   1237         /**
   1238          * Framing buffer.
   1239          * <p>
   1240          * This field is only used when the field <code>videoEffectType</code>
   1241          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1242          * this field is ignored.
   1243          */
   1244         public int[] framingBuffer;
   1245 
   1246         /**
   1247          * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
   1248          **/
   1249 
   1250         public int bitmapType;
   1251 
   1252         public int width;
   1253 
   1254         public int height;
   1255 
   1256         /**
   1257          * Top left x coordinate. This coordinate is used to set the x
   1258          * coordinate of the picture in the framing file when the framing file
   1259          * is selected. The x coordinate is also used to set the location of the
   1260          * text in the text effect.
   1261          * <p>
   1262          * This field is only used when the field <code>videoEffectType</code>
   1263          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
   1264          * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
   1265          * ignored.
   1266          */
   1267         public int topLeftX;
   1268 
   1269         /**
   1270          * Top left y coordinate. This coordinate is used to set the y
   1271          * coordinate of the picture in the framing file when the framing file
   1272          * is selected. The y coordinate is also used to set the location of the
   1273          * text in the text effect.
   1274          * <p>
   1275          * This field is only used when the field <code>videoEffectType</code>
   1276          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
   1277          * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
   1278          * ignored.
   1279          */
   1280         public int topLeftY;
   1281 
   1282         /**
   1283          * Should the frame be resized or not. If this field is set to
   1284          * <link>true</code> then the frame size is matched with the output
   1285          * video size.
   1286          * <p>
   1287          * This field is only used when the field <code>videoEffectType</code>
   1288          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1289          * this field is ignored.
   1290          */
   1291         public boolean framingResize;
   1292 
   1293         /**
   1294          * Size to which the framing buffer needs to be resized to
   1295          * This is valid only if framingResize is true
   1296          */
   1297         public int framingScaledSize;
   1298         /**
   1299          * Text to insert in the video.
   1300          * <p>
   1301          * This field is only used when the field <code>videoEffectType</code>
   1302          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
   1303          * field is ignored.
   1304          */
   1305         public String text;
   1306 
   1307         /**
   1308          * Text attributes for the text to insert in the video.
   1309          * <p>
   1310          * This field is only used when the field <code>videoEffectType</code>
   1311          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
   1312          * field is ignored. For more details about this field see the
   1313          * integration guide.
   1314          */
   1315         public String textRenderingData;
   1316 
   1317         /** Width of the text buffer in pixels. */
   1318         public int textBufferWidth;
   1319 
   1320         /** Height of the text buffer in pixels. */
   1321         public int textBufferHeight;
   1322 
   1323         /**
   1324          * Processing rate for the fifties effect. A high value (e.g. 30)
   1325          * results in high effect strength.
   1326          * <p>
   1327          * This field is only used when the field <code>videoEffectType</code>
   1328          * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
   1329          * this field is ignored.
   1330          */
   1331         public int fiftiesFrameRate;
   1332 
   1333         /**
   1334          * RGB 16 color of the RGB16 and gradient color effect.
   1335          * <p>
   1336          * This field is only used when the field <code>videoEffectType</code>
   1337          * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
   1338          * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
   1339          * field is ignored.
   1340          */
   1341         public int rgb16InputColor;
   1342 
   1343         /**
   1344          * Start alpha blending percentage.
   1345          * <p>
   1346          * This field is only used when the field <code>videoEffectType</code>
   1347          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1348          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1349          * is ignored.
   1350          */
   1351         public int alphaBlendingStartPercent;
   1352 
   1353         /**
   1354          * Middle alpha blending percentage.
   1355          * <p>
   1356          * This field is only used when the field <code>videoEffectType</code>
   1357          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1358          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1359          * is ignored.
   1360          */
   1361         public int alphaBlendingMiddlePercent;
   1362 
   1363         /**
   1364          * End alpha blending percentage.
   1365          * <p>
   1366          * This field is only used when the field <code>videoEffectType</code>
   1367          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1368          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1369          * is ignored.
   1370          */
   1371         public int alphaBlendingEndPercent;
   1372 
   1373         /**
   1374          * Duration, in percentage of effect duration of the fade-in phase.
   1375          * <p>
   1376          * This field is only used when the field <code>videoEffectType</code>
   1377          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1378          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1379          * is ignored.
   1380          */
   1381         public int alphaBlendingFadeInTimePercent;
   1382 
   1383         /**
   1384          * Duration, in percentage of effect duration of the fade-out phase.
   1385          * <p>
   1386          * This field is only used when the field <code>videoEffectType</code>
   1387          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1388          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1389          * is ignored.
   1390          */
   1391         public int alphaBlendingFadeOutTimePercent;
   1392     }
   1393 
   1394     /** Defines the clip properties for preview */
   1395     public static class PreviewClips {
   1396 
   1397         /**
   1398          * The path to the clip file.
   1399          * <p>
   1400          * File format of the clip, it can be:
   1401          * <ul>
   1402          * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
   1403          * <li>JPG file
   1404          * </ul>
   1405          */
   1406 
   1407         public String clipPath;
   1408 
   1409         /**
   1410          * File type of the clip. See {@link FileType FileType} for valid
   1411          * values.
   1412          */
   1413         public int fileType;
   1414 
   1415         /** Begin of the cut in the clip in milliseconds. */
   1416         public long beginPlayTime;
   1417 
   1418         public long endPlayTime;
   1419 
   1420         /**
   1421          * Set The media rendering. See {@link MediaRendering MediaRendering}
   1422          * for valid values.
   1423          */
   1424         public int mediaRendering;
   1425 
   1426     }
   1427 
   1428     /** Defines the audio settings. */
   1429     public static class AudioSettings {
   1430 
   1431         String pFile;
   1432 
   1433         /** < PCM file path */
   1434         String Id;
   1435 
   1436         boolean bRemoveOriginal;
   1437 
   1438         /** < If true, the original audio track is not taken into account */
   1439         int channels;
   1440 
   1441         /** < Number of channels (1=mono, 2=stereo) of BGM clip */
   1442         int Fs;
   1443 
   1444         /**
   1445          * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
   1446          * BGM clip
   1447          */
   1448         int ExtendedFs;
   1449 
   1450         /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
   1451         long startMs;
   1452 
   1453         /** < Time, in milliseconds, at which the added audio track is inserted */
   1454         long beginCutTime;
   1455 
   1456         long endCutTime;
   1457 
   1458         int fileType;
   1459 
   1460         int volume;
   1461 
   1462         /** < Volume, in percentage, of the added audio track */
   1463         boolean loop;
   1464 
   1465         /** < Looping on/off > **/
   1466 
   1467         /** Audio mix and Duck **/
   1468         int ducking_threshold;
   1469 
   1470         int ducking_lowVolume;
   1471 
   1472         boolean bInDucking_enable;
   1473 
   1474         String pcmFilePath;
   1475     }
   1476 
   1477     /** Encapsulates preview clips and effect settings */
   1478     public static class PreviewSettings {
   1479 
   1480         public PreviewClips[] previewClipsArray;
   1481 
   1482         /** The effect settings. */
   1483         public EffectSettings[] effectSettingsArray;
   1484 
   1485     }
   1486 
   1487     /** Encapsulates clip properties */
   1488     public static class PreviewClipProperties {
   1489 
   1490         public Properties[] clipProperties;
   1491 
   1492     }
   1493 
   1494     /** Defines the editing settings. */
   1495     public static class EditSettings {
   1496 
   1497         /**
   1498          * Array of clip settings. There is one <code>clipSetting</code> for
   1499          * each clip.
   1500          */
   1501         public ClipSettings[] clipSettingsArray;
   1502 
   1503         /**
   1504          * Array of transition settings. If there are n clips (and thus n
   1505          * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
   1506          * <code>transistionSettings</code> in
   1507          * <code>transistionSettingsArray</code>.
   1508          */
   1509         public TransitionSettings[] transitionSettingsArray;
   1510 
   1511         /** The effect settings. */
   1512         public EffectSettings[] effectSettingsArray;
   1513 
   1514         /**
   1515          * Video frame rate of the output clip. See {@link VideoFrameRate
   1516          * VideoFrameRate} for valid values.
   1517          */
   1518         public int videoFrameRate;
   1519 
   1520         /** Output file name. Must be an absolute path. */
   1521         public String outputFile;
   1522 
   1523         /**
   1524          * Size of the video frames in the output clip. See
   1525          * {@link VideoFrameSize VideoFrameSize} for valid values.
   1526          */
   1527         public int videoFrameSize;
   1528 
   1529         /**
   1530          * Format of the video stream in the output clip. See
   1531          * {@link VideoFormat VideoFormat} for valid values.
   1532          */
   1533         public int videoFormat;
   1534 
   1535         /**
   1536          * Profile of the video stream in the output clip.
   1537          */
   1538         public int videoProfile;
   1539 
   1540         /**
   1541          * Level of the video stream in the output clip.
   1542          */
   1543         public int videoLevel;
   1544 
   1545         /**
   1546          * Format of the audio stream in the output clip. See
   1547          * {@link AudioFormat AudioFormat} for valid values.
   1548          */
   1549         public int audioFormat;
   1550 
   1551         /**
   1552          * Sampling frequency of the audio stream in the output clip. See
   1553          * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
   1554          * values.
   1555          */
   1556         public int audioSamplingFreq;
   1557 
   1558         /**
   1559          * Maximum file size. By setting this you can set the maximum size of
   1560          * the output clip. Set it to <code>0</code> to let the class ignore
   1561          * this filed.
   1562          */
   1563         public int maxFileSize;
   1564 
   1565         /**
   1566          * Number of audio channels in output clip. Use <code>0</code> for none,
   1567          * <code>1</code> for mono or <code>2</code> for stereo. None is only
   1568          * allowed when the <code>audioFormat</code> field is set to
   1569          * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
   1570          * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
   1571          * allowed when the <code>audioFormat</code> field is set to
   1572          * {@link AudioFormat#AAC AudioFormat.AAC}
   1573          */
   1574         public int audioChannels;
   1575 
   1576         /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
   1577         public int videoBitrate;
   1578 
   1579         /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
   1580         public int audioBitrate;
   1581 
   1582         /**
   1583          * Background music settings. See {@link BackgroundMusicSettings
   1584          * BackgroundMusicSettings} for valid values.
   1585          */
   1586         public BackgroundMusicSettings backgroundMusicSettings;
   1587 
   1588         public int primaryTrackVolume;
   1589 
   1590     }
   1591 
   1592     /**
   1593      * Defines the media properties.
   1594      **/
   1595 
   1596     public static class Properties {
   1597 
   1598         /**
   1599          * Duration of the media in milliseconds.
   1600          */
   1601 
   1602         public int duration;
   1603 
   1604         /**
   1605          * File type.
   1606          */
   1607 
   1608         public int fileType;
   1609 
   1610         /**
   1611          * Video format.
   1612          */
   1613 
   1614         public int videoFormat;
   1615 
   1616         /**
   1617          * Duration of the video stream of the media in milliseconds.
   1618          */
   1619 
   1620         public int videoDuration;
   1621 
   1622         /**
   1623          * Bitrate of the video stream of the media.
   1624          */
   1625 
   1626         public int videoBitrate;
   1627 
   1628         /**
   1629          * Width of the video frames or the width of the still picture in
   1630          * pixels.
   1631          */
   1632 
   1633         public int width;
   1634 
   1635         /**
   1636          * Height of the video frames or the height of the still picture in
   1637          * pixels.
   1638          */
   1639 
   1640         public int height;
   1641 
   1642         /**
   1643          * Average frame rate of video in the media in frames per second.
   1644          */
   1645 
   1646         public float averageFrameRate;
   1647 
   1648         /**
   1649          * Profile of the video in the media.
   1650          */
   1651 
   1652         public int profile;
   1653 
   1654         /**
   1655          * Level of the video in the media.
   1656          */
   1657 
   1658         public int level;
   1659 
   1660         /**
   1661          * Is Video Profile supported.
   1662          */
   1663 
   1664         public boolean profileSupported;
   1665 
   1666         /**
   1667          * Is Video Level supported.
   1668          */
   1669 
   1670         public boolean levelSupported;
   1671 
   1672         /**
   1673          * Audio format.
   1674          */
   1675 
   1676         public int audioFormat;
   1677 
   1678         /**
   1679          * Duration of the audio stream of the media in milliseconds.
   1680          */
   1681 
   1682         public int audioDuration;
   1683 
   1684         /**
   1685          * Bitrate of the audio stream of the media.
   1686          */
   1687 
   1688         public int audioBitrate;
   1689 
   1690         /**
   1691          * Number of audio channels in the media.
   1692          */
   1693 
   1694         public int audioChannels;
   1695 
   1696         /**
   1697          * Sampling frequency of the audio stream in the media in samples per
   1698          * second.
   1699          */
   1700 
   1701         public int audioSamplingFrequency;
   1702 
   1703         /**
   1704          * Volume value of the audio track as percentage.
   1705          */
   1706         public int audioVolumeValue;
   1707 
   1708         /**
   1709          * Video rotation degree.
   1710          */
   1711         public int videoRotation;
   1712 
   1713         public String Id;
   1714     }
   1715 
   1716     /**
   1717      * Constructor
   1718      *
   1719      * @param projectPath The path where the VideoEditor stores all files
   1720      *        related to the project
   1721      * @param lock The semaphore
   1722      * @param veObj The video editor reference
   1723      */
   1724     public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) {
   1725         mProjectPath = projectPath;
   1726         if (veObj != null) {
   1727             mVideoEditor = veObj;
   1728         } else {
   1729             mVideoEditor = null;
   1730             throw new IllegalArgumentException("video editor object is null");
   1731         }
   1732         if (mStoryBoardSettings == null) {
   1733             mStoryBoardSettings = new EditSettings();
   1734         }
   1735 
   1736         mLock = lock;
   1737 
   1738         _init(mProjectPath, "null");
   1739         mAudioTrackPCMFilePath = null;
   1740     }
   1741 
   1742     /**
   1743      * @return The project path
   1744      */
   1745     String getProjectPath() {
   1746         return mProjectPath;
   1747     }
   1748 
   1749     /**
   1750      * @return The Audio Track PCM file path
   1751      */
   1752     String getProjectAudioTrackPCMFilePath() {
   1753         return mAudioTrackPCMFilePath;
   1754     }
   1755 
   1756     /**
   1757      * Invalidates the PCM file
   1758      */
   1759     void invalidatePcmFile() {
   1760         if (mAudioTrackPCMFilePath != null) {
   1761             new File(mAudioTrackPCMFilePath).delete();
   1762             mAudioTrackPCMFilePath = null;
   1763         }
   1764     }
   1765 
   1766     @SuppressWarnings("unused")
   1767     private void onProgressUpdate(int taskId, int progress) {
   1768         if (mProcessingState == PROCESSING_EXPORT) {
   1769             if (mExportProgressListener != null) {
   1770                 if (mProgressToApp < progress) {
   1771                     mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
   1772                     /* record previous progress */
   1773                     mProgressToApp = progress;
   1774                 }
   1775             }
   1776         }
   1777         else {
   1778             // Adapt progress depending on current state
   1779             int actualProgress = 0;
   1780             int action = 0;
   1781 
   1782             if (mProcessingState == PROCESSING_AUDIO_PCM) {
   1783                 action = MediaProcessingProgressListener.ACTION_DECODE;
   1784             } else {
   1785                 action = MediaProcessingProgressListener.ACTION_ENCODE;
   1786             }
   1787 
   1788             switch (mProcessingState) {
   1789                 case PROCESSING_AUDIO_PCM:
   1790                     actualProgress = progress;
   1791                     break;
   1792                 case PROCESSING_TRANSITION:
   1793                     actualProgress = progress;
   1794                     break;
   1795                 case PROCESSING_KENBURNS:
   1796                     actualProgress = progress;
   1797                     break;
   1798                 case PROCESSING_INTERMEDIATE1:
   1799                     if ((progress == 0) && (mProgressToApp != 0)) {
   1800                         mProgressToApp = 0;
   1801                     }
   1802                     if ((progress != 0) || (mProgressToApp != 0)) {
   1803                         actualProgress = progress/4;
   1804                     }
   1805                     break;
   1806                 case PROCESSING_INTERMEDIATE2:
   1807                     if ((progress != 0) || (mProgressToApp != 0)) {
   1808                         actualProgress = 25 + progress/4;
   1809                     }
   1810                     break;
   1811                 case PROCESSING_INTERMEDIATE3:
   1812                     if ((progress != 0) || (mProgressToApp != 0)) {
   1813                         actualProgress = 50 + progress/2;
   1814                     }
   1815                     break;
   1816                 case PROCESSING_NONE:
   1817 
   1818                 default:
   1819                     Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
   1820                     return;
   1821             }
   1822             if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
   1823 
   1824                 mProgressToApp = actualProgress;
   1825 
   1826                 if (mMediaProcessingProgressListener != null) {
   1827                     // Send the progress indication
   1828                     mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
   1829                                                                 actualProgress);
   1830                 }
   1831             }
   1832             /* avoid 0 in next intermediate call */
   1833             if (mProgressToApp == 0) {
   1834                 if (mMediaProcessingProgressListener != null) {
   1835                     /*
   1836                      *  Send the progress indication
   1837                      */
   1838                     mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
   1839                                                                 actualProgress);
   1840                 }
   1841                 mProgressToApp = 1;
   1842             }
   1843         }
   1844     }
   1845 
   1846     @SuppressWarnings("unused")
   1847     private void onPreviewProgressUpdate(int progress, boolean isFinished,
   1848                   boolean updateOverlay, String filename, int renderingMode) {
   1849         if (mPreviewProgressListener != null) {
   1850             if (mIsFirstProgress) {
   1851                 mPreviewProgressListener.onStart(mVideoEditor);
   1852                 mIsFirstProgress = false;
   1853             }
   1854 
   1855             final VideoEditor.OverlayData overlayData;
   1856             if (updateOverlay) {
   1857                 overlayData = new VideoEditor.OverlayData();
   1858                 if (filename != null) {
   1859                     overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
   1860                 } else {
   1861                     overlayData.setClear();
   1862                 }
   1863             } else {
   1864                 overlayData = null;
   1865             }
   1866 
   1867             if (progress != 0) {
   1868                 mPreviewProgress = progress;
   1869             }
   1870 
   1871             if (isFinished) {
   1872                 mPreviewProgressListener.onStop(mVideoEditor);
   1873             } else {
   1874                 mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
   1875             }
   1876         }
   1877     }
   1878 
   1879     /**
   1880      * Release the native helper object
   1881      */
   1882     void releaseNativeHelper() throws InterruptedException {
   1883         release();
   1884     }
   1885 
   1886     /**
   1887      * Release the native helper to end the Audio Graph process
   1888      */
   1889     @SuppressWarnings("unused")
   1890     private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
   1891         if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
   1892             mExtractAudioWaveformProgressListener.onProgress(progress);
   1893         }
   1894     }
   1895 
   1896     /**
   1897      * Populates the Effect Settings in EffectSettings
   1898      *
   1899      * @param effects The reference of EffectColor
   1900      *
   1901      * @return The populated effect settings in EffectSettings reference
   1902      */
   1903     EffectSettings getEffectSettings(EffectColor effects) {
   1904         EffectSettings effectSettings = new EffectSettings();
   1905         effectSettings.startTime = (int)effects.getStartTime();
   1906         effectSettings.duration = (int)effects.getDuration();
   1907         effectSettings.videoEffectType = getEffectColorType(effects);
   1908         effectSettings.audioEffectType = 0;
   1909         effectSettings.startPercent = 0;
   1910         effectSettings.durationPercent = 0;
   1911         effectSettings.framingFile = null;
   1912         effectSettings.topLeftX = 0;
   1913         effectSettings.topLeftY = 0;
   1914         effectSettings.framingResize = false;
   1915         effectSettings.text = null;
   1916         effectSettings.textRenderingData = null;
   1917         effectSettings.textBufferWidth = 0;
   1918         effectSettings.textBufferHeight = 0;
   1919         if (effects.getType() == EffectColor.TYPE_FIFTIES) {
   1920             effectSettings.fiftiesFrameRate = 15;
   1921         } else {
   1922             effectSettings.fiftiesFrameRate = 0;
   1923         }
   1924 
   1925         if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
   1926                 || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
   1927             effectSettings.rgb16InputColor = effects.getColor();
   1928         }
   1929 
   1930         effectSettings.alphaBlendingStartPercent = 0;
   1931         effectSettings.alphaBlendingMiddlePercent = 0;
   1932         effectSettings.alphaBlendingEndPercent = 0;
   1933         effectSettings.alphaBlendingFadeInTimePercent = 0;
   1934         effectSettings.alphaBlendingFadeOutTimePercent = 0;
   1935         return effectSettings;
   1936     }
   1937 
   1938     /**
   1939      * Populates the Overlay Settings in EffectSettings
   1940      *
   1941      * @param overlay The reference of OverlayFrame
   1942      *
   1943      * @return The populated overlay settings in EffectSettings reference
   1944      */
   1945     EffectSettings getOverlaySettings(OverlayFrame overlay) {
   1946         EffectSettings effectSettings = new EffectSettings();
   1947         Bitmap bitmap = null;
   1948 
   1949         effectSettings.startTime = (int)overlay.getStartTime();
   1950         effectSettings.duration = (int)overlay.getDuration();
   1951         effectSettings.videoEffectType = VideoEffect.FRAMING;
   1952         effectSettings.audioEffectType = 0;
   1953         effectSettings.startPercent = 0;
   1954         effectSettings.durationPercent = 0;
   1955         effectSettings.framingFile = null;
   1956 
   1957         if ((bitmap = overlay.getBitmap()) != null) {
   1958             effectSettings.framingFile = overlay.getFilename();
   1959 
   1960             if (effectSettings.framingFile == null) {
   1961                 try {
   1962                     (overlay).save(mProjectPath);
   1963                 } catch (IOException e) {
   1964                     Log.e(TAG, "getOverlaySettings : File not found");
   1965                 }
   1966                 effectSettings.framingFile = overlay.getFilename();
   1967             }
   1968             if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
   1969                 effectSettings.bitmapType = 6;
   1970             else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
   1971                 effectSettings.bitmapType = 5;
   1972             else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
   1973                 effectSettings.bitmapType = 4;
   1974             else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
   1975                 throw new RuntimeException("Bitmap config not supported");
   1976 
   1977             effectSettings.width = bitmap.getWidth();
   1978             effectSettings.height = bitmap.getHeight();
   1979             effectSettings.framingBuffer = new int[effectSettings.width];
   1980             int tmp = 0;
   1981             short maxAlpha = 0;
   1982             short minAlpha = (short)0xFF;
   1983             short alpha = 0;
   1984             while (tmp < effectSettings.height) {
   1985                 bitmap.getPixels(effectSettings.framingBuffer, 0,
   1986                                  effectSettings.width, 0, tmp,
   1987                                  effectSettings.width, 1);
   1988                 for (int i = 0; i < effectSettings.width; i++) {
   1989                     alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
   1990                     if (alpha > maxAlpha) {
   1991                         maxAlpha = alpha;
   1992                     }
   1993                     if (alpha < minAlpha) {
   1994                         minAlpha = alpha;
   1995                     }
   1996                 }
   1997                 tmp += 1;
   1998             }
   1999             alpha = (short)((maxAlpha + minAlpha) / 2);
   2000             alpha = (short)((alpha * 100) / 256);
   2001             effectSettings.alphaBlendingEndPercent = alpha;
   2002             effectSettings.alphaBlendingMiddlePercent = alpha;
   2003             effectSettings.alphaBlendingStartPercent = alpha;
   2004             effectSettings.alphaBlendingFadeInTimePercent = 100;
   2005             effectSettings.alphaBlendingFadeOutTimePercent = 100;
   2006             effectSettings.framingBuffer = null;
   2007 
   2008             /*
   2009              * Set the resized RGB file dimensions
   2010              */
   2011             effectSettings.width = overlay.getResizedRGBSizeWidth();
   2012             if(effectSettings.width == 0) {
   2013                 effectSettings.width = bitmap.getWidth();
   2014             }
   2015 
   2016             effectSettings.height = overlay.getResizedRGBSizeHeight();
   2017             if(effectSettings.height == 0) {
   2018                 effectSettings.height = bitmap.getHeight();
   2019             }
   2020 
   2021         }
   2022 
   2023         effectSettings.topLeftX = 0;
   2024         effectSettings.topLeftY = 0;
   2025 
   2026         effectSettings.framingResize = true;
   2027         effectSettings.text = null;
   2028         effectSettings.textRenderingData = null;
   2029         effectSettings.textBufferWidth = 0;
   2030         effectSettings.textBufferHeight = 0;
   2031         effectSettings.fiftiesFrameRate = 0;
   2032         effectSettings.rgb16InputColor = 0;
   2033         int mediaItemHeight;
   2034         int aspectRatio;
   2035         if (overlay.getMediaItem() instanceof MediaImageItem) {
   2036             if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
   2037                 // Ken Burns was applied
   2038                 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
   2039                 aspectRatio = getAspectRatio(
   2040                     ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
   2041                     , mediaItemHeight);
   2042             } else {
   2043                 //For image get the scaled height. Aspect ratio would remain the same
   2044                 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
   2045                 aspectRatio = overlay.getMediaItem().getAspectRatio();
   2046             }
   2047         } else {
   2048             aspectRatio = overlay.getMediaItem().getAspectRatio();
   2049             mediaItemHeight = overlay.getMediaItem().getHeight();
   2050         }
   2051         effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
   2052         return effectSettings;
   2053     }
   2054 
   2055      /* get Video Editor aspect ratio */
   2056     int nativeHelperGetAspectRatio() {
   2057         return mVideoEditor.getAspectRatio();
   2058     }
   2059 
   2060     /**
   2061      * Sets the export audio codec
   2062      *
   2063      * @param export audio codec
   2064      *
   2065      */
   2066     void setAudioCodec(int codec) {
   2067         mExportAudioCodec = codec;
   2068     }
   2069     /**
   2070      * Sets the export video codec
   2071      *
   2072      * @param export video codec
   2073      *
   2074      */
   2075     void setVideoCodec(int codec) {
   2076         mExportVideoCodec = codec;
   2077     }
   2078 
   2079     /**
   2080      * Sets the audio regenerate flag
   2081      *
   2082      * @param flag The boolean to set the audio regenerate flag
   2083      *
   2084      */
   2085     void setAudioflag(boolean flag) {
   2086         //check if the file exists.
   2087         if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
   2088             flag = true;
   2089         }
   2090         mRegenerateAudio = flag;
   2091     }
   2092 
   2093     /**
   2094      * Gets the audio regenerate flag
   2095      *
   2096      * @param return The boolean to get the audio regenerate flag
   2097      *
   2098      */
   2099     boolean getAudioflag() {
   2100         return mRegenerateAudio;
   2101     }
   2102 
   2103     /**
   2104      * Maps the average frame rate to one of the defined enum values
   2105      *
   2106      * @param averageFrameRate The average frame rate of video item
   2107      *
   2108      * @return The frame rate from one of the defined enum values
   2109      */
   2110     int GetClosestVideoFrameRate(int averageFrameRate) {
   2111         if (averageFrameRate >= 25) {
   2112             return VideoFrameRate.FR_30_FPS;
   2113         } else if (averageFrameRate >= 20) {
   2114             return VideoFrameRate.FR_25_FPS;
   2115         } else if (averageFrameRate >= 15) {
   2116             return VideoFrameRate.FR_20_FPS;
   2117         } else if (averageFrameRate >= 12) {
   2118             return VideoFrameRate.FR_15_FPS;
   2119         } else if (averageFrameRate >= 10) {
   2120             return VideoFrameRate.FR_12_5_FPS;
   2121         } else if (averageFrameRate >= 7) {
   2122             return VideoFrameRate.FR_10_FPS;
   2123         } else if (averageFrameRate >= 5) {
   2124             return VideoFrameRate.FR_7_5_FPS;
   2125         } else {
   2126             return -1;
   2127         }
   2128     }
   2129 
   2130     /**
   2131      * Helper function to adjust the effect or overlay start time
   2132      * depending on the begin and end boundary time of meddia item
   2133      */
   2134     public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
   2135                                                   int endCutTime) {
   2136 
   2137         int effectStartTime = 0;
   2138         int effectDuration = 0;
   2139 
   2140         /**
   2141          * cbct -> clip begin cut time
   2142          * cect -> clip end cut time
   2143          ****************************************
   2144          *  |                                 |
   2145          *  |         cbct        cect        |
   2146          *  | <-1-->   |           |          |
   2147          *  |       <--|-2->       |          |
   2148          *  |          | <---3---> |          |
   2149          *  |          |        <--|-4--->    |
   2150          *  |          |           | <--5-->  |
   2151          *  |      <---|------6----|---->     |
   2152          *  |                                 |
   2153          *  < : effectStart
   2154          *  > : effectStart + effectDuration
   2155          ****************************************
   2156          **/
   2157 
   2158         /** 1 & 5 */
   2159         /**
   2160          * Effect falls out side the trim duration. In such a case effects shall
   2161          * not be applied.
   2162          */
   2163         if ((lEffect.startTime > endCutTime)
   2164                 || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
   2165 
   2166             effectStartTime = 0;
   2167             effectDuration = 0;
   2168 
   2169             lEffect.startTime = effectStartTime;
   2170             lEffect.duration = effectDuration;
   2171             return;
   2172         }
   2173 
   2174         /** 2 */
   2175         if ((lEffect.startTime < beginCutTime)
   2176                 && ((lEffect.startTime + lEffect.duration) > beginCutTime)
   2177                 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
   2178             effectStartTime = 0;
   2179             effectDuration = lEffect.duration;
   2180 
   2181             effectDuration -= (beginCutTime - lEffect.startTime);
   2182             lEffect.startTime = effectStartTime;
   2183             lEffect.duration = effectDuration;
   2184             return;
   2185         }
   2186 
   2187         /** 3 */
   2188         if ((lEffect.startTime >= beginCutTime)
   2189                 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
   2190             effectStartTime = lEffect.startTime - beginCutTime;
   2191             lEffect.startTime = effectStartTime;
   2192             lEffect.duration = lEffect.duration;
   2193             return;
   2194         }
   2195 
   2196         /** 4 */
   2197         if ((lEffect.startTime >= beginCutTime)
   2198                 && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
   2199             effectStartTime = lEffect.startTime - beginCutTime;
   2200             effectDuration = endCutTime - lEffect.startTime;
   2201             lEffect.startTime = effectStartTime;
   2202             lEffect.duration = effectDuration;
   2203             return;
   2204         }
   2205 
   2206         /** 6 */
   2207         if ((lEffect.startTime < beginCutTime)
   2208                 && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
   2209             effectStartTime = 0;
   2210             effectDuration = endCutTime - beginCutTime;
   2211             lEffect.startTime = effectStartTime;
   2212             lEffect.duration = effectDuration;
   2213             return;
   2214         }
   2215 
   2216     }
   2217 
   2218     /**
   2219      * Generates the clip for preview or export
   2220      *
   2221      * @param editSettings The EditSettings reference for generating
   2222      * a clip for preview or export
   2223      *
   2224      * @return error value
   2225      */
   2226     public int generateClip(EditSettings editSettings) {
   2227         int err = 0;
   2228 
   2229         try {
   2230             err = nativeGenerateClip(editSettings);
   2231         } catch (IllegalArgumentException ex) {
   2232             Log.e(TAG, "Illegal Argument exception in load settings");
   2233             return -1;
   2234         } catch (IllegalStateException ex) {
   2235             Log.e(TAG, "Illegal state exception in load settings");
   2236             return -1;
   2237         } catch (RuntimeException ex) {
   2238             Log.e(TAG, "Runtime exception in load settings");
   2239             return -1;
   2240         }
   2241         return err;
   2242     }
   2243 
   2244     /**
   2245      * Init function to initialiZe the  ClipSettings reference to
   2246      * default values
   2247      *
   2248      * @param lclipSettings The ClipSettings reference
   2249      */
   2250     void initClipSettings(ClipSettings lclipSettings) {
   2251         lclipSettings.clipPath = null;
   2252         lclipSettings.clipDecodedPath = null;
   2253         lclipSettings.clipOriginalPath = null;
   2254         lclipSettings.fileType = 0;
   2255         lclipSettings.endCutTime = 0;
   2256         lclipSettings.beginCutTime = 0;
   2257         lclipSettings.beginCutPercent = 0;
   2258         lclipSettings.endCutPercent = 0;
   2259         lclipSettings.panZoomEnabled = false;
   2260         lclipSettings.panZoomPercentStart = 0;
   2261         lclipSettings.panZoomTopLeftXStart = 0;
   2262         lclipSettings.panZoomTopLeftYStart = 0;
   2263         lclipSettings.panZoomPercentEnd = 0;
   2264         lclipSettings.panZoomTopLeftXEnd = 0;
   2265         lclipSettings.panZoomTopLeftYEnd = 0;
   2266         lclipSettings.mediaRendering = 0;
   2267         lclipSettings.rotationDegree = 0;
   2268     }
   2269 
   2270 
   2271     /**
   2272      * Populates the settings for generating an effect clip
   2273      *
   2274      * @param lMediaItem The media item for which the effect clip
   2275      * needs to be generated
   2276      * @param lclipSettings The ClipSettings reference containing
   2277      * clips data
   2278      * @param e The EditSettings reference containing effect specific data
   2279      * @param uniqueId The unique id used in the name of the output clip
   2280      * @param clipNo Used for internal purpose
   2281      *
   2282      * @return The name and path of generated clip
   2283      */
   2284     String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
   2285             EditSettings e,String uniqueId,int clipNo) {
   2286         int err = 0;
   2287         EditSettings editSettings = null;
   2288         String EffectClipPath = null;
   2289         int outVideoProfile = 0;
   2290         int outVideoLevel = 0;
   2291         editSettings = new EditSettings();
   2292 
   2293         editSettings.clipSettingsArray = new ClipSettings[1];
   2294         editSettings.clipSettingsArray[0] = lclipSettings;
   2295 
   2296         editSettings.backgroundMusicSettings = null;
   2297         editSettings.transitionSettingsArray = null;
   2298         editSettings.effectSettingsArray = e.effectSettingsArray;
   2299 
   2300         EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
   2301                 + lMediaItem.getId() + uniqueId + ".3gp");
   2302 
   2303         File tmpFile = new File(EffectClipPath);
   2304         if (tmpFile.exists()) {
   2305             tmpFile.delete();
   2306         }
   2307 
   2308         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2309         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2310         editSettings.videoProfile = outVideoProfile;
   2311         editSettings.videoLevel= outVideoLevel;
   2312 
   2313         if (lMediaItem instanceof MediaVideoItem) {
   2314             MediaVideoItem m = (MediaVideoItem)lMediaItem;
   2315 
   2316             editSettings.audioFormat = AudioFormat.AAC;
   2317             editSettings.audioChannels = 2;
   2318             editSettings.audioBitrate = Bitrate.BR_64_KBPS;
   2319             editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2320 
   2321             editSettings.videoFormat = VideoFormat.H264;
   2322             editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2323             editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2324                     m.getHeight());
   2325             editSettings.videoBitrate = findVideoBitrate(editSettings.videoFrameSize);
   2326         } else {
   2327             MediaImageItem m = (MediaImageItem)lMediaItem;
   2328             editSettings.audioBitrate = Bitrate.BR_64_KBPS;
   2329             editSettings.audioChannels = 2;
   2330             editSettings.audioFormat = AudioFormat.AAC;
   2331             editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2332 
   2333             editSettings.videoFormat = VideoFormat.H264;
   2334             editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2335             editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2336                     m.getScaledHeight());
   2337             editSettings.videoBitrate = findVideoBitrate(editSettings.videoFrameSize);
   2338         }
   2339 
   2340         editSettings.outputFile = EffectClipPath;
   2341 
   2342         if (clipNo == 1) {
   2343             mProcessingState  = PROCESSING_INTERMEDIATE1;
   2344         } else if (clipNo == 2) {
   2345             mProcessingState  = PROCESSING_INTERMEDIATE2;
   2346         }
   2347         mProcessingObject = lMediaItem;
   2348         err = generateClip(editSettings);
   2349         mProcessingState  = PROCESSING_NONE;
   2350 
   2351         if (err == 0) {
   2352             lclipSettings.clipPath = EffectClipPath;
   2353             lclipSettings.fileType = FileType.THREE_GPP;
   2354             return EffectClipPath;
   2355         } else {
   2356             throw new RuntimeException("preview generation cannot be completed");
   2357         }
   2358     }
   2359 
   2360 
   2361     /**
   2362      * Populates the settings for generating a Ken Burn effect clip
   2363      *
   2364      * @param m The media image item for which the Ken Burn effect clip
   2365      * needs to be generated
   2366      * @param e The EditSettings reference clip specific data
   2367      *
   2368      * @return The name and path of generated clip
   2369      */
   2370     String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
   2371         String output = null;
   2372         int err = 0;
   2373         int outVideoProfile = 0;
   2374         int outVideoLevel = 0;
   2375 
   2376         e.backgroundMusicSettings = null;
   2377         e.transitionSettingsArray = null;
   2378         e.effectSettingsArray = null;
   2379         output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
   2380 
   2381         File tmpFile = new File(output);
   2382         if (tmpFile.exists()) {
   2383             tmpFile.delete();
   2384         }
   2385 
   2386         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2387         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2388         e.videoProfile = outVideoProfile;
   2389         e.videoLevel = outVideoLevel;
   2390 
   2391         e.outputFile = output;
   2392         e.audioBitrate = Bitrate.BR_64_KBPS;
   2393         e.audioChannels = 2;
   2394         e.audioFormat = AudioFormat.AAC;
   2395         e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2396 
   2397         e.videoFormat = VideoFormat.H264;
   2398         e.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2399         e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2400                                                            m.getScaledHeight());
   2401         e.videoBitrate = findVideoBitrate(e.videoFrameSize);
   2402 
   2403         mProcessingState  = PROCESSING_KENBURNS;
   2404         mProcessingObject = m;
   2405         err = generateClip(e);
   2406         // Reset the processing state and check for errors
   2407         mProcessingState  = PROCESSING_NONE;
   2408         if (err != 0) {
   2409             throw new RuntimeException("preview generation cannot be completed");
   2410         }
   2411         return output;
   2412     }
   2413 
   2414 
   2415     /**
   2416      * Calculates the output resolution for transition clip
   2417      *
   2418      * @param m1 First media item associated with transition
   2419      * @param m2 Second media item associated with transition
   2420      *
   2421      * @return The transition resolution
   2422      */
   2423     private int getTransitionResolution(MediaItem m1, MediaItem m2) {
   2424         int clip1Height = 0;
   2425         int clip2Height = 0;
   2426         int videoSize = 0;
   2427 
   2428         if (m1 != null && m2 != null) {
   2429             if (m1 instanceof MediaVideoItem) {
   2430                 clip1Height = m1.getHeight();
   2431             } else if (m1 instanceof MediaImageItem) {
   2432                 clip1Height = ((MediaImageItem)m1).getScaledHeight();
   2433             }
   2434             if (m2 instanceof MediaVideoItem) {
   2435                 clip2Height = m2.getHeight();
   2436             } else if (m2 instanceof MediaImageItem) {
   2437                 clip2Height = ((MediaImageItem)m2).getScaledHeight();
   2438             }
   2439             if (clip1Height > clip2Height) {
   2440                 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
   2441             } else {
   2442                 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
   2443             }
   2444         } else if (m1 == null && m2 != null) {
   2445             if (m2 instanceof MediaVideoItem) {
   2446                 clip2Height = m2.getHeight();
   2447             } else if (m2 instanceof MediaImageItem) {
   2448                 clip2Height = ((MediaImageItem)m2).getScaledHeight();
   2449             }
   2450             videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
   2451         } else if (m1 != null && m2 == null) {
   2452             if (m1 instanceof MediaVideoItem) {
   2453                 clip1Height = m1.getHeight();
   2454             } else if (m1 instanceof MediaImageItem) {
   2455                 clip1Height = ((MediaImageItem)m1).getScaledHeight();
   2456             }
   2457             videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
   2458         }
   2459         return videoSize;
   2460     }
   2461 
   2462     /**
   2463      * Populates the settings for generating an transition clip
   2464      *
   2465      * @param m1 First media item associated with transition
   2466      * @param m2 Second media item associated with transition
   2467      * @param e The EditSettings reference containing
   2468      * clip specific data
   2469      * @param uniqueId The unique id used in the name of the output clip
   2470      * @param t The Transition specific data
   2471      *
   2472      * @return The name and path of generated clip
   2473      */
   2474     String generateTransitionClip(EditSettings e, String uniqueId,
   2475             MediaItem m1, MediaItem m2,Transition t) {
   2476         String outputFilename = null;
   2477         int err = 0;
   2478         int outVideoProfile = 0;
   2479         int outVideoLevel = 0;
   2480         outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
   2481 
   2482         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2483         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2484         e.videoProfile = outVideoProfile;
   2485         e.videoLevel = outVideoLevel;
   2486 
   2487         e.outputFile = outputFilename;
   2488         e.audioBitrate = Bitrate.BR_64_KBPS;
   2489         e.audioChannels = 2;
   2490         e.audioFormat = AudioFormat.AAC;
   2491         e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2492 
   2493         e.videoFormat = VideoFormat.H264;
   2494         e.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2495         e.videoFrameSize = getTransitionResolution(m1, m2);
   2496         e.videoBitrate = findVideoBitrate(e.videoFrameSize);
   2497 
   2498         if (new File(outputFilename).exists()) {
   2499             new File(outputFilename).delete();
   2500         }
   2501         mProcessingState  = PROCESSING_INTERMEDIATE3;
   2502         mProcessingObject = t;
   2503         err = generateClip(e);
   2504         // Reset the processing state and check for errors
   2505         mProcessingState  = PROCESSING_NONE;
   2506         if (err != 0) {
   2507             throw new RuntimeException("preview generation cannot be completed");
   2508         }
   2509         return outputFilename;
   2510     }
   2511 
   2512     /**
   2513      * Populates effects and overlays in EffectSettings structure
   2514      * and also adjust the start time and duration of effects and overlays
   2515      * w.r.t to total story board time
   2516      *
   2517      * @param m1 Media item associated with effect
   2518      * @param effectSettings The EffectSettings reference containing
   2519      *      effect specific data
   2520      * @param beginCutTime The begin cut time of the clip associated with effect
   2521      * @param endCutTime The end cut time of the clip associated with effect
   2522      * @param storyBoardTime The current story board time
   2523      *
   2524      * @return The updated index
   2525      */
   2526     private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
   2527             int beginCutTime, int endCutTime, int storyBoardTime) {
   2528 
   2529         if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
   2530                 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
   2531             beginCutTime += m.getBeginTransition().getDuration();
   2532             endCutTime -= m.getEndTransition().getDuration();
   2533         } else if (m.getBeginTransition() == null && m.getEndTransition() != null
   2534                 && m.getEndTransition().getDuration() > 0) {
   2535             endCutTime -= m.getEndTransition().getDuration();
   2536         } else if (m.getEndTransition() == null && m.getBeginTransition() != null
   2537                 && m.getBeginTransition().getDuration() > 0) {
   2538             beginCutTime += m.getBeginTransition().getDuration();
   2539         }
   2540 
   2541         final List<Effect> effects = m.getAllEffects();
   2542         final List<Overlay> overlays = m.getAllOverlays();
   2543 
   2544         for (Overlay overlay : overlays) {
   2545             effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
   2546             adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
   2547             effectSettings[i].startTime += storyBoardTime;
   2548             i++;
   2549         }
   2550 
   2551         for (Effect effect : effects) {
   2552             if (effect instanceof EffectColor) {
   2553                 effectSettings[i] = getEffectSettings((EffectColor)effect);
   2554                 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
   2555                 effectSettings[i].startTime += storyBoardTime;
   2556                 i++;
   2557             }
   2558         }
   2559 
   2560         return i;
   2561     }
   2562 
   2563     /**
   2564      * Adjusts the media item boundaries for use in export or preview
   2565      *
   2566      * @param clipSettings The ClipSettings reference
   2567      * @param clipProperties The Properties reference
   2568      * @param m The media item
   2569      */
   2570     private void adjustMediaItemBoundary(ClipSettings clipSettings,
   2571                                          Properties clipProperties, MediaItem m) {
   2572         if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
   2573                 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
   2574             clipSettings.beginCutTime += m.getBeginTransition().getDuration();
   2575             clipSettings.endCutTime -= m.getEndTransition().getDuration();
   2576         } else if (m.getBeginTransition() == null && m.getEndTransition() != null
   2577                 && m.getEndTransition().getDuration() > 0) {
   2578             clipSettings.endCutTime -= m.getEndTransition().getDuration();
   2579         } else if (m.getEndTransition() == null && m.getBeginTransition() != null
   2580                 && m.getBeginTransition().getDuration() > 0) {
   2581             clipSettings.beginCutTime += m.getBeginTransition().getDuration();
   2582         }
   2583 
   2584         clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2585 
   2586         if (clipProperties.videoDuration != 0) {
   2587             clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2588         }
   2589 
   2590         if (clipProperties.audioDuration != 0) {
   2591             clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2592         }
   2593     }
   2594 
   2595     /**
   2596      * Generates the transition if transition is present
   2597      * and is in invalidated state
   2598      *
   2599      * @param transition The Transition reference
   2600      * @param editSettings The EditSettings reference
   2601      * @param clipPropertiesArray The clip Properties array
   2602      * @param i The index in clip Properties array for current clip
   2603      */
   2604     private void generateTransition(Transition transition, EditSettings editSettings,
   2605             PreviewClipProperties clipPropertiesArray, int index) {
   2606         if (!(transition.isGenerated())) {
   2607             transition.generate();
   2608         }
   2609         editSettings.clipSettingsArray[index] = new ClipSettings();
   2610         editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
   2611         editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
   2612         editSettings.clipSettingsArray[index].beginCutTime = 0;
   2613         editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
   2614         editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
   2615 
   2616         try {
   2617             clipPropertiesArray.clipProperties[index] =
   2618                 getMediaProperties(transition.getFilename());
   2619         } catch (Exception e) {
   2620             throw new IllegalArgumentException("Unsupported file or file not found");
   2621         }
   2622 
   2623         clipPropertiesArray.clipProperties[index].Id = null;
   2624         clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
   2625         clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
   2626         if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
   2627             clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
   2628         }
   2629 
   2630         if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
   2631             clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
   2632         }
   2633     }
   2634 
   2635     /**
   2636      * Sets the volume for current media item in clip properties array
   2637      *
   2638      * @param m The media item
   2639      * @param clipProperties The clip properties array reference
   2640      * @param i The index in clip Properties array for current clip
   2641      */
   2642     private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
   2643                               int index) {
   2644         if (m instanceof MediaVideoItem) {
   2645             final boolean videoMuted = ((MediaVideoItem)m).isMuted();
   2646             if (videoMuted == false) {
   2647                 mClipProperties.clipProperties[index].audioVolumeValue =
   2648                     ((MediaVideoItem)m).getVolume();
   2649             } else {
   2650                 mClipProperties.clipProperties[index].audioVolumeValue = 0;
   2651             }
   2652         } else if (m instanceof MediaImageItem) {
   2653             mClipProperties.clipProperties[index].audioVolumeValue = 0;
   2654         }
   2655     }
   2656 
   2657     /**
   2658      * Checks for odd size image width and height
   2659      *
   2660      * @param m The media item
   2661      * @param clipProperties The clip properties array reference
   2662      * @param i The index in clip Properties array for current clip
   2663      */
   2664     private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
   2665         if (m instanceof MediaImageItem) {
   2666             int width = mClipProperties.clipProperties[index].width;
   2667             int height = mClipProperties.clipProperties[index].height;
   2668 
   2669             if ((width % 2) != 0) {
   2670                 width -= 1;
   2671             }
   2672             if ((height % 2) != 0) {
   2673                 height -= 1;
   2674             }
   2675             mClipProperties.clipProperties[index].width = width;
   2676             mClipProperties.clipProperties[index].height = height;
   2677         }
   2678     }
   2679 
   2680     /**
   2681      * Populates the media item properties and calculates the maximum
   2682      * height among all the clips
   2683      *
   2684      * @param m The media item
   2685      * @param i The index in clip Properties array for current clip
   2686      * @param maxHeight The max height from the clip properties
   2687      *
   2688      * @return Updates the max height if current clip's height is greater
   2689      * than all previous clips height
   2690      */
   2691     private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
   2692         mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
   2693         if (m instanceof MediaVideoItem) {
   2694             mPreviewEditSettings.clipSettingsArray[index] =
   2695                 ((MediaVideoItem)m).getVideoClipProperties();
   2696             if (((MediaVideoItem)m).getHeight() > maxHeight) {
   2697                 maxHeight = ((MediaVideoItem)m).getHeight();
   2698             }
   2699         } else if (m instanceof MediaImageItem) {
   2700             mPreviewEditSettings.clipSettingsArray[index] =
   2701                 ((MediaImageItem)m).getImageClipProperties();
   2702             if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
   2703                 maxHeight = ((MediaImageItem)m).getScaledHeight();
   2704             }
   2705         }
   2706         /** + Handle the image files here */
   2707         if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
   2708             mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
   2709                 ((MediaImageItem)m).getDecodedImageFileName();
   2710 
   2711             mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
   2712                          mPreviewEditSettings.clipSettingsArray[index].clipPath;
   2713         }
   2714         return maxHeight;
   2715     }
   2716 
   2717     /**
   2718      * Populates the background music track properties
   2719      *
   2720      * @param mediaBGMList The background music list
   2721      *
   2722      */
   2723     private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
   2724 
   2725         if (mediaBGMList.size() == 1) {
   2726             mAudioTrack = mediaBGMList.get(0);
   2727         } else {
   2728             mAudioTrack = null;
   2729         }
   2730 
   2731         if (mAudioTrack != null) {
   2732             mAudioSettings = new AudioSettings();
   2733             Properties mAudioProperties = new Properties();
   2734             mAudioSettings.pFile = null;
   2735             mAudioSettings.Id = mAudioTrack.getId();
   2736             try {
   2737                 mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
   2738             } catch (Exception e) {
   2739                throw new IllegalArgumentException("Unsupported file or file not found");
   2740             }
   2741             mAudioSettings.bRemoveOriginal = false;
   2742             mAudioSettings.channels = mAudioProperties.audioChannels;
   2743             mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
   2744             mAudioSettings.loop = mAudioTrack.isLooping();
   2745             mAudioSettings.ExtendedFs = 0;
   2746             mAudioSettings.pFile = mAudioTrack.getFilename();
   2747             mAudioSettings.startMs = mAudioTrack.getStartTime();
   2748             mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
   2749             mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
   2750             if (mAudioTrack.isMuted()) {
   2751                 mAudioSettings.volume = 0;
   2752             } else {
   2753                 mAudioSettings.volume = mAudioTrack.getVolume();
   2754             }
   2755             mAudioSettings.fileType = mAudioProperties.fileType;
   2756             mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
   2757             mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
   2758             mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
   2759             mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
   2760             mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
   2761 
   2762             mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
   2763             mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
   2764             mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
   2765             mPreviewEditSettings.backgroundMusicSettings.insertionTime =
   2766                 mAudioTrack.getStartTime();
   2767             mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
   2768             mPreviewEditSettings.backgroundMusicSettings.beginLoop =
   2769                 mAudioTrack.getBoundaryBeginTime();
   2770             mPreviewEditSettings.backgroundMusicSettings.endLoop =
   2771                                                mAudioTrack.getBoundaryEndTime();
   2772             mPreviewEditSettings.backgroundMusicSettings.enableDucking =
   2773                 mAudioTrack.isDuckingEnabled();
   2774             mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
   2775                 mAudioTrack.getDuckingThreshhold();
   2776             mPreviewEditSettings.backgroundMusicSettings.lowVolume =
   2777                 mAudioTrack.getDuckedTrackVolume();
   2778             mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
   2779             mPreviewEditSettings.primaryTrackVolume = 100;
   2780             mProcessingState  = PROCESSING_AUDIO_PCM;
   2781             mProcessingObject = mAudioTrack;
   2782         } else {
   2783             mAudioSettings = null;
   2784             mPreviewEditSettings.backgroundMusicSettings = null;
   2785             mAudioTrackPCMFilePath = null;
   2786         }
   2787     }
   2788 
   2789     /**
   2790      * Calculates all the effects in all the media items
   2791      * in media items list
   2792      *
   2793      * @param mediaItemsList The media item list
   2794      *
   2795      * @return The total number of effects
   2796      *
   2797      */
   2798     private int getTotalEffects(List<MediaItem> mediaItemsList) {
   2799         int totalEffects = 0;
   2800         final Iterator<MediaItem> it = mediaItemsList.iterator();
   2801         while (it.hasNext()) {
   2802             final MediaItem t = it.next();
   2803             totalEffects += t.getAllEffects().size();
   2804             totalEffects += t.getAllOverlays().size();
   2805             final Iterator<Effect> ef = t.getAllEffects().iterator();
   2806             while (ef.hasNext()) {
   2807                 final Effect e = ef.next();
   2808                 if (e instanceof EffectKenBurns) {
   2809                     totalEffects--;
   2810                 }
   2811             }
   2812         }
   2813         return totalEffects;
   2814     }
   2815 
   2816     /**
   2817      * This function is responsible for forming clip settings
   2818      * array and clip properties array including transition clips
   2819      * and effect settings for preview purpose or export.
   2820      *
   2821      *
   2822      * @param mediaItemsList The media item list
   2823      * @param mediaTransitionList The transitions list
   2824      * @param mediaBGMList The background music list
   2825      * @param listener The MediaProcessingProgressListener
   2826      *
   2827      */
   2828     void previewStoryBoard(List<MediaItem> mediaItemsList,
   2829             List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
   2830             MediaProcessingProgressListener listener) {
   2831         if (mInvalidatePreviewArray) {
   2832             int previewIndex = 0;
   2833             int totalEffects = 0;
   2834             int storyBoardTime = 0;
   2835             int maxHeight = 0;
   2836             int beginCutTime = 0;
   2837             int endCutTime = 0;
   2838             int effectIndex = 0;
   2839             Transition lTransition = null;
   2840             MediaItem lMediaItem = null;
   2841             mPreviewEditSettings = new EditSettings();
   2842             mClipProperties = new PreviewClipProperties();
   2843             mTotalClips = 0;
   2844 
   2845             mTotalClips = mediaItemsList.size();
   2846             for (Transition transition : mediaTransitionList) {
   2847                 if (transition.getDuration() > 0) {
   2848                     mTotalClips++;
   2849                 }
   2850             }
   2851 
   2852             totalEffects = getTotalEffects(mediaItemsList);
   2853 
   2854             mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
   2855             mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
   2856             mClipProperties.clipProperties = new Properties[mTotalClips];
   2857 
   2858             /** record the call back progress listener */
   2859             mMediaProcessingProgressListener = listener;
   2860             mProgressToApp = 0;
   2861 
   2862             if (mediaItemsList.size() > 0) {
   2863                 for (int i = 0; i < mediaItemsList.size(); i++) {
   2864                     /* Get the Media Item from the list */
   2865                     lMediaItem = mediaItemsList.get(i);
   2866                     if (lMediaItem instanceof MediaVideoItem) {
   2867                         beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
   2868                         endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
   2869                     } else if (lMediaItem instanceof MediaImageItem) {
   2870                         beginCutTime = 0;
   2871                         endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
   2872                     }
   2873                     /* Get the transition associated with Media Item */
   2874                     lTransition = lMediaItem.getBeginTransition();
   2875                     if (lTransition != null && (lTransition.getDuration() > 0)) {
   2876                         /* generate transition clip */
   2877                         generateTransition(lTransition, mPreviewEditSettings,
   2878                                            mClipProperties, previewIndex);
   2879                         storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
   2880                         previewIndex++;
   2881                     }
   2882                     /* Populate media item properties */
   2883                     maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
   2884                     /* Get the clip properties of the media item. */
   2885                     if (lMediaItem instanceof MediaImageItem) {
   2886                         int tmpCnt = 0;
   2887                         boolean bEffectKbPresent = false;
   2888                         final List<Effect> effectList = lMediaItem.getAllEffects();
   2889                         /**
   2890                          * Check if Ken Burns effect is present
   2891                          */
   2892                         while (tmpCnt < effectList.size()) {
   2893                             if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
   2894                                 bEffectKbPresent = true;
   2895                                 break;
   2896                             }
   2897                             tmpCnt++;
   2898                         }
   2899 
   2900                         if (bEffectKbPresent) {
   2901                             try {
   2902                                   if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
   2903                                      mClipProperties.clipProperties[previewIndex]
   2904                                         = getMediaProperties(((MediaImageItem)lMediaItem).
   2905                                                              getGeneratedImageClip());
   2906                                   }
   2907                                   else {
   2908                                    mClipProperties.clipProperties[previewIndex]
   2909                                       = getMediaProperties(((MediaImageItem)lMediaItem).
   2910                                                              getScaledImageFileName());
   2911                                    mClipProperties.clipProperties[previewIndex].width =
   2912                                              ((MediaImageItem)lMediaItem).getScaledWidth();
   2913                                    mClipProperties.clipProperties[previewIndex].height =
   2914                                              ((MediaImageItem)lMediaItem).getScaledHeight();
   2915                                   }
   2916                                 } catch (Exception e) {
   2917                                    throw new IllegalArgumentException("Unsupported file or file not found");
   2918                                 }
   2919                          } else {
   2920                               try {
   2921                                   mClipProperties.clipProperties[previewIndex]
   2922                                       = getMediaProperties(((MediaImageItem)lMediaItem).
   2923                                                                getScaledImageFileName());
   2924                               } catch (Exception e) {
   2925                                 throw new IllegalArgumentException("Unsupported file or file not found");
   2926                               }
   2927                             mClipProperties.clipProperties[previewIndex].width =
   2928                                         ((MediaImageItem)lMediaItem).getScaledWidth();
   2929                             mClipProperties.clipProperties[previewIndex].height =
   2930                                         ((MediaImageItem)lMediaItem).getScaledHeight();
   2931                         }
   2932                     } else {
   2933                         try {
   2934                             mClipProperties.clipProperties[previewIndex]
   2935                                  = getMediaProperties(lMediaItem.getFilename());
   2936                             } catch (Exception e) {
   2937                               throw new IllegalArgumentException("Unsupported file or file not found");
   2938                           }
   2939                     }
   2940                     mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
   2941                     checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
   2942                     adjustVolume(lMediaItem, mClipProperties, previewIndex);
   2943 
   2944                     /*
   2945                      * Adjust media item start time and end time w.r.t to begin
   2946                      * and end transitions associated with media item
   2947                      */
   2948 
   2949                     adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
   2950                             mClipProperties.clipProperties[previewIndex], lMediaItem);
   2951 
   2952                     /*
   2953                      * Get all the effects and overlays for that media item and
   2954                      * adjust start time and duration of effects
   2955                      */
   2956 
   2957                     effectIndex = populateEffects(lMediaItem,
   2958                             mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
   2959                             endCutTime, storyBoardTime);
   2960                     storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
   2961                     previewIndex++;
   2962 
   2963                     /* Check if there is any end transition at last media item */
   2964 
   2965                     if (i == (mediaItemsList.size() - 1)) {
   2966                         lTransition = lMediaItem.getEndTransition();
   2967                         if (lTransition != null && (lTransition.getDuration() > 0)) {
   2968                             generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
   2969                                     previewIndex);
   2970                             break;
   2971                         }
   2972                     }
   2973                 }
   2974 
   2975                 if (!mErrorFlagSet) {
   2976                     mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
   2977                             .getAspectRatio(), maxHeight);
   2978                     populateBackgroundMusicProperties(mediaBGMList);
   2979 
   2980                     /** call to native populate settings */
   2981                     try {
   2982                         nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   2983                     } catch (IllegalArgumentException ex) {
   2984                         Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
   2985                         throw ex;
   2986                     } catch (IllegalStateException ex) {
   2987                         Log.e(TAG, "Illegal state exception in nativePopulateSettings");
   2988                         throw ex;
   2989                     } catch (RuntimeException ex) {
   2990                         Log.e(TAG, "Runtime exception in nativePopulateSettings");
   2991                         throw ex;
   2992                     }
   2993                     mInvalidatePreviewArray = false;
   2994                     mProcessingState  = PROCESSING_NONE;
   2995                 }
   2996             }
   2997             if (mErrorFlagSet) {
   2998                 mErrorFlagSet = false;
   2999                 throw new RuntimeException("preview generation cannot be completed");
   3000             }
   3001         }
   3002     } /* END of previewStoryBoard */
   3003 
   3004     /**
   3005      * This function is responsible for starting the preview
   3006      *
   3007      *
   3008      * @param surface The surface on which preview has to be displayed
   3009      * @param fromMs The time in ms from which preview has to be started
   3010      * @param toMs The time in ms till preview has to be played
   3011      * @param loop To loop the preview or not
   3012      * @param callbackAfterFrameCount INdicated after how many frames
   3013      * the callback is needed
   3014      * @param listener The PreviewProgressListener
   3015      */
   3016     void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
   3017             int callbackAfterFrameCount, PreviewProgressListener listener) {
   3018         mPreviewProgress = fromMs;
   3019         mIsFirstProgress = true;
   3020         mPreviewProgressListener = listener;
   3021 
   3022         if (!mInvalidatePreviewArray) {
   3023             try {
   3024                 /** Modify the image files names to rgb image files. */
   3025                 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
   3026                     clipCnt++) {
   3027                     if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3028                         mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3029                             mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
   3030                     }
   3031                 }
   3032                 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3033                 nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
   3034             } catch (IllegalArgumentException ex) {
   3035                 Log.e(TAG, "Illegal argument exception in nativeStartPreview");
   3036                 throw ex;
   3037             } catch (IllegalStateException ex) {
   3038                 Log.e(TAG, "Illegal state exception in nativeStartPreview");
   3039                 throw ex;
   3040             } catch (RuntimeException ex) {
   3041                 Log.e(TAG, "Runtime exception in nativeStartPreview");
   3042                 throw ex;
   3043             }
   3044         } else {
   3045             throw new IllegalStateException("generatePreview is in progress");
   3046         }
   3047     }
   3048 
   3049     /**
   3050      * This function is responsible for stopping the preview
   3051      */
   3052     long stopPreview() {
   3053         return nativeStopPreview();
   3054     }
   3055 
   3056     /**
   3057      * This function is responsible for rendering a single frame
   3058      * from the complete story board on the surface
   3059      *
   3060      * @param surface The surface on which frame has to be rendered
   3061      * @param time The time in ms at which the frame has to be rendered
   3062      * @param surfaceWidth The surface width
   3063      * @param surfaceHeight The surface height
   3064      * @param overlayData The overlay data
   3065      *
   3066      * @return The actual time from the story board at which the  frame was extracted
   3067      * and rendered
   3068      */
   3069     long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
   3070             int surfaceHeight, VideoEditor.OverlayData overlayData) {
   3071         if (mInvalidatePreviewArray) {
   3072             if (Log.isLoggable(TAG, Log.DEBUG)) {
   3073                 Log.d(TAG, "Call generate preview first");
   3074             }
   3075             throw new IllegalStateException("Call generate preview first");
   3076         }
   3077 
   3078         long timeMs = 0;
   3079         try {
   3080             for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
   3081                   clipCnt++) {
   3082                 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3083                     mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3084                         mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
   3085                 }
   3086             }
   3087 
   3088             // Reset the render preview frame params that shall be set by native.
   3089             mRenderPreviewOverlayFile = null;
   3090             mRenderPreviewRenderingMode = MediaRendering.RESIZING;
   3091 
   3092             nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3093 
   3094             timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
   3095 
   3096             if (mRenderPreviewOverlayFile != null) {
   3097                 overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
   3098                         mRenderPreviewRenderingMode);
   3099             } else {
   3100                 overlayData.setClear();
   3101             }
   3102         } catch (IllegalArgumentException ex) {
   3103             Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
   3104             throw ex;
   3105         } catch (IllegalStateException ex) {
   3106             Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
   3107             throw ex;
   3108         } catch (RuntimeException ex) {
   3109             Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
   3110             throw ex;
   3111         }
   3112 
   3113         return timeMs;
   3114     }
   3115 
   3116     private void previewFrameEditInfo(String filename, int renderingMode) {
   3117         mRenderPreviewOverlayFile = filename;
   3118         mRenderPreviewRenderingMode = renderingMode;
   3119     }
   3120 
   3121 
   3122     /**
   3123      * This function is responsible for rendering a single frame
   3124      * from a single media item on the surface
   3125      *
   3126      * @param surface The surface on which frame has to be rendered
   3127      * @param filepath The file path for which the frame needs to be displayed
   3128      * @param time The time in ms at which the frame has to be rendered
   3129      * @param framewidth The frame width
   3130      * @param framewidth The frame height
   3131      *
   3132      * @return The actual time from media item at which the  frame was extracted
   3133      * and rendered
   3134      */
   3135     long renderMediaItemPreviewFrame(Surface surface, String filepath,
   3136                                             long time, int framewidth, int frameheight) {
   3137         long timeMs = 0;
   3138         try {
   3139             timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
   3140                     frameheight, 0, 0, time);
   3141         } catch (IllegalArgumentException ex) {
   3142             Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
   3143             throw ex;
   3144         } catch (IllegalStateException ex) {
   3145             Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
   3146             throw ex;
   3147         } catch (RuntimeException ex) {
   3148             Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
   3149             throw ex;
   3150         }
   3151 
   3152         return timeMs;
   3153     }
   3154 
   3155     /**
   3156      * This function sets the flag to invalidate the preview array
   3157      * and for generating the preview again
   3158      */
   3159     void setGeneratePreview(boolean isRequired) {
   3160         boolean semAcquiredDone = false;
   3161         try {
   3162             lock();
   3163             semAcquiredDone = true;
   3164             mInvalidatePreviewArray = isRequired;
   3165         } catch (InterruptedException ex) {
   3166             Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
   3167         } finally {
   3168             if (semAcquiredDone) {
   3169                 unlock();
   3170             }
   3171         }
   3172     }
   3173 
   3174     /**
   3175      * @return Returns the current status of preview invalidation
   3176      * flag
   3177      */
   3178     boolean getGeneratePreview() {
   3179         return mInvalidatePreviewArray;
   3180     }
   3181 
   3182     /**
   3183      * Calculates the aspect ratio from widht and height
   3184      *
   3185      * @param w The width of media item
   3186      * @param h The height of media item
   3187      *
   3188      * @return The calculated aspect ratio
   3189      */
   3190     int getAspectRatio(int w, int h) {
   3191         double apRatio = (double)(w) / (double)(h);
   3192         BigDecimal bd = new BigDecimal(apRatio);
   3193         bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
   3194         apRatio = bd.doubleValue();
   3195         int var = MediaProperties.ASPECT_RATIO_16_9;
   3196         if (apRatio >= 1.7) {
   3197             var = MediaProperties.ASPECT_RATIO_16_9;
   3198         } else if (apRatio >= 1.6) {
   3199             var = MediaProperties.ASPECT_RATIO_5_3;
   3200         } else if (apRatio >= 1.5) {
   3201             var = MediaProperties.ASPECT_RATIO_3_2;
   3202         } else if (apRatio > 1.3) {
   3203             var = MediaProperties.ASPECT_RATIO_4_3;
   3204         } else if (apRatio >= 1.2) {
   3205             var = MediaProperties.ASPECT_RATIO_11_9;
   3206         }
   3207         return var;
   3208     }
   3209 
   3210     /**
   3211      * Maps the file type used in native layer
   3212      * to file type used in JAVA layer
   3213      *
   3214      * @param fileType The file type in native layer
   3215      *
   3216      * @return The File type in JAVA layer
   3217      */
   3218     int getFileType(int fileType) {
   3219         int retValue = -1;
   3220         switch (fileType) {
   3221             case FileType.UNSUPPORTED:
   3222                 retValue = MediaProperties.FILE_UNSUPPORTED;
   3223                 break;
   3224             case FileType.THREE_GPP:
   3225                 retValue = MediaProperties.FILE_3GP;
   3226                 break;
   3227             case FileType.MP4:
   3228                 retValue = MediaProperties.FILE_MP4;
   3229                 break;
   3230             case FileType.JPG:
   3231                 retValue = MediaProperties.FILE_JPEG;
   3232                 break;
   3233             case FileType.PNG:
   3234                 retValue = MediaProperties.FILE_PNG;
   3235                 break;
   3236             case FileType.MP3:
   3237                 retValue = MediaProperties.FILE_MP3;
   3238                 break;
   3239             case FileType.M4V:
   3240                 retValue = MediaProperties.FILE_M4V;
   3241                 break;
   3242             case FileType.AMR:
   3243                 retValue = MediaProperties.FILE_AMR;
   3244                 break;
   3245 
   3246             default:
   3247                 retValue = -1;
   3248         }
   3249         return retValue;
   3250     }
   3251 
   3252     /**
   3253      * Maps the video codec type used in native layer
   3254      * to video codec type used in JAVA layer
   3255      *
   3256      * @param codecType The video codec type in native layer
   3257      *
   3258      * @return The video codec type in JAVA layer
   3259      */
   3260     int getVideoCodecType(int codecType) {
   3261         int retValue = -1;
   3262         switch (codecType) {
   3263             case VideoFormat.H263:
   3264                 retValue = MediaProperties.VCODEC_H263;
   3265                 break;
   3266             case VideoFormat.H264:
   3267                 retValue = MediaProperties.VCODEC_H264;
   3268                 break;
   3269             case VideoFormat.MPEG4:
   3270                 retValue = MediaProperties.VCODEC_MPEG4;
   3271                 break;
   3272             case VideoFormat.UNSUPPORTED:
   3273 
   3274             default:
   3275                 retValue = -1;
   3276         }
   3277         return retValue;
   3278     }
   3279 
   3280     /**
   3281      * Maps the audio codec type used in native layer
   3282      * to audio codec type used in JAVA layer
   3283      *
   3284      * @param audioType The audio codec type in native layer
   3285      *
   3286      * @return The audio codec type in JAVA layer
   3287      */
   3288     int getAudioCodecType(int codecType) {
   3289         int retValue = -1;
   3290         switch (codecType) {
   3291             case AudioFormat.AMR_NB:
   3292                 retValue = MediaProperties.ACODEC_AMRNB;
   3293                 break;
   3294             case AudioFormat.AAC:
   3295                 retValue = MediaProperties.ACODEC_AAC_LC;
   3296                 break;
   3297             case AudioFormat.MP3:
   3298                 retValue = MediaProperties.ACODEC_MP3;
   3299                 break;
   3300 
   3301             default:
   3302                 retValue = -1;
   3303         }
   3304         return retValue;
   3305     }
   3306 
   3307     /**
   3308      * Returns the frame rate as integer
   3309      *
   3310      * @param fps The fps as enum
   3311      *
   3312      * @return The frame rate as integer
   3313      */
   3314     int getFrameRate(int fps) {
   3315         int retValue = -1;
   3316         switch (fps) {
   3317             case VideoFrameRate.FR_5_FPS:
   3318                 retValue = 5;
   3319                 break;
   3320             case VideoFrameRate.FR_7_5_FPS:
   3321                 retValue = 8;
   3322                 break;
   3323             case VideoFrameRate.FR_10_FPS:
   3324                 retValue = 10;
   3325                 break;
   3326             case VideoFrameRate.FR_12_5_FPS:
   3327                 retValue = 13;
   3328                 break;
   3329             case VideoFrameRate.FR_15_FPS:
   3330                 retValue = 15;
   3331                 break;
   3332             case VideoFrameRate.FR_20_FPS:
   3333                 retValue = 20;
   3334                 break;
   3335             case VideoFrameRate.FR_25_FPS:
   3336                 retValue = 25;
   3337                 break;
   3338             case VideoFrameRate.FR_30_FPS:
   3339                 retValue = 30;
   3340                 break;
   3341 
   3342             default:
   3343                 retValue = -1;
   3344         }
   3345         return retValue;
   3346     }
   3347 
   3348     /**
   3349      * Maps the file type used in JAVA layer
   3350      * to file type used in native layer
   3351      *
   3352      * @param fileType The file type in JAVA layer
   3353      *
   3354      * @return The File type in native layer
   3355      */
   3356     int getMediaItemFileType(int fileType) {
   3357         int retValue = -1;
   3358 
   3359         switch (fileType) {
   3360             case MediaProperties.FILE_UNSUPPORTED:
   3361                 retValue = FileType.UNSUPPORTED;
   3362                 break;
   3363             case MediaProperties.FILE_3GP:
   3364                 retValue = FileType.THREE_GPP;
   3365                 break;
   3366             case MediaProperties.FILE_MP4:
   3367                 retValue = FileType.MP4;
   3368                 break;
   3369             case MediaProperties.FILE_JPEG:
   3370                 retValue = FileType.JPG;
   3371                 break;
   3372             case MediaProperties.FILE_PNG:
   3373                 retValue = FileType.PNG;
   3374                 break;
   3375             case MediaProperties.FILE_M4V:
   3376                 retValue = FileType.M4V;
   3377                 break;
   3378 
   3379             default:
   3380                 retValue = -1;
   3381         }
   3382         return retValue;
   3383 
   3384     }
   3385 
   3386     /**
   3387      * Maps the rendering mode used in native layer
   3388      * to rendering mode used in JAVA layer
   3389      *
   3390      * @param renderingMode The rendering mode in JAVA layer
   3391      *
   3392      * @return The rendering mode in native layer
   3393      */
   3394     int getMediaItemRenderingMode(int renderingMode) {
   3395         int retValue = -1;
   3396         switch (renderingMode) {
   3397             case MediaItem.RENDERING_MODE_BLACK_BORDER:
   3398                 retValue = MediaRendering.BLACK_BORDERS;
   3399                 break;
   3400             case MediaItem.RENDERING_MODE_STRETCH:
   3401                 retValue = MediaRendering.RESIZING;
   3402                 break;
   3403             case MediaItem.RENDERING_MODE_CROPPING:
   3404                 retValue = MediaRendering.CROPPING;
   3405                 break;
   3406 
   3407             default:
   3408                 retValue = -1;
   3409         }
   3410         return retValue;
   3411     }
   3412 
   3413     /**
   3414      * Maps the transition behavior used in JAVA layer
   3415      * to transition behavior used in native layer
   3416      *
   3417      * @param transitionType The transition behavior in JAVA layer
   3418      *
   3419      * @return The transition behavior in native layer
   3420      */
   3421     int getVideoTransitionBehaviour(int transitionType) {
   3422         int retValue = -1;
   3423         switch (transitionType) {
   3424             case Transition.BEHAVIOR_SPEED_UP:
   3425                 retValue = TransitionBehaviour.SPEED_UP;
   3426                 break;
   3427             case Transition.BEHAVIOR_SPEED_DOWN:
   3428                 retValue = TransitionBehaviour.SPEED_DOWN;
   3429                 break;
   3430             case Transition.BEHAVIOR_LINEAR:
   3431                 retValue = TransitionBehaviour.LINEAR;
   3432                 break;
   3433             case Transition.BEHAVIOR_MIDDLE_SLOW:
   3434                 retValue = TransitionBehaviour.SLOW_MIDDLE;
   3435                 break;
   3436             case Transition.BEHAVIOR_MIDDLE_FAST:
   3437                 retValue = TransitionBehaviour.FAST_MIDDLE;
   3438                 break;
   3439 
   3440             default:
   3441                 retValue = -1;
   3442         }
   3443         return retValue;
   3444     }
   3445 
   3446     /**
   3447      * Maps the transition slide direction used in JAVA layer
   3448      * to transition slide direction used in native layer
   3449      *
   3450      * @param slideDirection The transition slide direction
   3451      * in JAVA layer
   3452      *
   3453      * @return The transition slide direction in native layer
   3454      */
   3455     int getSlideSettingsDirection(int slideDirection) {
   3456         int retValue = -1;
   3457         switch (slideDirection) {
   3458             case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
   3459                 retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
   3460                 break;
   3461             case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
   3462                 retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
   3463                 break;
   3464             case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
   3465                 retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
   3466                 break;
   3467             case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
   3468                 retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
   3469                 break;
   3470 
   3471             default:
   3472                 retValue = -1;
   3473         }
   3474         return retValue;
   3475     }
   3476 
   3477     /**
   3478      * Maps the effect color type used in JAVA layer
   3479      * to effect color type used in native layer
   3480      *
   3481      * @param effect The EffectColor reference
   3482      *
   3483      * @return The color effect value from native layer
   3484      */
   3485     private int getEffectColorType(EffectColor effect) {
   3486         int retValue = -1;
   3487         switch (effect.getType()) {
   3488             case EffectColor.TYPE_COLOR:
   3489                 if (effect.getColor() == EffectColor.GREEN) {
   3490                     retValue = VideoEffect.GREEN;
   3491                 } else if (effect.getColor() == EffectColor.PINK) {
   3492                     retValue = VideoEffect.PINK;
   3493                 } else if (effect.getColor() == EffectColor.GRAY) {
   3494                     retValue = VideoEffect.BLACK_AND_WHITE;
   3495                 } else {
   3496                     retValue = VideoEffect.COLORRGB16;
   3497                 }
   3498                 break;
   3499             case EffectColor.TYPE_GRADIENT:
   3500                 retValue = VideoEffect.GRADIENT;
   3501                 break;
   3502             case EffectColor.TYPE_SEPIA:
   3503                 retValue = VideoEffect.SEPIA;
   3504                 break;
   3505             case EffectColor.TYPE_NEGATIVE:
   3506                 retValue = VideoEffect.NEGATIVE;
   3507                 break;
   3508             case EffectColor.TYPE_FIFTIES:
   3509                 retValue = VideoEffect.FIFTIES;
   3510                 break;
   3511 
   3512             default:
   3513                 retValue = -1;
   3514         }
   3515         return retValue;
   3516     }
   3517 
   3518     /**
   3519      * Calculates video resolution for output clip
   3520      * based on clip's height and aspect ratio of storyboard
   3521      *
   3522      * @param aspectRatio The aspect ratio of story board
   3523      * @param height The height of clip
   3524      *
   3525      * @return The video resolution
   3526      */
   3527     private int findVideoResolution(int aspectRatio, int height) {
   3528         final Pair<Integer, Integer>[] resolutions;
   3529         final Pair<Integer, Integer> maxResolution;
   3530         int retValue = VideoFrameSize.SIZE_UNDEFINED;
   3531         switch (aspectRatio) {
   3532             case MediaProperties.ASPECT_RATIO_3_2:
   3533                 if (height == MediaProperties.HEIGHT_480)
   3534                     retValue = VideoFrameSize.NTSC;
   3535                 else if (height == MediaProperties.HEIGHT_720)
   3536                     retValue = VideoFrameSize.W720p;
   3537                 break;
   3538             case MediaProperties.ASPECT_RATIO_16_9:
   3539                 if (height == MediaProperties.HEIGHT_480)
   3540                     retValue = VideoFrameSize.WVGA16x9;
   3541                 else if (height == MediaProperties.HEIGHT_720)
   3542                     retValue = VideoFrameSize.V720p;
   3543                 else if (height == MediaProperties.HEIGHT_1080)
   3544                     retValue = VideoFrameSize.V1080p;
   3545                 break;
   3546             case MediaProperties.ASPECT_RATIO_4_3:
   3547                 if (height == MediaProperties.HEIGHT_480)
   3548                     retValue = VideoFrameSize.VGA;
   3549                 else if (height == MediaProperties.HEIGHT_720)
   3550                     retValue = VideoFrameSize.S720p;
   3551                 break;
   3552             case MediaProperties.ASPECT_RATIO_5_3:
   3553                 if (height == MediaProperties.HEIGHT_480)
   3554                     retValue = VideoFrameSize.WVGA;
   3555                 break;
   3556             case MediaProperties.ASPECT_RATIO_11_9:
   3557                 if (height == MediaProperties.HEIGHT_144)
   3558                     retValue = VideoFrameSize.QCIF;
   3559                 else if (height == MediaProperties.HEIGHT_288)
   3560                     retValue = VideoFrameSize.CIF;
   3561                 break;
   3562         }
   3563         if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
   3564             resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
   3565             // Get the highest resolution
   3566             maxResolution = resolutions[resolutions.length - 1];
   3567             retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
   3568         }
   3569 
   3570         return retValue;
   3571     }
   3572 
   3573     /**
   3574      *  Calculate a reasonable bitrate for generating intermediate clips.
   3575      */
   3576     private int findVideoBitrate(int videoFrameSize) {
   3577         switch (videoFrameSize) {
   3578             case VideoFrameSize.SQCIF:
   3579             case VideoFrameSize.QQVGA:
   3580             case VideoFrameSize.QCIF:
   3581                 return Bitrate.BR_128_KBPS;
   3582             case VideoFrameSize.QVGA:
   3583             case VideoFrameSize.CIF:
   3584                 return Bitrate.BR_384_KBPS;
   3585             case VideoFrameSize.VGA:
   3586             case VideoFrameSize.WVGA:
   3587             case VideoFrameSize.NTSC:
   3588             case VideoFrameSize.nHD:
   3589             case VideoFrameSize.WVGA16x9:
   3590                 return Bitrate.BR_2_MBPS;
   3591             case VideoFrameSize.V720p:
   3592             case VideoFrameSize.W720p:
   3593             case VideoFrameSize.S720p:
   3594                 return Bitrate.BR_5_MBPS;
   3595             case VideoFrameSize.V1080p:
   3596             default:
   3597                 return Bitrate.BR_8_MBPS;
   3598         }
   3599     }
   3600 
   3601     /**
   3602      * This method is responsible for exporting a movie
   3603      *
   3604      * @param filePath The output file path
   3605      * @param projectDir The output project directory
   3606      * @param height The height of clip
   3607      * @param bitrate The bitrate at which the movie should be exported
   3608      * @param mediaItemsList The media items list
   3609      * @param mediaTransitionList The transitions list
   3610      * @param mediaBGMList The background track list
   3611      * @param listener The ExportProgressListener
   3612      *
   3613      */
   3614     void export(String filePath, String projectDir, int height, int bitrate,
   3615             List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
   3616             List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
   3617 
   3618         int outBitrate = 0;
   3619         mExportFilename = filePath;
   3620         previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
   3621         mExportProgressListener = listener;
   3622         int outVideoProfile = 0;
   3623         int outVideoLevel = 0;
   3624 
   3625         /** Check the platform specific maximum export resolution */
   3626         VideoEditorProfile veProfile = VideoEditorProfile.get();
   3627         if (veProfile == null) {
   3628             throw new RuntimeException("Can't get the video editor profile");
   3629         }
   3630         final int maxOutputHeight = veProfile.maxOutputVideoFrameHeight;
   3631         final int maxOutputWidth = veProfile.maxOutputVideoFrameWidth;
   3632         if (height > maxOutputHeight) {
   3633             throw new IllegalArgumentException(
   3634                     "Unsupported export resolution. Supported maximum width:" +
   3635                     maxOutputWidth + " height:" + maxOutputHeight +
   3636                     " current height:" + height);
   3637         }
   3638         outVideoProfile = VideoEditorProfile.getExportProfile(mExportVideoCodec);
   3639         outVideoLevel = VideoEditorProfile.getExportLevel(mExportVideoCodec);
   3640 
   3641         mProgressToApp = 0;
   3642 
   3643         switch (bitrate) {
   3644             case MediaProperties.BITRATE_28K:
   3645                 outBitrate = Bitrate.BR_32_KBPS;
   3646                 break;
   3647             case MediaProperties.BITRATE_40K:
   3648                 outBitrate = Bitrate.BR_48_KBPS;
   3649                 break;
   3650             case MediaProperties.BITRATE_64K:
   3651                 outBitrate = Bitrate.BR_64_KBPS;
   3652                 break;
   3653             case MediaProperties.BITRATE_96K:
   3654                 outBitrate = Bitrate.BR_96_KBPS;
   3655                 break;
   3656             case MediaProperties.BITRATE_128K:
   3657                 outBitrate = Bitrate.BR_128_KBPS;
   3658                 break;
   3659             case MediaProperties.BITRATE_192K:
   3660                 outBitrate = Bitrate.BR_192_KBPS;
   3661                 break;
   3662             case MediaProperties.BITRATE_256K:
   3663                 outBitrate = Bitrate.BR_256_KBPS;
   3664                 break;
   3665             case MediaProperties.BITRATE_384K:
   3666                 outBitrate = Bitrate.BR_384_KBPS;
   3667                 break;
   3668             case MediaProperties.BITRATE_512K:
   3669                 outBitrate = Bitrate.BR_512_KBPS;
   3670                 break;
   3671             case MediaProperties.BITRATE_800K:
   3672                 outBitrate = Bitrate.BR_800_KBPS;
   3673                 break;
   3674             case MediaProperties.BITRATE_2M:
   3675                 outBitrate = Bitrate.BR_2_MBPS;
   3676                 break;
   3677             case MediaProperties.BITRATE_5M:
   3678                 outBitrate = Bitrate.BR_5_MBPS;
   3679                 break;
   3680             case MediaProperties.BITRATE_8M:
   3681                 outBitrate = Bitrate.BR_8_MBPS;
   3682                 break;
   3683 
   3684             default:
   3685                 throw new IllegalArgumentException("Argument Bitrate incorrect");
   3686         }
   3687         mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   3688         mPreviewEditSettings.outputFile = mOutputFilename = filePath;
   3689 
   3690         int aspectRatio = mVideoEditor.getAspectRatio();
   3691         mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
   3692         mPreviewEditSettings.videoFormat = mExportVideoCodec;
   3693         mPreviewEditSettings.audioFormat = mExportAudioCodec;
   3694         mPreviewEditSettings.videoProfile = outVideoProfile;
   3695         mPreviewEditSettings.videoLevel = outVideoLevel;
   3696         mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   3697         mPreviewEditSettings.maxFileSize = 0;
   3698         mPreviewEditSettings.audioChannels = 2;
   3699         mPreviewEditSettings.videoBitrate = outBitrate;
   3700         mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
   3701 
   3702         mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
   3703         for (int index = 0; index < mTotalClips - 1; index++) {
   3704             mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
   3705             mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
   3706                 VideoTransition.NONE;
   3707             mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
   3708                 AudioTransition.NONE;
   3709         }
   3710 
   3711         for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
   3712             if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3713                 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3714                 mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
   3715             }
   3716         }
   3717         nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3718 
   3719         int err = 0;
   3720         try {
   3721             mProcessingState  = PROCESSING_EXPORT;
   3722             mProcessingObject = null;
   3723             err = generateClip(mPreviewEditSettings);
   3724             mProcessingState  = PROCESSING_NONE;
   3725         } catch (IllegalArgumentException ex) {
   3726             Log.e(TAG, "IllegalArgument for generateClip");
   3727             throw ex;
   3728         } catch (IllegalStateException ex) {
   3729             Log.e(TAG, "IllegalStateExceptiont for generateClip");
   3730             throw ex;
   3731         } catch (RuntimeException ex) {
   3732             Log.e(TAG, "RuntimeException for generateClip");
   3733             throw ex;
   3734         }
   3735 
   3736         if (err != 0) {
   3737             Log.e(TAG, "RuntimeException for generateClip");
   3738             throw new RuntimeException("generateClip failed with error=" + err);
   3739         }
   3740 
   3741         mExportProgressListener = null;
   3742     }
   3743 
   3744     /**
   3745      * This methods takes care of stopping the Export process
   3746      *
   3747      * @param The input file name for which export has to be stopped
   3748      */
   3749     void stop(String filename) {
   3750         try {
   3751             stopEncoding();
   3752             new File(mExportFilename).delete();
   3753         } catch (IllegalStateException ex) {
   3754             Log.e(TAG, "Illegal state exception in unload settings");
   3755             throw ex;
   3756         } catch (RuntimeException ex) {
   3757             Log.e(TAG, "Runtime exception in unload settings");
   3758             throw ex;
   3759         }
   3760     }
   3761 
   3762     /**
   3763      * This method extracts a frame from the input file
   3764      * and returns the frame as a bitmap. See getPixelsList() for more information.
   3765      */
   3766     Bitmap getPixels(String filename, int width, int height, long timeMs,
   3767             int videoRotation) {
   3768         final Bitmap result[] = new Bitmap[1];
   3769         getPixelsList(filename, width, height, timeMs, timeMs, 1, new int[] {0},
   3770                 new MediaItem.GetThumbnailListCallback() {
   3771             public void onThumbnail(Bitmap bitmap, int index) {
   3772                 result[0] = bitmap;
   3773             }
   3774         }, videoRotation);
   3775         return result[0];
   3776     }
   3777 
   3778     /**
   3779      * This method extracts a list of frame from the
   3780      * input file and returns the frame in bitmap array
   3781      *
   3782      * @param filename The input file name
   3783      * @param width The width of the output frame, before rotation
   3784      * @param height The height of the output frame, before rotation
   3785      * @param startMs The starting time in ms
   3786      * @param endMs The end time in ms
   3787      * @param thumbnailCount The number of frames to be extracted
   3788      * @param indices The indices of thumbnails wanted
   3789      * @param callback The callback used to pass back the bitmaps
   3790      * @param videoRotation The rotation degree need to be done for the bitmap
   3791      *
   3792      * @return The frames as bitmaps in bitmap array
   3793      **/
   3794     void getPixelsList(String filename, final int width, final int height,
   3795             long startMs, long endMs, int thumbnailCount, int[] indices,
   3796             final MediaItem.GetThumbnailListCallback callback,
   3797             final int videoRotation) {
   3798 
   3799         // The decoder needs output width and height as even
   3800         final int decWidth = (width + 1) & 0xFFFFFFFE;
   3801         final int decHeight = (height + 1) & 0xFFFFFFFE;
   3802         final int thumbnailSize = decWidth * decHeight;
   3803 
   3804         // We convert the decoder output (in int[]) to a bitmap by first
   3805         // copy it into an IntBuffer, then use Bitmap.copyPixelsFromBuffer to
   3806         // copy it to the bitmap.
   3807         final int[] decArray = new int[thumbnailSize];
   3808         final IntBuffer decBuffer = IntBuffer.allocate(thumbnailSize);
   3809 
   3810         // If we need to resize and/or rotate the decoder output, we need a
   3811         // temporary bitmap to hold the decoded output.
   3812         final boolean needToMassage =
   3813                 (decWidth != width || decHeight != height || videoRotation != 0);
   3814         final Bitmap tmpBitmap = needToMassage
   3815                 ? Bitmap.createBitmap(decWidth, decHeight, Bitmap.Config.ARGB_8888)
   3816                 : null;
   3817 
   3818         // The final output bitmap width/height may swap because of rotation.
   3819         final boolean needToSwapWH = (videoRotation == 90 || videoRotation == 270);
   3820         final int outWidth = needToSwapWH ? height : width;
   3821         final int outHeight = needToSwapWH ? width : height;
   3822 
   3823         nativeGetPixelsList(filename, decArray, decWidth, decHeight,
   3824                 thumbnailCount, startMs, endMs, indices,
   3825                 new NativeGetPixelsListCallback() {
   3826             public void onThumbnail(int index) {
   3827                 // This is the bitmap we will output to the client
   3828                 Bitmap outBitmap = Bitmap.createBitmap(
   3829                         outWidth, outHeight, Bitmap.Config.ARGB_8888);
   3830 
   3831                 // Copy int[] to IntBuffer
   3832                 decBuffer.put(decArray, 0, thumbnailSize);
   3833                 decBuffer.rewind();
   3834 
   3835                 if (!needToMassage) {
   3836                     // We can directly read the decoded result to output bitmap
   3837                     outBitmap.copyPixelsFromBuffer(decBuffer);
   3838                 } else {
   3839                     // Copy the decoded result to an intermediate bitmap first
   3840                     tmpBitmap.copyPixelsFromBuffer(decBuffer);
   3841 
   3842                     // Create a canvas to resize/rotate the bitmap
   3843                     // First scale the decoded bitmap to (0,0)-(1,1), rotate it
   3844                     // with (0.5, 0.5) as center, then scale it to
   3845                     // (outWidth, outHeight).
   3846                     final Canvas canvas = new Canvas(outBitmap);
   3847                     Matrix m = new Matrix();
   3848                     float sx = 1f / decWidth;
   3849                     float sy = 1f / decHeight;
   3850                     m.postScale(sx, sy);
   3851                     m.postRotate(videoRotation, 0.5f, 0.5f);
   3852                     m.postScale(outWidth, outHeight);
   3853                     canvas.drawBitmap(tmpBitmap, m, sResizePaint);
   3854                 }
   3855                 callback.onThumbnail(outBitmap, index);
   3856             }
   3857         });
   3858 
   3859         if (tmpBitmap != null) {
   3860             tmpBitmap.recycle();
   3861         }
   3862     }
   3863 
   3864     interface NativeGetPixelsListCallback {
   3865         public void onThumbnail(int index);
   3866     }
   3867 
   3868     /**
   3869      * This method generates the audio graph
   3870      *
   3871      * @param uniqueId The unique id
   3872      * @param inFileName The inputFile
   3873      * @param OutAudiGraphFileName output filename
   3874      * @param frameDuration The each frame duration
   3875      * @param audioChannels The number of audio channels
   3876      * @param samplesCount Total number of samples count
   3877      * @param listener ExtractAudioWaveformProgressListener reference
   3878      * @param isVideo The flag to indicate if the file is video file or not
   3879      *
   3880      **/
   3881     void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
   3882             int frameDuration, int audioChannels, int samplesCount,
   3883             ExtractAudioWaveformProgressListener listener, boolean isVideo) {
   3884         String tempPCMFileName;
   3885 
   3886         mExtractAudioWaveformProgressListener = listener;
   3887 
   3888         /**
   3889          * In case of Video, first call will generate the PCM file to make the
   3890          * audio graph
   3891          */
   3892         if (isVideo) {
   3893             tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
   3894         } else {
   3895             tempPCMFileName = mAudioTrackPCMFilePath;
   3896         }
   3897 
   3898         /**
   3899          * For Video item, generate the PCM
   3900          */
   3901         if (isVideo) {
   3902             nativeGenerateRawAudio(inFileName, tempPCMFileName);
   3903         }
   3904 
   3905         nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
   3906                 audioChannels, samplesCount);
   3907 
   3908         /**
   3909          * Once the audio graph file is generated, delete the pcm file
   3910          */
   3911         if (isVideo) {
   3912             new File(tempPCMFileName).delete();
   3913         }
   3914     }
   3915 
   3916     void clearPreviewSurface(Surface surface) {
   3917         nativeClearSurface(surface);
   3918     }
   3919 
   3920     /**
   3921      * Grab the semaphore which arbitrates access to the editor
   3922      *
   3923      * @throws InterruptedException
   3924      */
   3925     private void lock() throws InterruptedException {
   3926         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3927             Log.d(TAG, "lock: grabbing semaphore", new Throwable());
   3928         }
   3929         mLock.acquire();
   3930         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3931             Log.d(TAG, "lock: grabbed semaphore");
   3932         }
   3933     }
   3934 
   3935     /**
   3936      * Release the semaphore which arbitrates access to the editor
   3937      */
   3938     private void unlock() {
   3939         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3940             Log.d(TAG, "unlock: releasing semaphore");
   3941         }
   3942         mLock.release();
   3943     }
   3944 
   3945     /**     Native Methods        */
   3946     native Properties getMediaProperties(String file) throws IllegalArgumentException,
   3947             IllegalStateException, RuntimeException, Exception;
   3948 
   3949     /**
   3950      * Get the version of ManualEdit.
   3951      *
   3952      * @return version of ManualEdit
   3953      * @throws RuntimeException if an error occurred
   3954      * @see Version
   3955      */
   3956     private static native Version getVersion() throws RuntimeException;
   3957 
   3958     /**
   3959      * Returns the video thumbnail in an array of integers. Output format is
   3960      * ARGB8888.
   3961      *
   3962      * @param pixelArray the array that receives the pixel values
   3963      * @param width width of the video thumbnail
   3964      * @param height height of the video thumbnail
   3965      * @param timeMS desired time of the thumbnail in ms
   3966      * @return actual time in ms of the thumbnail generated
   3967      * @throws IllegalStateException if the class has not been initialized
   3968      * @throws IllegalArgumentException if the pixelArray is not available or
   3969      *             one of the dimensions is negative or zero or the time is
   3970      *             negative
   3971      * @throws RuntimeException on runtime errors in native code
   3972      */
   3973     private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
   3974             long timeMS);
   3975 
   3976     private native int nativeGetPixelsList(String fileName, int[] pixelArray,
   3977             int width, int height, int nosofTN, long startTimeMs,
   3978             long endTimeMs, int[] indices, NativeGetPixelsListCallback callback);
   3979 
   3980     /**
   3981      * Releases the JNI and cleans up the core native module.. Should be called
   3982      * only after init( )
   3983      *
   3984      * @throws IllegalStateException if the method could not be called
   3985      */
   3986     private native void release() throws IllegalStateException, RuntimeException;
   3987 
   3988     /*
   3989      * Clear the preview surface
   3990      */
   3991     private native void nativeClearSurface(Surface surface);
   3992 
   3993     /**
   3994      * Stops the encoding. This method should only be called after encoding has
   3995      * started using method <code> startEncoding</code>
   3996      *
   3997      * @throws IllegalStateException if the method could not be called
   3998      */
   3999     private native void stopEncoding() throws IllegalStateException, RuntimeException;
   4000 
   4001 
   4002     private native void _init(String tempPath, String libraryPath)
   4003             throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4004 
   4005     private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
   4006             int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
   4007             IllegalStateException, RuntimeException;
   4008 
   4009     private native void nativePopulateSettings(EditSettings editSettings,
   4010             PreviewClipProperties mProperties, AudioSettings mAudioSettings)
   4011     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4012 
   4013     private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
   4014                                                  int surfaceWidth, int surfaceHeight)
   4015                                                  throws IllegalArgumentException,
   4016                                                  IllegalStateException, RuntimeException;
   4017 
   4018     private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
   4019             int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
   4020     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4021 
   4022     private native int nativeStopPreview();
   4023 
   4024     private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
   4025             int frameDuration, int channels, int sampleCount);
   4026 
   4027     private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
   4028 
   4029     private native int nativeGenerateClip(EditSettings editSettings)
   4030     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4031 
   4032 }
   4033