Home | History | Annotate | Download | only in videoeditor
      1 /*
      2  * Copyright (C) 2010 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 package android.media.videoeditor;
     18 
     19 import java.io.File;
     20 import java.io.IOException;
     21 import java.math.BigDecimal;
     22 import java.nio.IntBuffer;
     23 import java.util.Iterator;
     24 import java.util.List;
     25 import java.util.concurrent.Semaphore;
     26 
     27 import android.graphics.Bitmap;
     28 import android.graphics.BitmapFactory;
     29 import android.graphics.Canvas;
     30 import android.graphics.Paint;
     31 import android.graphics.Rect;
     32 import android.graphics.Matrix;
     33 import android.media.videoeditor.VideoEditor.ExportProgressListener;
     34 import android.media.videoeditor.VideoEditor.PreviewProgressListener;
     35 import android.media.videoeditor.VideoEditor.MediaProcessingProgressListener;
     36 import android.util.Log;
     37 import android.util.Pair;
     38 import android.view.Surface;
     39 
     40 /**
     41  *This class provide Native methods to be used by MediaArtist {@hide}
     42  */
     43 class MediaArtistNativeHelper {
     44     private static final String TAG = "MediaArtistNativeHelper";
     45 
     46     static {
     47         System.loadLibrary("videoeditor_jni");
     48     }
     49 
     50     private static final int MAX_THUMBNAIL_PERMITTED = 8;
     51 
     52     public static final int TASK_LOADING_SETTINGS = 1;
     53     public static final int TASK_ENCODING = 2;
     54 
     55     /**
     56      *  The resize paint
     57      */
     58     private static final Paint sResizePaint = new Paint(Paint.FILTER_BITMAP_FLAG);
     59 
     60     private final VideoEditor mVideoEditor;
     61     /*
     62      *  Semaphore to control preview calls
     63      */
     64     private final Semaphore mLock;
     65 
     66     private EditSettings mStoryBoardSettings;
     67 
     68     private String mOutputFilename;
     69 
     70     private PreviewClipProperties mClipProperties = null;
     71 
     72     private EditSettings mPreviewEditSettings;
     73 
     74     private AudioSettings mAudioSettings = null;
     75 
     76     private AudioTrack mAudioTrack = null;
     77 
     78     private boolean mInvalidatePreviewArray = true;
     79 
     80     private boolean mRegenerateAudio = true;
     81 
     82     private String mExportFilename = null;
     83     private int mExportVideoCodec = 0;
     84     private int mExportAudioCodec = 0;
     85     private int mProgressToApp;
     86 
     87     private String mRenderPreviewOverlayFile;
     88     private int mRenderPreviewRenderingMode;
     89 
     90     private boolean mIsFirstProgress;
     91 
     92     private static final String AUDIO_TRACK_PCM_FILE = "AudioPcm.pcm";
     93 
     94     // Processing indication
     95     public static final int PROCESSING_NONE          = 0;
     96     public static final int PROCESSING_AUDIO_PCM     = 1;
     97     public static final int PROCESSING_TRANSITION    = 2;
     98     public static final int PROCESSING_KENBURNS      = 3;
     99     public static final int PROCESSING_INTERMEDIATE1 = 11;
    100     public static final int PROCESSING_INTERMEDIATE2 = 12;
    101     public static final int PROCESSING_INTERMEDIATE3 = 13;
    102     public static final int PROCESSING_EXPORT        = 20;
    103 
    104     private int mProcessingState;
    105     private Object mProcessingObject;
    106     private PreviewProgressListener mPreviewProgressListener;
    107     private ExportProgressListener mExportProgressListener;
    108     private ExtractAudioWaveformProgressListener mExtractAudioWaveformProgressListener;
    109     private MediaProcessingProgressListener mMediaProcessingProgressListener;
    110     private final String mProjectPath;
    111 
    112     private long mPreviewProgress;
    113 
    114     private String mAudioTrackPCMFilePath;
    115 
    116     private int mTotalClips = 0;
    117 
    118     private boolean mErrorFlagSet = false;
    119 
    120     @SuppressWarnings("unused")
    121     private int mManualEditContext;
    122 
    123     /* Listeners */
    124 
    125     /**
    126      * Interface definition for a listener to be invoked when there is an update
    127      * in a running task.
    128      */
    129     public interface OnProgressUpdateListener {
    130         /**
    131          * Called when there is an update.
    132          *
    133          * @param taskId id of the task reporting an update.
    134          * @param progress progress of the task [0..100].
    135          * @see BasicEdit#TASK_ENCODING
    136          */
    137         public void OnProgressUpdate(int taskId, int progress);
    138     }
    139 
    140     /** Defines the version. */
    141     public final class Version {
    142 
    143         /** Major version number */
    144         public int major;
    145 
    146         /** Minor version number */
    147         public int minor;
    148 
    149         /** Revision number */
    150         public int revision;
    151 
    152         /** VIDEOEDITOR major version number */
    153         private static final int VIDEOEDITOR_MAJOR_VERSION = 0;
    154 
    155         /** VIDEOEDITOR minor version number */
    156         private static final int VIDEOEDITOR_MINOR_VERSION = 0;
    157 
    158         /** VIDEOEDITOR revision number */
    159         private static final int VIDEOEDITOR_REVISION_VERSION = 1;
    160 
    161         /** Method which returns the current VIDEOEDITOR version */
    162         public Version getVersion() {
    163             Version version = new Version();
    164 
    165             version.major = Version.VIDEOEDITOR_MAJOR_VERSION;
    166             version.minor = Version.VIDEOEDITOR_MINOR_VERSION;
    167             version.revision = Version.VIDEOEDITOR_REVISION_VERSION;
    168 
    169             return version;
    170         }
    171     }
    172 
    173     /**
    174      * Defines output audio formats.
    175      */
    176     public final class AudioFormat {
    177         /** No audio present in output clip. Used to generate video only clip */
    178         public static final int NO_AUDIO = 0;
    179 
    180         /** AMR Narrow Band. */
    181         public static final int AMR_NB = 1;
    182 
    183         /** Advanced Audio Coding (AAC). */
    184         public static final int AAC = 2;
    185 
    186         /** Advanced Audio Codec Plus (HE-AAC v1). */
    187         public static final int AAC_PLUS = 3;
    188 
    189         /** Advanced Audio Codec Plus (HE-AAC v2). */
    190         public static final int ENHANCED_AAC_PLUS = 4;
    191 
    192         /** MPEG layer 3 (MP3). */
    193         public static final int MP3 = 5;
    194 
    195         /** Enhanced Variable RateCodec (EVRC). */
    196         public static final int EVRC = 6;
    197 
    198         /** PCM (PCM). */
    199         public static final int PCM = 7;
    200 
    201         /** No transcoding. Output audio format is same as input audio format */
    202         public static final int NULL_AUDIO = 254;
    203 
    204         /** Unsupported audio format. */
    205         public static final int UNSUPPORTED_AUDIO = 255;
    206     }
    207 
    208     /**
    209      * Defines audio sampling frequencies.
    210      */
    211     public final class AudioSamplingFrequency {
    212         /**
    213          * Default sampling frequency. Uses the default frequency for a specific
    214          * audio format. For AAC the only supported (and thus default) sampling
    215          * frequency is 16 kHz. For this audio format the sampling frequency in
    216          * the OutputParams.
    217          **/
    218         public static final int FREQ_DEFAULT = 0;
    219 
    220         /** Audio sampling frequency of 8000 Hz. */
    221         public static final int FREQ_8000 = 8000;
    222 
    223         /** Audio sampling frequency of 11025 Hz. */
    224         public static final int FREQ_11025 = 11025;
    225 
    226         /** Audio sampling frequency of 12000 Hz. */
    227         public static final int FREQ_12000 = 12000;
    228 
    229         /** Audio sampling frequency of 16000 Hz. */
    230         public static final int FREQ_16000 = 16000;
    231 
    232         /** Audio sampling frequency of 22050 Hz. */
    233         public static final int FREQ_22050 = 22050;
    234 
    235         /** Audio sampling frequency of 24000 Hz. */
    236         public static final int FREQ_24000 = 24000;
    237 
    238         /** Audio sampling frequency of 32000 Hz. */
    239         public static final int FREQ_32000 = 32000;
    240 
    241         /** Audio sampling frequency of 44100 Hz. */
    242         public static final int FREQ_44100 = 44100;
    243 
    244         /** Audio sampling frequency of 48000 Hz. Not available for output file. */
    245         public static final int FREQ_48000 = 48000;
    246     }
    247 
    248     /**
    249      * Defines the supported fixed audio and video bitrates. These values are
    250      * for output audio video only.
    251      */
    252     public final class Bitrate {
    253         /** Variable bitrate. Means no bitrate regulation */
    254         public static final int VARIABLE = -1;
    255 
    256         /** An undefined bitrate. */
    257         public static final int UNDEFINED = 0;
    258 
    259         /** A bitrate of 9.2 kbits/s. */
    260         public static final int BR_9_2_KBPS = 9200;
    261 
    262         /** A bitrate of 12.2 kbits/s. */
    263         public static final int BR_12_2_KBPS = 12200;
    264 
    265         /** A bitrate of 16 kbits/s. */
    266         public static final int BR_16_KBPS = 16000;
    267 
    268         /** A bitrate of 24 kbits/s. */
    269         public static final int BR_24_KBPS = 24000;
    270 
    271         /** A bitrate of 32 kbits/s. */
    272         public static final int BR_32_KBPS = 32000;
    273 
    274         /** A bitrate of 48 kbits/s. */
    275         public static final int BR_48_KBPS = 48000;
    276 
    277         /** A bitrate of 64 kbits/s. */
    278         public static final int BR_64_KBPS = 64000;
    279 
    280         /** A bitrate of 96 kbits/s. */
    281         public static final int BR_96_KBPS = 96000;
    282 
    283         /** A bitrate of 128 kbits/s. */
    284         public static final int BR_128_KBPS = 128000;
    285 
    286         /** A bitrate of 192 kbits/s. */
    287         public static final int BR_192_KBPS = 192000;
    288 
    289         /** A bitrate of 256 kbits/s. */
    290         public static final int BR_256_KBPS = 256000;
    291 
    292         /** A bitrate of 288 kbits/s. */
    293         public static final int BR_288_KBPS = 288000;
    294 
    295         /** A bitrate of 384 kbits/s. */
    296         public static final int BR_384_KBPS = 384000;
    297 
    298         /** A bitrate of 512 kbits/s. */
    299         public static final int BR_512_KBPS = 512000;
    300 
    301         /** A bitrate of 800 kbits/s. */
    302         public static final int BR_800_KBPS = 800000;
    303 
    304         /** A bitrate of 2 Mbits/s. */
    305         public static final int BR_2_MBPS = 2000000;
    306 
    307         /** A bitrate of 5 Mbits/s. */
    308         public static final int BR_5_MBPS = 5000000;
    309 
    310         /** A bitrate of 8 Mbits/s. */
    311         public static final int BR_8_MBPS = 8000000;
    312     }
    313 
    314     /**
    315      * Defines all supported file types.
    316      */
    317     public final class FileType {
    318         /** 3GPP file type. */
    319         public static final int THREE_GPP = 0;
    320 
    321         /** MP4 file type. */
    322         public static final int MP4 = 1;
    323 
    324         /** AMR file type. */
    325         public static final int AMR = 2;
    326 
    327         /** MP3 audio file type. */
    328         public static final int MP3 = 3;
    329 
    330         /** PCM audio file type. */
    331         public static final int PCM = 4;
    332 
    333         /** JPEG image file type. */
    334         public static final int JPG = 5;
    335 
    336         /** GIF image file type. */
    337         public static final int GIF = 7;
    338 
    339         /** PNG image file type. */
    340         public static final int PNG = 8;
    341 
    342         /** M4V file type. */
    343         public static final int M4V = 10;
    344 
    345         /** Unsupported file type. */
    346         public static final int UNSUPPORTED = 255;
    347     }
    348 
    349     /**
    350      * Defines rendering types. Rendering can only be applied to files
    351      * containing video streams.
    352      **/
    353     public final class MediaRendering {
    354         /**
    355          * Resize to fit the output video with changing the aspect ratio if
    356          * needed.
    357          */
    358         public static final int RESIZING = 0;
    359 
    360         /**
    361          * Crop the input video to fit it with the output video resolution.
    362          **/
    363         public static final int CROPPING = 1;
    364 
    365         /**
    366          * Resize to fit the output video resolution but maintain the aspect
    367          * ratio. This framing type adds black borders if needed.
    368          */
    369         public static final int BLACK_BORDERS = 2;
    370     }
    371 
    372     /**
    373      * Defines the results.
    374      */
    375     public final class Result {
    376         /** No error. result OK */
    377         public static final int NO_ERROR = 0;
    378 
    379         /** File not found */
    380         public static final int ERR_FILE_NOT_FOUND = 1;
    381 
    382         /**
    383          * In case of UTF8 conversion, the size of the converted path will be
    384          * more than the corresponding allocated buffer.
    385          */
    386         public static final int ERR_BUFFER_OUT_TOO_SMALL = 2;
    387 
    388         /** Invalid file type. */
    389         public static final int ERR_INVALID_FILE_TYPE = 3;
    390 
    391         /** Invalid effect kind. */
    392         public static final int ERR_INVALID_EFFECT_KIND = 4;
    393 
    394         /** Invalid video effect. */
    395         public static final int ERR_INVALID_VIDEO_EFFECT_TYPE = 5;
    396 
    397         /** Invalid audio effect. */
    398         public static final int ERR_INVALID_AUDIO_EFFECT_TYPE = 6;
    399 
    400         /** Invalid video transition. */
    401         public static final int ERR_INVALID_VIDEO_TRANSITION_TYPE = 7;
    402 
    403         /** Invalid audio transition. */
    404         public static final int ERR_INVALID_AUDIO_TRANSITION_TYPE = 8;
    405 
    406         /** Invalid encoding frame rate. */
    407         public static final int ERR_INVALID_VIDEO_ENCODING_FRAME_RATE = 9;
    408 
    409         /** External effect is called but this function is not set. */
    410         public static final int ERR_EXTERNAL_EFFECT_NULL = 10;
    411 
    412         /** External transition is called but this function is not set. */
    413         public static final int ERR_EXTERNAL_TRANSITION_NULL = 11;
    414 
    415         /** Begin time cut is larger than the video clip duration. */
    416         public static final int ERR_BEGIN_CUT_LARGER_THAN_DURATION = 12;
    417 
    418         /** Begin cut time is larger or equal than end cut. */
    419         public static final int ERR_BEGIN_CUT_LARGER_THAN_END_CUT = 13;
    420 
    421         /** Two consecutive transitions are overlapping on one clip. */
    422         public static final int ERR_OVERLAPPING_TRANSITIONS = 14;
    423 
    424         /** Internal error, type size mismatch. */
    425         public static final int ERR_ANALYSIS_DATA_SIZE_TOO_SMALL = 15;
    426 
    427         /** An input 3GPP file is invalid/corrupted. */
    428         public static final int ERR_INVALID_3GPP_FILE = 16;
    429 
    430         /** A file contains an unsupported video format. */
    431         public static final int ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT = 17;
    432 
    433         /** A file contains an unsupported audio format. */
    434         public static final int ERR_UNSUPPORTED_INPUT_AUDIO_FORMAT = 18;
    435 
    436         /** A file format is not supported. */
    437         public static final int ERR_AMR_EDITING_UNSUPPORTED = 19;
    438 
    439         /** An input clip has an unexpectedly large Video AU. */
    440         public static final int ERR_INPUT_VIDEO_AU_TOO_LARGE = 20;
    441 
    442         /** An input clip has an unexpectedly large Audio AU. */
    443         public static final int ERR_INPUT_AUDIO_AU_TOO_LARGE = 21;
    444 
    445         /** An input clip has a corrupted Audio AU. */
    446         public static final int ERR_INPUT_AUDIO_CORRUPTED_AU = 22;
    447 
    448         /** The video encoder encountered an Access Unit error. */
    449         public static final int ERR_ENCODER_ACCES_UNIT_ERROR = 23;
    450 
    451         /** Unsupported video format for Video Editing. */
    452         public static final int ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT = 24;
    453 
    454         /** Unsupported H263 profile for Video Editing. */
    455         public static final int ERR_EDITING_UNSUPPORTED_H263_PROFILE = 25;
    456 
    457         /** Unsupported MPEG-4 profile for Video Editing. */
    458         public static final int ERR_EDITING_UNSUPPORTED_MPEG4_PROFILE = 26;
    459 
    460         /** Unsupported MPEG-4 RVLC tool for Video Editing. */
    461         public static final int ERR_EDITING_UNSUPPORTED_MPEG4_RVLC = 27;
    462 
    463         /** Unsupported audio format for Video Editing. */
    464         public static final int ERR_EDITING_UNSUPPORTED_AUDIO_FORMAT = 28;
    465 
    466         /** File contains no supported stream. */
    467         public static final int ERR_EDITING_NO_SUPPORTED_STREAM_IN_FILE = 29;
    468 
    469         /** File contains no video stream or an unsupported video stream. */
    470         public static final int ERR_EDITING_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 30;
    471 
    472         /** Internal error, clip analysis version mismatch. */
    473         public static final int ERR_INVALID_CLIP_ANALYSIS_VERSION = 31;
    474 
    475         /**
    476          * At least one of the clip analysis has been generated on another
    477          * platform (WIN32, ARM, etc.).
    478          */
    479         public static final int ERR_INVALID_CLIP_ANALYSIS_PLATFORM = 32;
    480 
    481         /** Clips don't have the same video format (H263 or MPEG4). */
    482         public static final int ERR_INCOMPATIBLE_VIDEO_FORMAT = 33;
    483 
    484         /** Clips don't have the same frame size. */
    485         public static final int ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE = 34;
    486 
    487         /** Clips don't have the same MPEG-4 time scale. */
    488         public static final int ERR_INCOMPATIBLE_VIDEO_TIME_SCALE = 35;
    489 
    490         /** Clips don't have the same use of MPEG-4 data partitioning. */
    491         public static final int ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING = 36;
    492 
    493         /** MP3 clips can't be assembled. */
    494         public static final int ERR_UNSUPPORTED_MP3_ASSEMBLY = 37;
    495 
    496         /**
    497          * The input 3GPP file does not contain any supported audio or video
    498          * track.
    499          */
    500         public static final int ERR_NO_SUPPORTED_STREAM_IN_FILE = 38;
    501 
    502         /**
    503          * The Volume of the added audio track (AddVolume) must be strictly
    504          * superior than zero.
    505          */
    506         public static final int ERR_ADDVOLUME_EQUALS_ZERO = 39;
    507 
    508         /**
    509          * The time at which an audio track is added can't be higher than the
    510          * input video track duration..
    511          */
    512         public static final int ERR_ADDCTS_HIGHER_THAN_VIDEO_DURATION = 40;
    513 
    514         /** The audio track file format setting is undefined. */
    515         public static final int ERR_UNDEFINED_AUDIO_TRACK_FILE_FORMAT = 41;
    516 
    517         /** The added audio track stream has an unsupported format. */
    518         public static final int ERR_UNSUPPORTED_ADDED_AUDIO_STREAM = 42;
    519 
    520         /** The audio mixing feature doesn't support the audio track type. */
    521         public static final int ERR_AUDIO_MIXING_UNSUPPORTED = 43;
    522 
    523         /** The audio mixing feature doesn't support MP3 audio tracks. */
    524         public static final int ERR_AUDIO_MIXING_MP3_UNSUPPORTED = 44;
    525 
    526         /**
    527          * An added audio track limits the available features: uiAddCts must be
    528          * 0 and bRemoveOriginal must be true.
    529          */
    530         public static final int ERR_FEATURE_UNSUPPORTED_WITH_AUDIO_TRACK = 45;
    531 
    532         /**
    533          * An added audio track limits the available features: uiAddCts must be
    534          * 0 and bRemoveOriginal must be true.
    535          */
    536         public static final int ERR_FEATURE_UNSUPPORTED_WITH_AAC = 46;
    537 
    538         /** Input audio track is not of a type that can be mixed with output. */
    539         public static final int ERR_AUDIO_CANNOT_BE_MIXED = 47;
    540 
    541         /** Input audio track is not AMR-NB, so it can't be mixed with output. */
    542         public static final int ERR_ONLY_AMRNB_INPUT_CAN_BE_MIXED = 48;
    543 
    544         /**
    545          * An added EVRC audio track limit the available features: uiAddCts must
    546          * be 0 and bRemoveOriginal must be true.
    547          */
    548         public static final int ERR_FEATURE_UNSUPPORTED_WITH_EVRC = 49;
    549 
    550         /** H263 profiles other than 0 are not supported. */
    551         public static final int ERR_H263_PROFILE_NOT_SUPPORTED = 51;
    552 
    553         /** File contains no video stream or an unsupported video stream. */
    554         public static final int ERR_NO_SUPPORTED_VIDEO_STREAM_IN_FILE = 52;
    555 
    556         /** Transcoding of the input file(s) is necessary. */
    557         public static final int WAR_TRANSCODING_NECESSARY = 53;
    558 
    559         /**
    560          * The size of the output file will exceed the maximum configured value.
    561          */
    562         public static final int WAR_MAX_OUTPUT_SIZE_EXCEEDED = 54;
    563 
    564         /** The time scale is too big. */
    565         public static final int WAR_TIMESCALE_TOO_BIG = 55;
    566 
    567         /** The year is out of range */
    568         public static final int ERR_CLOCK_BAD_REF_YEAR = 56;
    569 
    570         /** The directory could not be opened */
    571         public static final int ERR_DIR_OPEN_FAILED = 57;
    572 
    573         /** The directory could not be read */
    574         public static final int ERR_DIR_READ_FAILED = 58;
    575 
    576         /** There are no more entries in the current directory */
    577         public static final int ERR_DIR_NO_MORE_ENTRY = 59;
    578 
    579         /** The input parameter/s has error */
    580         public static final int ERR_PARAMETER = 60;
    581 
    582         /** There is a state machine error */
    583         public static final int ERR_STATE = 61;
    584 
    585         /** Memory allocation failed */
    586         public static final int ERR_ALLOC = 62;
    587 
    588         /** Context is invalid */
    589         public static final int ERR_BAD_CONTEXT = 63;
    590 
    591         /** Context creation failed */
    592         public static final int ERR_CONTEXT_FAILED = 64;
    593 
    594         /** Invalid stream ID */
    595         public static final int ERR_BAD_STREAM_ID = 65;
    596 
    597         /** Invalid option ID */
    598         public static final int ERR_BAD_OPTION_ID = 66;
    599 
    600         /** The option is write only */
    601         public static final int ERR_WRITE_ONLY = 67;
    602 
    603         /** The option is read only */
    604         public static final int ERR_READ_ONLY = 68;
    605 
    606         /** The feature is not implemented in this version */
    607         public static final int ERR_NOT_IMPLEMENTED = 69;
    608 
    609         /** The media type is not supported */
    610         public static final int ERR_UNSUPPORTED_MEDIA_TYPE = 70;
    611 
    612         /** No data to be encoded */
    613         public static final int WAR_NO_DATA_YET = 71;
    614 
    615         /** No data to be decoded */
    616         public static final int WAR_NO_MORE_STREAM = 72;
    617 
    618         /** Time stamp is invalid */
    619         public static final int WAR_INVALID_TIME = 73;
    620 
    621         /** No more data to be decoded */
    622         public static final int WAR_NO_MORE_AU = 74;
    623 
    624         /** Semaphore timed out */
    625         public static final int WAR_TIME_OUT = 75;
    626 
    627         /** Memory buffer is full */
    628         public static final int WAR_BUFFER_FULL = 76;
    629 
    630         /** Server has asked for redirection */
    631         public static final int WAR_REDIRECT = 77;
    632 
    633         /** Too many streams in input */
    634         public static final int WAR_TOO_MUCH_STREAMS = 78;
    635 
    636         /** The file cannot be opened/ written into as it is locked */
    637         public static final int ERR_FILE_LOCKED = 79;
    638 
    639         /** The file access mode is invalid */
    640         public static final int ERR_FILE_BAD_MODE_ACCESS = 80;
    641 
    642         /** The file pointer points to an invalid location */
    643         public static final int ERR_FILE_INVALID_POSITION = 81;
    644 
    645         /** Invalid string */
    646         public static final int ERR_STR_BAD_STRING = 94;
    647 
    648         /** The input string cannot be converted */
    649         public static final int ERR_STR_CONV_FAILED = 95;
    650 
    651         /** The string size is too large */
    652         public static final int ERR_STR_OVERFLOW = 96;
    653 
    654         /** Bad string arguments */
    655         public static final int ERR_STR_BAD_ARGS = 97;
    656 
    657         /** The string value is larger than maximum size allowed */
    658         public static final int WAR_STR_OVERFLOW = 98;
    659 
    660         /** The string value is not present in this comparison operation */
    661         public static final int WAR_STR_NOT_FOUND = 99;
    662 
    663         /** The thread is not started */
    664         public static final int ERR_THREAD_NOT_STARTED = 100;
    665 
    666         /** Trancoding done warning */
    667         public static final int WAR_TRANSCODING_DONE = 101;
    668 
    669         /** Unsupported mediatype */
    670         public static final int WAR_MEDIATYPE_NOT_SUPPORTED = 102;
    671 
    672         /** Input file contains invalid/unsupported streams */
    673         public static final int ERR_INPUT_FILE_CONTAINS_NO_SUPPORTED_STREAM = 103;
    674 
    675         /** Invalid input file */
    676         public static final int ERR_INVALID_INPUT_FILE = 104;
    677 
    678         /** Invalid output video format */
    679         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FORMAT = 105;
    680 
    681         /** Invalid output video frame size */
    682         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_SIZE = 106;
    683 
    684         /** Invalid output video frame rate */
    685         public static final int ERR_UNDEFINED_OUTPUT_VIDEO_FRAME_RATE = 107;
    686 
    687         /** Invalid output audio format */
    688         public static final int ERR_UNDEFINED_OUTPUT_AUDIO_FORMAT = 108;
    689 
    690         /** Invalid video frame size for H.263 */
    691         public static final int ERR_INVALID_VIDEO_FRAME_SIZE_FOR_H263 = 109;
    692 
    693         /** Invalid video frame rate for H.263 */
    694         public static final int ERR_INVALID_VIDEO_FRAME_RATE_FOR_H263 = 110;
    695 
    696         /** invalid playback duration */
    697         public static final int ERR_DURATION_IS_NULL = 111;
    698 
    699         /** Invalid H.263 profile in file */
    700         public static final int ERR_H263_FORBIDDEN_IN_MP4_FILE = 112;
    701 
    702         /** Invalid AAC sampling frequency */
    703         public static final int ERR_INVALID_AAC_SAMPLING_FREQUENCY = 113;
    704 
    705         /** Audio conversion failure */
    706         public static final int ERR_AUDIO_CONVERSION_FAILED = 114;
    707 
    708         /** Invalid trim start and end times */
    709         public static final int ERR_BEGIN_CUT_EQUALS_END_CUT = 115;
    710 
    711         /** End time smaller than start time for trim */
    712         public static final int ERR_END_CUT_SMALLER_THAN_BEGIN_CUT = 116;
    713 
    714         /** Output file size is small */
    715         public static final int ERR_MAXFILESIZE_TOO_SMALL = 117;
    716 
    717         /** Output video bitrate is too low */
    718         public static final int ERR_VIDEOBITRATE_TOO_LOW = 118;
    719 
    720         /** Output audio bitrate is too low */
    721         public static final int ERR_AUDIOBITRATE_TOO_LOW = 119;
    722 
    723         /** Output video bitrate is too high */
    724         public static final int ERR_VIDEOBITRATE_TOO_HIGH = 120;
    725 
    726         /** Output audio bitrate is too high */
    727         public static final int ERR_AUDIOBITRATE_TOO_HIGH = 121;
    728 
    729         /** Output file size is too small */
    730         public static final int ERR_OUTPUT_FILE_SIZE_TOO_SMALL = 122;
    731 
    732         /** Unknown stream type */
    733         public static final int ERR_READER_UNKNOWN_STREAM_TYPE = 123;
    734 
    735         /** Invalid metadata in input stream */
    736         public static final int WAR_READER_NO_METADATA = 124;
    737 
    738         /** Invalid file reader info warning */
    739         public static final int WAR_READER_INFORMATION_NOT_PRESENT = 125;
    740 
    741         /** Warning to indicate the the writer is being stopped */
    742         public static final int WAR_WRITER_STOP_REQ = 131;
    743 
    744         /** Video decoder failed to provide frame for transcoding */
    745         public static final int WAR_VIDEORENDERER_NO_NEW_FRAME = 132;
    746 
    747         /** Video deblocking filter is not implemented */
    748         public static final int WAR_DEBLOCKING_FILTER_NOT_IMPLEMENTED = 133;
    749 
    750         /** H.263 decoder profile not supported */
    751         public static final int ERR_DECODER_H263_PROFILE_NOT_SUPPORTED = 134;
    752 
    753         /** The input file contains unsupported H.263 profile */
    754         public static final int ERR_DECODER_H263_NOT_BASELINE = 135;
    755 
    756         /** There is no more space to store the output file */
    757         public static final int ERR_NOMORE_SPACE_FOR_FILE = 136;
    758 
    759         /** Internal error. */
    760         public static final int ERR_INTERNAL = 255;
    761     }
    762 
    763     /**
    764      * Defines output video formats.
    765      */
    766     public final class VideoFormat {
    767         /** No video present in output clip. Used to generate audio only clip */
    768         public static final int NO_VIDEO = 0;
    769 
    770         /** H263 video format. */
    771         public static final int H263 = 1;
    772 
    773         /** H264 video */
    774         public static final int H264 = 2;
    775 
    776         /** MPEG4 video format. */
    777         public static final int MPEG4 = 3;
    778 
    779         /** No transcoding. Output video format is same as input video format */
    780         public static final int NULL_VIDEO = 254;
    781 
    782         /** Unsupported video format. */
    783         public static final int UNSUPPORTED = 255;
    784     }
    785 
    786     /** Defines video frame sizes. */
    787     public final class VideoFrameSize {
    788 
    789         public static final int SIZE_UNDEFINED = -1;
    790 
    791         /** SQCIF 128 x 96 pixels. */
    792         public static final int SQCIF = 0;
    793 
    794         /** QQVGA 160 x 120 pixels. */
    795         public static final int QQVGA = 1;
    796 
    797         /** QCIF 176 x 144 pixels. */
    798         public static final int QCIF = 2;
    799 
    800         /** QVGA 320 x 240 pixels. */
    801         public static final int QVGA = 3;
    802 
    803         /** CIF 352 x 288 pixels. */
    804         public static final int CIF = 4;
    805 
    806         /** VGA 640 x 480 pixels. */
    807         public static final int VGA = 5;
    808 
    809         /** WVGA 800 X 480 pixels */
    810         public static final int WVGA = 6;
    811 
    812         /** NTSC 720 X 480 pixels */
    813         public static final int NTSC = 7;
    814 
    815         /** 640 x 360 */
    816         public static final int nHD = 8;
    817 
    818         /** 854 x 480 */
    819         public static final int WVGA16x9 = 9;
    820 
    821         /** 720p 1280 X 720 */
    822         public static final int V720p = 10;
    823 
    824         /** W720p 1080 x 720 */
    825         public static final int W720p = 11;
    826 
    827         /** S720p 960 x 720 */
    828         public static final int S720p = 12;
    829 
    830         /** 1080p 1920 x 1080 */
    831         public static final int V1080p = 13;
    832     }
    833 
    834     /**
    835      * Defines output video frame rates.
    836      */
    837     public final class VideoFrameRate {
    838         /** Frame rate of 5 frames per second. */
    839         public static final int FR_5_FPS = 0;
    840 
    841         /** Frame rate of 7.5 frames per second. */
    842         public static final int FR_7_5_FPS = 1;
    843 
    844         /** Frame rate of 10 frames per second. */
    845         public static final int FR_10_FPS = 2;
    846 
    847         /** Frame rate of 12.5 frames per second. */
    848         public static final int FR_12_5_FPS = 3;
    849 
    850         /** Frame rate of 15 frames per second. */
    851         public static final int FR_15_FPS = 4;
    852 
    853         /** Frame rate of 20 frames per second. */
    854         public static final int FR_20_FPS = 5;
    855 
    856         /** Frame rate of 25 frames per second. */
    857         public static final int FR_25_FPS = 6;
    858 
    859         /** Frame rate of 30 frames per second. */
    860         public static final int FR_30_FPS = 7;
    861     }
    862 
    863     /**
    864      * Defines Video Effect Types.
    865      */
    866     public static class VideoEffect {
    867 
    868         public static final int NONE = 0;
    869 
    870         public static final int FADE_FROM_BLACK = 8;
    871 
    872         public static final int FADE_TO_BLACK = 16;
    873 
    874         public static final int EXTERNAL = 256;
    875 
    876         public static final int BLACK_AND_WHITE = 257;
    877 
    878         public static final int PINK = 258;
    879 
    880         public static final int GREEN = 259;
    881 
    882         public static final int SEPIA = 260;
    883 
    884         public static final int NEGATIVE = 261;
    885 
    886         public static final int FRAMING = 262;
    887 
    888         public static final int TEXT = 263;
    889 
    890         public static final int ZOOM_IN = 264;
    891 
    892         public static final int ZOOM_OUT = 265;
    893 
    894         public static final int FIFTIES = 266;
    895 
    896         public static final int COLORRGB16 = 267;
    897 
    898         public static final int GRADIENT = 268;
    899     }
    900 
    901     /**
    902      * Defines the video transitions.
    903      */
    904     public static class VideoTransition {
    905         /** No transition */
    906         public static final int NONE = 0;
    907 
    908         /** Cross fade transition */
    909         public static final int CROSS_FADE = 1;
    910 
    911         /** External transition. Currently not available. */
    912         public static final int EXTERNAL = 256;
    913 
    914         /** AlphaMagic transition. */
    915         public static final int ALPHA_MAGIC = 257;
    916 
    917         /** Slide transition. */
    918         public static final int SLIDE_TRANSITION = 258;
    919 
    920         /** Fade to black transition. */
    921         public static final int FADE_BLACK = 259;
    922     }
    923 
    924     /**
    925      * Defines settings for the AlphaMagic transition
    926      */
    927     public static class AlphaMagicSettings {
    928         /** Name of the alpha file (JPEG file). */
    929         public String file;
    930 
    931         /** Blending percentage [0..100] 0 = no blending. */
    932         public int blendingPercent;
    933 
    934         /** Invert the default rotation direction of the AlphaMagic effect. */
    935         public boolean invertRotation;
    936 
    937         public int rgbWidth;
    938         public int rgbHeight;
    939     }
    940 
    941     /** Defines the direction of the Slide transition. */
    942     public static final class SlideDirection {
    943 
    944         /** Right out left in. */
    945         public static final int RIGHT_OUT_LEFT_IN = 0;
    946 
    947         /** Left out right in. */
    948         public static final int LEFT_OUT_RIGTH_IN = 1;
    949 
    950         /** Top out bottom in. */
    951         public static final int TOP_OUT_BOTTOM_IN = 2;
    952 
    953         /** Bottom out top in */
    954         public static final int BOTTOM_OUT_TOP_IN = 3;
    955     }
    956 
    957     /** Defines the Slide transition settings. */
    958     public static class SlideTransitionSettings {
    959         /**
    960          * Direction of the slide transition. See {@link SlideDirection
    961          * SlideDirection} for valid values.
    962          */
    963         public int direction;
    964     }
    965 
    966     /**
    967      * Defines the settings of a single clip.
    968      */
    969     public static class ClipSettings {
    970 
    971         /**
    972          * The path to the clip file.
    973          * <p>
    974          * File format of the clip, it can be:
    975          * <ul>
    976          * <li>3GP file containing MPEG4/H263/H264 video and AAC/AMR audio
    977          * <li>JPG file
    978          * </ul>
    979          */
    980 
    981         public String clipPath;
    982 
    983         /**
    984          * The path of the decoded file. This is used only for image files.
    985          */
    986         public String clipDecodedPath;
    987 
    988         /**
    989          * The path of the Original file. This is used only for image files.
    990          */
    991         public String clipOriginalPath;
    992 
    993         /**
    994          * File type of the clip. See {@link FileType FileType} for valid
    995          * values.
    996          */
    997         public int fileType;
    998 
    999         /** Begin of the cut in the clip in milliseconds. */
   1000         public int beginCutTime;
   1001 
   1002         /**
   1003          * End of the cut in the clip in milliseconds. Set both
   1004          * <code>beginCutTime</code> and <code>endCutTime</code> to
   1005          * <code>0</code> to get the full length of the clip without a cut. In
   1006          * case of JPG clip, this is the duration of the JPEG file.
   1007          */
   1008         public int endCutTime;
   1009 
   1010         /**
   1011          * Begin of the cut in the clip in percentage of the file duration.
   1012          */
   1013         public int beginCutPercent;
   1014 
   1015         /**
   1016          * End of the cut in the clip in percentage of the file duration. Set
   1017          * both <code>beginCutPercent</code> and <code>endCutPercent</code> to
   1018          * <code>0</code> to get the full length of the clip without a cut.
   1019          */
   1020         public int endCutPercent;
   1021 
   1022         /** Enable panning and zooming. */
   1023         public boolean panZoomEnabled;
   1024 
   1025         /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
   1026         public int panZoomPercentStart;
   1027 
   1028         /** Top left X coordinate at start of clip. */
   1029         public int panZoomTopLeftXStart;
   1030 
   1031         /** Top left Y coordinate at start of clip. */
   1032         public int panZoomTopLeftYStart;
   1033 
   1034         /** Zoom percentage at start of clip. 0 = no zoom, 100 = full zoom */
   1035         public int panZoomPercentEnd;
   1036 
   1037         /** Top left X coordinate at end of clip. */
   1038         public int panZoomTopLeftXEnd;
   1039 
   1040         /** Top left Y coordinate at end of clip. */
   1041         public int panZoomTopLeftYEnd;
   1042 
   1043         /**
   1044          * Set The media rendering. See {@link MediaRendering MediaRendering}
   1045          * for valid values.
   1046          */
   1047         public int mediaRendering;
   1048 
   1049         /**
   1050          * RGB width and Height
   1051          */
   1052          public int rgbWidth;
   1053          public int rgbHeight;
   1054          /**
   1055          * Video rotation degree.
   1056          */
   1057          public int rotationDegree;
   1058     }
   1059 
   1060     /**
   1061      * Defines settings for a transition.
   1062      */
   1063     public static class TransitionSettings {
   1064 
   1065         /** Duration of the transition in msec. */
   1066         public int duration;
   1067 
   1068         /**
   1069          * Transition type for video. See {@link VideoTransition
   1070          * VideoTransition} for valid values.
   1071          */
   1072         public int videoTransitionType;
   1073 
   1074         /**
   1075          * Transition type for audio. See {@link AudioTransition
   1076          * AudioTransition} for valid values.
   1077          */
   1078         public int audioTransitionType;
   1079 
   1080         /**
   1081          * Transition behaviour. See {@link TransitionBehaviour
   1082          * TransitionBehaviour} for valid values.
   1083          */
   1084         public int transitionBehaviour;
   1085 
   1086         /**
   1087          * Settings for AlphaMagic transition. Only needs to be set if
   1088          * <code>videoTransitionType</code> is set to
   1089          * <code>VideoTransition.ALPHA_MAGIC</code>. See
   1090          * {@link AlphaMagicSettings AlphaMagicSettings}.
   1091          */
   1092         public AlphaMagicSettings alphaSettings;
   1093 
   1094         /**
   1095          * Settings for the Slide transition. See
   1096          * {@link SlideTransitionSettings SlideTransitionSettings}.
   1097          */
   1098         public SlideTransitionSettings slideSettings;
   1099     }
   1100 
   1101     public static final class AudioTransition {
   1102         /** No audio transition. */
   1103         public static final int NONE = 0;
   1104 
   1105         /** Cross-fade audio transition. */
   1106         public static final int CROSS_FADE = 1;
   1107     }
   1108 
   1109     /**
   1110      * Defines transition behaviors.
   1111      */
   1112     public static final class TransitionBehaviour {
   1113 
   1114         /** The transition uses an increasing speed. */
   1115         public static final int SPEED_UP = 0;
   1116 
   1117         /** The transition uses a linear (constant) speed. */
   1118         public static final int LINEAR = 1;
   1119 
   1120         /** The transition uses a decreasing speed. */
   1121         public static final int SPEED_DOWN = 2;
   1122 
   1123         /**
   1124          * The transition uses a constant speed, but slows down in the middle
   1125          * section.
   1126          */
   1127         public static final int SLOW_MIDDLE = 3;
   1128 
   1129         /**
   1130          * The transition uses a constant speed, but increases speed in the
   1131          * middle section.
   1132          */
   1133         public static final int FAST_MIDDLE = 4;
   1134     }
   1135 
   1136     /**
   1137      * Defines settings for the background music.
   1138      */
   1139     public static class BackgroundMusicSettings {
   1140 
   1141         /** Background music file. */
   1142         public String file;
   1143 
   1144         /** File type. See {@link FileType FileType} for valid values. */
   1145         public int fileType;
   1146 
   1147         /**
   1148          * Insertion time in milliseconds, in the output video where the
   1149          * background music must be inserted.
   1150          */
   1151         public long insertionTime;
   1152 
   1153         /**
   1154          * Volume, as a percentage of the background music track, to use. If
   1155          * this field is set to 100, the background music will replace the audio
   1156          * from the video input file(s).
   1157          */
   1158         public int volumePercent;
   1159 
   1160         /**
   1161          * Start time in milliseconds in the background muisc file from where
   1162          * the background music should loop. Set both <code>beginLoop</code> and
   1163          * <code>endLoop</code> to <code>0</code> to disable looping.
   1164          */
   1165         public long beginLoop;
   1166 
   1167         /**
   1168          * End time in milliseconds in the background music file to where the
   1169          * background music should loop. Set both <code>beginLoop</code> and
   1170          * <code>endLoop</code> to <code>0</code> to disable looping.
   1171          */
   1172         public long endLoop;
   1173 
   1174         public boolean enableDucking;
   1175 
   1176         public int duckingThreshold;
   1177 
   1178         public int lowVolume;
   1179 
   1180         public boolean isLooping;
   1181     }
   1182 
   1183     /** Defines settings for an effect. */
   1184     public static class AudioEffect {
   1185         /** No audio effect. */
   1186         public static final int NONE = 0;
   1187 
   1188         /** Fade-in effect. */
   1189         public static final int FADE_IN = 8;
   1190 
   1191         /** Fade-out effect. */
   1192         public static final int FADE_OUT = 16;
   1193     }
   1194 
   1195     /** Defines the effect settings. */
   1196     public static class EffectSettings {
   1197 
   1198         /** Start time of the effect in milliseconds. */
   1199         public int startTime;
   1200 
   1201         /** Duration of the effect in milliseconds. */
   1202         public int duration;
   1203 
   1204         /**
   1205          * Video effect type. See {@link VideoEffect VideoEffect} for valid
   1206          * values.
   1207          */
   1208         public int videoEffectType;
   1209 
   1210         /**
   1211          * Audio effect type. See {@link AudioEffect AudioEffect} for valid
   1212          * values.
   1213          */
   1214         public int audioEffectType;
   1215 
   1216         /**
   1217          * Start time of the effect in percents of the duration of the clip. A
   1218          * value of 0 percent means start time is from the beginning of the
   1219          * clip.
   1220          */
   1221         public int startPercent;
   1222 
   1223         /**
   1224          * Duration of the effect in percents of the duration of the clip.
   1225          */
   1226         public int durationPercent;
   1227 
   1228         /**
   1229          * Framing file.
   1230          * <p>
   1231          * This field is only used when the field <code>videoEffectType</code>
   1232          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1233          * this field is ignored.
   1234          */
   1235         public String framingFile;
   1236 
   1237         /**
   1238          * Framing buffer.
   1239          * <p>
   1240          * This field is only used when the field <code>videoEffectType</code>
   1241          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1242          * this field is ignored.
   1243          */
   1244         public int[] framingBuffer;
   1245 
   1246         /**
   1247          * Bitmap type Can be from RGB_565 (4), ARGB_4444 (5), ARGB_8888 (6);
   1248          **/
   1249 
   1250         public int bitmapType;
   1251 
   1252         public int width;
   1253 
   1254         public int height;
   1255 
   1256         /**
   1257          * Top left x coordinate. This coordinate is used to set the x
   1258          * coordinate of the picture in the framing file when the framing file
   1259          * is selected. The x coordinate is also used to set the location of the
   1260          * text in the text effect.
   1261          * <p>
   1262          * This field is only used when the field <code>videoEffectType</code>
   1263          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
   1264          * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
   1265          * ignored.
   1266          */
   1267         public int topLeftX;
   1268 
   1269         /**
   1270          * Top left y coordinate. This coordinate is used to set the y
   1271          * coordinate of the picture in the framing file when the framing file
   1272          * is selected. The y coordinate is also used to set the location of the
   1273          * text in the text effect.
   1274          * <p>
   1275          * This field is only used when the field <code>videoEffectType</code>
   1276          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING} or
   1277          * {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this field is
   1278          * ignored.
   1279          */
   1280         public int topLeftY;
   1281 
   1282         /**
   1283          * Should the frame be resized or not. If this field is set to
   1284          * <link>true</code> then the frame size is matched with the output
   1285          * video size.
   1286          * <p>
   1287          * This field is only used when the field <code>videoEffectType</code>
   1288          * is set to {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise
   1289          * this field is ignored.
   1290          */
   1291         public boolean framingResize;
   1292 
   1293         /**
   1294          * Size to which the framing buffer needs to be resized to
   1295          * This is valid only if framingResize is true
   1296          */
   1297         public int framingScaledSize;
   1298         /**
   1299          * Text to insert in the video.
   1300          * <p>
   1301          * This field is only used when the field <code>videoEffectType</code>
   1302          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
   1303          * field is ignored.
   1304          */
   1305         public String text;
   1306 
   1307         /**
   1308          * Text attributes for the text to insert in the video.
   1309          * <p>
   1310          * This field is only used when the field <code>videoEffectType</code>
   1311          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT}. Otherwise this
   1312          * field is ignored. For more details about this field see the
   1313          * integration guide.
   1314          */
   1315         public String textRenderingData;
   1316 
   1317         /** Width of the text buffer in pixels. */
   1318         public int textBufferWidth;
   1319 
   1320         /** Height of the text buffer in pixels. */
   1321         public int textBufferHeight;
   1322 
   1323         /**
   1324          * Processing rate for the fifties effect. A high value (e.g. 30)
   1325          * results in high effect strength.
   1326          * <p>
   1327          * This field is only used when the field <code>videoEffectType</code>
   1328          * is set to {@link VideoEffect#FIFTIES VideoEffect.FIFTIES}. Otherwise
   1329          * this field is ignored.
   1330          */
   1331         public int fiftiesFrameRate;
   1332 
   1333         /**
   1334          * RGB 16 color of the RGB16 and gradient color effect.
   1335          * <p>
   1336          * This field is only used when the field <code>videoEffectType</code>
   1337          * is set to {@link VideoEffect#COLORRGB16 VideoEffect.COLORRGB16} or
   1338          * {@link VideoEffect#GRADIENT VideoEffect.GRADIENT}. Otherwise this
   1339          * field is ignored.
   1340          */
   1341         public int rgb16InputColor;
   1342 
   1343         /**
   1344          * Start alpha blending percentage.
   1345          * <p>
   1346          * This field is only used when the field <code>videoEffectType</code>
   1347          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1348          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1349          * is ignored.
   1350          */
   1351         public int alphaBlendingStartPercent;
   1352 
   1353         /**
   1354          * Middle alpha blending percentage.
   1355          * <p>
   1356          * This field is only used when the field <code>videoEffectType</code>
   1357          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1358          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1359          * is ignored.
   1360          */
   1361         public int alphaBlendingMiddlePercent;
   1362 
   1363         /**
   1364          * End alpha blending percentage.
   1365          * <p>
   1366          * This field is only used when the field <code>videoEffectType</code>
   1367          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1368          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1369          * is ignored.
   1370          */
   1371         public int alphaBlendingEndPercent;
   1372 
   1373         /**
   1374          * Duration, in percentage of effect duration of the fade-in phase.
   1375          * <p>
   1376          * This field is only used when the field <code>videoEffectType</code>
   1377          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1378          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1379          * is ignored.
   1380          */
   1381         public int alphaBlendingFadeInTimePercent;
   1382 
   1383         /**
   1384          * Duration, in percentage of effect duration of the fade-out phase.
   1385          * <p>
   1386          * This field is only used when the field <code>videoEffectType</code>
   1387          * is set to {@link VideoEffect#TEXT VideoEffect.TEXT} or
   1388          * {@link VideoEffect#FRAMING VideoEffect.FRAMING}. Otherwise this field
   1389          * is ignored.
   1390          */
   1391         public int alphaBlendingFadeOutTimePercent;
   1392     }
   1393 
   1394     /** Defines the clip properties for preview */
   1395     public static class PreviewClips {
   1396 
   1397         /**
   1398          * The path to the clip file.
   1399          * <p>
   1400          * File format of the clip, it can be:
   1401          * <ul>
   1402          * <li>3GP file containing MPEG4/H263 video and AAC/AMR audio
   1403          * <li>JPG file
   1404          * </ul>
   1405          */
   1406 
   1407         public String clipPath;
   1408 
   1409         /**
   1410          * File type of the clip. See {@link FileType FileType} for valid
   1411          * values.
   1412          */
   1413         public int fileType;
   1414 
   1415         /** Begin of the cut in the clip in milliseconds. */
   1416         public long beginPlayTime;
   1417 
   1418         public long endPlayTime;
   1419 
   1420         /**
   1421          * Set The media rendering. See {@link MediaRendering MediaRendering}
   1422          * for valid values.
   1423          */
   1424         public int mediaRendering;
   1425 
   1426     }
   1427 
   1428     /** Defines the audio settings. */
   1429     public static class AudioSettings {
   1430 
   1431         String pFile;
   1432 
   1433         /** < PCM file path */
   1434         String Id;
   1435 
   1436         boolean bRemoveOriginal;
   1437 
   1438         /** < If true, the original audio track is not taken into account */
   1439         int channels;
   1440 
   1441         /** < Number of channels (1=mono, 2=stereo) of BGM clip */
   1442         int Fs;
   1443 
   1444         /**
   1445          * < Sampling audio frequency (8000 for amr, 16000 or more for aac) of
   1446          * BGM clip
   1447          */
   1448         int ExtendedFs;
   1449 
   1450         /** < Extended frequency for AAC+, eAAC+ streams of BGM clip */
   1451         long startMs;
   1452 
   1453         /** < Time, in milliseconds, at which the added audio track is inserted */
   1454         long beginCutTime;
   1455 
   1456         long endCutTime;
   1457 
   1458         int fileType;
   1459 
   1460         int volume;
   1461 
   1462         /** < Volume, in percentage, of the added audio track */
   1463         boolean loop;
   1464 
   1465         /** < Looping on/off > **/
   1466 
   1467         /** Audio mix and Duck **/
   1468         int ducking_threshold;
   1469 
   1470         int ducking_lowVolume;
   1471 
   1472         boolean bInDucking_enable;
   1473 
   1474         String pcmFilePath;
   1475     }
   1476 
   1477     /** Encapsulates preview clips and effect settings */
   1478     public static class PreviewSettings {
   1479 
   1480         public PreviewClips[] previewClipsArray;
   1481 
   1482         /** The effect settings. */
   1483         public EffectSettings[] effectSettingsArray;
   1484 
   1485     }
   1486 
   1487     /** Encapsulates clip properties */
   1488     public static class PreviewClipProperties {
   1489 
   1490         public Properties[] clipProperties;
   1491 
   1492     }
   1493 
   1494     /** Defines the editing settings. */
   1495     public static class EditSettings {
   1496 
   1497         /**
   1498          * Array of clip settings. There is one <code>clipSetting</code> for
   1499          * each clip.
   1500          */
   1501         public ClipSettings[] clipSettingsArray;
   1502 
   1503         /**
   1504          * Array of transition settings. If there are n clips (and thus n
   1505          * <code>clipSettings</code>) then there are (n-1) transitions and (n-1)
   1506          * <code>transistionSettings</code> in
   1507          * <code>transistionSettingsArray</code>.
   1508          */
   1509         public TransitionSettings[] transitionSettingsArray;
   1510 
   1511         /** The effect settings. */
   1512         public EffectSettings[] effectSettingsArray;
   1513 
   1514         /**
   1515          * Video frame rate of the output clip. See {@link VideoFrameRate
   1516          * VideoFrameRate} for valid values.
   1517          */
   1518         public int videoFrameRate;
   1519 
   1520         /** Output file name. Must be an absolute path. */
   1521         public String outputFile;
   1522 
   1523         /**
   1524          * Size of the video frames in the output clip. See
   1525          * {@link VideoFrameSize VideoFrameSize} for valid values.
   1526          */
   1527         public int videoFrameSize;
   1528 
   1529         /**
   1530          * Format of the video stream in the output clip. See
   1531          * {@link VideoFormat VideoFormat} for valid values.
   1532          */
   1533         public int videoFormat;
   1534 
   1535         /**
   1536          * Profile of the video stream in the output clip.
   1537          */
   1538         public int videoProfile;
   1539 
   1540         /**
   1541          * Level of the video stream in the output clip.
   1542          */
   1543         public int videoLevel;
   1544 
   1545         /**
   1546          * Format of the audio stream in the output clip. See
   1547          * {@link AudioFormat AudioFormat} for valid values.
   1548          */
   1549         public int audioFormat;
   1550 
   1551         /**
   1552          * Sampling frequency of the audio stream in the output clip. See
   1553          * {@link AudioSamplingFrequency AudioSamplingFrequency} for valid
   1554          * values.
   1555          */
   1556         public int audioSamplingFreq;
   1557 
   1558         /**
   1559          * Maximum file size. By setting this you can set the maximum size of
   1560          * the output clip. Set it to <code>0</code> to let the class ignore
   1561          * this filed.
   1562          */
   1563         public int maxFileSize;
   1564 
   1565         /**
   1566          * Number of audio channels in output clip. Use <code>0</code> for none,
   1567          * <code>1</code> for mono or <code>2</code> for stereo. None is only
   1568          * allowed when the <code>audioFormat</code> field is set to
   1569          * {@link AudioFormat#NO_AUDIO AudioFormat.NO_AUDIO} or
   1570          * {@link AudioFormat#NULL_AUDIO AudioFormat.NULL_AUDIO} Mono is only
   1571          * allowed when the <code>audioFormat</code> field is set to
   1572          * {@link AudioFormat#AAC AudioFormat.AAC}
   1573          */
   1574         public int audioChannels;
   1575 
   1576         /** Video bitrate. See {@link Bitrate Bitrate} for valid values. */
   1577         public int videoBitrate;
   1578 
   1579         /** Audio bitrate. See {@link Bitrate Bitrate} for valid values. */
   1580         public int audioBitrate;
   1581 
   1582         /**
   1583          * Background music settings. See {@link BackgroundMusicSettings
   1584          * BackgroundMusicSettings} for valid values.
   1585          */
   1586         public BackgroundMusicSettings backgroundMusicSettings;
   1587 
   1588         public int primaryTrackVolume;
   1589 
   1590     }
   1591 
   1592     /**
   1593      * Defines the media properties.
   1594      **/
   1595 
   1596     public static class Properties {
   1597 
   1598         /**
   1599          * Duration of the media in milliseconds.
   1600          */
   1601 
   1602         public int duration;
   1603 
   1604         /**
   1605          * File type.
   1606          */
   1607 
   1608         public int fileType;
   1609 
   1610         /**
   1611          * Video format.
   1612          */
   1613 
   1614         public int videoFormat;
   1615 
   1616         /**
   1617          * Duration of the video stream of the media in milliseconds.
   1618          */
   1619 
   1620         public int videoDuration;
   1621 
   1622         /**
   1623          * Bitrate of the video stream of the media.
   1624          */
   1625 
   1626         public int videoBitrate;
   1627 
   1628         /**
   1629          * Width of the video frames or the width of the still picture in
   1630          * pixels.
   1631          */
   1632 
   1633         public int width;
   1634 
   1635         /**
   1636          * Height of the video frames or the height of the still picture in
   1637          * pixels.
   1638          */
   1639 
   1640         public int height;
   1641 
   1642         /**
   1643          * Average frame rate of video in the media in frames per second.
   1644          */
   1645 
   1646         public float averageFrameRate;
   1647 
   1648         /**
   1649          * Profile of the video in the media.
   1650          */
   1651 
   1652         public int profile;
   1653 
   1654         /**
   1655          * Level of the video in the media.
   1656          */
   1657 
   1658         public int level;
   1659 
   1660         /**
   1661          * Is Video Profile supported.
   1662          */
   1663 
   1664         public boolean profileSupported;
   1665 
   1666         /**
   1667          * Is Video Level supported.
   1668          */
   1669 
   1670         public boolean levelSupported;
   1671 
   1672         /**
   1673          * Audio format.
   1674          */
   1675 
   1676         public int audioFormat;
   1677 
   1678         /**
   1679          * Duration of the audio stream of the media in milliseconds.
   1680          */
   1681 
   1682         public int audioDuration;
   1683 
   1684         /**
   1685          * Bitrate of the audio stream of the media.
   1686          */
   1687 
   1688         public int audioBitrate;
   1689 
   1690         /**
   1691          * Number of audio channels in the media.
   1692          */
   1693 
   1694         public int audioChannels;
   1695 
   1696         /**
   1697          * Sampling frequency of the audio stream in the media in samples per
   1698          * second.
   1699          */
   1700 
   1701         public int audioSamplingFrequency;
   1702 
   1703         /**
   1704          * Volume value of the audio track as percentage.
   1705          */
   1706         public int audioVolumeValue;
   1707 
   1708         /**
   1709          * Video rotation degree.
   1710          */
   1711         public int videoRotation;
   1712 
   1713         public String Id;
   1714     }
   1715 
   1716     /**
   1717      * Constructor
   1718      *
   1719      * @param projectPath The path where the VideoEditor stores all files
   1720      *        related to the project
   1721      * @param lock The semaphore
   1722      * @param veObj The video editor reference
   1723      */
   1724     public MediaArtistNativeHelper(String projectPath, Semaphore lock, VideoEditor veObj) {
   1725         mProjectPath = projectPath;
   1726         if (veObj != null) {
   1727             mVideoEditor = veObj;
   1728         } else {
   1729             mVideoEditor = null;
   1730             throw new IllegalArgumentException("video editor object is null");
   1731         }
   1732         if (mStoryBoardSettings == null) {
   1733             mStoryBoardSettings = new EditSettings();
   1734         }
   1735 
   1736         mLock = lock;
   1737 
   1738         _init(mProjectPath, "null");
   1739         mAudioTrackPCMFilePath = null;
   1740     }
   1741 
   1742     /**
   1743      * @return The project path
   1744      */
   1745     String getProjectPath() {
   1746         return mProjectPath;
   1747     }
   1748 
   1749     /**
   1750      * @return The Audio Track PCM file path
   1751      */
   1752     String getProjectAudioTrackPCMFilePath() {
   1753         return mAudioTrackPCMFilePath;
   1754     }
   1755 
   1756     /**
   1757      * Invalidates the PCM file
   1758      */
   1759     void invalidatePcmFile() {
   1760         if (mAudioTrackPCMFilePath != null) {
   1761             new File(mAudioTrackPCMFilePath).delete();
   1762             mAudioTrackPCMFilePath = null;
   1763         }
   1764     }
   1765 
   1766     @SuppressWarnings("unused")
   1767     private void onProgressUpdate(int taskId, int progress) {
   1768         if (mProcessingState == PROCESSING_EXPORT) {
   1769             if (mExportProgressListener != null) {
   1770                 if (mProgressToApp < progress) {
   1771                     mExportProgressListener.onProgress(mVideoEditor, mOutputFilename, progress);
   1772                     /* record previous progress */
   1773                     mProgressToApp = progress;
   1774                 }
   1775             }
   1776         }
   1777         else {
   1778             // Adapt progress depending on current state
   1779             int actualProgress = 0;
   1780             int action = 0;
   1781 
   1782             if (mProcessingState == PROCESSING_AUDIO_PCM) {
   1783                 action = MediaProcessingProgressListener.ACTION_DECODE;
   1784             } else {
   1785                 action = MediaProcessingProgressListener.ACTION_ENCODE;
   1786             }
   1787 
   1788             switch (mProcessingState) {
   1789                 case PROCESSING_AUDIO_PCM:
   1790                     actualProgress = progress;
   1791                     break;
   1792                 case PROCESSING_TRANSITION:
   1793                     actualProgress = progress;
   1794                     break;
   1795                 case PROCESSING_KENBURNS:
   1796                     actualProgress = progress;
   1797                     break;
   1798                 case PROCESSING_INTERMEDIATE1:
   1799                     if ((progress == 0) && (mProgressToApp != 0)) {
   1800                         mProgressToApp = 0;
   1801                     }
   1802                     if ((progress != 0) || (mProgressToApp != 0)) {
   1803                         actualProgress = progress/4;
   1804                     }
   1805                     break;
   1806                 case PROCESSING_INTERMEDIATE2:
   1807                     if ((progress != 0) || (mProgressToApp != 0)) {
   1808                         actualProgress = 25 + progress/4;
   1809                     }
   1810                     break;
   1811                 case PROCESSING_INTERMEDIATE3:
   1812                     if ((progress != 0) || (mProgressToApp != 0)) {
   1813                         actualProgress = 50 + progress/2;
   1814                     }
   1815                     break;
   1816                 case PROCESSING_NONE:
   1817 
   1818                 default:
   1819                     Log.e(TAG, "ERROR unexpected State=" + mProcessingState);
   1820                     return;
   1821             }
   1822             if ((mProgressToApp != actualProgress) && (actualProgress != 0)) {
   1823 
   1824                 mProgressToApp = actualProgress;
   1825 
   1826                 if (mMediaProcessingProgressListener != null) {
   1827                     // Send the progress indication
   1828                     mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
   1829                                                                 actualProgress);
   1830                 }
   1831             }
   1832             /* avoid 0 in next intermediate call */
   1833             if (mProgressToApp == 0) {
   1834                 if (mMediaProcessingProgressListener != null) {
   1835                     /*
   1836                      *  Send the progress indication
   1837                      */
   1838                     mMediaProcessingProgressListener.onProgress(mProcessingObject, action,
   1839                                                                 actualProgress);
   1840                 }
   1841                 mProgressToApp = 1;
   1842             }
   1843         }
   1844     }
   1845 
   1846     @SuppressWarnings("unused")
   1847     private void onPreviewProgressUpdate(int progress, boolean isFinished,
   1848                   boolean updateOverlay, String filename, int renderingMode, int error) {
   1849         if (mPreviewProgressListener != null) {
   1850             if (mIsFirstProgress) {
   1851                 mPreviewProgressListener.onStart(mVideoEditor);
   1852                 mIsFirstProgress = false;
   1853             }
   1854 
   1855             final VideoEditor.OverlayData overlayData;
   1856             if (updateOverlay) {
   1857                 overlayData = new VideoEditor.OverlayData();
   1858                 if (filename != null) {
   1859                     overlayData.set(BitmapFactory.decodeFile(filename), renderingMode);
   1860                 } else {
   1861                     overlayData.setClear();
   1862                 }
   1863             } else {
   1864                 overlayData = null;
   1865             }
   1866 
   1867             if (progress != 0) {
   1868                 mPreviewProgress = progress;
   1869             }
   1870 
   1871             if (isFinished) {
   1872                 mPreviewProgressListener.onStop(mVideoEditor);
   1873             } else if (error != 0) {
   1874                 mPreviewProgressListener.onError(mVideoEditor, error);
   1875             } else {
   1876                 mPreviewProgressListener.onProgress(mVideoEditor, progress, overlayData);
   1877             }
   1878         }
   1879     }
   1880 
   1881     /**
   1882      * Release the native helper object
   1883      */
   1884     void releaseNativeHelper() throws InterruptedException {
   1885         release();
   1886     }
   1887 
   1888     /**
   1889      * Release the native helper to end the Audio Graph process
   1890      */
   1891     @SuppressWarnings("unused")
   1892     private void onAudioGraphExtractProgressUpdate(int progress, boolean isVideo) {
   1893         if ((mExtractAudioWaveformProgressListener != null) && (progress > 0)) {
   1894             mExtractAudioWaveformProgressListener.onProgress(progress);
   1895         }
   1896     }
   1897 
   1898     /**
   1899      * Populates the Effect Settings in EffectSettings
   1900      *
   1901      * @param effects The reference of EffectColor
   1902      *
   1903      * @return The populated effect settings in EffectSettings reference
   1904      */
   1905     EffectSettings getEffectSettings(EffectColor effects) {
   1906         EffectSettings effectSettings = new EffectSettings();
   1907         effectSettings.startTime = (int)effects.getStartTime();
   1908         effectSettings.duration = (int)effects.getDuration();
   1909         effectSettings.videoEffectType = getEffectColorType(effects);
   1910         effectSettings.audioEffectType = 0;
   1911         effectSettings.startPercent = 0;
   1912         effectSettings.durationPercent = 0;
   1913         effectSettings.framingFile = null;
   1914         effectSettings.topLeftX = 0;
   1915         effectSettings.topLeftY = 0;
   1916         effectSettings.framingResize = false;
   1917         effectSettings.text = null;
   1918         effectSettings.textRenderingData = null;
   1919         effectSettings.textBufferWidth = 0;
   1920         effectSettings.textBufferHeight = 0;
   1921         if (effects.getType() == EffectColor.TYPE_FIFTIES) {
   1922             effectSettings.fiftiesFrameRate = 15;
   1923         } else {
   1924             effectSettings.fiftiesFrameRate = 0;
   1925         }
   1926 
   1927         if ((effectSettings.videoEffectType == VideoEffect.COLORRGB16)
   1928                 || (effectSettings.videoEffectType == VideoEffect.GRADIENT)) {
   1929             effectSettings.rgb16InputColor = effects.getColor();
   1930         }
   1931 
   1932         effectSettings.alphaBlendingStartPercent = 0;
   1933         effectSettings.alphaBlendingMiddlePercent = 0;
   1934         effectSettings.alphaBlendingEndPercent = 0;
   1935         effectSettings.alphaBlendingFadeInTimePercent = 0;
   1936         effectSettings.alphaBlendingFadeOutTimePercent = 0;
   1937         return effectSettings;
   1938     }
   1939 
   1940     /**
   1941      * Populates the Overlay Settings in EffectSettings
   1942      *
   1943      * @param overlay The reference of OverlayFrame
   1944      *
   1945      * @return The populated overlay settings in EffectSettings reference
   1946      */
   1947     EffectSettings getOverlaySettings(OverlayFrame overlay) {
   1948         EffectSettings effectSettings = new EffectSettings();
   1949         Bitmap bitmap = null;
   1950 
   1951         effectSettings.startTime = (int)overlay.getStartTime();
   1952         effectSettings.duration = (int)overlay.getDuration();
   1953         effectSettings.videoEffectType = VideoEffect.FRAMING;
   1954         effectSettings.audioEffectType = 0;
   1955         effectSettings.startPercent = 0;
   1956         effectSettings.durationPercent = 0;
   1957         effectSettings.framingFile = null;
   1958 
   1959         if ((bitmap = overlay.getBitmap()) != null) {
   1960             effectSettings.framingFile = overlay.getFilename();
   1961 
   1962             if (effectSettings.framingFile == null) {
   1963                 try {
   1964                     (overlay).save(mProjectPath);
   1965                 } catch (IOException e) {
   1966                     Log.e(TAG, "getOverlaySettings : File not found");
   1967                 }
   1968                 effectSettings.framingFile = overlay.getFilename();
   1969             }
   1970             if (bitmap.getConfig() == Bitmap.Config.ARGB_8888)
   1971                 effectSettings.bitmapType = 6;
   1972             else if (bitmap.getConfig() == Bitmap.Config.ARGB_4444)
   1973                 effectSettings.bitmapType = 5;
   1974             else if (bitmap.getConfig() == Bitmap.Config.RGB_565)
   1975                 effectSettings.bitmapType = 4;
   1976             else if (bitmap.getConfig() == Bitmap.Config.ALPHA_8)
   1977                 throw new RuntimeException("Bitmap config not supported");
   1978 
   1979             effectSettings.width = bitmap.getWidth();
   1980             effectSettings.height = bitmap.getHeight();
   1981             effectSettings.framingBuffer = new int[effectSettings.width];
   1982             int tmp = 0;
   1983             short maxAlpha = 0;
   1984             short minAlpha = (short)0xFF;
   1985             short alpha = 0;
   1986             while (tmp < effectSettings.height) {
   1987                 bitmap.getPixels(effectSettings.framingBuffer, 0,
   1988                                  effectSettings.width, 0, tmp,
   1989                                  effectSettings.width, 1);
   1990                 for (int i = 0; i < effectSettings.width; i++) {
   1991                     alpha = (short)((effectSettings.framingBuffer[i] >> 24) & 0xFF);
   1992                     if (alpha > maxAlpha) {
   1993                         maxAlpha = alpha;
   1994                     }
   1995                     if (alpha < minAlpha) {
   1996                         minAlpha = alpha;
   1997                     }
   1998                 }
   1999                 tmp += 1;
   2000             }
   2001             alpha = (short)((maxAlpha + minAlpha) / 2);
   2002             alpha = (short)((alpha * 100) / 256);
   2003             effectSettings.alphaBlendingEndPercent = alpha;
   2004             effectSettings.alphaBlendingMiddlePercent = alpha;
   2005             effectSettings.alphaBlendingStartPercent = alpha;
   2006             effectSettings.alphaBlendingFadeInTimePercent = 100;
   2007             effectSettings.alphaBlendingFadeOutTimePercent = 100;
   2008             effectSettings.framingBuffer = null;
   2009 
   2010             /*
   2011              * Set the resized RGB file dimensions
   2012              */
   2013             effectSettings.width = overlay.getResizedRGBSizeWidth();
   2014             if(effectSettings.width == 0) {
   2015                 effectSettings.width = bitmap.getWidth();
   2016             }
   2017 
   2018             effectSettings.height = overlay.getResizedRGBSizeHeight();
   2019             if(effectSettings.height == 0) {
   2020                 effectSettings.height = bitmap.getHeight();
   2021             }
   2022 
   2023         }
   2024 
   2025         effectSettings.topLeftX = 0;
   2026         effectSettings.topLeftY = 0;
   2027 
   2028         effectSettings.framingResize = true;
   2029         effectSettings.text = null;
   2030         effectSettings.textRenderingData = null;
   2031         effectSettings.textBufferWidth = 0;
   2032         effectSettings.textBufferHeight = 0;
   2033         effectSettings.fiftiesFrameRate = 0;
   2034         effectSettings.rgb16InputColor = 0;
   2035         int mediaItemHeight;
   2036         int aspectRatio;
   2037         if (overlay.getMediaItem() instanceof MediaImageItem) {
   2038             if (((MediaImageItem)overlay.getMediaItem()).getGeneratedImageClip() != null) {
   2039                 // Ken Burns was applied
   2040                 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipHeight();
   2041                 aspectRatio = getAspectRatio(
   2042                     ((MediaImageItem)overlay.getMediaItem()).getGeneratedClipWidth()
   2043                     , mediaItemHeight);
   2044             } else {
   2045                 //For image get the scaled height. Aspect ratio would remain the same
   2046                 mediaItemHeight = ((MediaImageItem)overlay.getMediaItem()).getScaledHeight();
   2047                 aspectRatio = overlay.getMediaItem().getAspectRatio();
   2048             }
   2049         } else {
   2050             aspectRatio = overlay.getMediaItem().getAspectRatio();
   2051             mediaItemHeight = overlay.getMediaItem().getHeight();
   2052         }
   2053         effectSettings.framingScaledSize = findVideoResolution(aspectRatio, mediaItemHeight);
   2054         return effectSettings;
   2055     }
   2056 
   2057      /* get Video Editor aspect ratio */
   2058     int nativeHelperGetAspectRatio() {
   2059         return mVideoEditor.getAspectRatio();
   2060     }
   2061 
   2062     /**
   2063      * Sets the export audio codec
   2064      *
   2065      * @param export audio codec
   2066      *
   2067      */
   2068     void setAudioCodec(int codec) {
   2069         mExportAudioCodec = codec;
   2070     }
   2071     /**
   2072      * Sets the export video codec
   2073      *
   2074      * @param export video codec
   2075      *
   2076      */
   2077     void setVideoCodec(int codec) {
   2078         mExportVideoCodec = codec;
   2079     }
   2080 
   2081     /**
   2082      * Sets the audio regenerate flag
   2083      *
   2084      * @param flag The boolean to set the audio regenerate flag
   2085      *
   2086      */
   2087     void setAudioflag(boolean flag) {
   2088         //check if the file exists.
   2089         if (!(new File(String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE)).exists())) {
   2090             flag = true;
   2091         }
   2092         mRegenerateAudio = flag;
   2093     }
   2094 
   2095     /**
   2096      * Gets the audio regenerate flag
   2097      *
   2098      * @param return The boolean to get the audio regenerate flag
   2099      *
   2100      */
   2101     boolean getAudioflag() {
   2102         return mRegenerateAudio;
   2103     }
   2104 
   2105     /**
   2106      * Maps the average frame rate to one of the defined enum values
   2107      *
   2108      * @param averageFrameRate The average frame rate of video item
   2109      *
   2110      * @return The frame rate from one of the defined enum values
   2111      */
   2112     int GetClosestVideoFrameRate(int averageFrameRate) {
   2113         if (averageFrameRate >= 25) {
   2114             return VideoFrameRate.FR_30_FPS;
   2115         } else if (averageFrameRate >= 20) {
   2116             return VideoFrameRate.FR_25_FPS;
   2117         } else if (averageFrameRate >= 15) {
   2118             return VideoFrameRate.FR_20_FPS;
   2119         } else if (averageFrameRate >= 12) {
   2120             return VideoFrameRate.FR_15_FPS;
   2121         } else if (averageFrameRate >= 10) {
   2122             return VideoFrameRate.FR_12_5_FPS;
   2123         } else if (averageFrameRate >= 7) {
   2124             return VideoFrameRate.FR_10_FPS;
   2125         } else if (averageFrameRate >= 5) {
   2126             return VideoFrameRate.FR_7_5_FPS;
   2127         } else {
   2128             return -1;
   2129         }
   2130     }
   2131 
   2132     /**
   2133      * Helper function to adjust the effect or overlay start time
   2134      * depending on the begin and end boundary time of meddia item
   2135      */
   2136     public void adjustEffectsStartTimeAndDuration(EffectSettings lEffect, int beginCutTime,
   2137                                                   int endCutTime) {
   2138 
   2139         int effectStartTime = 0;
   2140         int effectDuration = 0;
   2141 
   2142         /**
   2143          * cbct -> clip begin cut time
   2144          * cect -> clip end cut time
   2145          ****************************************
   2146          *  |                                 |
   2147          *  |         cbct        cect        |
   2148          *  | <-1-->   |           |          |
   2149          *  |       <--|-2->       |          |
   2150          *  |          | <---3---> |          |
   2151          *  |          |        <--|-4--->    |
   2152          *  |          |           | <--5-->  |
   2153          *  |      <---|------6----|---->     |
   2154          *  |                                 |
   2155          *  < : effectStart
   2156          *  > : effectStart + effectDuration
   2157          ****************************************
   2158          **/
   2159 
   2160         /** 1 & 5 */
   2161         /**
   2162          * Effect falls out side the trim duration. In such a case effects shall
   2163          * not be applied.
   2164          */
   2165         if ((lEffect.startTime > endCutTime)
   2166                 || ((lEffect.startTime + lEffect.duration) <= beginCutTime)) {
   2167 
   2168             effectStartTime = 0;
   2169             effectDuration = 0;
   2170 
   2171             lEffect.startTime = effectStartTime;
   2172             lEffect.duration = effectDuration;
   2173             return;
   2174         }
   2175 
   2176         /** 2 */
   2177         if ((lEffect.startTime < beginCutTime)
   2178                 && ((lEffect.startTime + lEffect.duration) > beginCutTime)
   2179                 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
   2180             effectStartTime = 0;
   2181             effectDuration = lEffect.duration;
   2182 
   2183             effectDuration -= (beginCutTime - lEffect.startTime);
   2184             lEffect.startTime = effectStartTime;
   2185             lEffect.duration = effectDuration;
   2186             return;
   2187         }
   2188 
   2189         /** 3 */
   2190         if ((lEffect.startTime >= beginCutTime)
   2191                 && ((lEffect.startTime + lEffect.duration) <= endCutTime)) {
   2192             effectStartTime = lEffect.startTime - beginCutTime;
   2193             lEffect.startTime = effectStartTime;
   2194             lEffect.duration = lEffect.duration;
   2195             return;
   2196         }
   2197 
   2198         /** 4 */
   2199         if ((lEffect.startTime >= beginCutTime)
   2200                 && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
   2201             effectStartTime = lEffect.startTime - beginCutTime;
   2202             effectDuration = endCutTime - lEffect.startTime;
   2203             lEffect.startTime = effectStartTime;
   2204             lEffect.duration = effectDuration;
   2205             return;
   2206         }
   2207 
   2208         /** 6 */
   2209         if ((lEffect.startTime < beginCutTime)
   2210                 && ((lEffect.startTime + lEffect.duration) > endCutTime)) {
   2211             effectStartTime = 0;
   2212             effectDuration = endCutTime - beginCutTime;
   2213             lEffect.startTime = effectStartTime;
   2214             lEffect.duration = effectDuration;
   2215             return;
   2216         }
   2217 
   2218     }
   2219 
   2220     /**
   2221      * Generates the clip for preview or export
   2222      *
   2223      * @param editSettings The EditSettings reference for generating
   2224      * a clip for preview or export
   2225      *
   2226      * @return error value
   2227      */
   2228     public int generateClip(EditSettings editSettings) {
   2229         int err = 0;
   2230 
   2231         try {
   2232             err = nativeGenerateClip(editSettings);
   2233         } catch (IllegalArgumentException ex) {
   2234             Log.e(TAG, "Illegal Argument exception in load settings");
   2235             return -1;
   2236         } catch (IllegalStateException ex) {
   2237             Log.e(TAG, "Illegal state exception in load settings");
   2238             return -1;
   2239         } catch (RuntimeException ex) {
   2240             Log.e(TAG, "Runtime exception in load settings");
   2241             return -1;
   2242         }
   2243         return err;
   2244     }
   2245 
   2246     /**
   2247      * Init function to initialiZe the  ClipSettings reference to
   2248      * default values
   2249      *
   2250      * @param lclipSettings The ClipSettings reference
   2251      */
   2252     void initClipSettings(ClipSettings lclipSettings) {
   2253         lclipSettings.clipPath = null;
   2254         lclipSettings.clipDecodedPath = null;
   2255         lclipSettings.clipOriginalPath = null;
   2256         lclipSettings.fileType = 0;
   2257         lclipSettings.endCutTime = 0;
   2258         lclipSettings.beginCutTime = 0;
   2259         lclipSettings.beginCutPercent = 0;
   2260         lclipSettings.endCutPercent = 0;
   2261         lclipSettings.panZoomEnabled = false;
   2262         lclipSettings.panZoomPercentStart = 0;
   2263         lclipSettings.panZoomTopLeftXStart = 0;
   2264         lclipSettings.panZoomTopLeftYStart = 0;
   2265         lclipSettings.panZoomPercentEnd = 0;
   2266         lclipSettings.panZoomTopLeftXEnd = 0;
   2267         lclipSettings.panZoomTopLeftYEnd = 0;
   2268         lclipSettings.mediaRendering = 0;
   2269         lclipSettings.rotationDegree = 0;
   2270     }
   2271 
   2272 
   2273     /**
   2274      * Populates the settings for generating an effect clip
   2275      *
   2276      * @param lMediaItem The media item for which the effect clip
   2277      * needs to be generated
   2278      * @param lclipSettings The ClipSettings reference containing
   2279      * clips data
   2280      * @param e The EditSettings reference containing effect specific data
   2281      * @param uniqueId The unique id used in the name of the output clip
   2282      * @param clipNo Used for internal purpose
   2283      *
   2284      * @return The name and path of generated clip
   2285      */
   2286     String generateEffectClip(MediaItem lMediaItem, ClipSettings lclipSettings,
   2287             EditSettings e,String uniqueId,int clipNo) {
   2288         int err = 0;
   2289         EditSettings editSettings = null;
   2290         String EffectClipPath = null;
   2291         int outVideoProfile = 0;
   2292         int outVideoLevel = 0;
   2293         editSettings = new EditSettings();
   2294 
   2295         editSettings.clipSettingsArray = new ClipSettings[1];
   2296         editSettings.clipSettingsArray[0] = lclipSettings;
   2297 
   2298         editSettings.backgroundMusicSettings = null;
   2299         editSettings.transitionSettingsArray = null;
   2300         editSettings.effectSettingsArray = e.effectSettingsArray;
   2301 
   2302         EffectClipPath = String.format(mProjectPath + "/" + "ClipEffectIntermediate" + "_"
   2303                 + lMediaItem.getId() + uniqueId + ".3gp");
   2304 
   2305         File tmpFile = new File(EffectClipPath);
   2306         if (tmpFile.exists()) {
   2307             tmpFile.delete();
   2308         }
   2309 
   2310         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2311         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2312         editSettings.videoProfile = outVideoProfile;
   2313         editSettings.videoLevel= outVideoLevel;
   2314 
   2315         if (lMediaItem instanceof MediaVideoItem) {
   2316             MediaVideoItem m = (MediaVideoItem)lMediaItem;
   2317 
   2318             editSettings.audioFormat = AudioFormat.AAC;
   2319             editSettings.audioChannels = 2;
   2320             editSettings.audioBitrate = Bitrate.BR_64_KBPS;
   2321             editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2322 
   2323             editSettings.videoFormat = VideoFormat.H264;
   2324             editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2325             editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2326                     m.getHeight());
   2327             editSettings.videoBitrate = findVideoBitrate(editSettings.videoFrameSize);
   2328         } else {
   2329             MediaImageItem m = (MediaImageItem)lMediaItem;
   2330             editSettings.audioBitrate = Bitrate.BR_64_KBPS;
   2331             editSettings.audioChannels = 2;
   2332             editSettings.audioFormat = AudioFormat.AAC;
   2333             editSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2334 
   2335             editSettings.videoFormat = VideoFormat.H264;
   2336             editSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2337             editSettings.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2338                     m.getScaledHeight());
   2339             editSettings.videoBitrate = findVideoBitrate(editSettings.videoFrameSize);
   2340         }
   2341 
   2342         editSettings.outputFile = EffectClipPath;
   2343 
   2344         if (clipNo == 1) {
   2345             mProcessingState  = PROCESSING_INTERMEDIATE1;
   2346         } else if (clipNo == 2) {
   2347             mProcessingState  = PROCESSING_INTERMEDIATE2;
   2348         }
   2349         mProcessingObject = lMediaItem;
   2350         err = generateClip(editSettings);
   2351         mProcessingState  = PROCESSING_NONE;
   2352 
   2353         if (err == 0) {
   2354             lclipSettings.clipPath = EffectClipPath;
   2355             lclipSettings.fileType = FileType.THREE_GPP;
   2356             return EffectClipPath;
   2357         } else {
   2358             throw new RuntimeException("preview generation cannot be completed");
   2359         }
   2360     }
   2361 
   2362 
   2363     /**
   2364      * Populates the settings for generating a Ken Burn effect clip
   2365      *
   2366      * @param m The media image item for which the Ken Burn effect clip
   2367      * needs to be generated
   2368      * @param e The EditSettings reference clip specific data
   2369      *
   2370      * @return The name and path of generated clip
   2371      */
   2372     String generateKenBurnsClip(EditSettings e, MediaImageItem m) {
   2373         String output = null;
   2374         int err = 0;
   2375         int outVideoProfile = 0;
   2376         int outVideoLevel = 0;
   2377 
   2378         e.backgroundMusicSettings = null;
   2379         e.transitionSettingsArray = null;
   2380         e.effectSettingsArray = null;
   2381         output = String.format(mProjectPath + "/" + "ImageClip-" + m.getId() + ".3gp");
   2382 
   2383         File tmpFile = new File(output);
   2384         if (tmpFile.exists()) {
   2385             tmpFile.delete();
   2386         }
   2387 
   2388         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2389         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2390         e.videoProfile = outVideoProfile;
   2391         e.videoLevel = outVideoLevel;
   2392 
   2393         e.outputFile = output;
   2394         e.audioBitrate = Bitrate.BR_64_KBPS;
   2395         e.audioChannels = 2;
   2396         e.audioFormat = AudioFormat.AAC;
   2397         e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2398 
   2399         e.videoFormat = VideoFormat.H264;
   2400         e.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2401         e.videoFrameSize = findVideoResolution(mVideoEditor.getAspectRatio(),
   2402                                                            m.getScaledHeight());
   2403         e.videoBitrate = findVideoBitrate(e.videoFrameSize);
   2404 
   2405         mProcessingState  = PROCESSING_KENBURNS;
   2406         mProcessingObject = m;
   2407         err = generateClip(e);
   2408         // Reset the processing state and check for errors
   2409         mProcessingState  = PROCESSING_NONE;
   2410         if (err != 0) {
   2411             throw new RuntimeException("preview generation cannot be completed");
   2412         }
   2413         return output;
   2414     }
   2415 
   2416 
   2417     /**
   2418      * Calculates the output resolution for transition clip
   2419      *
   2420      * @param m1 First media item associated with transition
   2421      * @param m2 Second media item associated with transition
   2422      *
   2423      * @return The transition resolution
   2424      */
   2425     private int getTransitionResolution(MediaItem m1, MediaItem m2) {
   2426         int clip1Height = 0;
   2427         int clip2Height = 0;
   2428         int videoSize = 0;
   2429 
   2430         if (m1 != null && m2 != null) {
   2431             if (m1 instanceof MediaVideoItem) {
   2432                 clip1Height = m1.getHeight();
   2433             } else if (m1 instanceof MediaImageItem) {
   2434                 clip1Height = ((MediaImageItem)m1).getScaledHeight();
   2435             }
   2436             if (m2 instanceof MediaVideoItem) {
   2437                 clip2Height = m2.getHeight();
   2438             } else if (m2 instanceof MediaImageItem) {
   2439                 clip2Height = ((MediaImageItem)m2).getScaledHeight();
   2440             }
   2441             if (clip1Height > clip2Height) {
   2442                 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
   2443             } else {
   2444                 videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
   2445             }
   2446         } else if (m1 == null && m2 != null) {
   2447             if (m2 instanceof MediaVideoItem) {
   2448                 clip2Height = m2.getHeight();
   2449             } else if (m2 instanceof MediaImageItem) {
   2450                 clip2Height = ((MediaImageItem)m2).getScaledHeight();
   2451             }
   2452             videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip2Height);
   2453         } else if (m1 != null && m2 == null) {
   2454             if (m1 instanceof MediaVideoItem) {
   2455                 clip1Height = m1.getHeight();
   2456             } else if (m1 instanceof MediaImageItem) {
   2457                 clip1Height = ((MediaImageItem)m1).getScaledHeight();
   2458             }
   2459             videoSize = findVideoResolution(mVideoEditor.getAspectRatio(), clip1Height);
   2460         }
   2461         return videoSize;
   2462     }
   2463 
   2464     /**
   2465      * Populates the settings for generating an transition clip
   2466      *
   2467      * @param m1 First media item associated with transition
   2468      * @param m2 Second media item associated with transition
   2469      * @param e The EditSettings reference containing
   2470      * clip specific data
   2471      * @param uniqueId The unique id used in the name of the output clip
   2472      * @param t The Transition specific data
   2473      *
   2474      * @return The name and path of generated clip
   2475      */
   2476     String generateTransitionClip(EditSettings e, String uniqueId,
   2477             MediaItem m1, MediaItem m2,Transition t) {
   2478         String outputFilename = null;
   2479         int err = 0;
   2480         int outVideoProfile = 0;
   2481         int outVideoLevel = 0;
   2482         outputFilename = String.format(mProjectPath + "/" + uniqueId + ".3gp");
   2483 
   2484         outVideoProfile = VideoEditorProfile.getExportProfile(VideoFormat.H264);
   2485         outVideoLevel = VideoEditorProfile.getExportLevel(VideoFormat.H264);
   2486         e.videoProfile = outVideoProfile;
   2487         e.videoLevel = outVideoLevel;
   2488 
   2489         e.outputFile = outputFilename;
   2490         e.audioBitrate = Bitrate.BR_64_KBPS;
   2491         e.audioChannels = 2;
   2492         e.audioFormat = AudioFormat.AAC;
   2493         e.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   2494 
   2495         e.videoFormat = VideoFormat.H264;
   2496         e.videoFrameRate = VideoFrameRate.FR_30_FPS;
   2497         e.videoFrameSize = getTransitionResolution(m1, m2);
   2498         e.videoBitrate = findVideoBitrate(e.videoFrameSize);
   2499 
   2500         if (new File(outputFilename).exists()) {
   2501             new File(outputFilename).delete();
   2502         }
   2503         mProcessingState  = PROCESSING_INTERMEDIATE3;
   2504         mProcessingObject = t;
   2505         err = generateClip(e);
   2506         // Reset the processing state and check for errors
   2507         mProcessingState  = PROCESSING_NONE;
   2508         if (err != 0) {
   2509             throw new RuntimeException("preview generation cannot be completed");
   2510         }
   2511         return outputFilename;
   2512     }
   2513 
   2514     /**
   2515      * Populates effects and overlays in EffectSettings structure
   2516      * and also adjust the start time and duration of effects and overlays
   2517      * w.r.t to total story board time
   2518      *
   2519      * @param m1 Media item associated with effect
   2520      * @param effectSettings The EffectSettings reference containing
   2521      *      effect specific data
   2522      * @param beginCutTime The begin cut time of the clip associated with effect
   2523      * @param endCutTime The end cut time of the clip associated with effect
   2524      * @param storyBoardTime The current story board time
   2525      *
   2526      * @return The updated index
   2527      */
   2528     private int populateEffects(MediaItem m, EffectSettings[] effectSettings, int i,
   2529             int beginCutTime, int endCutTime, int storyBoardTime) {
   2530 
   2531         if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
   2532                 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
   2533             beginCutTime += m.getBeginTransition().getDuration();
   2534             endCutTime -= m.getEndTransition().getDuration();
   2535         } else if (m.getBeginTransition() == null && m.getEndTransition() != null
   2536                 && m.getEndTransition().getDuration() > 0) {
   2537             endCutTime -= m.getEndTransition().getDuration();
   2538         } else if (m.getEndTransition() == null && m.getBeginTransition() != null
   2539                 && m.getBeginTransition().getDuration() > 0) {
   2540             beginCutTime += m.getBeginTransition().getDuration();
   2541         }
   2542 
   2543         final List<Effect> effects = m.getAllEffects();
   2544         final List<Overlay> overlays = m.getAllOverlays();
   2545 
   2546         for (Overlay overlay : overlays) {
   2547             effectSettings[i] = getOverlaySettings((OverlayFrame)overlay);
   2548             adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
   2549             effectSettings[i].startTime += storyBoardTime;
   2550             i++;
   2551         }
   2552 
   2553         for (Effect effect : effects) {
   2554             if (effect instanceof EffectColor) {
   2555                 effectSettings[i] = getEffectSettings((EffectColor)effect);
   2556                 adjustEffectsStartTimeAndDuration(effectSettings[i], beginCutTime, endCutTime);
   2557                 effectSettings[i].startTime += storyBoardTime;
   2558                 i++;
   2559             }
   2560         }
   2561 
   2562         return i;
   2563     }
   2564 
   2565     /**
   2566      * Adjusts the media item boundaries for use in export or preview
   2567      *
   2568      * @param clipSettings The ClipSettings reference
   2569      * @param clipProperties The Properties reference
   2570      * @param m The media item
   2571      */
   2572     private void adjustMediaItemBoundary(ClipSettings clipSettings,
   2573                                          Properties clipProperties, MediaItem m) {
   2574         if (m.getBeginTransition() != null && m.getBeginTransition().getDuration() > 0
   2575                 && m.getEndTransition() != null && m.getEndTransition().getDuration() > 0) {
   2576             clipSettings.beginCutTime += m.getBeginTransition().getDuration();
   2577             clipSettings.endCutTime -= m.getEndTransition().getDuration();
   2578         } else if (m.getBeginTransition() == null && m.getEndTransition() != null
   2579                 && m.getEndTransition().getDuration() > 0) {
   2580             clipSettings.endCutTime -= m.getEndTransition().getDuration();
   2581         } else if (m.getEndTransition() == null && m.getBeginTransition() != null
   2582                 && m.getBeginTransition().getDuration() > 0) {
   2583             clipSettings.beginCutTime += m.getBeginTransition().getDuration();
   2584         }
   2585 
   2586         clipProperties.duration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2587 
   2588         if (clipProperties.videoDuration != 0) {
   2589             clipProperties.videoDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2590         }
   2591 
   2592         if (clipProperties.audioDuration != 0) {
   2593             clipProperties.audioDuration = clipSettings.endCutTime - clipSettings.beginCutTime;
   2594         }
   2595     }
   2596 
   2597     /**
   2598      * Generates the transition if transition is present
   2599      * and is in invalidated state
   2600      *
   2601      * @param transition The Transition reference
   2602      * @param editSettings The EditSettings reference
   2603      * @param clipPropertiesArray The clip Properties array
   2604      * @param i The index in clip Properties array for current clip
   2605      */
   2606     private void generateTransition(Transition transition, EditSettings editSettings,
   2607             PreviewClipProperties clipPropertiesArray, int index) {
   2608         if (!(transition.isGenerated())) {
   2609             transition.generate();
   2610         }
   2611         editSettings.clipSettingsArray[index] = new ClipSettings();
   2612         editSettings.clipSettingsArray[index].clipPath = transition.getFilename();
   2613         editSettings.clipSettingsArray[index].fileType = FileType.THREE_GPP;
   2614         editSettings.clipSettingsArray[index].beginCutTime = 0;
   2615         editSettings.clipSettingsArray[index].endCutTime = (int)transition.getDuration();
   2616         editSettings.clipSettingsArray[index].mediaRendering = MediaRendering.BLACK_BORDERS;
   2617 
   2618         try {
   2619             clipPropertiesArray.clipProperties[index] =
   2620                 getMediaProperties(transition.getFilename());
   2621         } catch (Exception e) {
   2622             throw new IllegalArgumentException("Unsupported file or file not found");
   2623         }
   2624 
   2625         clipPropertiesArray.clipProperties[index].Id = null;
   2626         clipPropertiesArray.clipProperties[index].audioVolumeValue = 100;
   2627         clipPropertiesArray.clipProperties[index].duration = (int)transition.getDuration();
   2628         if (clipPropertiesArray.clipProperties[index].videoDuration != 0) {
   2629             clipPropertiesArray.clipProperties[index].videoDuration = (int)transition.getDuration();
   2630         }
   2631 
   2632         if (clipPropertiesArray.clipProperties[index].audioDuration != 0) {
   2633             clipPropertiesArray.clipProperties[index].audioDuration = (int)transition.getDuration();
   2634         }
   2635     }
   2636 
   2637     /**
   2638      * Sets the volume for current media item in clip properties array
   2639      *
   2640      * @param m The media item
   2641      * @param clipProperties The clip properties array reference
   2642      * @param i The index in clip Properties array for current clip
   2643      */
   2644     private void adjustVolume(MediaItem m, PreviewClipProperties clipProperties,
   2645                               int index) {
   2646         if (m instanceof MediaVideoItem) {
   2647             final boolean videoMuted = ((MediaVideoItem)m).isMuted();
   2648             if (videoMuted == false) {
   2649                 mClipProperties.clipProperties[index].audioVolumeValue =
   2650                     ((MediaVideoItem)m).getVolume();
   2651             } else {
   2652                 mClipProperties.clipProperties[index].audioVolumeValue = 0;
   2653             }
   2654         } else if (m instanceof MediaImageItem) {
   2655             mClipProperties.clipProperties[index].audioVolumeValue = 0;
   2656         }
   2657     }
   2658 
   2659     /**
   2660      * Checks for odd size image width and height
   2661      *
   2662      * @param m The media item
   2663      * @param clipProperties The clip properties array reference
   2664      * @param i The index in clip Properties array for current clip
   2665      */
   2666     private void checkOddSizeImage(MediaItem m, PreviewClipProperties clipProperties, int index) {
   2667         if (m instanceof MediaImageItem) {
   2668             int width = mClipProperties.clipProperties[index].width;
   2669             int height = mClipProperties.clipProperties[index].height;
   2670 
   2671             if ((width % 2) != 0) {
   2672                 width -= 1;
   2673             }
   2674             if ((height % 2) != 0) {
   2675                 height -= 1;
   2676             }
   2677             mClipProperties.clipProperties[index].width = width;
   2678             mClipProperties.clipProperties[index].height = height;
   2679         }
   2680     }
   2681 
   2682     /**
   2683      * Populates the media item properties and calculates the maximum
   2684      * height among all the clips
   2685      *
   2686      * @param m The media item
   2687      * @param i The index in clip Properties array for current clip
   2688      * @param maxHeight The max height from the clip properties
   2689      *
   2690      * @return Updates the max height if current clip's height is greater
   2691      * than all previous clips height
   2692      */
   2693     private int populateMediaItemProperties(MediaItem m, int index, int maxHeight) {
   2694         mPreviewEditSettings.clipSettingsArray[index] = new ClipSettings();
   2695         if (m instanceof MediaVideoItem) {
   2696             mPreviewEditSettings.clipSettingsArray[index] =
   2697                 ((MediaVideoItem)m).getVideoClipProperties();
   2698             if (((MediaVideoItem)m).getHeight() > maxHeight) {
   2699                 maxHeight = ((MediaVideoItem)m).getHeight();
   2700             }
   2701         } else if (m instanceof MediaImageItem) {
   2702             mPreviewEditSettings.clipSettingsArray[index] =
   2703                 ((MediaImageItem)m).getImageClipProperties();
   2704             if (((MediaImageItem)m).getScaledHeight() > maxHeight) {
   2705                 maxHeight = ((MediaImageItem)m).getScaledHeight();
   2706             }
   2707         }
   2708         /** + Handle the image files here */
   2709         if (mPreviewEditSettings.clipSettingsArray[index].fileType == FileType.JPG) {
   2710             mPreviewEditSettings.clipSettingsArray[index].clipDecodedPath =
   2711                 ((MediaImageItem)m).getDecodedImageFileName();
   2712 
   2713             mPreviewEditSettings.clipSettingsArray[index].clipOriginalPath =
   2714                          mPreviewEditSettings.clipSettingsArray[index].clipPath;
   2715         }
   2716         return maxHeight;
   2717     }
   2718 
   2719     /**
   2720      * Populates the background music track properties
   2721      *
   2722      * @param mediaBGMList The background music list
   2723      *
   2724      */
   2725     private void populateBackgroundMusicProperties(List<AudioTrack> mediaBGMList) {
   2726 
   2727         if (mediaBGMList.size() == 1) {
   2728             mAudioTrack = mediaBGMList.get(0);
   2729         } else {
   2730             mAudioTrack = null;
   2731         }
   2732 
   2733         if (mAudioTrack != null) {
   2734             mAudioSettings = new AudioSettings();
   2735             Properties mAudioProperties = new Properties();
   2736             mAudioSettings.pFile = null;
   2737             mAudioSettings.Id = mAudioTrack.getId();
   2738             try {
   2739                 mAudioProperties = getMediaProperties(mAudioTrack.getFilename());
   2740             } catch (Exception e) {
   2741                throw new IllegalArgumentException("Unsupported file or file not found");
   2742             }
   2743             mAudioSettings.bRemoveOriginal = false;
   2744             mAudioSettings.channels = mAudioProperties.audioChannels;
   2745             mAudioSettings.Fs = mAudioProperties.audioSamplingFrequency;
   2746             mAudioSettings.loop = mAudioTrack.isLooping();
   2747             mAudioSettings.ExtendedFs = 0;
   2748             mAudioSettings.pFile = mAudioTrack.getFilename();
   2749             mAudioSettings.startMs = mAudioTrack.getStartTime();
   2750             mAudioSettings.beginCutTime = mAudioTrack.getBoundaryBeginTime();
   2751             mAudioSettings.endCutTime = mAudioTrack.getBoundaryEndTime();
   2752             if (mAudioTrack.isMuted()) {
   2753                 mAudioSettings.volume = 0;
   2754             } else {
   2755                 mAudioSettings.volume = mAudioTrack.getVolume();
   2756             }
   2757             mAudioSettings.fileType = mAudioProperties.fileType;
   2758             mAudioSettings.ducking_lowVolume = mAudioTrack.getDuckedTrackVolume();
   2759             mAudioSettings.ducking_threshold = mAudioTrack.getDuckingThreshhold();
   2760             mAudioSettings.bInDucking_enable = mAudioTrack.isDuckingEnabled();
   2761             mAudioTrackPCMFilePath = String.format(mProjectPath + "/" + AUDIO_TRACK_PCM_FILE);
   2762             mAudioSettings.pcmFilePath = mAudioTrackPCMFilePath;
   2763 
   2764             mPreviewEditSettings.backgroundMusicSettings = new BackgroundMusicSettings();
   2765             mPreviewEditSettings.backgroundMusicSettings.file = mAudioTrackPCMFilePath;
   2766             mPreviewEditSettings.backgroundMusicSettings.fileType = mAudioProperties.fileType;
   2767             mPreviewEditSettings.backgroundMusicSettings.insertionTime =
   2768                 mAudioTrack.getStartTime();
   2769             mPreviewEditSettings.backgroundMusicSettings.volumePercent = mAudioTrack.getVolume();
   2770             mPreviewEditSettings.backgroundMusicSettings.beginLoop =
   2771                 mAudioTrack.getBoundaryBeginTime();
   2772             mPreviewEditSettings.backgroundMusicSettings.endLoop =
   2773                                                mAudioTrack.getBoundaryEndTime();
   2774             mPreviewEditSettings.backgroundMusicSettings.enableDucking =
   2775                 mAudioTrack.isDuckingEnabled();
   2776             mPreviewEditSettings.backgroundMusicSettings.duckingThreshold =
   2777                 mAudioTrack.getDuckingThreshhold();
   2778             mPreviewEditSettings.backgroundMusicSettings.lowVolume =
   2779                 mAudioTrack.getDuckedTrackVolume();
   2780             mPreviewEditSettings.backgroundMusicSettings.isLooping = mAudioTrack.isLooping();
   2781             mPreviewEditSettings.primaryTrackVolume = 100;
   2782             mProcessingState  = PROCESSING_AUDIO_PCM;
   2783             mProcessingObject = mAudioTrack;
   2784         } else {
   2785             mAudioSettings = null;
   2786             mPreviewEditSettings.backgroundMusicSettings = null;
   2787             mAudioTrackPCMFilePath = null;
   2788         }
   2789     }
   2790 
   2791     /**
   2792      * Calculates all the effects in all the media items
   2793      * in media items list
   2794      *
   2795      * @param mediaItemsList The media item list
   2796      *
   2797      * @return The total number of effects
   2798      *
   2799      */
   2800     private int getTotalEffects(List<MediaItem> mediaItemsList) {
   2801         int totalEffects = 0;
   2802         final Iterator<MediaItem> it = mediaItemsList.iterator();
   2803         while (it.hasNext()) {
   2804             final MediaItem t = it.next();
   2805             totalEffects += t.getAllEffects().size();
   2806             totalEffects += t.getAllOverlays().size();
   2807             final Iterator<Effect> ef = t.getAllEffects().iterator();
   2808             while (ef.hasNext()) {
   2809                 final Effect e = ef.next();
   2810                 if (e instanceof EffectKenBurns) {
   2811                     totalEffects--;
   2812                 }
   2813             }
   2814         }
   2815         return totalEffects;
   2816     }
   2817 
   2818     /**
   2819      * This function is responsible for forming clip settings
   2820      * array and clip properties array including transition clips
   2821      * and effect settings for preview purpose or export.
   2822      *
   2823      *
   2824      * @param mediaItemsList The media item list
   2825      * @param mediaTransitionList The transitions list
   2826      * @param mediaBGMList The background music list
   2827      * @param listener The MediaProcessingProgressListener
   2828      *
   2829      */
   2830     void previewStoryBoard(List<MediaItem> mediaItemsList,
   2831             List<Transition> mediaTransitionList, List<AudioTrack> mediaBGMList,
   2832             MediaProcessingProgressListener listener) {
   2833         if (mInvalidatePreviewArray) {
   2834             int previewIndex = 0;
   2835             int totalEffects = 0;
   2836             int storyBoardTime = 0;
   2837             int maxHeight = 0;
   2838             int beginCutTime = 0;
   2839             int endCutTime = 0;
   2840             int effectIndex = 0;
   2841             Transition lTransition = null;
   2842             MediaItem lMediaItem = null;
   2843             mPreviewEditSettings = new EditSettings();
   2844             mClipProperties = new PreviewClipProperties();
   2845             mTotalClips = 0;
   2846 
   2847             mTotalClips = mediaItemsList.size();
   2848             for (Transition transition : mediaTransitionList) {
   2849                 if (transition.getDuration() > 0) {
   2850                     mTotalClips++;
   2851                 }
   2852             }
   2853 
   2854             totalEffects = getTotalEffects(mediaItemsList);
   2855 
   2856             mPreviewEditSettings.clipSettingsArray = new ClipSettings[mTotalClips];
   2857             mPreviewEditSettings.effectSettingsArray = new EffectSettings[totalEffects];
   2858             mClipProperties.clipProperties = new Properties[mTotalClips];
   2859 
   2860             /** record the call back progress listener */
   2861             mMediaProcessingProgressListener = listener;
   2862             mProgressToApp = 0;
   2863 
   2864             if (mediaItemsList.size() > 0) {
   2865                 for (int i = 0; i < mediaItemsList.size(); i++) {
   2866                     /* Get the Media Item from the list */
   2867                     lMediaItem = mediaItemsList.get(i);
   2868                     if (lMediaItem instanceof MediaVideoItem) {
   2869                         beginCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryBeginTime();
   2870                         endCutTime = (int)((MediaVideoItem)lMediaItem).getBoundaryEndTime();
   2871                     } else if (lMediaItem instanceof MediaImageItem) {
   2872                         beginCutTime = 0;
   2873                         endCutTime = (int)((MediaImageItem)lMediaItem).getTimelineDuration();
   2874                     }
   2875                     /* Get the transition associated with Media Item */
   2876                     lTransition = lMediaItem.getBeginTransition();
   2877                     if (lTransition != null && (lTransition.getDuration() > 0)) {
   2878                         /* generate transition clip */
   2879                         generateTransition(lTransition, mPreviewEditSettings,
   2880                                            mClipProperties, previewIndex);
   2881                         storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
   2882                         previewIndex++;
   2883                     }
   2884                     /* Populate media item properties */
   2885                     maxHeight = populateMediaItemProperties(lMediaItem, previewIndex, maxHeight);
   2886                     /* Get the clip properties of the media item. */
   2887                     if (lMediaItem instanceof MediaImageItem) {
   2888                         int tmpCnt = 0;
   2889                         boolean bEffectKbPresent = false;
   2890                         final List<Effect> effectList = lMediaItem.getAllEffects();
   2891                         /**
   2892                          * Check if Ken Burns effect is present
   2893                          */
   2894                         while (tmpCnt < effectList.size()) {
   2895                             if (effectList.get(tmpCnt) instanceof EffectKenBurns) {
   2896                                 bEffectKbPresent = true;
   2897                                 break;
   2898                             }
   2899                             tmpCnt++;
   2900                         }
   2901 
   2902                         if (bEffectKbPresent) {
   2903                             try {
   2904                                   if(((MediaImageItem)lMediaItem).getGeneratedImageClip() != null) {
   2905                                      mClipProperties.clipProperties[previewIndex]
   2906                                         = getMediaProperties(((MediaImageItem)lMediaItem).
   2907                                                              getGeneratedImageClip());
   2908                                   }
   2909                                   else {
   2910                                    mClipProperties.clipProperties[previewIndex]
   2911                                       = getMediaProperties(((MediaImageItem)lMediaItem).
   2912                                                              getScaledImageFileName());
   2913                                    mClipProperties.clipProperties[previewIndex].width =
   2914                                              ((MediaImageItem)lMediaItem).getScaledWidth();
   2915                                    mClipProperties.clipProperties[previewIndex].height =
   2916                                              ((MediaImageItem)lMediaItem).getScaledHeight();
   2917                                   }
   2918                                 } catch (Exception e) {
   2919                                    throw new IllegalArgumentException("Unsupported file or file not found");
   2920                                 }
   2921                          } else {
   2922                               try {
   2923                                   mClipProperties.clipProperties[previewIndex]
   2924                                       = getMediaProperties(((MediaImageItem)lMediaItem).
   2925                                                                getScaledImageFileName());
   2926                               } catch (Exception e) {
   2927                                 throw new IllegalArgumentException("Unsupported file or file not found");
   2928                               }
   2929                             mClipProperties.clipProperties[previewIndex].width =
   2930                                         ((MediaImageItem)lMediaItem).getScaledWidth();
   2931                             mClipProperties.clipProperties[previewIndex].height =
   2932                                         ((MediaImageItem)lMediaItem).getScaledHeight();
   2933                         }
   2934                     } else {
   2935                         try {
   2936                             mClipProperties.clipProperties[previewIndex]
   2937                                  = getMediaProperties(lMediaItem.getFilename());
   2938                             } catch (Exception e) {
   2939                               throw new IllegalArgumentException("Unsupported file or file not found");
   2940                           }
   2941                     }
   2942                     mClipProperties.clipProperties[previewIndex].Id = lMediaItem.getId();
   2943                     checkOddSizeImage(lMediaItem, mClipProperties, previewIndex);
   2944                     adjustVolume(lMediaItem, mClipProperties, previewIndex);
   2945 
   2946                     /*
   2947                      * Adjust media item start time and end time w.r.t to begin
   2948                      * and end transitions associated with media item
   2949                      */
   2950 
   2951                     adjustMediaItemBoundary(mPreviewEditSettings.clipSettingsArray[previewIndex],
   2952                             mClipProperties.clipProperties[previewIndex], lMediaItem);
   2953 
   2954                     /*
   2955                      * Get all the effects and overlays for that media item and
   2956                      * adjust start time and duration of effects
   2957                      */
   2958 
   2959                     effectIndex = populateEffects(lMediaItem,
   2960                             mPreviewEditSettings.effectSettingsArray, effectIndex, beginCutTime,
   2961                             endCutTime, storyBoardTime);
   2962                     storyBoardTime += mClipProperties.clipProperties[previewIndex].duration;
   2963                     previewIndex++;
   2964 
   2965                     /* Check if there is any end transition at last media item */
   2966 
   2967                     if (i == (mediaItemsList.size() - 1)) {
   2968                         lTransition = lMediaItem.getEndTransition();
   2969                         if (lTransition != null && (lTransition.getDuration() > 0)) {
   2970                             generateTransition(lTransition, mPreviewEditSettings, mClipProperties,
   2971                                     previewIndex);
   2972                             break;
   2973                         }
   2974                     }
   2975                 }
   2976 
   2977                 if (!mErrorFlagSet) {
   2978                     mPreviewEditSettings.videoFrameSize = findVideoResolution(mVideoEditor
   2979                             .getAspectRatio(), maxHeight);
   2980                     populateBackgroundMusicProperties(mediaBGMList);
   2981 
   2982                     /** call to native populate settings */
   2983                     try {
   2984                         nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   2985                     } catch (IllegalArgumentException ex) {
   2986                         Log.e(TAG, "Illegal argument exception in nativePopulateSettings");
   2987                         throw ex;
   2988                     } catch (IllegalStateException ex) {
   2989                         Log.e(TAG, "Illegal state exception in nativePopulateSettings");
   2990                         throw ex;
   2991                     } catch (RuntimeException ex) {
   2992                         Log.e(TAG, "Runtime exception in nativePopulateSettings");
   2993                         throw ex;
   2994                     }
   2995                     mInvalidatePreviewArray = false;
   2996                     mProcessingState  = PROCESSING_NONE;
   2997                 }
   2998             }
   2999             if (mErrorFlagSet) {
   3000                 mErrorFlagSet = false;
   3001                 throw new RuntimeException("preview generation cannot be completed");
   3002             }
   3003         }
   3004     } /* END of previewStoryBoard */
   3005 
   3006     /**
   3007      * This function is responsible for starting the preview
   3008      *
   3009      *
   3010      * @param surface The surface on which preview has to be displayed
   3011      * @param fromMs The time in ms from which preview has to be started
   3012      * @param toMs The time in ms till preview has to be played
   3013      * @param loop To loop the preview or not
   3014      * @param callbackAfterFrameCount INdicated after how many frames
   3015      * the callback is needed
   3016      * @param listener The PreviewProgressListener
   3017      */
   3018     void doPreview(Surface surface, long fromMs, long toMs, boolean loop,
   3019             int callbackAfterFrameCount, PreviewProgressListener listener) {
   3020         mPreviewProgress = fromMs;
   3021         mIsFirstProgress = true;
   3022         mPreviewProgressListener = listener;
   3023 
   3024         if (!mInvalidatePreviewArray) {
   3025             try {
   3026                 /** Modify the image files names to rgb image files. */
   3027                 for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
   3028                     clipCnt++) {
   3029                     if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3030                         mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3031                             mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
   3032                     }
   3033                 }
   3034                 nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3035                 nativeStartPreview(surface, fromMs, toMs, callbackAfterFrameCount, loop);
   3036             } catch (IllegalArgumentException ex) {
   3037                 Log.e(TAG, "Illegal argument exception in nativeStartPreview");
   3038                 throw ex;
   3039             } catch (IllegalStateException ex) {
   3040                 Log.e(TAG, "Illegal state exception in nativeStartPreview");
   3041                 throw ex;
   3042             } catch (RuntimeException ex) {
   3043                 Log.e(TAG, "Runtime exception in nativeStartPreview");
   3044                 throw ex;
   3045             }
   3046         } else {
   3047             throw new IllegalStateException("generatePreview is in progress");
   3048         }
   3049     }
   3050 
   3051     /**
   3052      * This function is responsible for stopping the preview
   3053      */
   3054     long stopPreview() {
   3055         return nativeStopPreview();
   3056     }
   3057 
   3058     /**
   3059      * This function is responsible for rendering a single frame
   3060      * from the complete story board on the surface
   3061      *
   3062      * @param surface The surface on which frame has to be rendered
   3063      * @param time The time in ms at which the frame has to be rendered
   3064      * @param surfaceWidth The surface width
   3065      * @param surfaceHeight The surface height
   3066      * @param overlayData The overlay data
   3067      *
   3068      * @return The actual time from the story board at which the  frame was extracted
   3069      * and rendered
   3070      */
   3071     long renderPreviewFrame(Surface surface, long time, int surfaceWidth,
   3072             int surfaceHeight, VideoEditor.OverlayData overlayData) {
   3073         if (mInvalidatePreviewArray) {
   3074             if (Log.isLoggable(TAG, Log.DEBUG)) {
   3075                 Log.d(TAG, "Call generate preview first");
   3076             }
   3077             throw new IllegalStateException("Call generate preview first");
   3078         }
   3079 
   3080         long timeMs = 0;
   3081         try {
   3082             for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length;
   3083                   clipCnt++) {
   3084                 if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3085                     mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3086                         mPreviewEditSettings.clipSettingsArray[clipCnt].clipDecodedPath;
   3087                 }
   3088             }
   3089 
   3090             // Reset the render preview frame params that shall be set by native.
   3091             mRenderPreviewOverlayFile = null;
   3092             mRenderPreviewRenderingMode = MediaRendering.RESIZING;
   3093 
   3094             nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3095 
   3096             timeMs = (long)nativeRenderPreviewFrame(surface, time, surfaceWidth, surfaceHeight);
   3097 
   3098             if (mRenderPreviewOverlayFile != null) {
   3099                 overlayData.set(BitmapFactory.decodeFile(mRenderPreviewOverlayFile),
   3100                         mRenderPreviewRenderingMode);
   3101             } else {
   3102                 overlayData.setClear();
   3103             }
   3104         } catch (IllegalArgumentException ex) {
   3105             Log.e(TAG, "Illegal Argument exception in nativeRenderPreviewFrame");
   3106             throw ex;
   3107         } catch (IllegalStateException ex) {
   3108             Log.e(TAG, "Illegal state exception in nativeRenderPreviewFrame");
   3109             throw ex;
   3110         } catch (RuntimeException ex) {
   3111             Log.e(TAG, "Runtime exception in nativeRenderPreviewFrame");
   3112             throw ex;
   3113         }
   3114 
   3115         return timeMs;
   3116     }
   3117 
   3118     private void previewFrameEditInfo(String filename, int renderingMode) {
   3119         mRenderPreviewOverlayFile = filename;
   3120         mRenderPreviewRenderingMode = renderingMode;
   3121     }
   3122 
   3123 
   3124     /**
   3125      * This function is responsible for rendering a single frame
   3126      * from a single media item on the surface
   3127      *
   3128      * @param surface The surface on which frame has to be rendered
   3129      * @param filepath The file path for which the frame needs to be displayed
   3130      * @param time The time in ms at which the frame has to be rendered
   3131      * @param framewidth The frame width
   3132      * @param framewidth The frame height
   3133      *
   3134      * @return The actual time from media item at which the  frame was extracted
   3135      * and rendered
   3136      */
   3137     long renderMediaItemPreviewFrame(Surface surface, String filepath,
   3138                                             long time, int framewidth, int frameheight) {
   3139         long timeMs = 0;
   3140         try {
   3141             timeMs = (long)nativeRenderMediaItemPreviewFrame(surface, filepath, framewidth,
   3142                     frameheight, 0, 0, time);
   3143         } catch (IllegalArgumentException ex) {
   3144             Log.e(TAG, "Illegal Argument exception in renderMediaItemPreviewFrame");
   3145             throw ex;
   3146         } catch (IllegalStateException ex) {
   3147             Log.e(TAG, "Illegal state exception in renderMediaItemPreviewFrame");
   3148             throw ex;
   3149         } catch (RuntimeException ex) {
   3150             Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
   3151             throw ex;
   3152         }
   3153 
   3154         return timeMs;
   3155     }
   3156 
   3157     /**
   3158      * This function sets the flag to invalidate the preview array
   3159      * and for generating the preview again
   3160      */
   3161     void setGeneratePreview(boolean isRequired) {
   3162         boolean semAcquiredDone = false;
   3163         try {
   3164             lock();
   3165             semAcquiredDone = true;
   3166             mInvalidatePreviewArray = isRequired;
   3167         } catch (InterruptedException ex) {
   3168             Log.e(TAG, "Runtime exception in renderMediaItemPreviewFrame");
   3169         } finally {
   3170             if (semAcquiredDone) {
   3171                 unlock();
   3172             }
   3173         }
   3174     }
   3175 
   3176     /**
   3177      * @return Returns the current status of preview invalidation
   3178      * flag
   3179      */
   3180     boolean getGeneratePreview() {
   3181         return mInvalidatePreviewArray;
   3182     }
   3183 
   3184     /**
   3185      * Calculates the aspect ratio from widht and height
   3186      *
   3187      * @param w The width of media item
   3188      * @param h The height of media item
   3189      *
   3190      * @return The calculated aspect ratio
   3191      */
   3192     int getAspectRatio(int w, int h) {
   3193         double apRatio = (double)(w) / (double)(h);
   3194         BigDecimal bd = new BigDecimal(apRatio);
   3195         bd = bd.setScale(3, BigDecimal.ROUND_HALF_UP);
   3196         apRatio = bd.doubleValue();
   3197         int var = MediaProperties.ASPECT_RATIO_16_9;
   3198         if (apRatio >= 1.7) {
   3199             var = MediaProperties.ASPECT_RATIO_16_9;
   3200         } else if (apRatio >= 1.6) {
   3201             var = MediaProperties.ASPECT_RATIO_5_3;
   3202         } else if (apRatio >= 1.5) {
   3203             var = MediaProperties.ASPECT_RATIO_3_2;
   3204         } else if (apRatio > 1.3) {
   3205             var = MediaProperties.ASPECT_RATIO_4_3;
   3206         } else if (apRatio >= 1.2) {
   3207             var = MediaProperties.ASPECT_RATIO_11_9;
   3208         }
   3209         return var;
   3210     }
   3211 
   3212     /**
   3213      * Maps the file type used in native layer
   3214      * to file type used in JAVA layer
   3215      *
   3216      * @param fileType The file type in native layer
   3217      *
   3218      * @return The File type in JAVA layer
   3219      */
   3220     int getFileType(int fileType) {
   3221         int retValue = -1;
   3222         switch (fileType) {
   3223             case FileType.UNSUPPORTED:
   3224                 retValue = MediaProperties.FILE_UNSUPPORTED;
   3225                 break;
   3226             case FileType.THREE_GPP:
   3227                 retValue = MediaProperties.FILE_3GP;
   3228                 break;
   3229             case FileType.MP4:
   3230                 retValue = MediaProperties.FILE_MP4;
   3231                 break;
   3232             case FileType.JPG:
   3233                 retValue = MediaProperties.FILE_JPEG;
   3234                 break;
   3235             case FileType.PNG:
   3236                 retValue = MediaProperties.FILE_PNG;
   3237                 break;
   3238             case FileType.MP3:
   3239                 retValue = MediaProperties.FILE_MP3;
   3240                 break;
   3241             case FileType.M4V:
   3242                 retValue = MediaProperties.FILE_M4V;
   3243                 break;
   3244             case FileType.AMR:
   3245                 retValue = MediaProperties.FILE_AMR;
   3246                 break;
   3247 
   3248             default:
   3249                 retValue = -1;
   3250         }
   3251         return retValue;
   3252     }
   3253 
   3254     /**
   3255      * Maps the video codec type used in native layer
   3256      * to video codec type used in JAVA layer
   3257      *
   3258      * @param codecType The video codec type in native layer
   3259      *
   3260      * @return The video codec type in JAVA layer
   3261      */
   3262     int getVideoCodecType(int codecType) {
   3263         int retValue = -1;
   3264         switch (codecType) {
   3265             case VideoFormat.H263:
   3266                 retValue = MediaProperties.VCODEC_H263;
   3267                 break;
   3268             case VideoFormat.H264:
   3269                 retValue = MediaProperties.VCODEC_H264;
   3270                 break;
   3271             case VideoFormat.MPEG4:
   3272                 retValue = MediaProperties.VCODEC_MPEG4;
   3273                 break;
   3274             case VideoFormat.UNSUPPORTED:
   3275 
   3276             default:
   3277                 retValue = -1;
   3278         }
   3279         return retValue;
   3280     }
   3281 
   3282     /**
   3283      * Maps the audio codec type used in native layer
   3284      * to audio codec type used in JAVA layer
   3285      *
   3286      * @param audioType The audio codec type in native layer
   3287      *
   3288      * @return The audio codec type in JAVA layer
   3289      */
   3290     int getAudioCodecType(int codecType) {
   3291         int retValue = -1;
   3292         switch (codecType) {
   3293             case AudioFormat.AMR_NB:
   3294                 retValue = MediaProperties.ACODEC_AMRNB;
   3295                 break;
   3296             case AudioFormat.AAC:
   3297                 retValue = MediaProperties.ACODEC_AAC_LC;
   3298                 break;
   3299             case AudioFormat.MP3:
   3300                 retValue = MediaProperties.ACODEC_MP3;
   3301                 break;
   3302 
   3303             default:
   3304                 retValue = -1;
   3305         }
   3306         return retValue;
   3307     }
   3308 
   3309     /**
   3310      * Returns the frame rate as integer
   3311      *
   3312      * @param fps The fps as enum
   3313      *
   3314      * @return The frame rate as integer
   3315      */
   3316     int getFrameRate(int fps) {
   3317         int retValue = -1;
   3318         switch (fps) {
   3319             case VideoFrameRate.FR_5_FPS:
   3320                 retValue = 5;
   3321                 break;
   3322             case VideoFrameRate.FR_7_5_FPS:
   3323                 retValue = 8;
   3324                 break;
   3325             case VideoFrameRate.FR_10_FPS:
   3326                 retValue = 10;
   3327                 break;
   3328             case VideoFrameRate.FR_12_5_FPS:
   3329                 retValue = 13;
   3330                 break;
   3331             case VideoFrameRate.FR_15_FPS:
   3332                 retValue = 15;
   3333                 break;
   3334             case VideoFrameRate.FR_20_FPS:
   3335                 retValue = 20;
   3336                 break;
   3337             case VideoFrameRate.FR_25_FPS:
   3338                 retValue = 25;
   3339                 break;
   3340             case VideoFrameRate.FR_30_FPS:
   3341                 retValue = 30;
   3342                 break;
   3343 
   3344             default:
   3345                 retValue = -1;
   3346         }
   3347         return retValue;
   3348     }
   3349 
   3350     /**
   3351      * Maps the file type used in JAVA layer
   3352      * to file type used in native layer
   3353      *
   3354      * @param fileType The file type in JAVA layer
   3355      *
   3356      * @return The File type in native layer
   3357      */
   3358     int getMediaItemFileType(int fileType) {
   3359         int retValue = -1;
   3360 
   3361         switch (fileType) {
   3362             case MediaProperties.FILE_UNSUPPORTED:
   3363                 retValue = FileType.UNSUPPORTED;
   3364                 break;
   3365             case MediaProperties.FILE_3GP:
   3366                 retValue = FileType.THREE_GPP;
   3367                 break;
   3368             case MediaProperties.FILE_MP4:
   3369                 retValue = FileType.MP4;
   3370                 break;
   3371             case MediaProperties.FILE_JPEG:
   3372                 retValue = FileType.JPG;
   3373                 break;
   3374             case MediaProperties.FILE_PNG:
   3375                 retValue = FileType.PNG;
   3376                 break;
   3377             case MediaProperties.FILE_M4V:
   3378                 retValue = FileType.M4V;
   3379                 break;
   3380 
   3381             default:
   3382                 retValue = -1;
   3383         }
   3384         return retValue;
   3385 
   3386     }
   3387 
   3388     /**
   3389      * Maps the rendering mode used in native layer
   3390      * to rendering mode used in JAVA layer
   3391      *
   3392      * @param renderingMode The rendering mode in JAVA layer
   3393      *
   3394      * @return The rendering mode in native layer
   3395      */
   3396     int getMediaItemRenderingMode(int renderingMode) {
   3397         int retValue = -1;
   3398         switch (renderingMode) {
   3399             case MediaItem.RENDERING_MODE_BLACK_BORDER:
   3400                 retValue = MediaRendering.BLACK_BORDERS;
   3401                 break;
   3402             case MediaItem.RENDERING_MODE_STRETCH:
   3403                 retValue = MediaRendering.RESIZING;
   3404                 break;
   3405             case MediaItem.RENDERING_MODE_CROPPING:
   3406                 retValue = MediaRendering.CROPPING;
   3407                 break;
   3408 
   3409             default:
   3410                 retValue = -1;
   3411         }
   3412         return retValue;
   3413     }
   3414 
   3415     /**
   3416      * Maps the transition behavior used in JAVA layer
   3417      * to transition behavior used in native layer
   3418      *
   3419      * @param transitionType The transition behavior in JAVA layer
   3420      *
   3421      * @return The transition behavior in native layer
   3422      */
   3423     int getVideoTransitionBehaviour(int transitionType) {
   3424         int retValue = -1;
   3425         switch (transitionType) {
   3426             case Transition.BEHAVIOR_SPEED_UP:
   3427                 retValue = TransitionBehaviour.SPEED_UP;
   3428                 break;
   3429             case Transition.BEHAVIOR_SPEED_DOWN:
   3430                 retValue = TransitionBehaviour.SPEED_DOWN;
   3431                 break;
   3432             case Transition.BEHAVIOR_LINEAR:
   3433                 retValue = TransitionBehaviour.LINEAR;
   3434                 break;
   3435             case Transition.BEHAVIOR_MIDDLE_SLOW:
   3436                 retValue = TransitionBehaviour.SLOW_MIDDLE;
   3437                 break;
   3438             case Transition.BEHAVIOR_MIDDLE_FAST:
   3439                 retValue = TransitionBehaviour.FAST_MIDDLE;
   3440                 break;
   3441 
   3442             default:
   3443                 retValue = -1;
   3444         }
   3445         return retValue;
   3446     }
   3447 
   3448     /**
   3449      * Maps the transition slide direction used in JAVA layer
   3450      * to transition slide direction used in native layer
   3451      *
   3452      * @param slideDirection The transition slide direction
   3453      * in JAVA layer
   3454      *
   3455      * @return The transition slide direction in native layer
   3456      */
   3457     int getSlideSettingsDirection(int slideDirection) {
   3458         int retValue = -1;
   3459         switch (slideDirection) {
   3460             case TransitionSliding.DIRECTION_RIGHT_OUT_LEFT_IN:
   3461                 retValue = SlideDirection.RIGHT_OUT_LEFT_IN;
   3462                 break;
   3463             case TransitionSliding.DIRECTION_LEFT_OUT_RIGHT_IN:
   3464                 retValue = SlideDirection.LEFT_OUT_RIGTH_IN;
   3465                 break;
   3466             case TransitionSliding.DIRECTION_TOP_OUT_BOTTOM_IN:
   3467                 retValue = SlideDirection.TOP_OUT_BOTTOM_IN;
   3468                 break;
   3469             case TransitionSliding.DIRECTION_BOTTOM_OUT_TOP_IN:
   3470                 retValue = SlideDirection.BOTTOM_OUT_TOP_IN;
   3471                 break;
   3472 
   3473             default:
   3474                 retValue = -1;
   3475         }
   3476         return retValue;
   3477     }
   3478 
   3479     /**
   3480      * Maps the effect color type used in JAVA layer
   3481      * to effect color type used in native layer
   3482      *
   3483      * @param effect The EffectColor reference
   3484      *
   3485      * @return The color effect value from native layer
   3486      */
   3487     private int getEffectColorType(EffectColor effect) {
   3488         int retValue = -1;
   3489         switch (effect.getType()) {
   3490             case EffectColor.TYPE_COLOR:
   3491                 if (effect.getColor() == EffectColor.GREEN) {
   3492                     retValue = VideoEffect.GREEN;
   3493                 } else if (effect.getColor() == EffectColor.PINK) {
   3494                     retValue = VideoEffect.PINK;
   3495                 } else if (effect.getColor() == EffectColor.GRAY) {
   3496                     retValue = VideoEffect.BLACK_AND_WHITE;
   3497                 } else {
   3498                     retValue = VideoEffect.COLORRGB16;
   3499                 }
   3500                 break;
   3501             case EffectColor.TYPE_GRADIENT:
   3502                 retValue = VideoEffect.GRADIENT;
   3503                 break;
   3504             case EffectColor.TYPE_SEPIA:
   3505                 retValue = VideoEffect.SEPIA;
   3506                 break;
   3507             case EffectColor.TYPE_NEGATIVE:
   3508                 retValue = VideoEffect.NEGATIVE;
   3509                 break;
   3510             case EffectColor.TYPE_FIFTIES:
   3511                 retValue = VideoEffect.FIFTIES;
   3512                 break;
   3513 
   3514             default:
   3515                 retValue = -1;
   3516         }
   3517         return retValue;
   3518     }
   3519 
   3520     /**
   3521      * Calculates video resolution for output clip
   3522      * based on clip's height and aspect ratio of storyboard
   3523      *
   3524      * @param aspectRatio The aspect ratio of story board
   3525      * @param height The height of clip
   3526      *
   3527      * @return The video resolution
   3528      */
   3529     private int findVideoResolution(int aspectRatio, int height) {
   3530         final Pair<Integer, Integer>[] resolutions;
   3531         final Pair<Integer, Integer> maxResolution;
   3532         int retValue = VideoFrameSize.SIZE_UNDEFINED;
   3533         switch (aspectRatio) {
   3534             case MediaProperties.ASPECT_RATIO_3_2:
   3535                 if (height == MediaProperties.HEIGHT_480)
   3536                     retValue = VideoFrameSize.NTSC;
   3537                 else if (height == MediaProperties.HEIGHT_720)
   3538                     retValue = VideoFrameSize.W720p;
   3539                 break;
   3540             case MediaProperties.ASPECT_RATIO_16_9:
   3541                 if (height == MediaProperties.HEIGHT_480)
   3542                     retValue = VideoFrameSize.WVGA16x9;
   3543                 else if (height == MediaProperties.HEIGHT_720)
   3544                     retValue = VideoFrameSize.V720p;
   3545                 else if (height == MediaProperties.HEIGHT_1080)
   3546                     retValue = VideoFrameSize.V1080p;
   3547                 break;
   3548             case MediaProperties.ASPECT_RATIO_4_3:
   3549                 if (height == MediaProperties.HEIGHT_480)
   3550                     retValue = VideoFrameSize.VGA;
   3551                 else if (height == MediaProperties.HEIGHT_720)
   3552                     retValue = VideoFrameSize.S720p;
   3553                 break;
   3554             case MediaProperties.ASPECT_RATIO_5_3:
   3555                 if (height == MediaProperties.HEIGHT_480)
   3556                     retValue = VideoFrameSize.WVGA;
   3557                 break;
   3558             case MediaProperties.ASPECT_RATIO_11_9:
   3559                 if (height == MediaProperties.HEIGHT_144)
   3560                     retValue = VideoFrameSize.QCIF;
   3561                 else if (height == MediaProperties.HEIGHT_288)
   3562                     retValue = VideoFrameSize.CIF;
   3563                 break;
   3564         }
   3565         if (retValue == VideoFrameSize.SIZE_UNDEFINED) {
   3566             resolutions = MediaProperties.getSupportedResolutions(mVideoEditor.getAspectRatio());
   3567             // Get the highest resolution
   3568             maxResolution = resolutions[resolutions.length - 1];
   3569             retValue = findVideoResolution(mVideoEditor.getAspectRatio(), maxResolution.second);
   3570         }
   3571 
   3572         return retValue;
   3573     }
   3574 
   3575     /**
   3576      *  Calculate a reasonable bitrate for generating intermediate clips.
   3577      */
   3578     private int findVideoBitrate(int videoFrameSize) {
   3579         switch (videoFrameSize) {
   3580             case VideoFrameSize.SQCIF:
   3581             case VideoFrameSize.QQVGA:
   3582             case VideoFrameSize.QCIF:
   3583                 return Bitrate.BR_128_KBPS;
   3584             case VideoFrameSize.QVGA:
   3585             case VideoFrameSize.CIF:
   3586                 return Bitrate.BR_384_KBPS;
   3587             case VideoFrameSize.VGA:
   3588             case VideoFrameSize.WVGA:
   3589             case VideoFrameSize.NTSC:
   3590             case VideoFrameSize.nHD:
   3591             case VideoFrameSize.WVGA16x9:
   3592                 return Bitrate.BR_2_MBPS;
   3593             case VideoFrameSize.V720p:
   3594             case VideoFrameSize.W720p:
   3595             case VideoFrameSize.S720p:
   3596                 return Bitrate.BR_5_MBPS;
   3597             case VideoFrameSize.V1080p:
   3598             default:
   3599                 return Bitrate.BR_8_MBPS;
   3600         }
   3601     }
   3602 
   3603     /**
   3604      * This method is responsible for exporting a movie
   3605      *
   3606      * @param filePath The output file path
   3607      * @param projectDir The output project directory
   3608      * @param height The height of clip
   3609      * @param bitrate The bitrate at which the movie should be exported
   3610      * @param mediaItemsList The media items list
   3611      * @param mediaTransitionList The transitions list
   3612      * @param mediaBGMList The background track list
   3613      * @param listener The ExportProgressListener
   3614      *
   3615      */
   3616     void export(String filePath, String projectDir, int height, int bitrate,
   3617             List<MediaItem> mediaItemsList, List<Transition> mediaTransitionList,
   3618             List<AudioTrack> mediaBGMList, ExportProgressListener listener) {
   3619 
   3620         int outBitrate = 0;
   3621         mExportFilename = filePath;
   3622         previewStoryBoard(mediaItemsList, mediaTransitionList, mediaBGMList,null);
   3623         mExportProgressListener = listener;
   3624         int outVideoProfile = 0;
   3625         int outVideoLevel = 0;
   3626 
   3627         /** Check the platform specific maximum export resolution */
   3628         VideoEditorProfile veProfile = VideoEditorProfile.get();
   3629         if (veProfile == null) {
   3630             throw new RuntimeException("Can't get the video editor profile");
   3631         }
   3632         final int maxOutputHeight = veProfile.maxOutputVideoFrameHeight;
   3633         final int maxOutputWidth = veProfile.maxOutputVideoFrameWidth;
   3634         if (height > maxOutputHeight) {
   3635             throw new IllegalArgumentException(
   3636                     "Unsupported export resolution. Supported maximum width:" +
   3637                     maxOutputWidth + " height:" + maxOutputHeight +
   3638                     " current height:" + height);
   3639         }
   3640         outVideoProfile = VideoEditorProfile.getExportProfile(mExportVideoCodec);
   3641         outVideoLevel = VideoEditorProfile.getExportLevel(mExportVideoCodec);
   3642 
   3643         mProgressToApp = 0;
   3644 
   3645         switch (bitrate) {
   3646             case MediaProperties.BITRATE_28K:
   3647                 outBitrate = Bitrate.BR_32_KBPS;
   3648                 break;
   3649             case MediaProperties.BITRATE_40K:
   3650                 outBitrate = Bitrate.BR_48_KBPS;
   3651                 break;
   3652             case MediaProperties.BITRATE_64K:
   3653                 outBitrate = Bitrate.BR_64_KBPS;
   3654                 break;
   3655             case MediaProperties.BITRATE_96K:
   3656                 outBitrate = Bitrate.BR_96_KBPS;
   3657                 break;
   3658             case MediaProperties.BITRATE_128K:
   3659                 outBitrate = Bitrate.BR_128_KBPS;
   3660                 break;
   3661             case MediaProperties.BITRATE_192K:
   3662                 outBitrate = Bitrate.BR_192_KBPS;
   3663                 break;
   3664             case MediaProperties.BITRATE_256K:
   3665                 outBitrate = Bitrate.BR_256_KBPS;
   3666                 break;
   3667             case MediaProperties.BITRATE_384K:
   3668                 outBitrate = Bitrate.BR_384_KBPS;
   3669                 break;
   3670             case MediaProperties.BITRATE_512K:
   3671                 outBitrate = Bitrate.BR_512_KBPS;
   3672                 break;
   3673             case MediaProperties.BITRATE_800K:
   3674                 outBitrate = Bitrate.BR_800_KBPS;
   3675                 break;
   3676             case MediaProperties.BITRATE_2M:
   3677                 outBitrate = Bitrate.BR_2_MBPS;
   3678                 break;
   3679             case MediaProperties.BITRATE_5M:
   3680                 outBitrate = Bitrate.BR_5_MBPS;
   3681                 break;
   3682             case MediaProperties.BITRATE_8M:
   3683                 outBitrate = Bitrate.BR_8_MBPS;
   3684                 break;
   3685 
   3686             default:
   3687                 throw new IllegalArgumentException("Argument Bitrate incorrect");
   3688         }
   3689         mPreviewEditSettings.videoFrameRate = VideoFrameRate.FR_30_FPS;
   3690         mPreviewEditSettings.outputFile = mOutputFilename = filePath;
   3691 
   3692         int aspectRatio = mVideoEditor.getAspectRatio();
   3693         mPreviewEditSettings.videoFrameSize = findVideoResolution(aspectRatio, height);
   3694         mPreviewEditSettings.videoFormat = mExportVideoCodec;
   3695         mPreviewEditSettings.audioFormat = mExportAudioCodec;
   3696         mPreviewEditSettings.videoProfile = outVideoProfile;
   3697         mPreviewEditSettings.videoLevel = outVideoLevel;
   3698         mPreviewEditSettings.audioSamplingFreq = AudioSamplingFrequency.FREQ_32000;
   3699         mPreviewEditSettings.maxFileSize = 0;
   3700         mPreviewEditSettings.audioChannels = 2;
   3701         mPreviewEditSettings.videoBitrate = outBitrate;
   3702         mPreviewEditSettings.audioBitrate = Bitrate.BR_96_KBPS;
   3703 
   3704         mPreviewEditSettings.transitionSettingsArray = new TransitionSettings[mTotalClips - 1];
   3705         for (int index = 0; index < mTotalClips - 1; index++) {
   3706             mPreviewEditSettings.transitionSettingsArray[index] = new TransitionSettings();
   3707             mPreviewEditSettings.transitionSettingsArray[index].videoTransitionType =
   3708                 VideoTransition.NONE;
   3709             mPreviewEditSettings.transitionSettingsArray[index].audioTransitionType =
   3710                 AudioTransition.NONE;
   3711         }
   3712 
   3713         for (int clipCnt = 0; clipCnt < mPreviewEditSettings.clipSettingsArray.length; clipCnt++) {
   3714             if (mPreviewEditSettings.clipSettingsArray[clipCnt].fileType == FileType.JPG) {
   3715                 mPreviewEditSettings.clipSettingsArray[clipCnt].clipPath =
   3716                 mPreviewEditSettings.clipSettingsArray[clipCnt].clipOriginalPath;
   3717             }
   3718         }
   3719         nativePopulateSettings(mPreviewEditSettings, mClipProperties, mAudioSettings);
   3720 
   3721         int err = 0;
   3722         try {
   3723             mProcessingState  = PROCESSING_EXPORT;
   3724             mProcessingObject = null;
   3725             err = generateClip(mPreviewEditSettings);
   3726             mProcessingState  = PROCESSING_NONE;
   3727         } catch (IllegalArgumentException ex) {
   3728             Log.e(TAG, "IllegalArgument for generateClip");
   3729             throw ex;
   3730         } catch (IllegalStateException ex) {
   3731             Log.e(TAG, "IllegalStateExceptiont for generateClip");
   3732             throw ex;
   3733         } catch (RuntimeException ex) {
   3734             Log.e(TAG, "RuntimeException for generateClip");
   3735             throw ex;
   3736         }
   3737 
   3738         if (err != 0) {
   3739             Log.e(TAG, "RuntimeException for generateClip");
   3740             throw new RuntimeException("generateClip failed with error=" + err);
   3741         }
   3742 
   3743         mExportProgressListener = null;
   3744     }
   3745 
   3746     /**
   3747      * This methods takes care of stopping the Export process
   3748      *
   3749      * @param The input file name for which export has to be stopped
   3750      */
   3751     void stop(String filename) {
   3752         try {
   3753             stopEncoding();
   3754             new File(mExportFilename).delete();
   3755         } catch (IllegalStateException ex) {
   3756             Log.e(TAG, "Illegal state exception in unload settings");
   3757             throw ex;
   3758         } catch (RuntimeException ex) {
   3759             Log.e(TAG, "Runtime exception in unload settings");
   3760             throw ex;
   3761         }
   3762     }
   3763 
   3764     /**
   3765      * This method extracts a frame from the input file
   3766      * and returns the frame as a bitmap. See getPixelsList() for more information.
   3767      */
   3768     Bitmap getPixels(String filename, int width, int height, long timeMs,
   3769             int videoRotation) {
   3770         final Bitmap result[] = new Bitmap[1];
   3771         getPixelsList(filename, width, height, timeMs, timeMs, 1, new int[] {0},
   3772                 new MediaItem.GetThumbnailListCallback() {
   3773             public void onThumbnail(Bitmap bitmap, int index) {
   3774                 result[0] = bitmap;
   3775             }
   3776         }, videoRotation);
   3777         return result[0];
   3778     }
   3779 
   3780     /**
   3781      * This method extracts a list of frame from the
   3782      * input file and returns the frame in bitmap array
   3783      *
   3784      * @param filename The input file name
   3785      * @param width The width of the output frame, before rotation
   3786      * @param height The height of the output frame, before rotation
   3787      * @param startMs The starting time in ms
   3788      * @param endMs The end time in ms
   3789      * @param thumbnailCount The number of frames to be extracted
   3790      * @param indices The indices of thumbnails wanted
   3791      * @param callback The callback used to pass back the bitmaps
   3792      * @param videoRotation The rotation degree need to be done for the bitmap
   3793      *
   3794      * @return The frames as bitmaps in bitmap array
   3795      **/
   3796     void getPixelsList(String filename, final int width, final int height,
   3797             long startMs, long endMs, int thumbnailCount, int[] indices,
   3798             final MediaItem.GetThumbnailListCallback callback,
   3799             final int videoRotation) {
   3800 
   3801         // The decoder needs output width and height as even
   3802         final int decWidth = (width + 1) & 0xFFFFFFFE;
   3803         final int decHeight = (height + 1) & 0xFFFFFFFE;
   3804         final int thumbnailSize = decWidth * decHeight;
   3805 
   3806         // We convert the decoder output (in int[]) to a bitmap by first
   3807         // copy it into an IntBuffer, then use Bitmap.copyPixelsFromBuffer to
   3808         // copy it to the bitmap.
   3809         final int[] decArray = new int[thumbnailSize];
   3810         final IntBuffer decBuffer = IntBuffer.allocate(thumbnailSize);
   3811 
   3812         // If we need to resize and/or rotate the decoder output, we need a
   3813         // temporary bitmap to hold the decoded output.
   3814         final boolean needToMassage =
   3815                 (decWidth != width || decHeight != height || videoRotation != 0);
   3816         final Bitmap tmpBitmap = needToMassage
   3817                 ? Bitmap.createBitmap(decWidth, decHeight, Bitmap.Config.ARGB_8888)
   3818                 : null;
   3819 
   3820         // The final output bitmap width/height may swap because of rotation.
   3821         final boolean needToSwapWH = (videoRotation == 90 || videoRotation == 270);
   3822         final int outWidth = needToSwapWH ? height : width;
   3823         final int outHeight = needToSwapWH ? width : height;
   3824 
   3825         nativeGetPixelsList(filename, decArray, decWidth, decHeight,
   3826                 thumbnailCount, startMs, endMs, indices,
   3827                 new NativeGetPixelsListCallback() {
   3828             public void onThumbnail(int index) {
   3829                 // This is the bitmap we will output to the client
   3830                 Bitmap outBitmap = Bitmap.createBitmap(
   3831                         outWidth, outHeight, Bitmap.Config.ARGB_8888);
   3832 
   3833                 // Copy int[] to IntBuffer
   3834                 decBuffer.put(decArray, 0, thumbnailSize);
   3835                 decBuffer.rewind();
   3836 
   3837                 if (!needToMassage) {
   3838                     // We can directly read the decoded result to output bitmap
   3839                     outBitmap.copyPixelsFromBuffer(decBuffer);
   3840                 } else {
   3841                     // Copy the decoded result to an intermediate bitmap first
   3842                     tmpBitmap.copyPixelsFromBuffer(decBuffer);
   3843 
   3844                     // Create a canvas to resize/rotate the bitmap
   3845                     // First scale the decoded bitmap to (0,0)-(1,1), rotate it
   3846                     // with (0.5, 0.5) as center, then scale it to
   3847                     // (outWidth, outHeight).
   3848                     final Canvas canvas = new Canvas(outBitmap);
   3849                     Matrix m = new Matrix();
   3850                     float sx = 1f / decWidth;
   3851                     float sy = 1f / decHeight;
   3852                     m.postScale(sx, sy);
   3853                     m.postRotate(videoRotation, 0.5f, 0.5f);
   3854                     m.postScale(outWidth, outHeight);
   3855                     canvas.drawBitmap(tmpBitmap, m, sResizePaint);
   3856                 }
   3857                 callback.onThumbnail(outBitmap, index);
   3858             }
   3859         });
   3860 
   3861         if (tmpBitmap != null) {
   3862             tmpBitmap.recycle();
   3863         }
   3864     }
   3865 
   3866     interface NativeGetPixelsListCallback {
   3867         public void onThumbnail(int index);
   3868     }
   3869 
   3870     /**
   3871      * This method generates the audio graph
   3872      *
   3873      * @param uniqueId The unique id
   3874      * @param inFileName The inputFile
   3875      * @param OutAudiGraphFileName output filename
   3876      * @param frameDuration The each frame duration
   3877      * @param audioChannels The number of audio channels
   3878      * @param samplesCount Total number of samples count
   3879      * @param listener ExtractAudioWaveformProgressListener reference
   3880      * @param isVideo The flag to indicate if the file is video file or not
   3881      *
   3882      **/
   3883     void generateAudioGraph(String uniqueId, String inFileName, String OutAudiGraphFileName,
   3884             int frameDuration, int audioChannels, int samplesCount,
   3885             ExtractAudioWaveformProgressListener listener, boolean isVideo) {
   3886         String tempPCMFileName;
   3887 
   3888         mExtractAudioWaveformProgressListener = listener;
   3889 
   3890         /**
   3891          * In case of Video, first call will generate the PCM file to make the
   3892          * audio graph
   3893          */
   3894         if (isVideo) {
   3895             tempPCMFileName = String.format(mProjectPath + "/" + uniqueId + ".pcm");
   3896         } else {
   3897             tempPCMFileName = mAudioTrackPCMFilePath;
   3898         }
   3899 
   3900         /**
   3901          * For Video item, generate the PCM
   3902          */
   3903         if (isVideo) {
   3904             nativeGenerateRawAudio(inFileName, tempPCMFileName);
   3905         }
   3906 
   3907         nativeGenerateAudioGraph(tempPCMFileName, OutAudiGraphFileName, frameDuration,
   3908                 audioChannels, samplesCount);
   3909 
   3910         /**
   3911          * Once the audio graph file is generated, delete the pcm file
   3912          */
   3913         if (isVideo) {
   3914             new File(tempPCMFileName).delete();
   3915         }
   3916     }
   3917 
   3918     void clearPreviewSurface(Surface surface) {
   3919         nativeClearSurface(surface);
   3920     }
   3921 
   3922     /**
   3923      * Grab the semaphore which arbitrates access to the editor
   3924      *
   3925      * @throws InterruptedException
   3926      */
   3927     private void lock() throws InterruptedException {
   3928         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3929             Log.d(TAG, "lock: grabbing semaphore", new Throwable());
   3930         }
   3931         mLock.acquire();
   3932         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3933             Log.d(TAG, "lock: grabbed semaphore");
   3934         }
   3935     }
   3936 
   3937     /**
   3938      * Release the semaphore which arbitrates access to the editor
   3939      */
   3940     private void unlock() {
   3941         if (Log.isLoggable(TAG, Log.DEBUG)) {
   3942             Log.d(TAG, "unlock: releasing semaphore");
   3943         }
   3944         mLock.release();
   3945     }
   3946 
   3947     /**     Native Methods        */
   3948     native Properties getMediaProperties(String file) throws IllegalArgumentException,
   3949             IllegalStateException, RuntimeException, Exception;
   3950 
   3951     /**
   3952      * Get the version of ManualEdit.
   3953      *
   3954      * @return version of ManualEdit
   3955      * @throws RuntimeException if an error occurred
   3956      * @see Version
   3957      */
   3958     private static native Version getVersion() throws RuntimeException;
   3959 
   3960     /**
   3961      * Returns the video thumbnail in an array of integers. Output format is
   3962      * ARGB8888.
   3963      *
   3964      * @param pixelArray the array that receives the pixel values
   3965      * @param width width of the video thumbnail
   3966      * @param height height of the video thumbnail
   3967      * @param timeMS desired time of the thumbnail in ms
   3968      * @return actual time in ms of the thumbnail generated
   3969      * @throws IllegalStateException if the class has not been initialized
   3970      * @throws IllegalArgumentException if the pixelArray is not available or
   3971      *             one of the dimensions is negative or zero or the time is
   3972      *             negative
   3973      * @throws RuntimeException on runtime errors in native code
   3974      */
   3975     private native int nativeGetPixels(String fileName, int[] pixelArray, int width, int height,
   3976             long timeMS);
   3977 
   3978     private native int nativeGetPixelsList(String fileName, int[] pixelArray,
   3979             int width, int height, int nosofTN, long startTimeMs,
   3980             long endTimeMs, int[] indices, NativeGetPixelsListCallback callback);
   3981 
   3982     /**
   3983      * Releases the JNI and cleans up the core native module.. Should be called
   3984      * only after init( )
   3985      *
   3986      * @throws IllegalStateException if the method could not be called
   3987      */
   3988     private native void release() throws IllegalStateException, RuntimeException;
   3989 
   3990     /*
   3991      * Clear the preview surface
   3992      */
   3993     private native void nativeClearSurface(Surface surface);
   3994 
   3995     /**
   3996      * Stops the encoding. This method should only be called after encoding has
   3997      * started using method <code> startEncoding</code>
   3998      *
   3999      * @throws IllegalStateException if the method could not be called
   4000      */
   4001     private native void stopEncoding() throws IllegalStateException, RuntimeException;
   4002 
   4003 
   4004     private native void _init(String tempPath, String libraryPath)
   4005             throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4006 
   4007     private native void nativeStartPreview(Surface mSurface, long fromMs, long toMs,
   4008             int callbackAfterFrameCount, boolean loop) throws IllegalArgumentException,
   4009             IllegalStateException, RuntimeException;
   4010 
   4011     private native void nativePopulateSettings(EditSettings editSettings,
   4012             PreviewClipProperties mProperties, AudioSettings mAudioSettings)
   4013     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4014 
   4015     private native int nativeRenderPreviewFrame(Surface mSurface, long timeMs,
   4016                                                  int surfaceWidth, int surfaceHeight)
   4017                                                  throws IllegalArgumentException,
   4018                                                  IllegalStateException, RuntimeException;
   4019 
   4020     private native int nativeRenderMediaItemPreviewFrame(Surface mSurface, String filepath,
   4021             int framewidth, int frameheight, int surfacewidth, int surfaceheight, long timeMs)
   4022     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4023 
   4024     private native int nativeStopPreview();
   4025 
   4026     private native int nativeGenerateAudioGraph(String pcmFilePath, String outGraphPath,
   4027             int frameDuration, int channels, int sampleCount);
   4028 
   4029     private native int nativeGenerateRawAudio(String InFileName, String PCMFileName);
   4030 
   4031     private native int nativeGenerateClip(EditSettings editSettings)
   4032     throws IllegalArgumentException, IllegalStateException, RuntimeException;
   4033 
   4034 }
   4035