Home | History | Annotate | Download | only in src
      1 /*
      2  * Copyright (C) 2011 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 /**
     17  ******************************************************************************
     18  * @file    M4xVSS_internal.c
     19  * @brief    Internal functions of extended Video Studio Service (Video Studio 2.1)
     20  * @note
     21  ******************************************************************************
     22  */
     23 #include "M4OSA_Debug.h"
     24 #include "M4OSA_CharStar.h"
     25 
     26 #include "NXPSW_CompilerSwitches.h"
     27 
     28 #include "M4VSS3GPP_API.h"
     29 #include "M4VSS3GPP_ErrorCodes.h"
     30 
     31 #include "M4xVSS_API.h"
     32 #include "M4xVSS_Internal.h"
     33 
     34 /*for rgb16 color effect*/
     35 #include "M4VIFI_Defines.h"
     36 #include "M4VIFI_Clip.h"
     37 
     38 /**
     39  * component includes */
     40 #include "M4VFL_transition.h"            /**< video effects */
     41 
     42 /* Internal header file of VSS is included because of MMS use case */
     43 #include "M4VSS3GPP_InternalTypes.h"
     44 
     45 /*Exif header files to add image rendering support (cropping, black borders)*/
     46 #include "M4EXIFC_CommonAPI.h"
     47 // StageFright encoders require %16 resolution
     48 #include "M4ENCODER_common.h"
     49 
     50 #define TRANSPARENT_COLOR 0x7E0
     51 
     52 /* Prototype of M4VIFI_xVSS_RGB565toYUV420 function (avoid green effect of transparency color) */
     53 M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
     54                                         M4VIFI_ImagePlane *pPlaneOut);
     55 
     56 
     57 /*special MCS function used only in VideoArtist and VideoStudio to open the media in the normal
     58  mode. That way the media duration is accurate*/
     59 extern M4OSA_ERR M4MCS_open_normalMode(M4MCS_Context pContext, M4OSA_Void* pFileIn,
     60                                          M4VIDEOEDITING_FileType InputFileType,
     61                                          M4OSA_Void* pFileOut, M4OSA_Void* pTempFile);
     62 
     63 
     64 /**
     65  ******************************************************************************
     66  * prototype    M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext)
     67  * @brief        This function initializes MCS (3GP transcoder) with the given
     68  *                parameters
     69  * @note        The transcoding parameters are given by the internal xVSS context.
     70  *                This context contains a pointer on the current element of the
     71  *                chained list of MCS parameters.
     72  *
     73  * @param    pContext            (IN) Pointer on the xVSS edit context
     74  * @return    M4NO_ERROR:            No error
     75  * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
     76  * @return    M4ERR_ALLOC:        Memory allocation has failed
     77  ******************************************************************************
     78  */
     79 M4OSA_ERR M4xVSS_internalStartTranscoding(M4OSA_Context pContext,
     80                                           M4OSA_UInt32 *rotationDegree)
     81 {
     82     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
     83     M4OSA_ERR err;
     84     M4MCS_Context mcs_context;
     85     M4MCS_OutputParams Params;
     86     M4MCS_EncodingParams Rates;
     87     M4OSA_UInt32 i;
     88     M4VIDEOEDITING_ClipProperties clipProps;
     89 
     90     err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
     91     if(err != M4NO_ERROR)
     92     {
     93         M4OSA_TRACE1_1("Error in M4MCS_init: 0x%x", err);
     94         return err;
     95     }
     96 
     97     err = M4MCS_open(mcs_context, xVSS_context->pMCScurrentParams->pFileIn,
     98          xVSS_context->pMCScurrentParams->InputFileType,
     99              xVSS_context->pMCScurrentParams->pFileOut,
    100              xVSS_context->pMCScurrentParams->pFileTemp);
    101     if (err != M4NO_ERROR)
    102     {
    103         M4OSA_TRACE1_1("Error in M4MCS_open: 0x%x", err);
    104         M4MCS_abort(mcs_context);
    105         return err;
    106     }
    107 
    108     /** Get the clip properties
    109      */
    110     err = M4MCS_getInputFileProperties(mcs_context, &clipProps);
    111     if (err != M4NO_ERROR) {
    112         M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
    113         M4MCS_abort(mcs_context);
    114         return err;
    115     }
    116     *rotationDegree = clipProps.videoRotationDegrees;
    117 
    118     /**
    119      * Fill MCS parameters with the parameters contained in the current element of the
    120        MCS parameters chained list */
    121     Params.OutputFileType = xVSS_context->pMCScurrentParams->OutputFileType;
    122     Params.OutputVideoFormat = xVSS_context->pMCScurrentParams->OutputVideoFormat;
    123     Params.outputVideoProfile= xVSS_context->pMCScurrentParams->outputVideoProfile;
    124     Params.outputVideoLevel = xVSS_context->pMCScurrentParams->outputVideoLevel;
    125     Params.OutputVideoFrameSize = xVSS_context->pMCScurrentParams->OutputVideoFrameSize;
    126     Params.OutputVideoFrameRate = xVSS_context->pMCScurrentParams->OutputVideoFrameRate;
    127     Params.OutputAudioFormat = xVSS_context->pMCScurrentParams->OutputAudioFormat;
    128     Params.OutputAudioSamplingFrequency =
    129          xVSS_context->pMCScurrentParams->OutputAudioSamplingFrequency;
    130     Params.bAudioMono = xVSS_context->pMCScurrentParams->bAudioMono;
    131     Params.pOutputPCMfile = M4OSA_NULL;
    132     /*FB 2008/10/20: add media rendering parameter to keep aspect ratio*/
    133     switch(xVSS_context->pMCScurrentParams->MediaRendering)
    134     {
    135     case M4xVSS_kResizing:
    136         Params.MediaRendering = M4MCS_kResizing;
    137         break;
    138     case M4xVSS_kCropping:
    139         Params.MediaRendering = M4MCS_kCropping;
    140         break;
    141     case M4xVSS_kBlackBorders:
    142         Params.MediaRendering = M4MCS_kBlackBorders;
    143         break;
    144     default:
    145         break;
    146     }
    147     /**/
    148     // new params after integrating MCS 2.0
    149     // Set the number of audio effects; 0 for now.
    150     Params.nbEffects = 0;
    151 
    152     // Set the audio effect; null for now.
    153     Params.pEffects = NULL;
    154 
    155     // Set the audio effect; null for now.
    156     Params.bDiscardExif = M4OSA_FALSE;
    157 
    158     // Set the audio effect; null for now.
    159     Params.bAdjustOrientation = M4OSA_FALSE;
    160     // new params after integrating MCS 2.0
    161 
    162     /**
    163      * Set output parameters */
    164     err = M4MCS_setOutputParams(mcs_context, &Params);
    165     if (err != M4NO_ERROR)
    166     {
    167         M4OSA_TRACE1_1("Error in M4MCS_setOutputParams: 0x%x", err);
    168         M4MCS_abort(mcs_context);
    169         return err;
    170     }
    171 
    172     Rates.OutputVideoBitrate = xVSS_context->pMCScurrentParams->OutputVideoBitrate;
    173     Rates.OutputAudioBitrate = xVSS_context->pMCScurrentParams->OutputAudioBitrate;
    174     Rates.BeginCutTime = 0;
    175     Rates.EndCutTime = 0;
    176     Rates.OutputFileSize = 0;
    177 
    178     /*FB: transcoding per parts*/
    179     Rates.BeginCutTime = xVSS_context->pMCScurrentParams->BeginCutTime;
    180     Rates.EndCutTime = xVSS_context->pMCScurrentParams->EndCutTime;
    181     Rates.OutputVideoTimescale = xVSS_context->pMCScurrentParams->OutputVideoTimescale;
    182 
    183     err = M4MCS_setEncodingParams(mcs_context, &Rates);
    184     if (err != M4NO_ERROR)
    185     {
    186         M4OSA_TRACE1_1("Error in M4MCS_setEncodingParams: 0x%x", err);
    187         M4MCS_abort(mcs_context);
    188         return err;
    189     }
    190 
    191     err = M4MCS_checkParamsAndStart(mcs_context);
    192     if (err != M4NO_ERROR)
    193     {
    194         M4OSA_TRACE1_1("Error in M4MCS_checkParamsAndStart: 0x%x", err);
    195         M4MCS_abort(mcs_context);
    196         return err;
    197     }
    198 
    199     /**
    200      * Save MCS context to be able to call MCS step function in M4xVSS_step function */
    201     xVSS_context->pMCS_Ctxt = mcs_context;
    202 
    203     return M4NO_ERROR;
    204 }
    205 
    206 /**
    207  ******************************************************************************
    208  * prototype    M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
    209  * @brief        This function cleans up MCS (3GP transcoder)
    210  * @note
    211  *
    212  * @param    pContext            (IN) Pointer on the xVSS edit context
    213  * @return    M4NO_ERROR:            No error
    214  * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL
    215  * @return    M4ERR_ALLOC:        Memory allocation has failed
    216  ******************************************************************************
    217  */
    218 M4OSA_ERR M4xVSS_internalStopTranscoding(M4OSA_Context pContext)
    219 {
    220     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
    221     M4OSA_ERR err;
    222 
    223     err = M4MCS_close(xVSS_context->pMCS_Ctxt);
    224     if (err != M4NO_ERROR)
    225     {
    226         M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_close: 0x%x", err);
    227         M4MCS_abort(xVSS_context->pMCS_Ctxt);
    228         return err;
    229     }
    230 
    231     /**
    232      * Free this MCS instance */
    233     err = M4MCS_cleanUp(xVSS_context->pMCS_Ctxt);
    234     if (err != M4NO_ERROR)
    235     {
    236         M4OSA_TRACE1_1("M4xVSS_internalStopTranscoding: Error in M4MCS_cleanUp: 0x%x", err);
    237         return err;
    238     }
    239 
    240     xVSS_context->pMCS_Ctxt = M4OSA_NULL;
    241 
    242     return M4NO_ERROR;
    243 }
    244 
    245 /**
    246  ******************************************************************************
    247  * M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
    248  *                                             M4OSA_FileReadPointer* pFileReadPtr,
    249  *                                                M4VIFI_ImagePlane* pImagePlanes,
    250  *                                                 M4OSA_UInt32 width,
    251  *                                                M4OSA_UInt32 height);
    252  * @brief    It Coverts and resizes a ARGB8888 image to YUV420
    253  * @note
    254  * @param    pFileIn            (IN) The Image input file
    255  * @param    pFileReadPtr    (IN) Pointer on filesystem functions
    256  * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
    257  *                            ARGB8888 image  will be converted and resized  to output
    258  *                             YUV420 plane size
    259  *@param    width        (IN) width of the ARGB8888
    260  *@param    height            (IN) height of the ARGB8888
    261  * @return    M4NO_ERROR:    No error
    262  * @return    M4ERR_ALLOC: memory error
    263  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
    264  ******************************************************************************
    265  */
    266 
    267 M4OSA_ERR M4xVSS_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn,
    268                                                           M4OSA_FileReadPointer* pFileReadPtr,
    269                                                           M4VIFI_ImagePlane* pImagePlanes,
    270                                                           M4OSA_UInt32 width,M4OSA_UInt32 height)
    271 {
    272     M4OSA_Context pARGBIn;
    273     M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2;
    274     M4OSA_UInt32 frameSize_argb=(width * height * 4);
    275     M4OSA_UInt32 frameSize = (width * height * 3); //Size of RGB888 data.
    276     M4OSA_UInt32 i = 0,j= 0;
    277     M4OSA_ERR err=M4NO_ERROR;
    278 
    279 
    280     M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb,
    281          M4VS, (M4OSA_Char*)"Image argb data");
    282         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Entering :");
    283     if(pTmpData == M4OSA_NULL) {
    284         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
    285             Failed to allocate memory for Image clip");
    286         return M4ERR_ALLOC;
    287     }
    288 
    289     M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :width and height %d %d",
    290         width ,height);
    291     /* Get file size (mandatory for chunk decoding) */
    292     err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead);
    293     if(err != M4NO_ERROR)
    294     {
    295         M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
    296             Can't open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err);
    297         free(pTmpData);
    298         pTmpData = M4OSA_NULL;
    299         goto cleanup;
    300     }
    301 
    302     err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
    303     if(err != M4NO_ERROR)
    304     {
    305         M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888\
    306              file %s, error: 0x%x\n",pFileIn, err);
    307         pFileReadPtr->closeRead(pARGBIn);
    308         free(pTmpData);
    309         pTmpData = M4OSA_NULL;
    310         goto cleanup;
    311     }
    312 
    313     err = pFileReadPtr->closeRead(pARGBIn);
    314     if(err != M4NO_ERROR)
    315     {
    316         M4OSA_TRACE1_2("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Can't close ARGB8888 \
    317              file %s, error: 0x%x\n",pFileIn, err);
    318         free(pTmpData);
    319         pTmpData = M4OSA_NULL;
    320         goto cleanup;
    321     }
    322 
    323     rgbPlane1.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
    324          (M4OSA_Char*)"Image clip RGB888 data");
    325     if(rgbPlane1.pac_data == M4OSA_NULL)
    326     {
    327         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 \
    328             Failed to allocate memory for Image clip");
    329         free(pTmpData);
    330         return M4ERR_ALLOC;
    331     }
    332 
    333         rgbPlane1.u_height = height;
    334         rgbPlane1.u_width = width;
    335         rgbPlane1.u_stride = width*3;
    336         rgbPlane1.u_topleft = 0;
    337 
    338 
    339     /** Remove the alpha channel */
    340     for (i=0, j = 0; i < frameSize_argb; i++) {
    341         if ((i % 4) == 0) continue;
    342         rgbPlane1.pac_data[j] = pTmpData[i];
    343         j++;
    344     }
    345         free(pTmpData);
    346 
    347     /* To Check if resizing is required with color conversion */
    348     if(width != pImagePlanes->u_width || height != pImagePlanes->u_height)
    349     {
    350         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 Resizing :");
    351         frameSize =  ( pImagePlanes->u_width * pImagePlanes->u_height * 3);
    352         rgbPlane2.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
    353              (M4OSA_Char*)"Image clip RGB888 data");
    354         if(rgbPlane2.pac_data == M4OSA_NULL)
    355         {
    356             M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
    357             free(pTmpData);
    358             return M4ERR_ALLOC;
    359         }
    360             rgbPlane2.u_height =  pImagePlanes->u_height;
    361             rgbPlane2.u_width = pImagePlanes->u_width;
    362             rgbPlane2.u_stride = pImagePlanes->u_width*3;
    363             rgbPlane2.u_topleft = 0;
    364 
    365         /* Resizing RGB888 to RGB888 */
    366         err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane1, &rgbPlane2);
    367         if(err != M4NO_ERROR)
    368         {
    369             M4OSA_TRACE1_1("error when converting from Resize RGB888 to RGB888: 0x%x\n", err);
    370             free(rgbPlane2.pac_data);
    371             free(rgbPlane1.pac_data);
    372             return err;
    373         }
    374         /*Converting Resized RGB888 to YUV420 */
    375         err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes);
    376         if(err != M4NO_ERROR)
    377         {
    378             M4OSA_TRACE1_1("error when converting from RGB888 to YUV: 0x%x\n", err);
    379             free(rgbPlane2.pac_data);
    380             free(rgbPlane1.pac_data);
    381             return err;
    382         }
    383             free(rgbPlane2.pac_data);
    384             free(rgbPlane1.pac_data);
    385 
    386             M4OSA_TRACE1_0("RGB to YUV done");
    387 
    388 
    389     }
    390     else
    391     {
    392         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 NO  Resizing :");
    393         err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes);
    394         if(err != M4NO_ERROR)
    395         {
    396             M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err);
    397         }
    398             free(rgbPlane1.pac_data);
    399 
    400             M4OSA_TRACE1_0("RGB to YUV done");
    401     }
    402 cleanup:
    403     M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 leaving :");
    404     return err;
    405 }
    406 
    407 /**
    408  ******************************************************************************
    409  * M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
    410  *                                             M4OSA_FileReadPointer* pFileReadPtr,
    411  *                                                M4VIFI_ImagePlane* pImagePlanes,
    412  *                                                 M4OSA_UInt32 width,
    413  *                                                M4OSA_UInt32 height);
    414  * @brief    It Coverts a ARGB8888 image to YUV420
    415  * @note
    416  * @param    pFileIn            (IN) The Image input file
    417  * @param    pFileReadPtr    (IN) Pointer on filesystem functions
    418  * @param    pImagePlanes    (IN/OUT) Pointer on YUV420 output planes allocated by the user
    419  *                            ARGB8888 image  will be converted and resized  to output
    420  *                            YUV420 plane size
    421  * @param    width        (IN) width of the ARGB8888
    422  * @param    height            (IN) height of the ARGB8888
    423  * @return    M4NO_ERROR:    No error
    424  * @return    M4ERR_ALLOC: memory error
    425  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
    426  ******************************************************************************
    427  */
    428 
    429 M4OSA_ERR M4xVSS_internalConvertARGB8888toYUV420(M4OSA_Void* pFileIn,
    430                                                  M4OSA_FileReadPointer* pFileReadPtr,
    431                                                  M4VIFI_ImagePlane** pImagePlanes,
    432                                                  M4OSA_UInt32 width,M4OSA_UInt32 height)
    433 {
    434     M4OSA_ERR err = M4NO_ERROR;
    435     M4VIFI_ImagePlane *yuvPlane = M4OSA_NULL;
    436 
    437     yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
    438                 M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
    439     if(yuvPlane == M4OSA_NULL) {
    440         M4OSA_TRACE1_0("M4xVSS_internalConvertAndResizeARGB8888toYUV420 :\
    441             Failed to allocate memory for Image clip");
    442         return M4ERR_ALLOC;
    443     }
    444     yuvPlane[0].u_height = height;
    445     yuvPlane[0].u_width = width;
    446     yuvPlane[0].u_stride = width;
    447     yuvPlane[0].u_topleft = 0;
    448     yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height \
    449         * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
    450 
    451     yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
    452     yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
    453     yuvPlane[1].u_stride = yuvPlane[1].u_width;
    454     yuvPlane[1].u_topleft = 0;
    455     yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height \
    456         * yuvPlane[0].u_width);
    457 
    458     yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
    459     yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
    460     yuvPlane[2].u_stride = yuvPlane[2].u_width;
    461     yuvPlane[2].u_topleft = 0;
    462     yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height \
    463         * yuvPlane[1].u_width);
    464     err = M4xVSS_internalConvertAndResizeARGB8888toYUV420( pFileIn,pFileReadPtr,
    465                                                           yuvPlane, width, height);
    466     if(err != M4NO_ERROR)
    467     {
    468         M4OSA_TRACE1_1("M4xVSS_internalConvertAndResizeARGB8888toYUV420 return error: 0x%x\n", err);
    469         free(yuvPlane);
    470         return err;
    471     }
    472 
    473         *pImagePlanes = yuvPlane;
    474 
    475     M4OSA_TRACE1_0("M4xVSS_internalConvertARGB8888toYUV420 :Leaving");
    476     return err;
    477 
    478 }
    479 
    480 /**
    481  ******************************************************************************
    482  * M4OSA_ERR M4xVSS_PictureCallbackFct (M4OSA_Void* pPictureCtxt,
    483  *                                        M4VIFI_ImagePlane* pImagePlanes,
    484  *                                        M4OSA_UInt32* pPictureDuration);
    485  * @brief    It feeds the PTO3GPP with YUV420 pictures.
    486  * @note    This function is given to the PTO3GPP in the M4PTO3GPP_Params structure
    487  * @param    pContext    (IN) The integrator own context
    488  * @param    pImagePlanes(IN/OUT) Pointer to an array of three valid image planes
    489  * @param    pPictureDuration(OUT) Duration of the returned picture
    490  *
    491  * @return    M4NO_ERROR:    No error
    492  * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
    493  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
    494  ******************************************************************************
    495  */
    496 M4OSA_ERR M4xVSS_PictureCallbackFct(M4OSA_Void* pPictureCtxt, M4VIFI_ImagePlane* pImagePlanes,
    497                                      M4OSA_Double* pPictureDuration)
    498 {
    499     M4OSA_ERR err = M4NO_ERROR;
    500     M4OSA_UInt8    last_frame_flag = 0;
    501     M4xVSS_PictureCallbackCtxt* pC = (M4xVSS_PictureCallbackCtxt*) (pPictureCtxt);
    502 
    503     /*Used for pan&zoom*/
    504     M4OSA_UInt8 tempPanzoomXa = 0;
    505     M4OSA_UInt8 tempPanzoomXb = 0;
    506     M4AIR_Params Params;
    507     /**/
    508 
    509     /*Used for cropping and black borders*/
    510     M4OSA_Context    pPictureContext = M4OSA_NULL;
    511     M4OSA_FilePosition    pictureSize = 0 ;
    512     M4OSA_UInt8*    pictureBuffer = M4OSA_NULL;
    513     //M4EXIFC_Context pExifContext = M4OSA_NULL;
    514     M4EXIFC_BasicTags pBasicTags;
    515     M4VIFI_ImagePlane pImagePlanes1 = pImagePlanes[0];
    516     M4VIFI_ImagePlane pImagePlanes2 = pImagePlanes[1];
    517     M4VIFI_ImagePlane pImagePlanes3 = pImagePlanes[2];
    518     /**/
    519 
    520     /**
    521      * Check input parameters */
    522     M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureCtxt),        M4ERR_PARAMETER,
    523          "M4xVSS_PictureCallbackFct: pPictureCtxt is M4OSA_NULL");
    524     M4OSA_DEBUG_IF2((M4OSA_NULL==pImagePlanes),        M4ERR_PARAMETER,
    525          "M4xVSS_PictureCallbackFct: pImagePlanes is M4OSA_NULL");
    526     M4OSA_DEBUG_IF2((M4OSA_NULL==pPictureDuration), M4ERR_PARAMETER,
    527          "M4xVSS_PictureCallbackFct: pPictureDuration is M4OSA_NULL");
    528     M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct :Entering");
    529     /*PR P4ME00003181 In case the image number is 0, pan&zoom can not be used*/
    530     if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom && pC->m_NbImage == 0)
    531     {
    532         pC->m_pPto3GPPparams->isPanZoom = M4OSA_FALSE;
    533     }
    534 
    535     /*If no cropping/black borders or pan&zoom, just decode and resize the picture*/
    536     if(pC->m_mediaRendering == M4xVSS_kResizing && M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
    537     {
    538         /**
    539          * Convert and resize input ARGB8888 file to YUV420 */
    540         /*To support ARGB8888 : */
    541         M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 1: width and heght %d %d",
    542             pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
    543         err = M4xVSS_internalConvertAndResizeARGB8888toYUV420(pC->m_FileIn,
    544              pC->m_pFileReadPtr, pImagePlanes,pC->m_pPto3GPPparams->width,
    545                 pC->m_pPto3GPPparams->height);
    546         if(err != M4NO_ERROR)
    547         {
    548             M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
    549             return err;
    550         }
    551     }
    552     /*In case of cropping, black borders or pan&zoom, call the EXIF reader and the AIR*/
    553     else
    554     {
    555         /**
    556          * Computes ratios */
    557         if(pC->m_pDecodedPlane == M4OSA_NULL)
    558         {
    559             /**
    560              * Convert input ARGB8888 file to YUV420 */
    561              M4OSA_TRACE1_2("M4xVSS_PictureCallbackFct 2: width and heght %d %d",
    562                 pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
    563             err = M4xVSS_internalConvertARGB8888toYUV420(pC->m_FileIn, pC->m_pFileReadPtr,
    564                 &(pC->m_pDecodedPlane),pC->m_pPto3GPPparams->width,pC->m_pPto3GPPparams->height);
    565             if(err != M4NO_ERROR)
    566             {
    567                 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when decoding JPEG: 0x%x\n", err);
    568                 if(pC->m_pDecodedPlane != M4OSA_NULL)
    569                 {
    570                     /* YUV420 planar is returned but allocation is made only once
    571                         (contigous planes in memory) */
    572                     if(pC->m_pDecodedPlane->pac_data != M4OSA_NULL)
    573                     {
    574                         free(pC->m_pDecodedPlane->pac_data);
    575                     }
    576                     free(pC->m_pDecodedPlane);
    577                     pC->m_pDecodedPlane = M4OSA_NULL;
    578                 }
    579                 return err;
    580             }
    581         }
    582 
    583         /*Initialize AIR Params*/
    584         Params.m_inputCoord.m_x = 0;
    585         Params.m_inputCoord.m_y = 0;
    586         Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
    587         Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
    588         Params.m_outputSize.m_width = pImagePlanes->u_width;
    589         Params.m_outputSize.m_height = pImagePlanes->u_height;
    590         Params.m_bOutputStripe = M4OSA_FALSE;
    591         Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
    592 
    593         /*Initialize Exif params structure*/
    594         pBasicTags.orientation = M4COMMON_kOrientationUnknown;
    595 
    596         /**
    597         Pan&zoom params*/
    598         if(M4OSA_TRUE == pC->m_pPto3GPPparams->isPanZoom)
    599         {
    600             /*Save ratio values, they can be reused if the new ratios are 0*/
    601             tempPanzoomXa = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXa;
    602             tempPanzoomXb = (M4OSA_UInt8)pC->m_pPto3GPPparams->PanZoomXb;
    603             /*Check that the ratio is not 0*/
    604             /*Check (a) parameters*/
    605             if(pC->m_pPto3GPPparams->PanZoomXa == 0)
    606             {
    607                 M4OSA_UInt8 maxRatio = 0;
    608                 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >=
    609                      pC->m_pPto3GPPparams->PanZoomTopleftYa)
    610                 {
    611                     /*The ratio is 0, that means the area of the picture defined with (a)
    612                     parameters is bigger than the image size*/
    613                     if(pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa > 1000)
    614                     {
    615                         /*The oversize is maxRatio*/
    616                         maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXa + tempPanzoomXa - 1000;
    617                     }
    618                 }
    619                 else
    620                 {
    621                     /*The ratio is 0, that means the area of the picture defined with (a)
    622                      parameters is bigger than the image size*/
    623                     if(pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa > 1000)
    624                     {
    625                         /*The oversize is maxRatio*/
    626                         maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYa + tempPanzoomXa - 1000;
    627                     }
    628                 }
    629                 /*Modify the (a) parameters:*/
    630                 if(pC->m_pPto3GPPparams->PanZoomTopleftXa >= maxRatio)
    631                 {
    632                     /*The (a) topleft parameters can be moved to keep the same area size*/
    633                     pC->m_pPto3GPPparams->PanZoomTopleftXa -= maxRatio;
    634                 }
    635                 else
    636                 {
    637                     /*Move the (a) topleft parameter to 0 but the ratio will be also further
    638                     modified to match the image size*/
    639                     pC->m_pPto3GPPparams->PanZoomTopleftXa = 0;
    640                 }
    641                 if(pC->m_pPto3GPPparams->PanZoomTopleftYa >= maxRatio)
    642                 {
    643                     /*The (a) topleft parameters can be moved to keep the same area size*/
    644                     pC->m_pPto3GPPparams->PanZoomTopleftYa -= maxRatio;
    645                 }
    646                 else
    647                 {
    648                     /*Move the (a) topleft parameter to 0 but the ratio will be also further
    649                      modified to match the image size*/
    650                     pC->m_pPto3GPPparams->PanZoomTopleftYa = 0;
    651                 }
    652                 /*The new ratio is the original one*/
    653                 pC->m_pPto3GPPparams->PanZoomXa = tempPanzoomXa;
    654                 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftXa > 1000)
    655                 {
    656                     /*Change the ratio if the area of the picture defined with (a) parameters is
    657                     bigger than the image size*/
    658                     pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXa;
    659                 }
    660                 if(pC->m_pPto3GPPparams->PanZoomXa + pC->m_pPto3GPPparams->PanZoomTopleftYa > 1000)
    661                 {
    662                     /*Change the ratio if the area of the picture defined with (a) parameters is
    663                     bigger than the image size*/
    664                     pC->m_pPto3GPPparams->PanZoomXa = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYa;
    665                 }
    666             }
    667             /*Check (b) parameters*/
    668             if(pC->m_pPto3GPPparams->PanZoomXb == 0)
    669             {
    670                 M4OSA_UInt8 maxRatio = 0;
    671                 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >=
    672                      pC->m_pPto3GPPparams->PanZoomTopleftYb)
    673                 {
    674                     /*The ratio is 0, that means the area of the picture defined with (b)
    675                      parameters is bigger than the image size*/
    676                     if(pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb > 1000)
    677                     {
    678                         /*The oversize is maxRatio*/
    679                         maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftXb + tempPanzoomXb - 1000;
    680                     }
    681                 }
    682                 else
    683                 {
    684                     /*The ratio is 0, that means the area of the picture defined with (b)
    685                      parameters is bigger than the image size*/
    686                     if(pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb > 1000)
    687                     {
    688                         /*The oversize is maxRatio*/
    689                         maxRatio = pC->m_pPto3GPPparams->PanZoomTopleftYb + tempPanzoomXb - 1000;
    690                     }
    691                 }
    692                 /*Modify the (b) parameters:*/
    693                 if(pC->m_pPto3GPPparams->PanZoomTopleftXb >= maxRatio)
    694                 {
    695                     /*The (b) topleft parameters can be moved to keep the same area size*/
    696                     pC->m_pPto3GPPparams->PanZoomTopleftXb -= maxRatio;
    697                 }
    698                 else
    699                 {
    700                     /*Move the (b) topleft parameter to 0 but the ratio will be also further
    701                      modified to match the image size*/
    702                     pC->m_pPto3GPPparams->PanZoomTopleftXb = 0;
    703                 }
    704                 if(pC->m_pPto3GPPparams->PanZoomTopleftYb >= maxRatio)
    705                 {
    706                     /*The (b) topleft parameters can be moved to keep the same area size*/
    707                     pC->m_pPto3GPPparams->PanZoomTopleftYb -= maxRatio;
    708                 }
    709                 else
    710                 {
    711                     /*Move the (b) topleft parameter to 0 but the ratio will be also further
    712                     modified to match the image size*/
    713                     pC->m_pPto3GPPparams->PanZoomTopleftYb = 0;
    714                 }
    715                 /*The new ratio is the original one*/
    716                 pC->m_pPto3GPPparams->PanZoomXb = tempPanzoomXb;
    717                 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftXb > 1000)
    718                 {
    719                     /*Change the ratio if the area of the picture defined with (b) parameters is
    720                     bigger than the image size*/
    721                     pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftXb;
    722                 }
    723                 if(pC->m_pPto3GPPparams->PanZoomXb + pC->m_pPto3GPPparams->PanZoomTopleftYb > 1000)
    724                 {
    725                     /*Change the ratio if the area of the picture defined with (b) parameters is
    726                     bigger than the image size*/
    727                     pC->m_pPto3GPPparams->PanZoomXb = 1000 - pC->m_pPto3GPPparams->PanZoomTopleftYb;
    728                 }
    729             }
    730 
    731             /**
    732              * Computes AIR parameters */
    733 /*        Params.m_inputCoord.m_x = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
    734             (pC->m_pPto3GPPparams->PanZoomTopleftXa +
    735             (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftXb \
    736                 - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
    737             pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
    738         Params.m_inputCoord.m_y = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
    739             (pC->m_pPto3GPPparams->PanZoomTopleftYa +
    740             (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomTopleftYb\
    741                  - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
    742             pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
    743 
    744         Params.m_inputSize.m_width = (M4OSA_UInt32)(pC->m_pDecodedPlane->u_width *
    745             (pC->m_pPto3GPPparams->PanZoomXa +
    746             (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
    747             pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
    748 
    749         Params.m_inputSize.m_height =  (M4OSA_UInt32)(pC->m_pDecodedPlane->u_height *
    750             (pC->m_pPto3GPPparams->PanZoomXa +
    751             (M4OSA_Int16)((pC->m_pPto3GPPparams->PanZoomXb - pC->m_pPto3GPPparams->PanZoomXa) *
    752             pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage)) / 100;
    753  */
    754             // Instead of using pC->m_NbImage we have to use (pC->m_NbImage-1) as pC->m_ImageCounter
    755             // will be x-1 max for x no. of frames
    756             Params.m_inputCoord.m_x = (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
    757                 (pC->m_pPto3GPPparams->PanZoomTopleftXa +
    758                 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftXb\
    759                      - pC->m_pPto3GPPparams->PanZoomTopleftXa) *
    760                 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
    761             Params.m_inputCoord.m_y =
    762                  (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
    763                 (pC->m_pPto3GPPparams->PanZoomTopleftYa +
    764                 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomTopleftYb\
    765                      - pC->m_pPto3GPPparams->PanZoomTopleftYa) *
    766                 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
    767 
    768             Params.m_inputSize.m_width =
    769                  (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_width *
    770                 (pC->m_pPto3GPPparams->PanZoomXa +
    771                 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb\
    772                      - pC->m_pPto3GPPparams->PanZoomXa) *
    773                 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
    774 
    775             Params.m_inputSize.m_height =
    776                  (M4OSA_UInt32)((((M4OSA_Double)pC->m_pDecodedPlane->u_height *
    777                 (pC->m_pPto3GPPparams->PanZoomXa +
    778                 (M4OSA_Double)((M4OSA_Double)(pC->m_pPto3GPPparams->PanZoomXb \
    779                     - pC->m_pPto3GPPparams->PanZoomXa) *
    780                 pC->m_ImageCounter) / (M4OSA_Double)pC->m_NbImage-1)) / 1000));
    781 
    782             if((Params.m_inputSize.m_width + Params.m_inputCoord.m_x)\
    783                  > pC->m_pDecodedPlane->u_width)
    784             {
    785                 Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width \
    786                     - Params.m_inputCoord.m_x;
    787             }
    788 
    789             if((Params.m_inputSize.m_height + Params.m_inputCoord.m_y)\
    790                  > pC->m_pDecodedPlane->u_height)
    791             {
    792                 Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height\
    793                      - Params.m_inputCoord.m_y;
    794             }
    795 
    796 
    797 
    798             Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
    799             Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
    800         }
    801 
    802 
    803 
    804     /**
    805         Picture rendering: Black borders*/
    806 
    807         if(pC->m_mediaRendering == M4xVSS_kBlackBorders)
    808         {
    809             memset((void *)pImagePlanes[0].pac_data,Y_PLANE_BORDER_VALUE,
    810                 (pImagePlanes[0].u_height*pImagePlanes[0].u_stride));
    811             memset((void *)pImagePlanes[1].pac_data,U_PLANE_BORDER_VALUE,
    812                 (pImagePlanes[1].u_height*pImagePlanes[1].u_stride));
    813             memset((void *)pImagePlanes[2].pac_data,V_PLANE_BORDER_VALUE,
    814                 (pImagePlanes[2].u_height*pImagePlanes[2].u_stride));
    815 
    816             /**
    817             First without pan&zoom*/
    818             if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
    819             {
    820                 switch(pBasicTags.orientation)
    821                 {
    822                 default:
    823                 case M4COMMON_kOrientationUnknown:
    824                     Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
    825                 case M4COMMON_kOrientationTopLeft:
    826                 case M4COMMON_kOrientationTopRight:
    827                 case M4COMMON_kOrientationBottomRight:
    828                 case M4COMMON_kOrientationBottomLeft:
    829                     if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
    830                          /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
    831                          //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
    832                     {
    833                         /*it is height so black borders will be on the top and on the bottom side*/
    834                         Params.m_outputSize.m_width = pImagePlanes->u_width;
    835                         Params.m_outputSize.m_height =
    836                              (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height \
    837                                 * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
    838                         /*number of lines at the top*/
    839                         pImagePlanes[0].u_topleft =
    840                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
    841                                 -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
    842                         pImagePlanes[0].u_height = Params.m_outputSize.m_height;
    843                         pImagePlanes[1].u_topleft =
    844                              (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
    845                                 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[1].u_stride;
    846                         pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
    847                         pImagePlanes[2].u_topleft =
    848                              (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
    849                                 -(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanes[2].u_stride;
    850                         pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
    851                     }
    852                     else
    853                     {
    854                         /*it is width so black borders will be on the left and right side*/
    855                         Params.m_outputSize.m_height = pImagePlanes->u_height;
    856                         Params.m_outputSize.m_width =
    857                              (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
    858                                 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
    859 
    860                         pImagePlanes[0].u_topleft =
    861                             (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
    862                                 -Params.m_outputSize.m_width)>>1));
    863                         pImagePlanes[0].u_width = Params.m_outputSize.m_width;
    864                         pImagePlanes[1].u_topleft =
    865                              (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
    866                                 -(Params.m_outputSize.m_width>>1)))>>1);
    867                         pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
    868                         pImagePlanes[2].u_topleft =
    869                              (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
    870                                 -(Params.m_outputSize.m_width>>1)))>>1);
    871                         pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
    872                     }
    873                     break;
    874                 case M4COMMON_kOrientationLeftTop:
    875                 case M4COMMON_kOrientationLeftBottom:
    876                 case M4COMMON_kOrientationRightTop:
    877                 case M4COMMON_kOrientationRightBottom:
    878                         if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
    879                              /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
    880                              //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
    881                         {
    882                             /*it is height so black borders will be on the top and on
    883                              the bottom side*/
    884                             Params.m_outputSize.m_height = pImagePlanes->u_width;
    885                             Params.m_outputSize.m_width =
    886                                  (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
    887                                     * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_height);
    888                             /*number of lines at the top*/
    889                             pImagePlanes[0].u_topleft =
    890                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
    891                                     -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
    892                             pImagePlanes[0].u_height = Params.m_outputSize.m_width;
    893                             pImagePlanes[1].u_topleft =
    894                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
    895                                     -(Params.m_outputSize.m_width>>1)))>>1)\
    896                                         *pImagePlanes[1].u_stride)+1;
    897                             pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
    898                             pImagePlanes[2].u_topleft =
    899                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
    900                                     -(Params.m_outputSize.m_width>>1)))>>1)\
    901                                         *pImagePlanes[2].u_stride)+1;
    902                             pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
    903                         }
    904                         else
    905                         {
    906                             /*it is width so black borders will be on the left and right side*/
    907                             Params.m_outputSize.m_width = pImagePlanes->u_height;
    908                             Params.m_outputSize.m_height =
    909                                  (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
    910                                      * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_width);
    911 
    912                             pImagePlanes[0].u_topleft =
    913                                  ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
    914                                     -Params.m_outputSize.m_height))>>1))+1;
    915                             pImagePlanes[0].u_width = Params.m_outputSize.m_height;
    916                             pImagePlanes[1].u_topleft =
    917                                  ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
    918                                     -(Params.m_outputSize.m_height>>1)))>>1))+1;
    919                             pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
    920                             pImagePlanes[2].u_topleft =
    921                                  ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
    922                                     -(Params.m_outputSize.m_height>>1)))>>1))+1;
    923                             pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
    924                         }
    925                     break;
    926                 }
    927             }
    928 
    929             /**
    930             Secondly with pan&zoom*/
    931             else
    932             {
    933                 switch(pBasicTags.orientation)
    934                 {
    935                 default:
    936                 case M4COMMON_kOrientationUnknown:
    937                     Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
    938                 case M4COMMON_kOrientationTopLeft:
    939                 case M4COMMON_kOrientationTopRight:
    940                 case M4COMMON_kOrientationBottomRight:
    941                 case M4COMMON_kOrientationBottomLeft:
    942                     /*NO ROTATION*/
    943                     if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_width)\
    944                          /pC->m_pDecodedPlane->u_width) <= pImagePlanes->u_height)
    945                             //Params.m_inputSize.m_height < Params.m_inputSize.m_width)
    946                     {
    947                         /*Black borders will be on the top and bottom of the output video*/
    948                         /*Maximum output height if the input image aspect ratio is kept and if
    949                         the output width is the screen width*/
    950                         M4OSA_UInt32 tempOutputSizeHeight =
    951                             (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height\
    952                                  * pImagePlanes->u_width) /pC->m_pDecodedPlane->u_width);
    953                         M4OSA_UInt32 tempInputSizeHeightMax = 0;
    954                         M4OSA_UInt32 tempFinalInputHeight = 0;
    955                         /*The output width is the screen width*/
    956                         Params.m_outputSize.m_width = pImagePlanes->u_width;
    957                         tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
    958 
    959                         /*Maximum input height according to the maximum output height
    960                         (proportional to the maximum output height)*/
    961                         tempInputSizeHeightMax = (pImagePlanes->u_height\
    962                             *Params.m_inputSize.m_height)/tempOutputSizeHeight;
    963                         tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
    964 
    965                         /*Check if the maximum possible input height is contained into the
    966                         input image height*/
    967                         if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_height)
    968                         {
    969                             /*The maximum possible input height is contained in the input
    970                             image height,
    971                             that means no black borders, the input pan zoom area will be extended
    972                             so that the input AIR height will be the maximum possible*/
    973                             if(((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
    974                                  <= Params.m_inputCoord.m_y
    975                                 && ((tempInputSizeHeightMax - Params.m_inputSize.m_height)>>1)\
    976                                      <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y\
    977                                          + Params.m_inputSize.m_height))
    978                             {
    979                                 /*The input pan zoom area can be extended symmetrically on the
    980                                 top and bottom side*/
    981                                 Params.m_inputCoord.m_y -= ((tempInputSizeHeightMax \
    982                                     - Params.m_inputSize.m_height)>>1);
    983                             }
    984                             else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
    985                                 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
    986                             {
    987                                 /*There is not enough place above the input pan zoom area to
    988                                 extend it symmetrically,
    989                                 so extend it to the maximum on the top*/
    990                                 Params.m_inputCoord.m_y = 0;
    991                             }
    992                             else
    993                             {
    994                                 /*There is not enough place below the input pan zoom area to
    995                                 extend it symmetrically,
    996                                 so extend it to the maximum on the bottom*/
    997                                 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height \
    998                                     - tempInputSizeHeightMax;
    999                             }
   1000                             /*The input height of the AIR is the maximum possible height*/
   1001                             Params.m_inputSize.m_height = tempInputSizeHeightMax;
   1002                         }
   1003                         else
   1004                         {
   1005                             /*The maximum possible input height is greater than the input
   1006                             image height,
   1007                             that means black borders are necessary to keep aspect ratio
   1008                             The input height of the AIR is all the input image height*/
   1009                             Params.m_outputSize.m_height =
   1010                                 (tempOutputSizeHeight*pC->m_pDecodedPlane->u_height)\
   1011                                     /Params.m_inputSize.m_height;
   1012                             Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
   1013                             Params.m_inputCoord.m_y = 0;
   1014                             Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
   1015                             pImagePlanes[0].u_topleft =
   1016                                  (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
   1017                                     -Params.m_outputSize.m_height)>>1))*pImagePlanes[0].u_stride;
   1018                             pImagePlanes[0].u_height = Params.m_outputSize.m_height;
   1019                             pImagePlanes[1].u_topleft =
   1020                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
   1021                                     -(Params.m_outputSize.m_height>>1)))>>1)\
   1022                                         *pImagePlanes[1].u_stride);
   1023                             pImagePlanes[1].u_height = Params.m_outputSize.m_height>>1;
   1024                             pImagePlanes[2].u_topleft =
   1025                                  ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
   1026                                     -(Params.m_outputSize.m_height>>1)))>>1)\
   1027                                         *pImagePlanes[2].u_stride);
   1028                             pImagePlanes[2].u_height = Params.m_outputSize.m_height>>1;
   1029                         }
   1030                     }
   1031                     else
   1032                     {
   1033                         /*Black borders will be on the left and right side of the output video*/
   1034                         /*Maximum output width if the input image aspect ratio is kept and if the
   1035                          output height is the screen height*/
   1036                         M4OSA_UInt32 tempOutputSizeWidth =
   1037                              (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width \
   1038                                 * pImagePlanes->u_height) /pC->m_pDecodedPlane->u_height);
   1039                         M4OSA_UInt32 tempInputSizeWidthMax = 0;
   1040                         M4OSA_UInt32 tempFinalInputWidth = 0;
   1041                         /*The output height is the screen height*/
   1042                         Params.m_outputSize.m_height = pImagePlanes->u_height;
   1043                         tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
   1044 
   1045                         /*Maximum input width according to the maximum output width
   1046                         (proportional to the maximum output width)*/
   1047                         tempInputSizeWidthMax =
   1048                              (pImagePlanes->u_width*Params.m_inputSize.m_width)\
   1049                                 /tempOutputSizeWidth;
   1050                         tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
   1051 
   1052                         /*Check if the maximum possible input width is contained into the input
   1053                          image width*/
   1054                         if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_width)
   1055                         {
   1056                             /*The maximum possible input width is contained in the input
   1057                             image width,
   1058                             that means no black borders, the input pan zoom area will be extended
   1059                             so that the input AIR width will be the maximum possible*/
   1060                             if(((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1) \
   1061                                 <= Params.m_inputCoord.m_x
   1062                                 && ((tempInputSizeWidthMax - Params.m_inputSize.m_width)>>1)\
   1063                                      <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
   1064                                         + Params.m_inputSize.m_width))
   1065                             {
   1066                                 /*The input pan zoom area can be extended symmetrically on the
   1067                                      right and left side*/
   1068                                 Params.m_inputCoord.m_x -= ((tempInputSizeWidthMax\
   1069                                      - Params.m_inputSize.m_width)>>1);
   1070                             }
   1071                             else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
   1072                                 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
   1073                             {
   1074                                 /*There is not enough place above the input pan zoom area to
   1075                                     extend it symmetrically,
   1076                                 so extend it to the maximum on the left*/
   1077                                 Params.m_inputCoord.m_x = 0;
   1078                             }
   1079                             else
   1080                             {
   1081                                 /*There is not enough place below the input pan zoom area
   1082                                     to extend it symmetrically,
   1083                                 so extend it to the maximum on the right*/
   1084                                 Params.m_inputCoord.m_x = pC->m_pDecodedPlane->u_width \
   1085                                     - tempInputSizeWidthMax;
   1086                             }
   1087                             /*The input width of the AIR is the maximum possible width*/
   1088                             Params.m_inputSize.m_width = tempInputSizeWidthMax;
   1089                         }
   1090                         else
   1091                         {
   1092                             /*The maximum possible input width is greater than the input
   1093                             image width,
   1094                             that means black borders are necessary to keep aspect ratio
   1095                             The input width of the AIR is all the input image width*/
   1096                             Params.m_outputSize.m_width =\
   1097                                  (tempOutputSizeWidth*pC->m_pDecodedPlane->u_width)\
   1098                                     /Params.m_inputSize.m_width;
   1099                             Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
   1100                             Params.m_inputCoord.m_x = 0;
   1101                             Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
   1102                             pImagePlanes[0].u_topleft =
   1103                                  (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
   1104                                     -Params.m_outputSize.m_width)>>1));
   1105                             pImagePlanes[0].u_width = Params.m_outputSize.m_width;
   1106                             pImagePlanes[1].u_topleft =
   1107                                  (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
   1108                                     -(Params.m_outputSize.m_width>>1)))>>1);
   1109                             pImagePlanes[1].u_width = Params.m_outputSize.m_width>>1;
   1110                             pImagePlanes[2].u_topleft =
   1111                                  (M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
   1112                                     -(Params.m_outputSize.m_width>>1)))>>1);
   1113                             pImagePlanes[2].u_width = Params.m_outputSize.m_width>>1;
   1114                         }
   1115                     }
   1116                     break;
   1117                 case M4COMMON_kOrientationLeftTop:
   1118                 case M4COMMON_kOrientationLeftBottom:
   1119                 case M4COMMON_kOrientationRightTop:
   1120                 case M4COMMON_kOrientationRightBottom:
   1121                     /*ROTATION*/
   1122                     if((M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
   1123                          /pC->m_pDecodedPlane->u_height) < pImagePlanes->u_height)
   1124                          //Params.m_inputSize.m_height > Params.m_inputSize.m_width)
   1125                     {
   1126                         /*Black borders will be on the left and right side of the output video*/
   1127                         /*Maximum output height if the input image aspect ratio is kept and if
   1128                         the output height is the screen width*/
   1129                         M4OSA_UInt32 tempOutputSizeHeight =
   1130                         (M4OSA_UInt32)((pC->m_pDecodedPlane->u_width * pImagePlanes->u_width)\
   1131                              /pC->m_pDecodedPlane->u_height);
   1132                         M4OSA_UInt32 tempInputSizeHeightMax = 0;
   1133                         M4OSA_UInt32 tempFinalInputHeight = 0;
   1134                         /*The output width is the screen height*/
   1135                         Params.m_outputSize.m_height = pImagePlanes->u_width;
   1136                         Params.m_outputSize.m_width= pImagePlanes->u_height;
   1137                         tempOutputSizeHeight = (tempOutputSizeHeight>>1)<<1;
   1138 
   1139                         /*Maximum input height according to the maximum output height
   1140                              (proportional to the maximum output height)*/
   1141                         tempInputSizeHeightMax =
   1142                             (pImagePlanes->u_height*Params.m_inputSize.m_width)\
   1143                                 /tempOutputSizeHeight;
   1144                         tempInputSizeHeightMax = (tempInputSizeHeightMax>>1)<<1;
   1145 
   1146                         /*Check if the maximum possible input height is contained into the
   1147                              input image width (rotation included)*/
   1148                         if(tempInputSizeHeightMax <= pC->m_pDecodedPlane->u_width)
   1149                         {
   1150                             /*The maximum possible input height is contained in the input
   1151                             image width (rotation included),
   1152                             that means no black borders, the input pan zoom area will be extended
   1153                             so that the input AIR width will be the maximum possible*/
   1154                             if(((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1) \
   1155                                 <= Params.m_inputCoord.m_x
   1156                                 && ((tempInputSizeHeightMax - Params.m_inputSize.m_width)>>1)\
   1157                                      <= pC->m_pDecodedPlane->u_width -(Params.m_inputCoord.m_x \
   1158                                         + Params.m_inputSize.m_width))
   1159                             {
   1160                                 /*The input pan zoom area can be extended symmetrically on the
   1161                                  right and left side*/
   1162                                 Params.m_inputCoord.m_x -= ((tempInputSizeHeightMax \
   1163                                     - Params.m_inputSize.m_width)>>1);
   1164                             }
   1165                             else if(Params.m_inputCoord.m_x < pC->m_pDecodedPlane->u_width\
   1166                                 -(Params.m_inputCoord.m_x + Params.m_inputSize.m_width))
   1167                             {
   1168                                 /*There is not enough place on the left of the input pan
   1169                                 zoom area to extend it symmetrically,
   1170                                 so extend it to the maximum on the left*/
   1171                                 Params.m_inputCoord.m_x = 0;
   1172                             }
   1173                             else
   1174                             {
   1175                                 /*There is not enough place on the right of the input pan zoom
   1176                                  area to extend it symmetrically,
   1177                                 so extend it to the maximum on the right*/
   1178                                 Params.m_inputCoord.m_x =
   1179                                      pC->m_pDecodedPlane->u_width - tempInputSizeHeightMax;
   1180                             }
   1181                             /*The input width of the AIR is the maximum possible width*/
   1182                             Params.m_inputSize.m_width = tempInputSizeHeightMax;
   1183                         }
   1184                         else
   1185                         {
   1186                             /*The maximum possible input height is greater than the input
   1187                             image width (rotation included),
   1188                             that means black borders are necessary to keep aspect ratio
   1189                             The input width of the AIR is all the input image width*/
   1190                             Params.m_outputSize.m_width =
   1191                             (tempOutputSizeHeight*pC->m_pDecodedPlane->u_width)\
   1192                                 /Params.m_inputSize.m_width;
   1193                             Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
   1194                             Params.m_inputCoord.m_x = 0;
   1195                             Params.m_inputSize.m_width = pC->m_pDecodedPlane->u_width;
   1196                             pImagePlanes[0].u_topleft =
   1197                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_height\
   1198                                     -Params.m_outputSize.m_width))>>1)*pImagePlanes[0].u_stride)+1;
   1199                             pImagePlanes[0].u_height = Params.m_outputSize.m_width;
   1200                             pImagePlanes[1].u_topleft =
   1201                             ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_height\
   1202                                 -(Params.m_outputSize.m_width>>1)))>>1)\
   1203                                     *pImagePlanes[1].u_stride)+1;
   1204                             pImagePlanes[1].u_height = Params.m_outputSize.m_width>>1;
   1205                             pImagePlanes[2].u_topleft =
   1206                             ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_height\
   1207                                 -(Params.m_outputSize.m_width>>1)))>>1)\
   1208                                     *pImagePlanes[2].u_stride)+1;
   1209                             pImagePlanes[2].u_height = Params.m_outputSize.m_width>>1;
   1210                         }
   1211                     }
   1212                     else
   1213                     {
   1214                         /*Black borders will be on the top and bottom of the output video*/
   1215                         /*Maximum output width if the input image aspect ratio is kept and if
   1216                          the output width is the screen height*/
   1217                         M4OSA_UInt32 tempOutputSizeWidth =
   1218                         (M4OSA_UInt32)((pC->m_pDecodedPlane->u_height * pImagePlanes->u_height)\
   1219                              /pC->m_pDecodedPlane->u_width);
   1220                         M4OSA_UInt32 tempInputSizeWidthMax = 0;
   1221                         M4OSA_UInt32 tempFinalInputWidth = 0, tempFinalOutputWidth = 0;
   1222                         /*The output height is the screen width*/
   1223                         Params.m_outputSize.m_width = pImagePlanes->u_height;
   1224                         Params.m_outputSize.m_height= pImagePlanes->u_width;
   1225                         tempOutputSizeWidth = (tempOutputSizeWidth>>1)<<1;
   1226 
   1227                         /*Maximum input width according to the maximum output width
   1228                          (proportional to the maximum output width)*/
   1229                         tempInputSizeWidthMax =
   1230                         (pImagePlanes->u_width*Params.m_inputSize.m_height)/tempOutputSizeWidth;
   1231                         tempInputSizeWidthMax = (tempInputSizeWidthMax>>1)<<1;
   1232 
   1233                         /*Check if the maximum possible input width is contained into the input
   1234                          image height (rotation included)*/
   1235                         if(tempInputSizeWidthMax <= pC->m_pDecodedPlane->u_height)
   1236                         {
   1237                             /*The maximum possible input width is contained in the input
   1238                              image height (rotation included),
   1239                             that means no black borders, the input pan zoom area will be extended
   1240                             so that the input AIR height will be the maximum possible*/
   1241                             if(((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1) \
   1242                                 <= Params.m_inputCoord.m_y
   1243                                 && ((tempInputSizeWidthMax - Params.m_inputSize.m_height)>>1)\
   1244                                      <= pC->m_pDecodedPlane->u_height -(Params.m_inputCoord.m_y \
   1245                                         + Params.m_inputSize.m_height))
   1246                             {
   1247                                 /*The input pan zoom area can be extended symmetrically on
   1248                                 the right and left side*/
   1249                                 Params.m_inputCoord.m_y -= ((tempInputSizeWidthMax \
   1250                                     - Params.m_inputSize.m_height)>>1);
   1251                             }
   1252                             else if(Params.m_inputCoord.m_y < pC->m_pDecodedPlane->u_height\
   1253                                 -(Params.m_inputCoord.m_y + Params.m_inputSize.m_height))
   1254                             {
   1255                                 /*There is not enough place on the top of the input pan zoom
   1256                                 area to extend it symmetrically,
   1257                                 so extend it to the maximum on the top*/
   1258                                 Params.m_inputCoord.m_y = 0;
   1259                             }
   1260                             else
   1261                             {
   1262                                 /*There is not enough place on the bottom of the input pan zoom
   1263                                  area to extend it symmetrically,
   1264                                 so extend it to the maximum on the bottom*/
   1265                                 Params.m_inputCoord.m_y = pC->m_pDecodedPlane->u_height\
   1266                                      - tempInputSizeWidthMax;
   1267                             }
   1268                             /*The input height of the AIR is the maximum possible height*/
   1269                             Params.m_inputSize.m_height = tempInputSizeWidthMax;
   1270                         }
   1271                         else
   1272                         {
   1273                             /*The maximum possible input width is greater than the input\
   1274                              image height (rotation included),
   1275                             that means black borders are necessary to keep aspect ratio
   1276                             The input height of the AIR is all the input image height*/
   1277                             Params.m_outputSize.m_height =
   1278                                 (tempOutputSizeWidth*pC->m_pDecodedPlane->u_height)\
   1279                                     /Params.m_inputSize.m_height;
   1280                             Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
   1281                             Params.m_inputCoord.m_y = 0;
   1282                             Params.m_inputSize.m_height = pC->m_pDecodedPlane->u_height;
   1283                             pImagePlanes[0].u_topleft =
   1284                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[0].u_width\
   1285                                     -Params.m_outputSize.m_height))>>1))+1;
   1286                             pImagePlanes[0].u_width = Params.m_outputSize.m_height;
   1287                             pImagePlanes[1].u_topleft =
   1288                                 ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[1].u_width\
   1289                                     -(Params.m_outputSize.m_height>>1)))>>1))+1;
   1290                             pImagePlanes[1].u_width = Params.m_outputSize.m_height>>1;
   1291                             pImagePlanes[2].u_topleft =
   1292                                  ((M4xVSS_ABS((M4OSA_Int32)(pImagePlanes[2].u_width\
   1293                                     -(Params.m_outputSize.m_height>>1)))>>1))+1;
   1294                             pImagePlanes[2].u_width = Params.m_outputSize.m_height>>1;
   1295                         }
   1296                     }
   1297                     break;
   1298                 }
   1299             }
   1300 
   1301             /*Width and height have to be even*/
   1302             Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
   1303             Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
   1304             Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
   1305             Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
   1306             pImagePlanes[0].u_width = (pImagePlanes[0].u_width>>1)<<1;
   1307             pImagePlanes[1].u_width = (pImagePlanes[1].u_width>>1)<<1;
   1308             pImagePlanes[2].u_width = (pImagePlanes[2].u_width>>1)<<1;
   1309             pImagePlanes[0].u_height = (pImagePlanes[0].u_height>>1)<<1;
   1310             pImagePlanes[1].u_height = (pImagePlanes[1].u_height>>1)<<1;
   1311             pImagePlanes[2].u_height = (pImagePlanes[2].u_height>>1)<<1;
   1312 
   1313             /*Check that values are coherent*/
   1314             if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
   1315             {
   1316                 Params.m_inputSize.m_width = Params.m_outputSize.m_width;
   1317             }
   1318             else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
   1319             {
   1320                 Params.m_inputSize.m_height = Params.m_outputSize.m_height;
   1321             }
   1322         }
   1323 
   1324         /**
   1325         Picture rendering: Resizing and Cropping*/
   1326         if(pC->m_mediaRendering != M4xVSS_kBlackBorders)
   1327         {
   1328             switch(pBasicTags.orientation)
   1329             {
   1330             default:
   1331             case M4COMMON_kOrientationUnknown:
   1332                 Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
   1333             case M4COMMON_kOrientationTopLeft:
   1334             case M4COMMON_kOrientationTopRight:
   1335             case M4COMMON_kOrientationBottomRight:
   1336             case M4COMMON_kOrientationBottomLeft:
   1337                 Params.m_outputSize.m_height = pImagePlanes->u_height;
   1338                 Params.m_outputSize.m_width = pImagePlanes->u_width;
   1339                 break;
   1340             case M4COMMON_kOrientationLeftTop:
   1341             case M4COMMON_kOrientationLeftBottom:
   1342             case M4COMMON_kOrientationRightTop:
   1343             case M4COMMON_kOrientationRightBottom:
   1344                 Params.m_outputSize.m_height = pImagePlanes->u_width;
   1345                 Params.m_outputSize.m_width = pImagePlanes->u_height;
   1346                 break;
   1347             }
   1348         }
   1349 
   1350         /**
   1351         Picture rendering: Cropping*/
   1352         if(pC->m_mediaRendering == M4xVSS_kCropping)
   1353         {
   1354             if((Params.m_outputSize.m_height * Params.m_inputSize.m_width)\
   1355                  /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
   1356             {
   1357                 M4OSA_UInt32 tempHeight = Params.m_inputSize.m_height;
   1358                 /*height will be cropped*/
   1359                 Params.m_inputSize.m_height =  (M4OSA_UInt32)((Params.m_outputSize.m_height \
   1360                     * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
   1361                 Params.m_inputSize.m_height =  (Params.m_inputSize.m_height>>1)<<1;
   1362                 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
   1363                 {
   1364                     Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)\
   1365                         ((pC->m_pDecodedPlane->u_height - Params.m_inputSize.m_height))>>1);
   1366                 }
   1367                 else
   1368                 {
   1369                     Params.m_inputCoord.m_y += (M4OSA_Int32)((M4OSA_Int32)\
   1370                         ((tempHeight - Params.m_inputSize.m_height))>>1);
   1371                 }
   1372             }
   1373             else
   1374             {
   1375                 M4OSA_UInt32 tempWidth= Params.m_inputSize.m_width;
   1376                 /*width will be cropped*/
   1377                 Params.m_inputSize.m_width =  (M4OSA_UInt32)((Params.m_outputSize.m_width \
   1378                     * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
   1379                 Params.m_inputSize.m_width =  (Params.m_inputSize.m_width>>1)<<1;
   1380                 if(M4OSA_FALSE == pC->m_pPto3GPPparams->isPanZoom)
   1381                 {
   1382                     Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)\
   1383                         ((pC->m_pDecodedPlane->u_width - Params.m_inputSize.m_width))>>1);
   1384                 }
   1385                 else
   1386                 {
   1387                     Params.m_inputCoord.m_x += (M4OSA_Int32)\
   1388                         (((M4OSA_Int32)(tempWidth - Params.m_inputSize.m_width))>>1);
   1389                 }
   1390             }
   1391         }
   1392 
   1393 
   1394 
   1395         /**
   1396          * Call AIR functions */
   1397         if(M4OSA_NULL == pC->m_air_context)
   1398         {
   1399             err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P);
   1400             if(err != M4NO_ERROR)
   1401             {
   1402                 free(pC->m_pDecodedPlane[0].pac_data);
   1403                 free(pC->m_pDecodedPlane);
   1404                 pC->m_pDecodedPlane = M4OSA_NULL;
   1405                 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
   1406                      Error when initializing AIR: 0x%x", err);
   1407                 return err;
   1408             }
   1409         }
   1410 
   1411         err = M4AIR_configure(pC->m_air_context, &Params);
   1412         if(err != M4NO_ERROR)
   1413         {
   1414             M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct:\
   1415                  Error when configuring AIR: 0x%x", err);
   1416             M4AIR_cleanUp(pC->m_air_context);
   1417             free(pC->m_pDecodedPlane[0].pac_data);
   1418             free(pC->m_pDecodedPlane);
   1419             pC->m_pDecodedPlane = M4OSA_NULL;
   1420             return err;
   1421         }
   1422 
   1423         err = M4AIR_get(pC->m_air_context, pC->m_pDecodedPlane, pImagePlanes);
   1424         if(err != M4NO_ERROR)
   1425         {
   1426             M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when getting AIR plane: 0x%x", err);
   1427             M4AIR_cleanUp(pC->m_air_context);
   1428             free(pC->m_pDecodedPlane[0].pac_data);
   1429             free(pC->m_pDecodedPlane);
   1430             pC->m_pDecodedPlane = M4OSA_NULL;
   1431             return err;
   1432         }
   1433         pImagePlanes[0] = pImagePlanes1;
   1434         pImagePlanes[1] = pImagePlanes2;
   1435         pImagePlanes[2] = pImagePlanes3;
   1436     }
   1437 
   1438 
   1439     /**
   1440      * Increment the image counter */
   1441     pC->m_ImageCounter++;
   1442 
   1443     /**
   1444      * Check end of sequence */
   1445     last_frame_flag    = (pC->m_ImageCounter >= pC->m_NbImage);
   1446 
   1447     /**
   1448      * Keep the picture duration */
   1449     *pPictureDuration = pC->m_timeDuration;
   1450 
   1451     if (1 == last_frame_flag)
   1452     {
   1453         if(M4OSA_NULL != pC->m_air_context)
   1454         {
   1455             err = M4AIR_cleanUp(pC->m_air_context);
   1456             if(err != M4NO_ERROR)
   1457             {
   1458                 M4OSA_TRACE1_1("M4xVSS_PictureCallbackFct: Error when cleaning AIR: 0x%x", err);
   1459                 return err;
   1460             }
   1461         }
   1462         if(M4OSA_NULL != pC->m_pDecodedPlane)
   1463         {
   1464             free(pC->m_pDecodedPlane[0].pac_data);
   1465             free(pC->m_pDecodedPlane);
   1466             pC->m_pDecodedPlane = M4OSA_NULL;
   1467         }
   1468         return M4PTO3GPP_WAR_LAST_PICTURE;
   1469     }
   1470 
   1471     M4OSA_TRACE1_0("M4xVSS_PictureCallbackFct: Leaving ");
   1472     return M4NO_ERROR;
   1473 }
   1474 
   1475 /**
   1476  ******************************************************************************
   1477  * M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
   1478  * @brief    This function initializes Pto3GPP with the given parameters
   1479  * @note    The "Pictures to 3GPP" parameters are given by the internal xVSS
   1480  *            context. This context contains a pointer on the current element
   1481  *            of the chained list of Pto3GPP parameters.
   1482  * @param    pContext    (IN) The integrator own context
   1483  *
   1484  * @return    M4NO_ERROR:    No error
   1485  * @return    M4PTO3GPP_WAR_LAST_PICTURE: The returned image is the last one
   1486  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   1487  ******************************************************************************
   1488  */
   1489 M4OSA_ERR M4xVSS_internalStartConvertPictureTo3gp(M4OSA_Context pContext)
   1490 {
   1491     /************************************************************************/
   1492     /* Definitions to generate dummy AMR file used to add AMR silence in files generated
   1493      by Pto3GPP */
   1494     #define M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE     13
   1495     /* This constant is defined in M4VSS3GPP_InternalConfig.h */
   1496     extern const M4OSA_UInt8\
   1497          M4VSS3GPP_AMR_AU_SILENCE_FRAME_048[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE];
   1498 
   1499     /* AMR silent frame used to compute dummy AMR silence file */
   1500     #define M4VSS3GPP_AMR_HEADER_SIZE 6
   1501     const M4OSA_UInt8 M4VSS3GPP_AMR_HEADER[M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE] =
   1502     { 0x23, 0x21, 0x41, 0x4d, 0x52, 0x0a };
   1503     /************************************************************************/
   1504 
   1505     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   1506     M4OSA_ERR err;
   1507     M4PTO3GPP_Context pM4PTO3GPP_Ctxt = M4OSA_NULL;
   1508     M4PTO3GPP_Params Params;
   1509      M4xVSS_PictureCallbackCtxt*    pCallBackCtxt;
   1510     M4OSA_Bool cmpResult=M4OSA_FALSE;
   1511     M4OSA_Context pDummyAMRFile;
   1512     M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
   1513     /*UTF conversion support*/
   1514     M4OSA_Char* pDecodedPath = M4OSA_NULL;
   1515     M4OSA_UInt32 i;
   1516 
   1517     /**
   1518      * Create a M4PTO3GPP instance */
   1519     err = M4PTO3GPP_Init( &pM4PTO3GPP_Ctxt, xVSS_context->pFileReadPtr,
   1520          xVSS_context->pFileWritePtr);
   1521     if (err != M4NO_ERROR)
   1522     {
   1523         M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp returned %ld\n",err);
   1524         return err;
   1525     }
   1526 
   1527     pCallBackCtxt = (M4xVSS_PictureCallbackCtxt*)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_PictureCallbackCtxt),
   1528          M4VS,(M4OSA_Char *) "Pto3gpp callback struct");
   1529     if(pCallBackCtxt == M4OSA_NULL)
   1530     {
   1531         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalStartConvertPictureTo3gp");
   1532         return M4ERR_ALLOC;
   1533     }
   1534 
   1535     Params.OutputVideoFrameSize = xVSS_context->pSettings->xVSS.outputVideoSize;
   1536     Params.OutputVideoFormat = xVSS_context->pSettings->xVSS.outputVideoFormat;
   1537     Params.videoProfile = xVSS_context->pSettings->xVSS.outputVideoProfile;
   1538     Params.videoLevel = xVSS_context->pSettings->xVSS.outputVideoLevel;
   1539 
   1540     /**
   1541      * Generate "dummy" amr file containing silence in temporary folder */
   1542     M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
   1543     strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
   1544 
   1545     /**
   1546      * UTF conversion: convert the temporary path into the customer format*/
   1547     pDecodedPath = out_amr;
   1548 
   1549     if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
   1550             && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   1551     {
   1552         M4OSA_UInt32 length = 0;
   1553         err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
   1554              (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
   1555         if(err != M4NO_ERROR)
   1556         {
   1557             M4OSA_TRACE1_1("M4xVSS_internalStartConvertPictureTo3gp:\
   1558                  M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
   1559             return err;
   1560         }
   1561         pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   1562     }
   1563 
   1564     /**
   1565     * End of the conversion, now use the converted path*/
   1566 
   1567     err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, pDecodedPath, M4OSA_kFileWrite);
   1568 
   1569     /*Commented because of the use of the UTF conversion see above*/
   1570 /*    err = xVSS_context->pFileWritePtr->openWrite(&pDummyAMRFile, out_amr, M4OSA_kFileWrite);
   1571  */
   1572     if(err != M4NO_ERROR)
   1573     {
   1574         M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't open output dummy amr file %s,\
   1575              error: 0x%x\n",out_amr, err);
   1576         return err;
   1577     }
   1578 
   1579     err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
   1580         (M4OSA_Int8*)M4VSS3GPP_AMR_HEADER, M4VSS3GPP_AMR_HEADER_SIZE);
   1581     if(err != M4NO_ERROR)
   1582     {
   1583         M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: Can't write output dummy amr file %s,\
   1584              error: 0x%x\n",out_amr, err);
   1585         return err;
   1586     }
   1587 
   1588     err =  xVSS_context->pFileWritePtr->writeData(pDummyAMRFile,
   1589          (M4OSA_Int8*)M4VSS3GPP_AMR_AU_SILENCE_FRAME_048, M4VSS3GPP_AMR_AU_SILENCE_FRAME_048_SIZE);
   1590     if(err != M4NO_ERROR)
   1591     {
   1592         M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
   1593             Can't write output dummy amr file %s, error: 0x%x\n",out_amr, err);
   1594         return err;
   1595     }
   1596 
   1597     err =  xVSS_context->pFileWritePtr->closeWrite(pDummyAMRFile);
   1598     if(err != M4NO_ERROR)
   1599     {
   1600         M4OSA_TRACE1_2("M4xVSS_internalConvertPictureTo3gp: \
   1601             Can't close output dummy amr file %s, error: 0x%x\n",out_amr, err);
   1602         return err;
   1603     }
   1604 
   1605     /**
   1606      * Fill parameters for Pto3GPP with the parameters contained in the current element of the
   1607      * Pto3GPP parameters chained list and with default parameters */
   1608 /*+ New Encoder bitrates */
   1609     if(xVSS_context->pSettings->xVSS.outputVideoBitrate == 0) {
   1610         Params.OutputVideoBitrate    = M4VIDEOEDITING_kVARIABLE_KBPS;
   1611     }
   1612     else {
   1613           Params.OutputVideoBitrate = xVSS_context->pSettings->xVSS.outputVideoBitrate;
   1614     }
   1615     M4OSA_TRACE1_1("M4xVSS_internalStartConvertPicTo3GP: video bitrate = %d",
   1616         Params.OutputVideoBitrate);
   1617 /*- New Encoder bitrates */
   1618     Params.OutputFileMaxSize    = M4PTO3GPP_kUNLIMITED;
   1619     Params.pPictureCallbackFct    = M4xVSS_PictureCallbackFct;
   1620     Params.pPictureCallbackCtxt    = pCallBackCtxt;
   1621     /*FB: change to use the converted path (UTF conversion) see the conversion above*/
   1622     /*Fix :- Adding Audio Track in Image as input :AudioTarckFile Setting to NULL */
   1623     Params.pInputAudioTrackFile    = M4OSA_NULL;//(M4OSA_Void*)pDecodedPath;//out_amr;
   1624     Params.AudioPaddingMode        = M4PTO3GPP_kAudioPaddingMode_Loop;
   1625     Params.AudioFileFormat        = M4VIDEOEDITING_kFileType_AMR;
   1626     Params.pOutput3gppFile        = xVSS_context->pPTo3GPPcurrentParams->pFileOut;
   1627     Params.pTemporaryFile        = xVSS_context->pPTo3GPPcurrentParams->pFileTemp;
   1628     /*+PR No:  blrnxpsw#223*/
   1629     /*Increasing frequency of Frame, calculating Nos of Frame = duration /FPS */
   1630     /*Other changes made is @ M4xVSS_API.c @ line 3841 in M4xVSS_SendCommand*/
   1631     /*If case check for PanZoom removed */
   1632     Params.NbVideoFrames            = (M4OSA_UInt32)
   1633         (xVSS_context->pPTo3GPPcurrentParams->duration \
   1634             / xVSS_context->pPTo3GPPcurrentParams->framerate); /* */
   1635     pCallBackCtxt->m_timeDuration    = xVSS_context->pPTo3GPPcurrentParams->framerate;
   1636     /*-PR No:  blrnxpsw#223*/
   1637     pCallBackCtxt->m_ImageCounter    = 0;
   1638     pCallBackCtxt->m_FileIn            = xVSS_context->pPTo3GPPcurrentParams->pFileIn;
   1639     pCallBackCtxt->m_NbImage        = Params.NbVideoFrames;
   1640     pCallBackCtxt->m_pFileReadPtr    = xVSS_context->pFileReadPtr;
   1641     pCallBackCtxt->m_pDecodedPlane    = M4OSA_NULL;
   1642     pCallBackCtxt->m_pPto3GPPparams    = xVSS_context->pPTo3GPPcurrentParams;
   1643     pCallBackCtxt->m_air_context    = M4OSA_NULL;
   1644     pCallBackCtxt->m_mediaRendering = xVSS_context->pPTo3GPPcurrentParams->MediaRendering;
   1645 
   1646     /**
   1647      * Set the input and output files */
   1648     err = M4PTO3GPP_Open(pM4PTO3GPP_Ctxt, &Params);
   1649     if (err != M4NO_ERROR)
   1650     {
   1651         M4OSA_TRACE1_1("M4PTO3GPP_Open returned: 0x%x\n",err);
   1652         if(pCallBackCtxt != M4OSA_NULL)
   1653         {
   1654             free(pCallBackCtxt);
   1655             pCallBackCtxt = M4OSA_NULL;
   1656         }
   1657         M4PTO3GPP_CleanUp(pM4PTO3GPP_Ctxt);
   1658         return err;
   1659     }
   1660 
   1661     /**
   1662      * Save context to be able to call Pto3GPP step function in M4xVSS_step function */
   1663     xVSS_context->pM4PTO3GPP_Ctxt = pM4PTO3GPP_Ctxt;
   1664     xVSS_context->pCallBackCtxt = pCallBackCtxt;
   1665 
   1666     return M4NO_ERROR;
   1667 }
   1668 
   1669 /**
   1670  ******************************************************************************
   1671  * M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
   1672  * @brief    This function cleans up Pto3GPP
   1673  * @note
   1674  * @param    pContext    (IN) The integrator own context
   1675  *
   1676  * @return    M4NO_ERROR:    No error
   1677  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   1678  ******************************************************************************
   1679  */
   1680 M4OSA_ERR M4xVSS_internalStopConvertPictureTo3gp(M4OSA_Context pContext)
   1681 {
   1682     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   1683     M4OSA_ERR err;
   1684     M4OSA_Char out_amr[M4XVSS_MAX_PATH_LEN];
   1685     /*UTF conversion support*/
   1686     M4OSA_Char* pDecodedPath = M4OSA_NULL;
   1687 
   1688     /**
   1689     * Free the PTO3GPP callback context */
   1690     if(M4OSA_NULL != xVSS_context->pCallBackCtxt)
   1691     {
   1692         free(xVSS_context->pCallBackCtxt);
   1693         xVSS_context->pCallBackCtxt = M4OSA_NULL;
   1694     }
   1695 
   1696     /**
   1697      * Finalize the output file */
   1698     err = M4PTO3GPP_Close(xVSS_context->pM4PTO3GPP_Ctxt);
   1699     if (err != M4NO_ERROR)
   1700     {
   1701         M4OSA_TRACE1_1("M4PTO3GPP_Close returned 0x%x\n",err);
   1702         M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
   1703         return err;
   1704     }
   1705 
   1706     /**
   1707      * Free this M4PTO3GPP instance */
   1708     err = M4PTO3GPP_CleanUp(xVSS_context->pM4PTO3GPP_Ctxt);
   1709     if (err != M4NO_ERROR)
   1710     {
   1711         M4OSA_TRACE1_1("M4PTO3GPP_CleanUp returned 0x%x\n",err);
   1712         return err;
   1713     }
   1714 
   1715     /**
   1716      * Remove dummy.amr file */
   1717     M4OSA_chrNCopy(out_amr, xVSS_context->pTempPath, M4XVSS_MAX_PATH_LEN - 1);
   1718     strncat((char *)out_amr, (const char *)"dummy.amr\0", 10);
   1719 
   1720     /**
   1721      * UTF conversion: convert the temporary path into the customer format*/
   1722     pDecodedPath = out_amr;
   1723 
   1724     if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
   1725             && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   1726     {
   1727         M4OSA_UInt32 length = 0;
   1728         err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) out_amr,
   1729              (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
   1730         if(err != M4NO_ERROR)
   1731         {
   1732             M4OSA_TRACE1_1("M4xVSS_internalStopConvertPictureTo3gp:\
   1733                  M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
   1734             return err;
   1735         }
   1736         pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   1737     }
   1738     /**
   1739     * End of the conversion, now use the decoded path*/
   1740     remove((const char *)pDecodedPath);
   1741 
   1742     /*Commented because of the use of the UTF conversion*/
   1743 /*    remove(out_amr);
   1744  */
   1745 
   1746     xVSS_context->pM4PTO3GPP_Ctxt = M4OSA_NULL;
   1747     xVSS_context->pCallBackCtxt = M4OSA_NULL;
   1748 
   1749     return M4NO_ERROR;
   1750 }
   1751 
   1752 /**
   1753  ******************************************************************************
   1754  * prototype    M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
   1755  * @brief    This function converts an RGB565 plane to YUV420 planar
   1756  * @note    It is used only for framing effect
   1757  *            It allocates output YUV planes
   1758  * @param    framingCtx    (IN) The framing struct containing input RGB565 plane
   1759  *
   1760  * @return    M4NO_ERROR:    No error
   1761  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   1762  * @return    M4ERR_ALLOC: Allocation error (no more memory)
   1763  ******************************************************************************
   1764  */
   1765 M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
   1766 {
   1767     M4OSA_ERR err;
   1768 
   1769     /**
   1770      * Allocate output YUV planes */
   1771     framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
   1772          M4VS, (M4OSA_Char *)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
   1773     if(framingCtx->FramingYuv == M4OSA_NULL)
   1774     {
   1775         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
   1776         return M4ERR_ALLOC;
   1777     }
   1778     framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
   1779     framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
   1780     framingCtx->FramingYuv[0].u_topleft = 0;
   1781     framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
   1782     framingCtx->FramingYuv[0].pac_data =
   1783          (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width\
   1784             *framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char *)\
   1785                 "Alloc for the Convertion output YUV");;
   1786     if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
   1787     {
   1788         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
   1789         return M4ERR_ALLOC;
   1790     }
   1791     framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
   1792     framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
   1793     framingCtx->FramingYuv[1].u_topleft = 0;
   1794     framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
   1795     framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data \
   1796         + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
   1797     framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
   1798     framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
   1799     framingCtx->FramingYuv[2].u_topleft = 0;
   1800     framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
   1801     framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data \
   1802         + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
   1803 
   1804     /**
   1805      * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing
   1806       effect */
   1807     err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
   1808     if(err != M4NO_ERROR)
   1809     {
   1810         M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV:\
   1811              error when converting from RGB to YUV: 0x%x\n", err);
   1812     }
   1813 
   1814     framingCtx->duration = 0;
   1815     framingCtx->previousClipTime = -1;
   1816     framingCtx->previewOffsetClipTime = -1;
   1817 
   1818     /**
   1819      * Only one element in the chained list (no animated image with RGB buffer...) */
   1820     framingCtx->pCurrent = framingCtx;
   1821     framingCtx->pNext = framingCtx;
   1822 
   1823     return M4NO_ERROR;
   1824 }
   1825 
   1826 M4OSA_ERR M4xVSS_internalSetPlaneTransparent(M4OSA_UInt8* planeIn, M4OSA_UInt32 size)
   1827 {
   1828     M4OSA_UInt32 i;
   1829     M4OSA_UInt8* plane = planeIn;
   1830     M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
   1831     M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
   1832 
   1833     for(i=0; i<(size>>1); i++)
   1834     {
   1835         *plane++ = transparent1;
   1836         *plane++ = transparent2;
   1837     }
   1838 
   1839     return M4NO_ERROR;
   1840 }
   1841 
   1842 
   1843 /**
   1844  ******************************************************************************
   1845  * prototype M4OSA_ERR M4xVSS_internalConvertARBG888toYUV420_FrammingEffect(M4OSA_Context pContext,
   1846  *                                                M4VSS3GPP_EffectSettings* pEffect,
   1847  *                                                M4xVSS_FramingStruct* framingCtx,
   1848                                                   M4VIDEOEDITING_VideoFrameSize OutputVideoResolution)
   1849  *
   1850  * @brief    This function converts ARGB8888 input file  to YUV420 whenused for framming effect
   1851  * @note    The input ARGB8888 file path is contained in the pEffect structure
   1852  *            If the ARGB8888 must be resized to fit output video size, this function
   1853  *            will do it.
   1854  * @param    pContext    (IN) The integrator own context
   1855  * @param    pEffect        (IN) The effect structure containing all informations on
   1856  *                        the file to decode, resizing ...
   1857  * @param    framingCtx    (IN/OUT) Structure in which the output RGB will be stored
   1858  *
   1859  * @return    M4NO_ERROR:    No error
   1860  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   1861  * @return    M4ERR_ALLOC: Allocation error (no more memory)
   1862  * @return    M4ERR_FILE_NOT_FOUND: File not found.
   1863  ******************************************************************************
   1864  */
   1865 
   1866 
   1867 M4OSA_ERR M4xVSS_internalConvertARGB888toYUV420_FrammingEffect(M4OSA_Context pContext,
   1868                                                                M4VSS3GPP_EffectSettings* pEffect,
   1869                                                                M4xVSS_FramingStruct* framingCtx,
   1870                                                                M4VIDEOEDITING_VideoFrameSize\
   1871                                                                OutputVideoResolution)
   1872 {
   1873     M4OSA_ERR err = M4NO_ERROR;
   1874     M4OSA_Context pARGBIn;
   1875     M4OSA_UInt32 file_size;
   1876     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   1877     M4OSA_UInt32 width, height, width_out, height_out;
   1878     M4OSA_Void* pFile = pEffect->xVSS.pFramingFilePath;
   1879     M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
   1880     M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
   1881     /*UTF conversion support*/
   1882     M4OSA_Char* pDecodedPath = M4OSA_NULL;
   1883     M4OSA_UInt32 i = 0,j = 0;
   1884     M4VIFI_ImagePlane rgbPlane;
   1885     M4OSA_UInt32 frameSize_argb=(framingCtx->width * framingCtx->height * 4);
   1886     M4OSA_UInt32 frameSize;
   1887     M4OSA_UInt32 tempAlphaPercent = 0;
   1888     M4VIFI_UInt8* TempPacData = M4OSA_NULL;
   1889     M4OSA_UInt16 *ptr = M4OSA_NULL;
   1890     M4OSA_UInt32 z = 0;
   1891 
   1892     M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: Entering ");
   1893 
   1894     M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect width and height %d %d ",
   1895         framingCtx->width,framingCtx->height);
   1896 
   1897     M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)\
   1898         "Image argb data");
   1899     if(pTmpData == M4OSA_NULL) {
   1900         M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
   1901         return M4ERR_ALLOC;
   1902     }
   1903     /**
   1904      * UTF conversion: convert the file path into the customer format*/
   1905     pDecodedPath = pFile;
   1906 
   1907     if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
   1908             && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   1909     {
   1910         M4OSA_UInt32 length = 0;
   1911         err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void*) pFile,
   1912              (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer, &length);
   1913         if(err != M4NO_ERROR)
   1914         {
   1915             M4OSA_TRACE1_1("M4xVSS_internalDecodePNG:\
   1916                  M4xVSS_internalConvertFromUTF8 returns err: 0x%x",err);
   1917             free(pTmpData);
   1918             pTmpData = M4OSA_NULL;
   1919             return err;
   1920         }
   1921         pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   1922     }
   1923 
   1924     /**
   1925     * End of the conversion, now use the decoded path*/
   1926 
   1927      /* Open input ARGB8888 file and store it into memory */
   1928     err = xVSS_context->pFileReadPtr->openRead(&pARGBIn, pDecodedPath, M4OSA_kFileRead);
   1929 
   1930     if(err != M4NO_ERROR)
   1931     {
   1932         M4OSA_TRACE1_2("Can't open input ARGB8888 file %s, error: 0x%x\n",pFile, err);
   1933         free(pTmpData);
   1934         pTmpData = M4OSA_NULL;
   1935         return err;
   1936     }
   1937 
   1938     err = xVSS_context->pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pTmpData, &frameSize_argb);
   1939     if(err != M4NO_ERROR)
   1940     {
   1941         xVSS_context->pFileReadPtr->closeRead(pARGBIn);
   1942         free(pTmpData);
   1943         pTmpData = M4OSA_NULL;
   1944         return err;
   1945     }
   1946 
   1947 
   1948     err =  xVSS_context->pFileReadPtr->closeRead(pARGBIn);
   1949     if(err != M4NO_ERROR)
   1950     {
   1951         M4OSA_TRACE1_2("Can't close input png file %s, error: 0x%x\n",pFile, err);
   1952         free(pTmpData);
   1953         pTmpData = M4OSA_NULL;
   1954         return err;
   1955     }
   1956 
   1957 
   1958     rgbPlane.u_height = framingCtx->height;
   1959     rgbPlane.u_width = framingCtx->width;
   1960     rgbPlane.u_stride = rgbPlane.u_width*3;
   1961     rgbPlane.u_topleft = 0;
   1962 
   1963     frameSize = (rgbPlane.u_width * rgbPlane.u_height * 3); //Size of RGB888 data
   1964     rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(((frameSize)+ (2 * framingCtx->width)),
   1965          M4VS, (M4OSA_Char*)"Image clip RGB888 data");
   1966     if(rgbPlane.pac_data == M4OSA_NULL)
   1967     {
   1968         M4OSA_TRACE1_0("Failed to allocate memory for Image clip");
   1969         free(pTmpData);
   1970         return M4ERR_ALLOC;
   1971     }
   1972 
   1973     M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
   1974           Remove the alpha channel  ");
   1975 
   1976     /* premultiplied alpha % on RGB */
   1977     for (i=0, j = 0; i < frameSize_argb; i += 4) {
   1978         /* this is alpha value */
   1979         if ((i % 4) == 0)
   1980         {
   1981             tempAlphaPercent = pTmpData[i];
   1982         }
   1983 
   1984         /* R */
   1985         rgbPlane.pac_data[j] = pTmpData[i+1];
   1986         j++;
   1987 
   1988         /* G */
   1989         if (tempAlphaPercent > 0) {
   1990             rgbPlane.pac_data[j] = pTmpData[i+2];
   1991             j++;
   1992         } else {/* In case of alpha value 0, make GREEN to 255 */
   1993             rgbPlane.pac_data[j] = 255; //pTmpData[i+2];
   1994             j++;
   1995         }
   1996 
   1997         /* B */
   1998         rgbPlane.pac_data[j] = pTmpData[i+3];
   1999         j++;
   2000     }
   2001 
   2002     free(pTmpData);
   2003     pTmpData = M4OSA_NULL;
   2004 
   2005     /* convert RGB888 to RGB565 */
   2006 
   2007     /* allocate temp RGB 565 buffer */
   2008     TempPacData = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize +
   2009                        (4 * (framingCtx->width + framingCtx->height + 1)),
   2010                         M4VS, (M4OSA_Char*)"Image clip RGB565 data");
   2011     if (TempPacData == M4OSA_NULL) {
   2012         M4OSA_TRACE1_0("Failed to allocate memory for Image clip RGB565 data");
   2013         free(rgbPlane.pac_data);
   2014         return M4ERR_ALLOC;
   2015     }
   2016 
   2017     ptr = (M4OSA_UInt16 *)TempPacData;
   2018     z = 0;
   2019 
   2020     for (i = 0; i < j ; i += 3)
   2021     {
   2022         ptr[z++] = PACK_RGB565(0,   rgbPlane.pac_data[i],
   2023                                     rgbPlane.pac_data[i+1],
   2024                                     rgbPlane.pac_data[i+2]);
   2025     }
   2026 
   2027     /* free the RBG888 and assign RGB565 */
   2028     free(rgbPlane.pac_data);
   2029     rgbPlane.pac_data = TempPacData;
   2030 
   2031     /**
   2032      * Check if output sizes are odd */
   2033     if(rgbPlane.u_height % 2 != 0)
   2034     {
   2035         M4VIFI_UInt8* output_pac_data = rgbPlane.pac_data;
   2036         M4OSA_UInt32 i;
   2037         M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
   2038              output height is odd  ");
   2039         output_pac_data +=rgbPlane.u_width * rgbPlane.u_height*2;
   2040 
   2041         for(i=0;i<rgbPlane.u_width;i++)
   2042         {
   2043             *output_pac_data++ = transparent1;
   2044             *output_pac_data++ = transparent2;
   2045         }
   2046 
   2047         /**
   2048          * We just add a white line to the PNG that will be transparent */
   2049         rgbPlane.u_height++;
   2050     }
   2051     if(rgbPlane.u_width % 2 != 0)
   2052     {
   2053         /**
   2054          * We add a new column of white (=transparent), but we need to parse all RGB lines ... */
   2055         M4OSA_UInt32 i;
   2056         M4VIFI_UInt8* newRGBpac_data;
   2057         M4VIFI_UInt8* output_pac_data, *input_pac_data;
   2058 
   2059         rgbPlane.u_width++;
   2060         M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
   2061              output width is odd  ");
   2062         /**
   2063          * We need to allocate a new RGB output buffer in which all decoded data
   2064           + white line will be copied */
   2065         newRGBpac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(rgbPlane.u_height*rgbPlane.u_width*2\
   2066             *sizeof(M4VIFI_UInt8), M4VS, (M4OSA_Char *)"New Framing GIF Output pac_data RGB");
   2067 
   2068         if(newRGBpac_data == M4OSA_NULL)
   2069         {
   2070             M4OSA_TRACE1_0("Allocation error in \
   2071                 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
   2072             free(rgbPlane.pac_data);
   2073             return M4ERR_ALLOC;
   2074         }
   2075 
   2076         output_pac_data= newRGBpac_data;
   2077         input_pac_data = rgbPlane.pac_data;
   2078 
   2079         for(i=0;i<rgbPlane.u_height;i++)
   2080         {
   2081             memcpy((void *)output_pac_data, (void *)input_pac_data,
   2082                  (rgbPlane.u_width-1)*2);
   2083 
   2084             output_pac_data += ((rgbPlane.u_width-1)*2);
   2085             /* Put the pixel to transparency color */
   2086             *output_pac_data++ = transparent1;
   2087             *output_pac_data++ = transparent2;
   2088 
   2089             input_pac_data += ((rgbPlane.u_width-1)*2);
   2090         }
   2091         free(rgbPlane.pac_data);
   2092         rgbPlane.pac_data = newRGBpac_data;
   2093     }
   2094 
   2095     /* reset stride */
   2096     rgbPlane.u_stride = rgbPlane.u_width*2;
   2097 
   2098     /**
   2099      * Initialize chained list parameters */
   2100     framingCtx->duration = 0;
   2101     framingCtx->previousClipTime = -1;
   2102     framingCtx->previewOffsetClipTime = -1;
   2103 
   2104     /**
   2105      * Only one element in the chained list (no animated image ...) */
   2106     framingCtx->pCurrent = framingCtx;
   2107     framingCtx->pNext = framingCtx;
   2108 
   2109     /**
   2110      * Get output width/height */
   2111      switch(OutputVideoResolution)
   2112     //switch(xVSS_context->pSettings->xVSS.outputVideoSize)
   2113     {
   2114     case M4VIDEOEDITING_kSQCIF:
   2115         width_out = 128;
   2116         height_out = 96;
   2117         break;
   2118     case M4VIDEOEDITING_kQQVGA:
   2119         width_out = 160;
   2120         height_out = 120;
   2121         break;
   2122     case M4VIDEOEDITING_kQCIF:
   2123         width_out = 176;
   2124         height_out = 144;
   2125         break;
   2126     case M4VIDEOEDITING_kQVGA:
   2127         width_out = 320;
   2128         height_out = 240;
   2129         break;
   2130     case M4VIDEOEDITING_kCIF:
   2131         width_out = 352;
   2132         height_out = 288;
   2133         break;
   2134     case M4VIDEOEDITING_kVGA:
   2135         width_out = 640;
   2136         height_out = 480;
   2137         break;
   2138     case M4VIDEOEDITING_kWVGA:
   2139         width_out = 800;
   2140         height_out = 480;
   2141         break;
   2142     case M4VIDEOEDITING_kNTSC:
   2143         width_out = 720;
   2144         height_out = 480;
   2145         break;
   2146     case M4VIDEOEDITING_k640_360:
   2147         width_out = 640;
   2148         height_out = 360;
   2149         break;
   2150     case M4VIDEOEDITING_k854_480:
   2151         // StageFright encoders require %16 resolution
   2152         width_out = M4ENCODER_854_480_Width;
   2153         height_out = 480;
   2154         break;
   2155     case M4VIDEOEDITING_k1280_720:
   2156         width_out = 1280;
   2157         height_out = 720;
   2158         break;
   2159     case M4VIDEOEDITING_k1080_720:
   2160         // StageFright encoders require %16 resolution
   2161         width_out = M4ENCODER_1080_720_Width;
   2162         height_out = 720;
   2163         break;
   2164     case M4VIDEOEDITING_k960_720:
   2165         width_out = 960;
   2166         height_out = 720;
   2167         break;
   2168     case M4VIDEOEDITING_k1920_1080:
   2169         width_out = 1920;
   2170         height_out = M4ENCODER_1920_1080_Height;
   2171         break;
   2172     /**
   2173      * If output video size is not given, we take QCIF size,
   2174      * should not happen, because already done in M4xVSS_sendCommand */
   2175     default:
   2176         width_out = 176;
   2177         height_out = 144;
   2178         break;
   2179     }
   2180 
   2181     /**
   2182      * Allocate output planes structures */
   2183     framingCtx->FramingRgb = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane), M4VS,
   2184          (M4OSA_Char *)"Framing Output plane RGB");
   2185     if(framingCtx->FramingRgb == M4OSA_NULL)
   2186     {
   2187         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
   2188         return M4ERR_ALLOC;
   2189     }
   2190     /**
   2191      * Resize RGB if needed */
   2192     if((pEffect->xVSS.bResize) &&
   2193          (rgbPlane.u_width != width_out || rgbPlane.u_height != height_out))
   2194     {
   2195         width = width_out;
   2196         height = height_out;
   2197 
   2198         M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect: \
   2199              New Width and height %d %d  ",width,height);
   2200 
   2201         framingCtx->FramingRgb->u_height = height_out;
   2202         framingCtx->FramingRgb->u_width = width_out;
   2203         framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
   2204         framingCtx->FramingRgb->u_topleft = 0;
   2205 
   2206         framingCtx->FramingRgb->pac_data =
   2207              (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(framingCtx->FramingRgb->u_height*framingCtx->\
   2208                 FramingRgb->u_width*2*sizeof(M4VIFI_UInt8), M4VS,
   2209                   (M4OSA_Char *)"Framing Output pac_data RGB");
   2210 
   2211         if(framingCtx->FramingRgb->pac_data == M4OSA_NULL)
   2212         {
   2213             M4OSA_TRACE1_0("Allocation error in \
   2214                 M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
   2215             free(framingCtx->FramingRgb);
   2216             free(rgbPlane.pac_data);
   2217             return M4ERR_ALLOC;
   2218         }
   2219 
   2220         M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Resizing Needed ");
   2221         M4OSA_TRACE1_2("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
   2222               rgbPlane.u_height & rgbPlane.u_width %d %d",rgbPlane.u_height,rgbPlane.u_width);
   2223 
   2224         //err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
   2225         err = M4VIFI_ResizeBilinearRGB565toRGB565(M4OSA_NULL, &rgbPlane,framingCtx->FramingRgb);
   2226 
   2227         if(err != M4NO_ERROR)
   2228         {
   2229             M4OSA_TRACE1_1("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect :\
   2230                 when resizing RGB plane: 0x%x\n", err);
   2231             return err;
   2232         }
   2233 
   2234         if(rgbPlane.pac_data != M4OSA_NULL)
   2235         {
   2236             free(rgbPlane.pac_data);
   2237             rgbPlane.pac_data = M4OSA_NULL;
   2238         }
   2239     }
   2240     else
   2241     {
   2242 
   2243         M4OSA_TRACE1_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
   2244               Resizing Not Needed ");
   2245 
   2246         width = rgbPlane.u_width;
   2247         height = rgbPlane.u_height;
   2248         framingCtx->FramingRgb->u_height = height;
   2249         framingCtx->FramingRgb->u_width = width;
   2250         framingCtx->FramingRgb->u_stride = framingCtx->FramingRgb->u_width*2;
   2251         framingCtx->FramingRgb->u_topleft = 0;
   2252         framingCtx->FramingRgb->pac_data = rgbPlane.pac_data;
   2253     }
   2254 
   2255 
   2256     if(pEffect->xVSS.bResize)
   2257     {
   2258         /**
   2259          * Force topleft to 0 for pure framing effect */
   2260         framingCtx->topleft_x = 0;
   2261         framingCtx->topleft_y = 0;
   2262     }
   2263 
   2264 
   2265     /**
   2266      * Convert  RGB output to YUV 420 to be able to merge it with output video in framing
   2267      effect */
   2268     framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS,
   2269          (M4OSA_Char *)"Framing Output plane YUV");
   2270     if(framingCtx->FramingYuv == M4OSA_NULL)
   2271     {
   2272         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
   2273         free(framingCtx->FramingRgb->pac_data);
   2274         return M4ERR_ALLOC;
   2275     }
   2276 
   2277     // Alloc for Y, U and V planes
   2278     framingCtx->FramingYuv[0].u_width = ((width+1)>>1)<<1;
   2279     framingCtx->FramingYuv[0].u_height = ((height+1)>>1)<<1;
   2280     framingCtx->FramingYuv[0].u_topleft = 0;
   2281     framingCtx->FramingYuv[0].u_stride = ((width+1)>>1)<<1;
   2282     framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc
   2283         ((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height), M4VS,
   2284             (M4OSA_Char *)"Alloc for the output Y");
   2285     if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
   2286     {
   2287         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertARGB888toYUV420_FrammingEffect");
   2288         free(framingCtx->FramingYuv);
   2289         free(framingCtx->FramingRgb->pac_data);
   2290         return M4ERR_ALLOC;
   2291     }
   2292     framingCtx->FramingYuv[1].u_width = (((width+1)>>1)<<1)>>1;
   2293     framingCtx->FramingYuv[1].u_height = (((height+1)>>1)<<1)>>1;
   2294     framingCtx->FramingYuv[1].u_topleft = 0;
   2295     framingCtx->FramingYuv[1].u_stride = (((width+1)>>1)<<1)>>1;
   2296 
   2297 
   2298     framingCtx->FramingYuv[1].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
   2299         framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height, M4VS,
   2300         (M4OSA_Char *)"Alloc for the output U");
   2301     if (framingCtx->FramingYuv[1].pac_data == M4OSA_NULL) {
   2302         free(framingCtx->FramingYuv[0].pac_data);
   2303         free(framingCtx->FramingYuv);
   2304         free(framingCtx->FramingRgb->pac_data);
   2305         return M4ERR_ALLOC;
   2306     }
   2307 
   2308     framingCtx->FramingYuv[2].u_width = (((width+1)>>1)<<1)>>1;
   2309     framingCtx->FramingYuv[2].u_height = (((height+1)>>1)<<1)>>1;
   2310     framingCtx->FramingYuv[2].u_topleft = 0;
   2311     framingCtx->FramingYuv[2].u_stride = (((width+1)>>1)<<1)>>1;
   2312 
   2313 
   2314     framingCtx->FramingYuv[2].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
   2315         framingCtx->FramingYuv[2].u_width * framingCtx->FramingYuv[0].u_height, M4VS,
   2316         (M4OSA_Char *)"Alloc for the  output V");
   2317     if (framingCtx->FramingYuv[2].pac_data == M4OSA_NULL) {
   2318         free(framingCtx->FramingYuv[1].pac_data);
   2319         free(framingCtx->FramingYuv[0].pac_data);
   2320         free(framingCtx->FramingYuv);
   2321         free(framingCtx->FramingRgb->pac_data);
   2322         return M4ERR_ALLOC;
   2323     }
   2324 
   2325     M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:\
   2326         convert RGB to YUV ");
   2327 
   2328     //err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
   2329     err = M4VIFI_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb,  framingCtx->FramingYuv);
   2330 
   2331     if (err != M4NO_ERROR)
   2332     {
   2333         M4OSA_TRACE1_1("SPS png: error when converting from RGB to YUV: 0x%x\n", err);
   2334     }
   2335     M4OSA_TRACE3_0("M4xVSS_internalConvertARGB888toYUV420_FrammingEffect:  Leaving ");
   2336     return err;
   2337 }
   2338 
   2339 /**
   2340  ******************************************************************************
   2341  * prototype    M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
   2342  *
   2343  * @brief    This function prepares VSS for editing
   2344  * @note    It also set special xVSS effect as external effects for the VSS
   2345  * @param    pContext    (IN) The integrator own context
   2346  *
   2347  * @return    M4NO_ERROR:    No error
   2348  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2349  * @return    M4ERR_ALLOC: Allocation error (no more memory)
   2350  ******************************************************************************
   2351  */
   2352 M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext)
   2353 {
   2354     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2355     M4VSS3GPP_EditContext pVssCtxt;
   2356     M4OSA_UInt32 i,j;
   2357     M4OSA_ERR err;
   2358 
   2359     /**
   2360      * Create a VSS 3GPP edition instance */
   2361     err = M4VSS3GPP_editInit( &pVssCtxt, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
   2362     if (err != M4NO_ERROR)
   2363     {
   2364         M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile: M4VSS3GPP_editInit returned 0x%x\n",
   2365             err);
   2366         M4VSS3GPP_editCleanUp(pVssCtxt);
   2367         /**
   2368          * Set the VSS context to NULL */
   2369         xVSS_context->pCurrentEditContext = M4OSA_NULL;
   2370         return err;
   2371     }
   2372 
   2373         M4VSS3GPP_InternalEditContext* pVSSContext =
   2374             (M4VSS3GPP_InternalEditContext*)pVssCtxt;
   2375         pVSSContext->xVSS.outputVideoFormat =
   2376             xVSS_context->pSettings->xVSS.outputVideoFormat;
   2377         pVSSContext->xVSS.outputVideoSize =
   2378             xVSS_context->pSettings->xVSS.outputVideoSize ;
   2379         pVSSContext->xVSS.outputAudioFormat =
   2380             xVSS_context->pSettings->xVSS.outputAudioFormat;
   2381         pVSSContext->xVSS.outputAudioSamplFreq =
   2382             xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
   2383         pVSSContext->xVSS.outputVideoBitrate =
   2384             xVSS_context->pSettings->xVSS.outputVideoBitrate ;
   2385         pVSSContext->xVSS.outputAudioBitrate =
   2386             xVSS_context->pSettings->xVSS.outputAudioBitrate ;
   2387         pVSSContext->xVSS.bAudioMono =
   2388             xVSS_context->pSettings->xVSS.bAudioMono;
   2389         pVSSContext->xVSS.outputVideoProfile =
   2390             xVSS_context->pSettings->xVSS.outputVideoProfile;
   2391         pVSSContext->xVSS.outputVideoLevel =
   2392             xVSS_context->pSettings->xVSS.outputVideoLevel;
   2393     /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */
   2394     if(xVSS_context->targetedBitrate != 0)
   2395     {
   2396         M4VSS3GPP_InternalEditContext* pVSSContext = (M4VSS3GPP_InternalEditContext*)pVssCtxt;
   2397 
   2398         pVSSContext->bIsMMS = M4OSA_TRUE;
   2399         pVSSContext->uiMMSVideoBitrate = xVSS_context->targetedBitrate;
   2400         pVSSContext->MMSvideoFramerate = xVSS_context->pSettings->videoFrameRate;
   2401     }
   2402 
   2403     /*Warning: since the adding of the UTF conversion, pSettings has been changed in the next
   2404     part in  pCurrentEditSettings (there is a specific current editing structure for the saving,
   2405      as for the preview)*/
   2406 
   2407     /**
   2408      * Set the external video effect functions, for saving mode (to be moved to
   2409       M4xVSS_saveStart() ?)*/
   2410     for (i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
   2411     {
   2412         for (j=0; j<xVSS_context->pCurrentEditSettings->nbEffects; j++)
   2413         {
   2414             if (M4xVSS_kVideoEffectType_BlackAndWhite ==
   2415             xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2416             {
   2417                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2418                  M4VSS3GPP_externalVideoEffectColor;
   2419                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2420                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_BlackAndWhite;
   2421                 /*commented FB*/
   2422                 /**
   2423                  * We do not need to set the color context, it is already set
   2424                  during sendCommand function */
   2425             }
   2426             if (M4xVSS_kVideoEffectType_Pink ==
   2427                 xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2428             {
   2429                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2430                  M4VSS3GPP_externalVideoEffectColor;
   2431                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2432                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Pink; /**< we don't
   2433                 // use any function context */
   2434                 /*commented FB*/
   2435                 /**
   2436                  * We do not need to set the color context,
   2437                   it is already set during sendCommand function */
   2438             }
   2439             if (M4xVSS_kVideoEffectType_Green ==
   2440                  xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2441             {
   2442                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2443                     M4VSS3GPP_externalVideoEffectColor;
   2444                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2445                     // (M4OSA_Void*)M4xVSS_kVideoEffectType_Green;
   2446                      /**< we don't use any function context */
   2447                 /*commented FB*/
   2448                 /**
   2449                  * We do not need to set the color context, it is already set during
   2450                   sendCommand function */
   2451             }
   2452             if (M4xVSS_kVideoEffectType_Sepia ==
   2453                  xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2454             {
   2455                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2456                  M4VSS3GPP_externalVideoEffectColor;
   2457                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2458                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Sepia;
   2459                 /**< we don't use any function context */
   2460                 /*commented FB*/
   2461                 /**
   2462                  * We do not need to set the color context, it is already set during
   2463                  sendCommand function */
   2464             }
   2465             if (M4xVSS_kVideoEffectType_Fifties ==
   2466              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2467             {
   2468                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2469                  M4VSS3GPP_externalVideoEffectFifties;
   2470                 /**
   2471                  * We do not need to set the framing context, it is already set during
   2472                  sendCommand function */
   2473             }
   2474             if (M4xVSS_kVideoEffectType_Negative ==
   2475              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2476             {
   2477                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2478                  M4VSS3GPP_externalVideoEffectColor;
   2479                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2480                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_Negative;
   2481                  /**< we don't use any function context */
   2482                 /*commented FB*/
   2483                 /**
   2484                  * We do not need to set the color context, it is already set during
   2485                   sendCommand function */
   2486             }
   2487             if (M4xVSS_kVideoEffectType_Framing ==
   2488              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2489             {
   2490                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2491                  M4VSS3GPP_externalVideoEffectFraming;
   2492                 /**
   2493                  * We do not need to set the framing context, it is already set during
   2494                  sendCommand function */
   2495             }
   2496             if (M4xVSS_kVideoEffectType_ZoomIn ==
   2497              xVSS_context->pSettings->Effects[j].VideoEffectType)
   2498             {
   2499                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2500                  M4VSS3GPP_externalVideoEffectZoom;
   2501                 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
   2502                  (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomIn; /**< we don't use any
   2503                  function context */
   2504             }
   2505             if (M4xVSS_kVideoEffectType_ZoomOut ==
   2506              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2507             {
   2508                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2509                  M4VSS3GPP_externalVideoEffectZoom;
   2510                 xVSS_context->pCurrentEditSettings->Effects[j].pExtVideoEffectFctCtxt =
   2511                  (M4OSA_Void*)M4xVSS_kVideoEffectType_ZoomOut; /**< we don't use any
   2512                  function context */
   2513             }
   2514             if (M4xVSS_kVideoEffectType_ColorRGB16 ==
   2515              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2516             {
   2517                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2518                  M4VSS3GPP_externalVideoEffectColor;
   2519                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2520                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
   2521                 /**< we don't use any function context */
   2522                 /**
   2523                  * We do not need to set the color context, it is already set during
   2524                  sendCommand function */
   2525             }
   2526             if (M4xVSS_kVideoEffectType_Gradient ==
   2527              xVSS_context->pCurrentEditSettings->Effects[j].VideoEffectType)
   2528             {
   2529                 xVSS_context->pCurrentEditSettings->Effects[j].ExtVideoEffectFct =
   2530                  M4VSS3GPP_externalVideoEffectColor;
   2531                 //xVSS_context->pSettings->Effects[j].pExtVideoEffectFctCtxt =
   2532                 // (M4OSA_Void*)M4xVSS_kVideoEffectType_ColorRGB16;
   2533                 /**< we don't use any function context */
   2534                 /**
   2535                  * We do not need to set the color context, it is already set during
   2536                  sendCommand function */
   2537             }
   2538 
   2539         }
   2540     }
   2541 
   2542     /**
   2543      * Open the VSS 3GPP */
   2544     err = M4VSS3GPP_editOpen(pVssCtxt, xVSS_context->pCurrentEditSettings);
   2545     if (err != M4NO_ERROR)
   2546     {
   2547         M4OSA_TRACE1_1("M4xVSS_internalGenerateEditedFile:\
   2548              M4VSS3GPP_editOpen returned 0x%x\n",err);
   2549         M4VSS3GPP_editCleanUp(pVssCtxt);
   2550         /**
   2551          * Set the VSS context to NULL */
   2552         xVSS_context->pCurrentEditContext = M4OSA_NULL;
   2553         return err;
   2554     }
   2555 
   2556     /**
   2557      * Save VSS context to be able to close / free VSS later */
   2558     xVSS_context->pCurrentEditContext = pVssCtxt;
   2559 
   2560     return M4NO_ERROR;
   2561 }
   2562 
   2563 /**
   2564  ******************************************************************************
   2565  * prototype    M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
   2566  *
   2567  * @brief    This function cleans up VSS
   2568  * @note
   2569  * @param    pContext    (IN) The integrator own context
   2570  *
   2571  * @return    M4NO_ERROR:    No error
   2572  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2573  ******************************************************************************
   2574  */
   2575 M4OSA_ERR M4xVSS_internalCloseEditedFile(M4OSA_Context pContext)
   2576 {
   2577     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2578     M4VSS3GPP_EditContext pVssCtxt = xVSS_context->pCurrentEditContext;
   2579     M4OSA_ERR err;
   2580 
   2581     if(xVSS_context->pCurrentEditContext != M4OSA_NULL)
   2582     {
   2583         /**
   2584          * Close the VSS 3GPP */
   2585         err = M4VSS3GPP_editClose(pVssCtxt);
   2586         if (err != M4NO_ERROR)
   2587         {
   2588             M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile:\
   2589                  M4VSS3GPP_editClose returned 0x%x\n",err);
   2590             M4VSS3GPP_editCleanUp(pVssCtxt);
   2591             /**
   2592              * Set the VSS context to NULL */
   2593             xVSS_context->pCurrentEditContext = M4OSA_NULL;
   2594             return err;
   2595         }
   2596 
   2597         /**
   2598          * Free this VSS3GPP edition instance */
   2599         err = M4VSS3GPP_editCleanUp(pVssCtxt);
   2600         /**
   2601          * Set the VSS context to NULL */
   2602         xVSS_context->pCurrentEditContext = M4OSA_NULL;
   2603         if (err != M4NO_ERROR)
   2604         {
   2605             M4OSA_TRACE1_1("M4xVSS_internalCloseEditedFile: \
   2606                 M4VSS3GPP_editCleanUp returned 0x%x\n",err);
   2607             return err;
   2608         }
   2609     }
   2610 
   2611     return M4NO_ERROR;
   2612 }
   2613 
   2614 /**
   2615  ******************************************************************************
   2616  * prototype    M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
   2617  *
   2618  * @brief    This function prepares VSS for audio mixing
   2619  * @note    It takes its parameters from the BGM settings in the xVSS internal context
   2620  * @param    pContext    (IN) The integrator own context
   2621  *
   2622  * @return    M4NO_ERROR:    No error
   2623  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2624  * @return    M4ERR_ALLOC: Allocation error (no more memory)
   2625  ******************************************************************************
   2626  */
   2627 /***
   2628  * FB: the function has been modified since the structure used for the saving is now the
   2629  *  pCurrentEditSettings and not the pSettings
   2630  * This change has been added for the UTF support
   2631  * All the "xVSS_context->pSettings" has been replaced by "xVSS_context->pCurrentEditSettings"
   2632  ***/
   2633 M4OSA_ERR M4xVSS_internalGenerateAudioMixFile(M4OSA_Context pContext)
   2634 {
   2635     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2636     M4VSS3GPP_AudioMixingSettings* pAudioMixSettings;
   2637     M4VSS3GPP_AudioMixingContext pAudioMixingCtxt;
   2638     M4OSA_ERR err;
   2639     M4VIDEOEDITING_ClipProperties fileProperties;
   2640 
   2641     /**
   2642      * Allocate audio mixing settings structure and fill it with BGM parameters */
   2643     pAudioMixSettings = (M4VSS3GPP_AudioMixingSettings*)M4OSA_32bitAlignedMalloc
   2644         (sizeof(M4VSS3GPP_AudioMixingSettings), M4VS, (M4OSA_Char *)"pAudioMixSettings");
   2645     if(pAudioMixSettings == M4OSA_NULL)
   2646     {
   2647         M4OSA_TRACE1_0("Allocation error in M4xVSS_internalGenerateAudioMixFile");
   2648         return M4ERR_ALLOC;
   2649     }
   2650 
   2651     if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType ==
   2652          M4VIDEOEDITING_kFileType_3GPP)
   2653     {
   2654         err = M4xVSS_internalGetProperties((M4OSA_Context)xVSS_context,
   2655              (M4OSA_Char*)xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile,
   2656                  &fileProperties);
   2657         if(err != M4NO_ERROR)
   2658         {
   2659             M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
   2660                  impossible to retrieve audio BGM properties ->\
   2661                      reencoding audio background music", err);
   2662             fileProperties.AudioStreamType =
   2663                  xVSS_context->pCurrentEditSettings->xVSS.outputAudioFormat+1;
   2664                   /* To force BGM encoding */
   2665         }
   2666     }
   2667 
   2668     pAudioMixSettings->bRemoveOriginal = M4OSA_FALSE;
   2669     pAudioMixSettings->AddedAudioFileType =
   2670      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->FileType;
   2671     pAudioMixSettings->pAddedAudioTrackFile =
   2672      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile;
   2673     pAudioMixSettings->uiAddVolume =
   2674      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume;
   2675 
   2676     pAudioMixSettings->outputAudioFormat = xVSS_context->pSettings->xVSS.outputAudioFormat;
   2677     pAudioMixSettings->outputASF = xVSS_context->pSettings->xVSS.outputAudioSamplFreq;
   2678     pAudioMixSettings->outputAudioBitrate = xVSS_context->pSettings->xVSS.outputAudioBitrate;
   2679     pAudioMixSettings->uiSamplingFrequency =
   2680      xVSS_context->pSettings->xVSS.pBGMtrack->uiSamplingFrequency;
   2681     pAudioMixSettings->uiNumChannels = xVSS_context->pSettings->xVSS.pBGMtrack->uiNumChannels;
   2682 
   2683     pAudioMixSettings->b_DuckingNeedeed =
   2684      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->b_DuckingNeedeed;
   2685     pAudioMixSettings->fBTVolLevel =
   2686      (M4OSA_Float )xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddVolume/100;
   2687     pAudioMixSettings->InDucking_threshold =
   2688      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->InDucking_threshold;
   2689     pAudioMixSettings->InDucking_lowVolume =
   2690      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->lowVolume/100;
   2691     pAudioMixSettings->fPTVolLevel =
   2692      (M4OSA_Float)xVSS_context->pSettings->PTVolLevel/100;
   2693     pAudioMixSettings->bLoop = xVSS_context->pSettings->xVSS.pBGMtrack->bLoop;
   2694 
   2695     if(xVSS_context->pSettings->xVSS.bAudioMono)
   2696     {
   2697         pAudioMixSettings->outputNBChannels = 1;
   2698     }
   2699     else
   2700     {
   2701         pAudioMixSettings->outputNBChannels = 2;
   2702     }
   2703 
   2704     /**
   2705      * Fill audio mix settings with BGM parameters */
   2706     pAudioMixSettings->uiBeginLoop =
   2707      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiBeginLoop;
   2708     pAudioMixSettings->uiEndLoop =
   2709      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiEndLoop;
   2710     pAudioMixSettings->uiAddCts =
   2711      xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->uiAddCts;
   2712 
   2713     /**
   2714      * Output file of the audio mixer will be final file (audio mixing is the last step) */
   2715     pAudioMixSettings->pOutputClipFile = xVSS_context->pOutputFile;
   2716     pAudioMixSettings->pTemporaryFile = xVSS_context->pTemporaryFile;
   2717 
   2718     /**
   2719      * Input file of the audio mixer is a temporary file containing all audio/video editions */
   2720     pAudioMixSettings->pOriginalClipFile = xVSS_context->pCurrentEditSettings->pOutputFile;
   2721 
   2722     /**
   2723      * Save audio mixing settings pointer to be able to free it in
   2724      M4xVSS_internalCloseAudioMixedFile function */
   2725     xVSS_context->pAudioMixSettings = pAudioMixSettings;
   2726 
   2727     /**
   2728      * Create a VSS 3GPP audio mixing instance */
   2729     err = M4VSS3GPP_audioMixingInit(&pAudioMixingCtxt, pAudioMixSettings,
   2730          xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
   2731 
   2732     /**
   2733      * Save audio mixing context to be able to call audio mixing step function in
   2734       M4xVSS_step function */
   2735     xVSS_context->pAudioMixContext = pAudioMixingCtxt;
   2736 
   2737     if (err != M4NO_ERROR)
   2738     {
   2739         M4OSA_TRACE1_1("M4xVSS_internalGenerateAudioMixFile:\
   2740              M4VSS3GPP_audioMixingInit returned 0x%x\n",err);
   2741         //M4VSS3GPP_audioMixingCleanUp(pAudioMixingCtxt);
   2742         return err;
   2743     }
   2744 
   2745     return M4NO_ERROR;
   2746 }
   2747 
   2748 /**
   2749  ******************************************************************************
   2750  * prototype    M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
   2751  *
   2752  * @brief    This function cleans up VSS for audio mixing
   2753  * @note
   2754  * @param    pContext    (IN) The integrator own context
   2755  *
   2756  * @return    M4NO_ERROR:    No error
   2757  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2758  ******************************************************************************
   2759  */
   2760 M4OSA_ERR M4xVSS_internalCloseAudioMixedFile(M4OSA_Context pContext)
   2761 {
   2762     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2763     M4OSA_ERR err;
   2764 
   2765     /**
   2766      * Free this VSS3GPP audio mixing instance */
   2767     if(xVSS_context->pAudioMixContext != M4OSA_NULL)
   2768     {
   2769         err = M4VSS3GPP_audioMixingCleanUp(xVSS_context->pAudioMixContext);
   2770         if (err != M4NO_ERROR)
   2771         {
   2772             M4OSA_TRACE1_1("M4xVSS_internalCloseAudioMixedFile:\
   2773                  M4VSS3GPP_audioMixingCleanUp returned 0x%x\n",err);
   2774             return err;
   2775         }
   2776     }
   2777 
   2778     /**
   2779      * Free VSS audio mixing settings */
   2780     if(xVSS_context->pAudioMixSettings != M4OSA_NULL)
   2781     {
   2782         free(xVSS_context->pAudioMixSettings);
   2783         xVSS_context->pAudioMixSettings = M4OSA_NULL;
   2784     }
   2785 
   2786     return M4NO_ERROR;
   2787 }
   2788 
   2789 /**
   2790  ******************************************************************************
   2791  * prototype    M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
   2792  *
   2793  * @brief    This function cleans up preview edition structure used to generate
   2794  *            preview.3gp file given to the VPS
   2795  * @note    It also free the preview structure given to the VPS
   2796  * @param    pContext    (IN) The integrator own context
   2797  *
   2798  * @return    M4NO_ERROR:    No error
   2799  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2800  ******************************************************************************
   2801  */
   2802 M4OSA_ERR M4xVSS_internalFreePreview(M4OSA_Context pContext)
   2803 {
   2804     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2805     M4OSA_UInt8 i;
   2806 
   2807     /**
   2808      * Free clip/transition settings */
   2809     for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
   2810     {
   2811         M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
   2812 
   2813         free((xVSS_context->pCurrentEditSettings->pClipList[i]));
   2814         xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
   2815 
   2816         /**
   2817          * Because there is 1 less transition than clip number */
   2818         if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
   2819         {
   2820             free((xVSS_context->pCurrentEditSettings->pTransitionList[i]));
   2821             xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
   2822         }
   2823     }
   2824 
   2825     /**
   2826      * Free clip/transition list */
   2827     if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
   2828     {
   2829         free((xVSS_context->pCurrentEditSettings->pClipList));
   2830         xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
   2831     }
   2832     if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
   2833     {
   2834         free((xVSS_context->pCurrentEditSettings->pTransitionList));
   2835         xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
   2836     }
   2837 
   2838     /**
   2839      * Free output preview file path */
   2840     if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
   2841     {
   2842         free(xVSS_context->pCurrentEditSettings->pOutputFile);
   2843         xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
   2844     }
   2845 
   2846     /**
   2847      * Free temporary preview file path */
   2848     if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
   2849     {
   2850         remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
   2851         free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
   2852         xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
   2853     }
   2854 
   2855     /**
   2856      * Free "local" BGM settings */
   2857     if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
   2858     {
   2859         if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
   2860         {
   2861             free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
   2862             xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
   2863         }
   2864         free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
   2865         xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
   2866     }
   2867 
   2868     /**
   2869      * Free current edit settings structure */
   2870     if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
   2871     {
   2872         free(xVSS_context->pCurrentEditSettings);
   2873         xVSS_context->pCurrentEditSettings = M4OSA_NULL;
   2874     }
   2875 
   2876     /**
   2877      * Free preview effects given to application */
   2878     if(M4OSA_NULL != xVSS_context->pPreviewSettings->Effects)
   2879     {
   2880         free(xVSS_context->pPreviewSettings->Effects);
   2881         xVSS_context->pPreviewSettings->Effects = M4OSA_NULL;
   2882         xVSS_context->pPreviewSettings->nbEffects = 0;
   2883     }
   2884 
   2885     return M4NO_ERROR;
   2886 }
   2887 
   2888 
   2889 /**
   2890  ******************************************************************************
   2891  * prototype    M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
   2892  *
   2893  * @brief    This function cleans up saving edition structure used to generate
   2894  *            output.3gp file given to the VPS
   2895  * @note
   2896  * @param    pContext    (IN) The integrator own context
   2897  *
   2898  * @return    M4NO_ERROR:    No error
   2899  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   2900  ******************************************************************************
   2901  */
   2902 M4OSA_ERR M4xVSS_internalFreeSaving(M4OSA_Context pContext)
   2903 {
   2904     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   2905     M4OSA_UInt8 i;
   2906 
   2907     if(xVSS_context->pCurrentEditSettings != M4OSA_NULL)
   2908     {
   2909         /**
   2910          * Free clip/transition settings */
   2911         for(i=0; i<xVSS_context->pCurrentEditSettings->uiClipNumber; i++)
   2912         {
   2913             M4xVSS_FreeClipSettings(xVSS_context->pCurrentEditSettings->pClipList[i]);
   2914 
   2915             free((xVSS_context->pCurrentEditSettings->pClipList[i]));
   2916             xVSS_context->pCurrentEditSettings->pClipList[i] = M4OSA_NULL;
   2917 
   2918             /**
   2919              * Because there is 1 less transition than clip number */
   2920             if(i != xVSS_context->pCurrentEditSettings->uiClipNumber-1)
   2921             {
   2922                 free(\
   2923                     (xVSS_context->pCurrentEditSettings->pTransitionList[i]));
   2924                 xVSS_context->pCurrentEditSettings->pTransitionList[i] = M4OSA_NULL;
   2925             }
   2926         }
   2927 
   2928         /**
   2929          * Free clip/transition list */
   2930         if(xVSS_context->pCurrentEditSettings->pClipList != M4OSA_NULL)
   2931         {
   2932             free((xVSS_context->pCurrentEditSettings->pClipList));
   2933             xVSS_context->pCurrentEditSettings->pClipList = M4OSA_NULL;
   2934         }
   2935         if(xVSS_context->pCurrentEditSettings->pTransitionList != M4OSA_NULL)
   2936         {
   2937             free((xVSS_context->pCurrentEditSettings->pTransitionList));
   2938             xVSS_context->pCurrentEditSettings->pTransitionList = M4OSA_NULL;
   2939         }
   2940 
   2941         if(xVSS_context->pCurrentEditSettings->Effects != M4OSA_NULL)
   2942         {
   2943             free((xVSS_context->pCurrentEditSettings->Effects));
   2944             xVSS_context->pCurrentEditSettings->Effects = M4OSA_NULL;
   2945             xVSS_context->pCurrentEditSettings->nbEffects = 0;
   2946         }
   2947 
   2948         /**
   2949          * Free output saving file path */
   2950         if(xVSS_context->pCurrentEditSettings->pOutputFile != M4OSA_NULL)
   2951         {
   2952             if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
   2953             {
   2954                 remove((const char *)xVSS_context->pCurrentEditSettings->pOutputFile);
   2955                 free(xVSS_context->pCurrentEditSettings->pOutputFile);
   2956             }
   2957             if(xVSS_context->pOutputFile != M4OSA_NULL)
   2958             {
   2959                 free(xVSS_context->pOutputFile);
   2960                 xVSS_context->pOutputFile = M4OSA_NULL;
   2961             }
   2962             xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
   2963             xVSS_context->pCurrentEditSettings->pOutputFile = M4OSA_NULL;
   2964         }
   2965 
   2966         /**
   2967          * Free temporary saving file path */
   2968         if(xVSS_context->pCurrentEditSettings->pTemporaryFile != M4OSA_NULL)
   2969         {
   2970             remove((const char *)xVSS_context->pCurrentEditSettings->pTemporaryFile);
   2971             free(xVSS_context->pCurrentEditSettings->pTemporaryFile);
   2972             xVSS_context->pCurrentEditSettings->pTemporaryFile = M4OSA_NULL;
   2973         }
   2974 
   2975         /**
   2976          * Free "local" BGM settings */
   2977         if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack != M4OSA_NULL)
   2978         {
   2979             if(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
   2980             {
   2981                 free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile);
   2982                 xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
   2983             }
   2984             free(xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack);
   2985             xVSS_context->pCurrentEditSettings->xVSS.pBGMtrack = M4OSA_NULL;
   2986         }
   2987 
   2988         /**
   2989          * Free current edit settings structure */
   2990         free(xVSS_context->pCurrentEditSettings);
   2991         xVSS_context->pCurrentEditSettings = M4OSA_NULL;
   2992     }
   2993 
   2994     return M4NO_ERROR;
   2995 }
   2996 
   2997 
   2998 /**
   2999  ******************************************************************************
   3000  * prototype    M4OSA_ERR M4xVSS_freeSettings(M4OSA_Context pContext)
   3001  *
   3002  * @brief    This function cleans up an M4VSS3GPP_EditSettings structure
   3003  * @note
   3004  * @param    pSettings    (IN) Pointer on M4VSS3GPP_EditSettings structure to free
   3005  *
   3006  * @return    M4NO_ERROR:    No error
   3007  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   3008  ******************************************************************************
   3009  */
   3010 M4OSA_ERR M4xVSS_freeSettings(M4VSS3GPP_EditSettings* pSettings)
   3011 {
   3012     M4OSA_UInt8 i,j;
   3013 
   3014     /**
   3015      * For each clip ... */
   3016     for(i=0; i<pSettings->uiClipNumber; i++)
   3017     {
   3018         /**
   3019          * ... free clip settings */
   3020         if(pSettings->pClipList[i] != M4OSA_NULL)
   3021         {
   3022             M4xVSS_FreeClipSettings(pSettings->pClipList[i]);
   3023 
   3024             free((pSettings->pClipList[i]));
   3025             pSettings->pClipList[i] = M4OSA_NULL;
   3026         }
   3027 
   3028         /**
   3029          * ... free transition settings */
   3030         if(i < pSettings->uiClipNumber-1) /* Because there is 1 less transition than clip number */
   3031         {
   3032             if(pSettings->pTransitionList[i] != M4OSA_NULL)
   3033             {
   3034                 switch (pSettings->pTransitionList[i]->VideoTransitionType)
   3035                 {
   3036                     case M4xVSS_kVideoTransitionType_AlphaMagic:
   3037 
   3038                         /**
   3039                          * In case of Alpha Magic transition,
   3040                           some extra parameters need to be freed */
   3041                         if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt\
   3042                              != M4OSA_NULL)
   3043                         {
   3044                             free((((M4xVSS_internal_AlphaMagicSettings*)\
   3045                                 pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt)->\
   3046                                     pPlane->pac_data));
   3047                             ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i\
   3048                                 ]->pExtVideoTransitionFctCtxt)->pPlane->pac_data = M4OSA_NULL;
   3049 
   3050                             free((((M4xVSS_internal_AlphaMagicSettings*)\
   3051                                 pSettings->pTransitionList[i]->\
   3052                                     pExtVideoTransitionFctCtxt)->pPlane));
   3053                             ((M4xVSS_internal_AlphaMagicSettings*)pSettings->pTransitionList[i]\
   3054                                 ->pExtVideoTransitionFctCtxt)->pPlane = M4OSA_NULL;
   3055 
   3056                             free((pSettings->pTransitionList[i]->\
   3057                                 pExtVideoTransitionFctCtxt));
   3058                             pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
   3059 
   3060                             for(j=i+1;j<pSettings->uiClipNumber-1;j++)
   3061                             {
   3062                                 if(pSettings->pTransitionList[j] != M4OSA_NULL)
   3063                                 {
   3064                                     if(pSettings->pTransitionList[j]->VideoTransitionType ==
   3065                                      M4xVSS_kVideoTransitionType_AlphaMagic)
   3066                                     {
   3067                                         M4OSA_UInt32 pCmpResult=0;
   3068                                         pCmpResult = strcmp((const char *)pSettings->pTransitionList[i]->\
   3069                                             xVSS.transitionSpecific.pAlphaMagicSettings->\
   3070                                                 pAlphaFilePath,
   3071                                                 (const char *)pSettings->pTransitionList[j]->\
   3072                                                 xVSS.transitionSpecific.pAlphaMagicSettings->\
   3073                                                 pAlphaFilePath);
   3074                                         if(pCmpResult == 0)
   3075                                         {
   3076                                             /* Free extra internal alpha magic structure and put
   3077                                             it to NULL to avoid refreeing it */
   3078                                             free((pSettings->\
   3079                                                 pTransitionList[j]->pExtVideoTransitionFctCtxt));
   3080                                             pSettings->pTransitionList[j]->\
   3081                                                 pExtVideoTransitionFctCtxt = M4OSA_NULL;
   3082                                         }
   3083                                     }
   3084                                 }
   3085                             }
   3086                         }
   3087 
   3088                         if(pSettings->pTransitionList[i]->\
   3089                             xVSS.transitionSpecific.pAlphaMagicSettings != M4OSA_NULL)
   3090                         {
   3091                             if(pSettings->pTransitionList[i]->\
   3092                                 xVSS.transitionSpecific.pAlphaMagicSettings->\
   3093                                     pAlphaFilePath != M4OSA_NULL)
   3094                             {
   3095                                 free(pSettings->\
   3096                                     pTransitionList[i]->\
   3097                                         xVSS.transitionSpecific.pAlphaMagicSettings->\
   3098                                             pAlphaFilePath);
   3099                                 pSettings->pTransitionList[i]->\
   3100                                     xVSS.transitionSpecific.pAlphaMagicSettings->\
   3101                                         pAlphaFilePath = M4OSA_NULL;
   3102                             }
   3103                             free(pSettings->pTransitionList[i]->\
   3104                                 xVSS.transitionSpecific.pAlphaMagicSettings);
   3105                             pSettings->pTransitionList[i]->\
   3106                                 xVSS.transitionSpecific.pAlphaMagicSettings = M4OSA_NULL;
   3107 
   3108                         }
   3109 
   3110                     break;
   3111 
   3112 
   3113                     case M4xVSS_kVideoTransitionType_SlideTransition:
   3114                         if (M4OSA_NULL != pSettings->pTransitionList[i]->\
   3115                             xVSS.transitionSpecific.pSlideTransitionSettings)
   3116                         {
   3117                             free(pSettings->pTransitionList[i]->\
   3118                                 xVSS.transitionSpecific.pSlideTransitionSettings);
   3119                             pSettings->pTransitionList[i]->\
   3120                                 xVSS.transitionSpecific.pSlideTransitionSettings = M4OSA_NULL;
   3121                         }
   3122                         if(pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt != M4OSA_NULL)
   3123                         {
   3124                             free((pSettings->pTransitionList[i]->\
   3125                                 pExtVideoTransitionFctCtxt));
   3126                             pSettings->pTransitionList[i]->pExtVideoTransitionFctCtxt = M4OSA_NULL;
   3127                         }
   3128                     break;
   3129                                         default:
   3130                     break;
   3131 
   3132                 }
   3133                 /**
   3134                  * Free transition settings structure */
   3135                 free((pSettings->pTransitionList[i]));
   3136                 pSettings->pTransitionList[i] = M4OSA_NULL;
   3137             }
   3138         }
   3139     }
   3140 
   3141     /**
   3142      * Free clip list */
   3143     if(pSettings->pClipList != M4OSA_NULL)
   3144     {
   3145         free((pSettings->pClipList));
   3146         pSettings->pClipList = M4OSA_NULL;
   3147     }
   3148 
   3149     /**
   3150      * Free transition list */
   3151     if(pSettings->pTransitionList != M4OSA_NULL)
   3152     {
   3153         free((pSettings->pTransitionList));
   3154         pSettings->pTransitionList = M4OSA_NULL;
   3155     }
   3156 
   3157     /**
   3158      * RC: Free effects list */
   3159     if(pSettings->Effects != M4OSA_NULL)
   3160     {
   3161         for(i=0; i<pSettings->nbEffects; i++)
   3162         {
   3163             /**
   3164              * For each clip, free framing structure if needed */
   3165             if(pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Framing
   3166                 || pSettings->Effects[i].VideoEffectType == M4xVSS_kVideoEffectType_Text)
   3167             {
   3168 #ifdef DECODE_GIF_ON_SAVING
   3169                 M4xVSS_FramingContext* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
   3170 #else
   3171                 M4xVSS_FramingStruct* framingCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
   3172                 M4xVSS_FramingStruct* framingCtx_save;
   3173                 M4xVSS_Framing3102Struct* framingCtx_first = framingCtx;
   3174 #endif
   3175 
   3176 #ifdef DECODE_GIF_ON_SAVING
   3177                 if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non existant
   3178                  pointer */
   3179                 {
   3180                     if(framingCtx->aFramingCtx != M4OSA_NULL)
   3181                     {
   3182                         {
   3183                             if(framingCtx->aFramingCtx->FramingRgb != M4OSA_NULL)
   3184                             {
   3185                                 free(framingCtx->aFramingCtx->\
   3186                                     FramingRgb->pac_data);
   3187                                 framingCtx->aFramingCtx->FramingRgb->pac_data = M4OSA_NULL;
   3188                                 free(framingCtx->aFramingCtx->FramingRgb);
   3189                                 framingCtx->aFramingCtx->FramingRgb = M4OSA_NULL;
   3190                             }
   3191                         }
   3192                         if(framingCtx->aFramingCtx->FramingYuv != M4OSA_NULL)
   3193                         {
   3194                             free(framingCtx->aFramingCtx->\
   3195                                 FramingYuv[0].pac_data);
   3196                             framingCtx->aFramingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
   3197                            free(framingCtx->aFramingCtx->\
   3198                                 FramingYuv[1].pac_data);
   3199                             framingCtx->aFramingCtx->FramingYuv[1].pac_data = M4OSA_NULL;
   3200                            free(framingCtx->aFramingCtx->\
   3201                                 FramingYuv[2].pac_data);
   3202                             framingCtx->aFramingCtx->FramingYuv[2].pac_data = M4OSA_NULL;
   3203                             free(framingCtx->aFramingCtx->FramingYuv);
   3204                             framingCtx->aFramingCtx->FramingYuv = M4OSA_NULL;
   3205                         }
   3206                         free(framingCtx->aFramingCtx);
   3207                         framingCtx->aFramingCtx = M4OSA_NULL;
   3208                     }
   3209                     if(framingCtx->aFramingCtx_last != M4OSA_NULL)
   3210                     {
   3211                         if(framingCtx->aFramingCtx_last->FramingRgb != M4OSA_NULL)
   3212                         {
   3213                             free(framingCtx->aFramingCtx_last->\
   3214                                 FramingRgb->pac_data);
   3215                             framingCtx->aFramingCtx_last->FramingRgb->pac_data = M4OSA_NULL;
   3216                             free(framingCtx->aFramingCtx_last->\
   3217                                 FramingRgb);
   3218                             framingCtx->aFramingCtx_last->FramingRgb = M4OSA_NULL;
   3219                         }
   3220                         if(framingCtx->aFramingCtx_last->FramingYuv != M4OSA_NULL)
   3221                         {
   3222                             free(framingCtx->aFramingCtx_last->\
   3223                                 FramingYuv[0].pac_data);
   3224                             framingCtx->aFramingCtx_last->FramingYuv[0].pac_data = M4OSA_NULL;
   3225                             free(framingCtx->aFramingCtx_last->FramingYuv);
   3226                             framingCtx->aFramingCtx_last->FramingYuv = M4OSA_NULL;
   3227                         }
   3228                         free(framingCtx->aFramingCtx_last);
   3229                         framingCtx->aFramingCtx_last = M4OSA_NULL;
   3230                     }
   3231                     if(framingCtx->pEffectFilePath != M4OSA_NULL)
   3232                     {
   3233                         free(framingCtx->pEffectFilePath);
   3234                         framingCtx->pEffectFilePath = M4OSA_NULL;
   3235                     }
   3236                     /*In case there are still allocated*/
   3237                     if(framingCtx->pSPSContext != M4OSA_NULL)
   3238                     {
   3239                     //    M4SPS_destroy(framingCtx->pSPSContext);
   3240                         framingCtx->pSPSContext = M4OSA_NULL;
   3241                     }
   3242                     /*Alpha blending structure*/
   3243                     if(framingCtx->alphaBlendingStruct  != M4OSA_NULL)
   3244                     {
   3245                         free(framingCtx->alphaBlendingStruct);
   3246                         framingCtx->alphaBlendingStruct = M4OSA_NULL;
   3247                     }
   3248 
   3249                     free(framingCtx);
   3250                     framingCtx = M4OSA_NULL;
   3251                 }
   3252 #else
   3253                 do
   3254                 {
   3255                     if(framingCtx != M4OSA_NULL) /* Bugfix 1.2.0: crash, trying to free non
   3256                     existant pointer */
   3257                     {
   3258                         if(framingCtx->FramingRgb != M4OSA_NULL)
   3259                         {
   3260                             free(framingCtx->FramingRgb->pac_data);
   3261                             framingCtx->FramingRgb->pac_data = M4OSA_NULL;
   3262                             free(framingCtx->FramingRgb);
   3263                             framingCtx->FramingRgb = M4OSA_NULL;
   3264                         }
   3265                         if(framingCtx->FramingYuv != M4OSA_NULL)
   3266                         {
   3267                             free(framingCtx->FramingYuv[0].pac_data);
   3268                             framingCtx->FramingYuv[0].pac_data = M4OSA_NULL;
   3269                             free(framingCtx->FramingYuv);
   3270                             framingCtx->FramingYuv = M4OSA_NULL;
   3271                         }
   3272                         framingCtx_save = framingCtx->pNext;
   3273                         free(framingCtx);
   3274                         framingCtx = M4OSA_NULL;
   3275                         framingCtx = framingCtx_save;
   3276                     }
   3277                     else
   3278                     {
   3279                         /*FB: bug fix P4ME00003002*/
   3280                         break;
   3281                     }
   3282                 } while(framingCtx_first != framingCtx);
   3283 #endif
   3284             }
   3285             else if( M4xVSS_kVideoEffectType_Fifties == pSettings->Effects[i].VideoEffectType)
   3286             {
   3287                 /* Free Fifties context */
   3288                 M4xVSS_FiftiesStruct* FiftiesCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
   3289 
   3290                 if(FiftiesCtx != M4OSA_NULL)
   3291                 {
   3292                     free(FiftiesCtx);
   3293                     FiftiesCtx = M4OSA_NULL;
   3294                 }
   3295 
   3296             }
   3297             else if( M4xVSS_kVideoEffectType_ColorRGB16 == pSettings->Effects[i].VideoEffectType
   3298                 || M4xVSS_kVideoEffectType_BlackAndWhite == pSettings->Effects[i].VideoEffectType
   3299                 || M4xVSS_kVideoEffectType_Pink == pSettings->Effects[i].VideoEffectType
   3300                 || M4xVSS_kVideoEffectType_Green == pSettings->Effects[i].VideoEffectType
   3301                 || M4xVSS_kVideoEffectType_Sepia == pSettings->Effects[i].VideoEffectType
   3302                 || M4xVSS_kVideoEffectType_Negative== pSettings->Effects[i].VideoEffectType
   3303                 || M4xVSS_kVideoEffectType_Gradient== pSettings->Effects[i].VideoEffectType)
   3304             {
   3305                 /* Free Color context */
   3306                 M4xVSS_ColorStruct* ColorCtx = pSettings->Effects[i].pExtVideoEffectFctCtxt;
   3307 
   3308                 if(ColorCtx != M4OSA_NULL)
   3309                 {
   3310                     free(ColorCtx);
   3311                     ColorCtx = M4OSA_NULL;
   3312                 }
   3313             }
   3314 
   3315             /* Free simple fields */
   3316             if(pSettings->Effects[i].xVSS.pFramingFilePath != M4OSA_NULL)
   3317             {
   3318                 free(pSettings->Effects[i].xVSS.pFramingFilePath);
   3319                 pSettings->Effects[i].xVSS.pFramingFilePath = M4OSA_NULL;
   3320             }
   3321             if(pSettings->Effects[i].xVSS.pFramingBuffer != M4OSA_NULL)
   3322             {
   3323                 free(pSettings->Effects[i].xVSS.pFramingBuffer);
   3324                 pSettings->Effects[i].xVSS.pFramingBuffer = M4OSA_NULL;
   3325             }
   3326             if(pSettings->Effects[i].xVSS.pTextBuffer != M4OSA_NULL)
   3327             {
   3328                 free(pSettings->Effects[i].xVSS.pTextBuffer);
   3329                 pSettings->Effects[i].xVSS.pTextBuffer = M4OSA_NULL;
   3330             }
   3331         }
   3332         free(pSettings->Effects);
   3333         pSettings->Effects = M4OSA_NULL;
   3334     }
   3335 
   3336     return M4NO_ERROR;
   3337 }
   3338 
   3339 M4OSA_ERR M4xVSS_freeCommand(M4OSA_Context pContext)
   3340 {
   3341     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   3342 //    M4OSA_UInt8 i,j;
   3343 
   3344     /* Free "local" BGM settings */
   3345     if(xVSS_context->pSettings->xVSS.pBGMtrack != M4OSA_NULL)
   3346     {
   3347         if(xVSS_context->pSettings->xVSS.pBGMtrack->pFile != M4OSA_NULL)
   3348         {
   3349             free(xVSS_context->pSettings->xVSS.pBGMtrack->pFile);
   3350             xVSS_context->pSettings->xVSS.pBGMtrack->pFile = M4OSA_NULL;
   3351         }
   3352         free(xVSS_context->pSettings->xVSS.pBGMtrack);
   3353         xVSS_context->pSettings->xVSS.pBGMtrack = M4OSA_NULL;
   3354     }
   3355 
   3356     M4xVSS_freeSettings(xVSS_context->pSettings);
   3357 
   3358     if(xVSS_context->pPTo3GPPparamsList != M4OSA_NULL)
   3359     {
   3360         M4xVSS_Pto3GPP_params* pParams = xVSS_context->pPTo3GPPparamsList;
   3361         M4xVSS_Pto3GPP_params* pParams_sauv;
   3362 
   3363         while(pParams != M4OSA_NULL)
   3364         {
   3365             if(pParams->pFileIn != M4OSA_NULL)
   3366             {
   3367                 free(pParams->pFileIn);
   3368                 pParams->pFileIn = M4OSA_NULL;
   3369             }
   3370             if(pParams->pFileOut != M4OSA_NULL)
   3371             {
   3372                 /* Delete temporary file */
   3373                 remove((const char *)pParams->pFileOut);
   3374                 free(pParams->pFileOut);
   3375                 pParams->pFileOut = M4OSA_NULL;
   3376             }
   3377             if(pParams->pFileTemp != M4OSA_NULL)
   3378             {
   3379                 /* Delete temporary file */
   3380 #ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
   3381                 remove((const char *)pParams->pFileTemp);
   3382                 free(pParams->pFileTemp);
   3383 #endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
   3384                 pParams->pFileTemp = M4OSA_NULL;
   3385             }
   3386             pParams_sauv = pParams;
   3387             pParams = pParams->pNext;
   3388             free(pParams_sauv);
   3389             pParams_sauv = M4OSA_NULL;
   3390         }
   3391     }
   3392 
   3393     if(xVSS_context->pMCSparamsList != M4OSA_NULL)
   3394     {
   3395         M4xVSS_MCS_params* pParams = xVSS_context->pMCSparamsList;
   3396         M4xVSS_MCS_params* pParams_sauv;
   3397 
   3398         while(pParams != M4OSA_NULL)
   3399         {
   3400             if(pParams->pFileIn != M4OSA_NULL)
   3401             {
   3402                 free(pParams->pFileIn);
   3403                 pParams->pFileIn = M4OSA_NULL;
   3404             }
   3405             if(pParams->pFileOut != M4OSA_NULL)
   3406             {
   3407                 /* Delete temporary file */
   3408                 remove((const char *)pParams->pFileOut);
   3409                 free(pParams->pFileOut);
   3410                 pParams->pFileOut = M4OSA_NULL;
   3411             }
   3412             if(pParams->pFileTemp != M4OSA_NULL)
   3413             {
   3414                 /* Delete temporary file */
   3415 #ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE
   3416                 remove((const char *)pParams->pFileTemp);
   3417                 free(pParams->pFileTemp);
   3418 #endif/*M4xVSS_RESERVED_MOOV_DISK_SPACE*/
   3419                 pParams->pFileTemp = M4OSA_NULL;
   3420             }
   3421             pParams_sauv = pParams;
   3422             pParams = pParams->pNext;
   3423             free(pParams_sauv);
   3424             pParams_sauv = M4OSA_NULL;
   3425         }
   3426     }
   3427 
   3428     if(xVSS_context->pcmPreviewFile != M4OSA_NULL)
   3429     {
   3430         free(xVSS_context->pcmPreviewFile);
   3431         xVSS_context->pcmPreviewFile = M4OSA_NULL;
   3432     }
   3433     if(xVSS_context->pSettings->pOutputFile != M4OSA_NULL
   3434         && xVSS_context->pOutputFile != M4OSA_NULL)
   3435     {
   3436         free(xVSS_context->pSettings->pOutputFile);
   3437         xVSS_context->pSettings->pOutputFile = M4OSA_NULL;
   3438         xVSS_context->pOutputFile = M4OSA_NULL;
   3439     }
   3440 
   3441     /* Reinit all context variables */
   3442     xVSS_context->previousClipNumber = 0;
   3443     xVSS_context->editingStep = M4xVSS_kMicroStateEditing;
   3444     xVSS_context->analyseStep = M4xVSS_kMicroStateAnalysePto3GPP;
   3445     xVSS_context->pPTo3GPPparamsList = M4OSA_NULL;
   3446     xVSS_context->pPTo3GPPcurrentParams = M4OSA_NULL;
   3447     xVSS_context->pMCSparamsList = M4OSA_NULL;
   3448     xVSS_context->pMCScurrentParams = M4OSA_NULL;
   3449     xVSS_context->tempFileIndex = 0;
   3450     xVSS_context->targetedTimescale = 0;
   3451 
   3452     return M4NO_ERROR;
   3453 }
   3454 
   3455 /**
   3456  ******************************************************************************
   3457  * prototype    M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext,
   3458  *                                    M4OSA_Char* pFile,
   3459  *                                    M4VIDEOEDITING_ClipProperties *pFileProperties)
   3460  *
   3461  * @brief    This function retrieve properties of an input 3GP file using MCS
   3462  * @note
   3463  * @param    pContext        (IN) The integrator own context
   3464  * @param    pFile            (IN) 3GP file to analyse
   3465  * @param    pFileProperties    (IN/OUT) Pointer on a structure that will contain
   3466  *                            the 3GP file properties
   3467  *
   3468  * @return    M4NO_ERROR:    No error
   3469  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   3470  ******************************************************************************
   3471  */
   3472 M4OSA_ERR M4xVSS_internalGetProperties(M4OSA_Context pContext, M4OSA_Char* pFile,
   3473                                        M4VIDEOEDITING_ClipProperties *pFileProperties)
   3474 {
   3475     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   3476     M4OSA_ERR err;
   3477     M4MCS_Context mcs_context;
   3478 
   3479     err = M4MCS_init(&mcs_context, xVSS_context->pFileReadPtr, xVSS_context->pFileWritePtr);
   3480     if(err != M4NO_ERROR)
   3481     {
   3482         M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_init: 0x%x", err);
   3483         return err;
   3484     }
   3485 
   3486     /*open the MCS in the "normal opening" mode to retrieve the exact duration*/
   3487     err = M4MCS_open_normalMode(mcs_context, pFile, M4VIDEOEDITING_kFileType_3GPP,
   3488         M4OSA_NULL, M4OSA_NULL);
   3489     if (err != M4NO_ERROR)
   3490     {
   3491         M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_open: 0x%x", err);
   3492         M4MCS_abort(mcs_context);
   3493         return err;
   3494     }
   3495 
   3496     err = M4MCS_getInputFileProperties(mcs_context, pFileProperties);
   3497     if(err != M4NO_ERROR)
   3498     {
   3499         M4OSA_TRACE1_1("Error in M4MCS_getInputFileProperties: 0x%x", err);
   3500         M4MCS_abort(mcs_context);
   3501         return err;
   3502     }
   3503 
   3504     err = M4MCS_abort(mcs_context);
   3505     if (err != M4NO_ERROR)
   3506     {
   3507         M4OSA_TRACE1_1("M4xVSS_internalGetProperties: Error in M4MCS_abort: 0x%x", err);
   3508         return err;
   3509     }
   3510 
   3511     return M4NO_ERROR;
   3512 }
   3513 
   3514 
   3515 /**
   3516  ******************************************************************************
   3517  * prototype    M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
   3518  *                                                M4OSA_UInt32* pTargetedTimeScale)
   3519  *
   3520  * @brief    This function retrieve targeted time scale
   3521  * @note
   3522  * @param    pContext            (IN)    The integrator own context
   3523  * @param    pTargetedTimeScale    (OUT)    Targeted time scale
   3524  *
   3525  * @return    M4NO_ERROR:    No error
   3526  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   3527  ******************************************************************************
   3528  */
   3529 M4OSA_ERR M4xVSS_internalGetTargetedTimeScale(M4OSA_Context pContext,
   3530                                                  M4VSS3GPP_EditSettings* pSettings,
   3531                                                   M4OSA_UInt32* pTargetedTimeScale)
   3532 {
   3533     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   3534     M4OSA_ERR err;
   3535     M4OSA_UInt32 totalDuration = 0;
   3536     M4OSA_UInt8 i = 0;
   3537     M4OSA_UInt32 tempTimeScale = 0, tempDuration = 0;
   3538 
   3539     for(i=0;i<pSettings->uiClipNumber;i++)
   3540     {
   3541         /*search timescale only in mpeg4 case*/
   3542         if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_3GPP
   3543             || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4
   3544             || pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_M4V)
   3545         {
   3546             M4VIDEOEDITING_ClipProperties fileProperties;
   3547 
   3548             /*UTF conversion support*/
   3549             M4OSA_Char* pDecodedPath = M4OSA_NULL;
   3550 
   3551             /**
   3552             * UTF conversion: convert into the customer format, before being used*/
   3553             pDecodedPath = pSettings->pClipList[i]->pFile;
   3554 
   3555             if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
   3556                 && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   3557             {
   3558                 M4OSA_UInt32 length = 0;
   3559                 err = M4xVSS_internalConvertFromUTF8(xVSS_context,
   3560                      (M4OSA_Void*) pSettings->pClipList[i]->pFile,
   3561                         (M4OSA_Void*) xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
   3562                              &length);
   3563                 if(err != M4NO_ERROR)
   3564                 {
   3565                     M4OSA_TRACE1_1("M4xVSS_Init:\
   3566                          M4xVSS_internalConvertToUTF8 returns err: 0x%x",err);
   3567                     return err;
   3568                 }
   3569                 pDecodedPath = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   3570             }
   3571 
   3572             /*End of the conversion: use the decoded path*/
   3573             err = M4xVSS_internalGetProperties(xVSS_context, pDecodedPath, &fileProperties);
   3574 
   3575             /*get input file properties*/
   3576             /*err = M4xVSS_internalGetProperties(xVSS_context, pSettings->\
   3577                 pClipList[i]->pFile, &fileProperties);*/
   3578             if(M4NO_ERROR != err)
   3579             {
   3580                 M4OSA_TRACE1_1("M4xVSS_internalGetTargetedTimeScale:\
   3581                      M4xVSS_internalGetProperties returned: 0x%x", err);
   3582                 return err;
   3583             }
   3584             if(fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4)
   3585             {
   3586                 if(pSettings->pClipList[i]->uiEndCutTime > 0)
   3587                 {
   3588                     if(tempDuration < (pSettings->pClipList[i]->uiEndCutTime \
   3589                         - pSettings->pClipList[i]->uiBeginCutTime))
   3590                     {
   3591                         tempTimeScale = fileProperties.uiVideoTimeScale;
   3592                         tempDuration = (pSettings->pClipList[i]->uiEndCutTime\
   3593                              - pSettings->pClipList[i]->uiBeginCutTime);
   3594                     }
   3595                 }
   3596                 else
   3597                 {
   3598                     if(tempDuration < (fileProperties.uiClipDuration\
   3599                          - pSettings->pClipList[i]->uiBeginCutTime))
   3600                     {
   3601                         tempTimeScale = fileProperties.uiVideoTimeScale;
   3602                         tempDuration = (fileProperties.uiClipDuration\
   3603                              - pSettings->pClipList[i]->uiBeginCutTime);
   3604                     }
   3605                 }
   3606             }
   3607         }
   3608         if(pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888)
   3609         {
   3610             /*the timescale is 30 for PTO3GP*/
   3611             *pTargetedTimeScale = 30;
   3612             return M4NO_ERROR;
   3613 
   3614         }
   3615     }
   3616 
   3617     if(tempTimeScale >= 30)/*Define a minimum time scale, otherwise if the timescale is not
   3618     enough, there will be an infinite loop in the shell encoder*/
   3619     {
   3620         *pTargetedTimeScale = tempTimeScale;
   3621     }
   3622     else
   3623     {
   3624         *pTargetedTimeScale = 30;
   3625     }
   3626 
   3627     return M4NO_ERROR;
   3628 }
   3629 
   3630 
   3631 /**
   3632  ******************************************************************************
   3633  * prototype    M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
   3634  *                                                    M4VIFI_ImagePlane *PlaneIn,
   3635  *                                                    M4VIFI_ImagePlane *PlaneOut,
   3636  *                                                    M4VSS3GPP_ExternalProgress *pProgress,
   3637  *                                                    M4OSA_UInt32 uiEffectKind)
   3638  *
   3639  * @brief    This function apply a color effect on an input YUV420 planar frame
   3640  * @note
   3641  * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
   3642  * @param    PlaneIn            (IN) Input YUV420 planar
   3643  * @param    PlaneOut        (IN/OUT) Output YUV420 planar
   3644  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   3645  * @param    uiEffectKind    (IN) Unused
   3646  *
   3647  * @return    M4VIFI_OK:    No error
   3648  ******************************************************************************
   3649  */
   3650 M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
   3651                                              M4VIFI_ImagePlane *PlaneIn,
   3652                                              M4VIFI_ImagePlane *PlaneOut,
   3653                                              M4VSS3GPP_ExternalProgress *pProgress,
   3654                                              M4OSA_UInt32 uiEffectKind)
   3655 {
   3656     M4VIFI_Int32 plane_number;
   3657     M4VIFI_UInt32 i,j;
   3658     M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
   3659     M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
   3660 
   3661     for (plane_number = 0; plane_number < 3; plane_number++)
   3662     {
   3663         p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
   3664         p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
   3665         for (i = 0; i < PlaneOut[plane_number].u_height; i++)
   3666         {
   3667             /**
   3668              * Chrominance */
   3669             if(plane_number==1 || plane_number==2)
   3670             {
   3671                 //switch ((M4OSA_UInt32)pFunctionContext)
   3672                 // commented because a structure for the effects context exist
   3673                 switch (ColorContext->colorEffectType)
   3674                 {
   3675                     case M4xVSS_kVideoEffectType_BlackAndWhite:
   3676                         memset((void *)p_buf_dest,128,
   3677                          PlaneIn[plane_number].u_width);
   3678                         break;
   3679                     case M4xVSS_kVideoEffectType_Pink:
   3680                         memset((void *)p_buf_dest,255,
   3681                          PlaneIn[plane_number].u_width);
   3682                         break;
   3683                     case M4xVSS_kVideoEffectType_Green:
   3684                         memset((void *)p_buf_dest,0,
   3685                          PlaneIn[plane_number].u_width);
   3686                         break;
   3687                     case M4xVSS_kVideoEffectType_Sepia:
   3688                         if(plane_number==1)
   3689                         {
   3690                             memset((void *)p_buf_dest,117,
   3691                              PlaneIn[plane_number].u_width);
   3692                         }
   3693                         else
   3694                         {
   3695                             memset((void *)p_buf_dest,139,
   3696                              PlaneIn[plane_number].u_width);
   3697                         }
   3698                         break;
   3699                     case M4xVSS_kVideoEffectType_Negative:
   3700                         memcpy((void *)p_buf_dest,
   3701                          (void *)p_buf_src ,PlaneOut[plane_number].u_width);
   3702                         break;
   3703 
   3704                     case M4xVSS_kVideoEffectType_ColorRGB16:
   3705                         {
   3706                             M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
   3707 
   3708                             /*first get the r, g, b*/
   3709                             b = (ColorContext->rgb16ColorData &  0x001f);
   3710                             g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
   3711                             r = (ColorContext->rgb16ColorData &  0xf800)>>11;
   3712 
   3713                             /*keep y, but replace u and v*/
   3714                             if(plane_number==1)
   3715                             {
   3716                                 /*then convert to u*/
   3717                                 u = U16(r, g, b);
   3718                                 memset((void *)p_buf_dest,(M4OSA_UInt8)u,
   3719                                  PlaneIn[plane_number].u_width);
   3720                             }
   3721                             if(plane_number==2)
   3722                             {
   3723                                 /*then convert to v*/
   3724                                 v = V16(r, g, b);
   3725                                 memset((void *)p_buf_dest, (M4OSA_UInt8)v,
   3726                                  PlaneIn[plane_number].u_width);
   3727                             }
   3728                         }
   3729                         break;
   3730                     case M4xVSS_kVideoEffectType_Gradient:
   3731                         {
   3732                             M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
   3733 
   3734                             /*first get the r, g, b*/
   3735                             b = (ColorContext->rgb16ColorData &  0x001f);
   3736                             g = (ColorContext->rgb16ColorData &  0x07e0)>>5;
   3737                             r = (ColorContext->rgb16ColorData &  0xf800)>>11;
   3738 
   3739                             /*for color gradation*/
   3740                             b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
   3741                             g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
   3742                             r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
   3743 
   3744                             /*keep y, but replace u and v*/
   3745                             if(plane_number==1)
   3746                             {
   3747                                 /*then convert to u*/
   3748                                 u = U16(r, g, b);
   3749                                 memset((void *)p_buf_dest,(M4OSA_UInt8)u,
   3750                                  PlaneIn[plane_number].u_width);
   3751                             }
   3752                             if(plane_number==2)
   3753                             {
   3754                                 /*then convert to v*/
   3755                                 v = V16(r, g, b);
   3756                                 memset((void *)p_buf_dest,(M4OSA_UInt8)v,
   3757                                  PlaneIn[plane_number].u_width);
   3758                             }
   3759                         }
   3760                         break;
   3761                         default:
   3762                         break;
   3763                 }
   3764             }
   3765             /**
   3766              * Luminance */
   3767             else
   3768             {
   3769                 //switch ((M4OSA_UInt32)pFunctionContext)
   3770                 // commented because a structure for the effects context exist
   3771                 switch (ColorContext->colorEffectType)
   3772                 {
   3773                 case M4xVSS_kVideoEffectType_Negative:
   3774                     for(j=0;j<PlaneOut[plane_number].u_width;j++)
   3775                     {
   3776                             p_buf_dest[j] = 255 - p_buf_src[j];
   3777                     }
   3778                     break;
   3779                 default:
   3780                     memcpy((void *)p_buf_dest,
   3781                      (void *)p_buf_src ,PlaneOut[plane_number].u_width);
   3782                     break;
   3783                 }
   3784             }
   3785             p_buf_src += PlaneIn[plane_number].u_stride;
   3786             p_buf_dest += PlaneOut[plane_number].u_stride;
   3787         }
   3788     }
   3789 
   3790     return M4VIFI_OK;
   3791 }
   3792 
   3793 /**
   3794  ******************************************************************************
   3795  * prototype    M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
   3796  *                                                    M4VIFI_ImagePlane *PlaneIn,
   3797  *                                                    M4VIFI_ImagePlane *PlaneOut,
   3798  *                                                    M4VSS3GPP_ExternalProgress *pProgress,
   3799  *                                                    M4OSA_UInt32 uiEffectKind)
   3800  *
   3801  * @brief    This function add a fixed or animated image on an input YUV420 planar frame
   3802  * @note
   3803  * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
   3804  * @param    PlaneIn            (IN) Input YUV420 planar
   3805  * @param    PlaneOut        (IN/OUT) Output YUV420 planar
   3806  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   3807  * @param    uiEffectKind    (IN) Unused
   3808  *
   3809  * @return    M4VIFI_OK:    No error
   3810  ******************************************************************************
   3811  */
   3812 M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData,
   3813                                                 M4VIFI_ImagePlane PlaneIn[3],
   3814                                                 M4VIFI_ImagePlane *PlaneOut,
   3815                                                 M4VSS3GPP_ExternalProgress *pProgress,
   3816                                                 M4OSA_UInt32 uiEffectKind )
   3817 {
   3818     M4VIFI_UInt32 x,y;
   3819 
   3820     M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
   3821     M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
   3822     M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
   3823 
   3824     M4xVSS_FramingStruct* Framing = M4OSA_NULL;
   3825     M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
   3826     M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
   3827 
   3828     M4VIFI_UInt8 *p_out0;
   3829     M4VIFI_UInt8 *p_out1;
   3830     M4VIFI_UInt8 *p_out2;
   3831 
   3832     M4VIFI_UInt32 topleft[2];
   3833 
   3834     M4OSA_UInt8 transparent1 = (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
   3835     M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
   3836 
   3837 #ifndef DECODE_GIF_ON_SAVING
   3838     Framing = (M4xVSS_FramingStruct *)userData;
   3839     currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
   3840     FramingRGB = Framing->FramingRgb->pac_data;
   3841 #endif /*DECODE_GIF_ON_SAVING*/
   3842 
   3843     /*FB*/
   3844 #ifdef DECODE_GIF_ON_SAVING
   3845     M4OSA_ERR err;
   3846     Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
   3847     currentFraming = (M4xVSS_FramingStruct *)Framing;
   3848     FramingRGB = Framing->FramingRgb->pac_data;
   3849 #endif /*DECODE_GIF_ON_SAVING*/
   3850     /*end FB*/
   3851 
   3852     /**
   3853      * Initialize input / output plane pointers */
   3854     p_in_Y += PlaneIn[0].u_topleft;
   3855     p_in_U += PlaneIn[1].u_topleft;
   3856     p_in_V += PlaneIn[2].u_topleft;
   3857 
   3858     p_out0 = PlaneOut[0].pac_data;
   3859     p_out1 = PlaneOut[1].pac_data;
   3860     p_out2 = PlaneOut[2].pac_data;
   3861 
   3862     /**
   3863      * Depending on time, initialize Framing frame to use */
   3864     if(Framing->previousClipTime == -1)
   3865     {
   3866         Framing->previousClipTime = pProgress->uiOutputTime;
   3867     }
   3868 
   3869     /**
   3870      * If the current clip time has reach the duration of one frame of the framing picture
   3871      * we need to step to next framing picture */
   3872 
   3873     Framing->previousClipTime = pProgress->uiOutputTime;
   3874     FramingRGB = currentFraming->FramingRgb->pac_data;
   3875     topleft[0] = currentFraming->topleft_x;
   3876     topleft[1] = currentFraming->topleft_y;
   3877 
   3878     for( x=0 ;x < PlaneIn[0].u_height ; x++)
   3879     {
   3880         for( y=0 ;y < PlaneIn[0].u_width ; y++)
   3881         {
   3882             /**
   3883              * To handle framing with input size != output size
   3884              * Framing is applyed if coordinates matches between framing/topleft and input plane */
   3885             if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width)  &&
   3886                 y >= topleft[0] &&
   3887                 x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
   3888                 x >= topleft[1])
   3889             {
   3890                 /*Alpha blending support*/
   3891                 M4OSA_Float alphaBlending = 1;
   3892                 M4xVSS_internalEffectsAlphaBlending*  alphaBlendingStruct =\
   3893                  (M4xVSS_internalEffectsAlphaBlending*)\
   3894                     ((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
   3895 
   3896                 if(alphaBlendingStruct != M4OSA_NULL)
   3897                 {
   3898                     if(pProgress->uiProgress \
   3899                     < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
   3900                     {
   3901                         if(alphaBlendingStruct->m_fadeInTime == 0) {
   3902                             alphaBlending = alphaBlendingStruct->m_start / 100;
   3903                         } else {
   3904                             alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle\
   3905                              - alphaBlendingStruct->m_start)\
   3906                                 *pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
   3907                             alphaBlending += alphaBlendingStruct->m_start;
   3908                             alphaBlending /= 100;
   3909                         }
   3910                     }
   3911                     else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->\
   3912                     m_fadeInTime*10) && pProgress->uiProgress < 1000\
   3913                      - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
   3914                     {
   3915                         alphaBlending = (M4OSA_Float)\
   3916                         ((M4OSA_Float)alphaBlendingStruct->m_middle/100);
   3917                     }
   3918                     else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)\
   3919                     (alphaBlendingStruct->m_fadeOutTime*10))
   3920                     {
   3921                         if(alphaBlendingStruct->m_fadeOutTime == 0) {
   3922                             alphaBlending = alphaBlendingStruct->m_end / 100;
   3923                         } else {
   3924                             alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle \
   3925                             - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)\
   3926                             /(alphaBlendingStruct->m_fadeOutTime*10);
   3927                             alphaBlending += alphaBlendingStruct->m_end;
   3928                             alphaBlending /= 100;
   3929                         }
   3930                     }
   3931                 }
   3932                 /**/
   3933 
   3934                 if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
   3935                 {
   3936                     *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
   3937                     *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
   3938                         (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
   3939                     *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
   3940                         (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
   3941                 }
   3942                 else
   3943                 {
   3944                     *( p_out0+y+x*PlaneOut[0].u_stride)=
   3945                         (*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])\
   3946                             +(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
   3947                     *( p_out0+y+x*PlaneOut[0].u_stride)+=
   3948                         (*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
   3949                     *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
   3950                         (*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)\
   3951                             +((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))\
   3952                                 *alphaBlending;
   3953                     *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=
   3954                         (*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
   3955                     *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
   3956                         (*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)\
   3957                             +((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))\
   3958                                 *alphaBlending;
   3959                     *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=
   3960                         (*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
   3961                 }
   3962                 if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
   3963                     y == PlaneIn[0].u_width-1)
   3964                 {
   3965                     FramingRGB = FramingRGB + 2 \
   3966                         * (topleft[0] + currentFraming->FramingYuv[0].u_width \
   3967                             - PlaneIn[0].u_width + 1);
   3968                 }
   3969                 else
   3970                 {
   3971                     FramingRGB = FramingRGB + 2;
   3972                 }
   3973             }
   3974             /**
   3975              * Just copy input plane to output plane */
   3976             else
   3977             {
   3978                 *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
   3979                 *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=
   3980                     *(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
   3981                 *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=
   3982                     *(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
   3983             }
   3984         }
   3985     }
   3986 
   3987 
   3988     return M4VIFI_OK;
   3989 }
   3990 
   3991 
   3992 /**
   3993  ******************************************************************************
   3994  * prototype    M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
   3995  *                                                    M4VIFI_ImagePlane *PlaneIn,
   3996  *                                                    M4VIFI_ImagePlane *PlaneOut,
   3997  *                                                    M4VSS3GPP_ExternalProgress *pProgress,
   3998  *                                                    M4OSA_UInt32 uiEffectKind)
   3999  *
   4000  * @brief    This function make a video look as if it was taken in the fifties
   4001  * @note
   4002  * @param    pUserData       (IN) Context
   4003  * @param    pPlaneIn        (IN) Input YUV420 planar
   4004  * @param    pPlaneOut        (IN/OUT) Output YUV420 planar
   4005  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   4006  * @param    uiEffectKind    (IN) Unused
   4007  *
   4008  * @return    M4VIFI_OK:            No error
   4009  * @return  M4ERR_PARAMETER:    pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
   4010  ******************************************************************************
   4011  */
   4012 M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData,
   4013                                                 M4VIFI_ImagePlane *pPlaneIn,
   4014                                                 M4VIFI_ImagePlane *pPlaneOut,
   4015                                                 M4VSS3GPP_ExternalProgress *pProgress,
   4016                                                 M4OSA_UInt32 uiEffectKind )
   4017 {
   4018     M4VIFI_UInt32 x, y, xShift;
   4019     M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
   4020     M4VIFI_UInt8 *pOutY, *pInYbegin;
   4021     M4VIFI_UInt8 *pInCr,* pOutCr;
   4022     M4VIFI_Int32 plane_number;
   4023 
   4024     /* Internal context*/
   4025     M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
   4026 
   4027     /* Check the inputs (debug only) */
   4028     M4OSA_DEBUG_IF2((pFiftiesData == M4OSA_NULL),M4ERR_PARAMETER,
   4029          "xVSS: p_FiftiesData is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
   4030     M4OSA_DEBUG_IF2((pPlaneOut == M4OSA_NULL),M4ERR_PARAMETER,
   4031          "xVSS: p_PlaneOut is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
   4032     M4OSA_DEBUG_IF2((pProgress == M4OSA_NULL),M4ERR_PARAMETER,
   4033         "xVSS: p_Progress is M4OSA_NULL in M4VSS3GPP_externalVideoEffectFifties");
   4034 
   4035     /* Initialize input / output plane pointers */
   4036     pInY += pPlaneIn[0].u_topleft;
   4037     pOutY = pPlaneOut[0].pac_data;
   4038     pInYbegin  = pInY;
   4039 
   4040     /* Initialize the random */
   4041     if(p_FiftiesData->previousClipTime < 0)
   4042     {
   4043         M4OSA_randInit();
   4044         M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
   4045         M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
   4046         p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
   4047     }
   4048 
   4049     /* Choose random values if we have reached the duration of a partial effect */
   4050     else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime)\
   4051          > p_FiftiesData->fiftiesEffectDuration)
   4052     {
   4053         M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
   4054         M4OSA_rand((M4OSA_Int32 *)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
   4055         p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
   4056     }
   4057 
   4058     /* Put in Sepia the chrominance */
   4059     for (plane_number = 1; plane_number < 3; plane_number++)
   4060     {
   4061         pInCr  = pPlaneIn[plane_number].pac_data  + pPlaneIn[plane_number].u_topleft;
   4062         pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
   4063 
   4064         for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
   4065         {
   4066             if (1 == plane_number)
   4067                 memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
   4068             else
   4069                 memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
   4070 
   4071             pInCr  += pPlaneIn[plane_number].u_stride;
   4072             pOutCr += pPlaneOut[plane_number].u_stride;
   4073         }
   4074     }
   4075 
   4076     /* Compute the new pixels values */
   4077     for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
   4078     {
   4079         M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
   4080 
   4081         /* Compute the xShift (random value) */
   4082         if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
   4083             xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
   4084         else
   4085             xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) \
   4086                 % (pPlaneIn[0].u_height - 1);
   4087 
   4088         /* Initialize the pointers */
   4089         p_outYtmp = pOutY + 1;                                    /* yShift of 1 pixel */
   4090         p_inYtmp  = pInYbegin + (xShift * pPlaneIn[0].u_stride);  /* Apply the xShift */
   4091 
   4092         for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
   4093         {
   4094             /* Set Y value */
   4095             if (xShift > (pPlaneIn[0].u_height - 4))
   4096                 *p_outYtmp = 40;        /* Add some horizontal black lines between the
   4097                                         two parts of the image */
   4098             else if ( y == p_FiftiesData->stripeRandomValue)
   4099                 *p_outYtmp = 90;        /* Add a random vertical line for the bulk */
   4100             else
   4101                 *p_outYtmp = *p_inYtmp;
   4102 
   4103 
   4104             /* Go to the next pixel */
   4105             p_outYtmp++;
   4106             p_inYtmp++;
   4107 
   4108             /* Restart at the beginning of the line for the last pixel*/
   4109             if (y == (pPlaneIn[0].u_width - 2))
   4110                 p_outYtmp = pOutY;
   4111         }
   4112 
   4113         /* Go to the next line */
   4114         pOutY += pPlaneOut[0].u_stride;
   4115     }
   4116 
   4117     return M4VIFI_OK;
   4118 }
   4119 
   4120 /**
   4121  ******************************************************************************
   4122  * M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom( )
   4123  * @brief    Zoom in/out video effect functions.
   4124  * @note    The external video function is used only if VideoEffectType is set to
   4125  * M4VSS3GPP_kVideoEffectType_ZoomIn or M4VSS3GPP_kVideoEffectType_ZoomOut.
   4126  *
   4127  * @param   pFunctionContext    (IN) The function context, previously set by the integrator
   4128  * @param    pInputPlanes        (IN) Input YUV420 image: pointer to an array of three valid
   4129  *                                    image planes (Y, U and V)
   4130  * @param    pOutputPlanes        (IN/OUT) Output (filtered) YUV420 image: pointer to an array of
   4131  *                                        three valid image planes (Y, U and V)
   4132  * @param    pProgress            (IN) Set of information about the video transition progress.
   4133  * @return    M4NO_ERROR:            No error
   4134  * @return    M4ERR_PARAMETER:    At least one parameter is M4OSA_NULL (debug only)
   4135  ******************************************************************************
   4136  */
   4137 
   4138 M4OSA_ERR M4VSS3GPP_externalVideoEffectZoom(
   4139     M4OSA_Void *pFunctionContext,
   4140     M4VIFI_ImagePlane *pInputPlanes,
   4141     M4VIFI_ImagePlane *pOutputPlanes,
   4142     M4VSS3GPP_ExternalProgress *pProgress,
   4143     M4OSA_UInt32 uiEffectKind
   4144 )
   4145 {
   4146     M4OSA_UInt32 boxWidth;
   4147     M4OSA_UInt32 boxHeight;
   4148     M4OSA_UInt32 boxPosX;
   4149     M4OSA_UInt32 boxPosY;
   4150     M4OSA_UInt32 ratio = 0;
   4151     /*  * 1.189207 between ratio */
   4152     /* zoom between x1 and x16 */
   4153     M4OSA_UInt32 ratiotab[17] ={1024,1218,1448,1722,2048,2435,2896,3444,4096,4871,5793,\
   4154                                 6889,8192,9742,11585,13777,16384};
   4155     M4OSA_UInt32 ik;
   4156 
   4157     M4VIFI_ImagePlane boxPlane[3];
   4158 
   4159     if(M4xVSS_kVideoEffectType_ZoomOut == (M4OSA_UInt32)pFunctionContext)
   4160     {
   4161         //ratio = 16 - (15 * pProgress->uiProgress)/1000;
   4162         ratio = 16 - pProgress->uiProgress / 66 ;
   4163     }
   4164     else if(M4xVSS_kVideoEffectType_ZoomIn == (M4OSA_UInt32)pFunctionContext)
   4165     {
   4166         //ratio = 1 + (15 * pProgress->uiProgress)/1000;
   4167         ratio = 1 + pProgress->uiProgress / 66 ;
   4168     }
   4169 
   4170     for(ik=0;ik<3;ik++){
   4171 
   4172         boxPlane[ik].u_stride = pInputPlanes[ik].u_stride;
   4173         boxPlane[ik].pac_data = pInputPlanes[ik].pac_data;
   4174 
   4175         boxHeight = ( pInputPlanes[ik].u_height << 10 ) / ratiotab[ratio];
   4176         boxWidth = ( pInputPlanes[ik].u_width << 10 ) / ratiotab[ratio];
   4177         boxPlane[ik].u_height = (boxHeight)&(~1);
   4178         boxPlane[ik].u_width = (boxWidth)&(~1);
   4179 
   4180         boxPosY = (pInputPlanes[ik].u_height >> 1) - (boxPlane[ik].u_height >> 1);
   4181         boxPosX = (pInputPlanes[ik].u_width >> 1) - (boxPlane[ik].u_width >> 1);
   4182         boxPlane[ik].u_topleft = boxPosY * boxPlane[ik].u_stride + boxPosX;
   4183     }
   4184 
   4185     M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, (M4VIFI_ImagePlane*)&boxPlane, pOutputPlanes);
   4186 
   4187     /**
   4188      * Return */
   4189     return(M4NO_ERROR);
   4190 }
   4191 
   4192 /**
   4193  ******************************************************************************
   4194  * prototype    M4xVSS_AlphaMagic( M4OSA_Void *userData,
   4195  *                                    M4VIFI_ImagePlane PlaneIn1[3],
   4196  *                                    M4VIFI_ImagePlane PlaneIn2[3],
   4197  *                                    M4VIFI_ImagePlane *PlaneOut,
   4198  *                                    M4VSS3GPP_ExternalProgress *pProgress,
   4199  *                                    M4OSA_UInt32 uiTransitionKind)
   4200  *
   4201  * @brief    This function apply a color effect on an input YUV420 planar frame
   4202  * @note
   4203  * @param    userData        (IN) Contains a pointer on a settings structure
   4204  * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
   4205  * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
   4206  * @param    PlaneOut        (IN/OUT) Output YUV420 planar
   4207  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   4208  * @param    uiTransitionKind(IN) Unused
   4209  *
   4210  * @return    M4VIFI_OK:    No error
   4211  ******************************************************************************
   4212  */
   4213 M4OSA_ERR M4xVSS_AlphaMagic( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
   4214                              M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
   4215                              M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiTransitionKind)
   4216 {
   4217 
   4218     M4OSA_ERR err;
   4219 
   4220     M4xVSS_internal_AlphaMagicSettings* alphaContext;
   4221     M4VIFI_Int32 alphaProgressLevel;
   4222 
   4223     M4VIFI_ImagePlane* planeswap;
   4224     M4VIFI_UInt32 x,y;
   4225 
   4226     M4VIFI_UInt8 *p_out0;
   4227     M4VIFI_UInt8 *p_out1;
   4228     M4VIFI_UInt8 *p_out2;
   4229     M4VIFI_UInt8 *alphaMask;
   4230     /* "Old image" */
   4231     M4VIFI_UInt8 *p_in1_Y;
   4232     M4VIFI_UInt8 *p_in1_U;
   4233     M4VIFI_UInt8 *p_in1_V;
   4234     /* "New image" */
   4235     M4VIFI_UInt8 *p_in2_Y;
   4236     M4VIFI_UInt8 *p_in2_U;
   4237     M4VIFI_UInt8 *p_in2_V;
   4238 
   4239     err = M4NO_ERROR;
   4240 
   4241     alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
   4242 
   4243     alphaProgressLevel = (pProgress->uiProgress * 128)/1000;
   4244 
   4245     if( alphaContext->isreverse != M4OSA_FALSE)
   4246     {
   4247         alphaProgressLevel = 128 - alphaProgressLevel;
   4248         planeswap = PlaneIn1;
   4249         PlaneIn1 = PlaneIn2;
   4250         PlaneIn2 = planeswap;
   4251     }
   4252 
   4253     p_out0 = PlaneOut[0].pac_data;
   4254     p_out1 = PlaneOut[1].pac_data;
   4255     p_out2 = PlaneOut[2].pac_data;
   4256 
   4257     alphaMask = alphaContext->pPlane->pac_data;
   4258 
   4259     /* "Old image" */
   4260     p_in1_Y = PlaneIn1[0].pac_data;
   4261     p_in1_U = PlaneIn1[1].pac_data;
   4262     p_in1_V = PlaneIn1[2].pac_data;
   4263     /* "New image" */
   4264     p_in2_Y = PlaneIn2[0].pac_data;
   4265     p_in2_U = PlaneIn2[1].pac_data;
   4266     p_in2_V = PlaneIn2[2].pac_data;
   4267 
   4268      /**
   4269      * For each column ... */
   4270     for( y=0; y<PlaneOut->u_height; y++ )
   4271     {
   4272         /**
   4273          * ... and each row of the alpha mask */
   4274         for( x=0; x<PlaneOut->u_width; x++ )
   4275         {
   4276             /**
   4277              * If the value of the current pixel of the alpha mask is > to the current time
   4278              * ( current time is normalized on [0-255] ) */
   4279             if( alphaProgressLevel < alphaMask[x+y*PlaneOut->u_width] )
   4280             {
   4281                 /* We keep "old image" in output plane */
   4282                 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
   4283                 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
   4284                     *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
   4285                 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
   4286                     *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
   4287             }
   4288             else
   4289             {
   4290                 /* We take "new image" in output plane */
   4291                 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
   4292                 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
   4293                     *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
   4294                 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
   4295                     *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
   4296             }
   4297         }
   4298     }
   4299 
   4300     return(err);
   4301 }
   4302 
   4303 /**
   4304  ******************************************************************************
   4305  * prototype    M4xVSS_AlphaMagicBlending( M4OSA_Void *userData,
   4306  *                                    M4VIFI_ImagePlane PlaneIn1[3],
   4307  *                                    M4VIFI_ImagePlane PlaneIn2[3],
   4308  *                                    M4VIFI_ImagePlane *PlaneOut,
   4309  *                                    M4VSS3GPP_ExternalProgress *pProgress,
   4310  *                                    M4OSA_UInt32 uiTransitionKind)
   4311  *
   4312  * @brief    This function apply a color effect on an input YUV420 planar frame
   4313  * @note
   4314  * @param    userData        (IN) Contains a pointer on a settings structure
   4315  * @param    PlaneIn1        (IN) Input YUV420 planar from video 1
   4316  * @param    PlaneIn2        (IN) Input YUV420 planar from video 2
   4317  * @param    PlaneOut        (IN/OUT) Output YUV420 planar
   4318  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   4319  * @param    uiTransitionKind(IN) Unused
   4320  *
   4321  * @return    M4VIFI_OK:    No error
   4322  ******************************************************************************
   4323  */
   4324 M4OSA_ERR M4xVSS_AlphaMagicBlending( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
   4325                                      M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
   4326                                      M4VSS3GPP_ExternalProgress *pProgress,
   4327                                      M4OSA_UInt32 uiTransitionKind)
   4328 {
   4329     M4OSA_ERR err;
   4330 
   4331     M4xVSS_internal_AlphaMagicSettings* alphaContext;
   4332     M4VIFI_Int32 alphaProgressLevel;
   4333     M4VIFI_Int32 alphaBlendLevelMin;
   4334     M4VIFI_Int32 alphaBlendLevelMax;
   4335     M4VIFI_Int32 alphaBlendRange;
   4336 
   4337     M4VIFI_ImagePlane* planeswap;
   4338     M4VIFI_UInt32 x,y;
   4339     M4VIFI_Int32 alphaMaskValue;
   4340 
   4341     M4VIFI_UInt8 *p_out0;
   4342     M4VIFI_UInt8 *p_out1;
   4343     M4VIFI_UInt8 *p_out2;
   4344     M4VIFI_UInt8 *alphaMask;
   4345     /* "Old image" */
   4346     M4VIFI_UInt8 *p_in1_Y;
   4347     M4VIFI_UInt8 *p_in1_U;
   4348     M4VIFI_UInt8 *p_in1_V;
   4349     /* "New image" */
   4350     M4VIFI_UInt8 *p_in2_Y;
   4351     M4VIFI_UInt8 *p_in2_U;
   4352     M4VIFI_UInt8 *p_in2_V;
   4353 
   4354 
   4355     err = M4NO_ERROR;
   4356 
   4357     alphaContext = (M4xVSS_internal_AlphaMagicSettings*)userData;
   4358 
   4359     alphaProgressLevel = (pProgress->uiProgress * 128)/1000;
   4360 
   4361     if( alphaContext->isreverse != M4OSA_FALSE)
   4362     {
   4363         alphaProgressLevel = 128 - alphaProgressLevel;
   4364         planeswap = PlaneIn1;
   4365         PlaneIn1 = PlaneIn2;
   4366         PlaneIn2 = planeswap;
   4367     }
   4368 
   4369     alphaBlendLevelMin = alphaProgressLevel-alphaContext->blendingthreshold;
   4370 
   4371     alphaBlendLevelMax = alphaProgressLevel+alphaContext->blendingthreshold;
   4372 
   4373     alphaBlendRange = (alphaContext->blendingthreshold)*2;
   4374 
   4375     p_out0 = PlaneOut[0].pac_data;
   4376     p_out1 = PlaneOut[1].pac_data;
   4377     p_out2 = PlaneOut[2].pac_data;
   4378 
   4379     alphaMask = alphaContext->pPlane->pac_data;
   4380 
   4381     /* "Old image" */
   4382     p_in1_Y = PlaneIn1[0].pac_data;
   4383     p_in1_U = PlaneIn1[1].pac_data;
   4384     p_in1_V = PlaneIn1[2].pac_data;
   4385     /* "New image" */
   4386     p_in2_Y = PlaneIn2[0].pac_data;
   4387     p_in2_U = PlaneIn2[1].pac_data;
   4388     p_in2_V = PlaneIn2[2].pac_data;
   4389 
   4390     /* apply Alpha Magic on each pixel */
   4391        for( y=0; y<PlaneOut->u_height; y++ )
   4392     {
   4393         for( x=0; x<PlaneOut->u_width; x++ )
   4394         {
   4395             alphaMaskValue = alphaMask[x+y*PlaneOut->u_width];
   4396             if( alphaBlendLevelMax < alphaMaskValue )
   4397             {
   4398                 /* We keep "old image" in output plane */
   4399                 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in1_Y+x+y*PlaneIn1[0].u_stride);
   4400                 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
   4401                     *(p_in1_U+(x>>1)+(y>>1)*PlaneIn1[1].u_stride);
   4402                 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
   4403                     *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride);
   4404             }
   4405             else if( (alphaBlendLevelMin < alphaMaskValue)&&
   4406                     (alphaMaskValue <= alphaBlendLevelMax ) )
   4407             {
   4408                 /* We blend "old and new image" in output plane */
   4409                 *( p_out0+x+y*PlaneOut[0].u_stride)=(M4VIFI_UInt8)
   4410                     (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_Y+x+y*PlaneIn1[0].u_stride))
   4411                         +(alphaBlendLevelMax-alphaMaskValue)\
   4412                             *( *(p_in2_Y+x+y*PlaneIn2[0].u_stride)) )/alphaBlendRange );
   4413 
   4414                 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=(M4VIFI_UInt8)\
   4415                     (( (alphaMaskValue-alphaBlendLevelMin)*( *(p_in1_U+(x>>1)+(y>>1)\
   4416                         *PlaneIn1[1].u_stride))
   4417                             +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_U+(x>>1)+(y>>1)\
   4418                                 *PlaneIn2[1].u_stride)) )/alphaBlendRange );
   4419 
   4420                 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
   4421                     (M4VIFI_UInt8)(( (alphaMaskValue-alphaBlendLevelMin)\
   4422                         *( *(p_in1_V+(x>>1)+(y>>1)*PlaneIn1[2].u_stride))
   4423                                 +(alphaBlendLevelMax-alphaMaskValue)*( *(p_in2_V+(x>>1)+(y>>1)\
   4424                                     *PlaneIn2[2].u_stride)) )/alphaBlendRange );
   4425 
   4426             }
   4427             else
   4428             {
   4429                 /* We take "new image" in output plane */
   4430                 *( p_out0+x+y*PlaneOut[0].u_stride)=*(p_in2_Y+x+y*PlaneIn2[0].u_stride);
   4431                 *( p_out1+(x>>1)+(y>>1)*PlaneOut[1].u_stride)=
   4432                     *(p_in2_U+(x>>1)+(y>>1)*PlaneIn2[1].u_stride);
   4433                 *( p_out2+(x>>1)+(y>>1)*PlaneOut[2].u_stride)=
   4434                     *(p_in2_V+(x>>1)+(y>>1)*PlaneIn2[2].u_stride);
   4435             }
   4436         }
   4437     }
   4438 
   4439     return(err);
   4440 }
   4441 
   4442 #define M4XXX_SampleAddress(plane, x, y)  ( (plane).pac_data + (plane).u_topleft + (y)\
   4443      * (plane).u_stride + (x) )
   4444 
   4445 static void M4XXX_CopyPlane(M4VIFI_ImagePlane* dest, M4VIFI_ImagePlane* source)
   4446 {
   4447     M4OSA_UInt32    height, width, sourceStride, destStride, y;
   4448     M4OSA_MemAddr8    sourceWalk, destWalk;
   4449 
   4450     /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
   4451      recomputed from memory. */
   4452     height = dest->u_height;
   4453     width = dest->u_width;
   4454 
   4455     sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*source, 0, 0);
   4456     sourceStride = source->u_stride;
   4457 
   4458     destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(*dest, 0, 0);
   4459     destStride = dest->u_stride;
   4460 
   4461     for (y=0; y<height; y++)
   4462     {
   4463         memcpy((void *)destWalk, (void *)sourceWalk, width);
   4464         destWalk += destStride;
   4465         sourceWalk += sourceStride;
   4466     }
   4467 }
   4468 
   4469 static M4OSA_ERR M4xVSS_VerticalSlideTransition(M4VIFI_ImagePlane* topPlane,
   4470                                                 M4VIFI_ImagePlane* bottomPlane,
   4471                                                 M4VIFI_ImagePlane *PlaneOut,
   4472                                                 M4OSA_UInt32    shiftUV)
   4473 {
   4474     M4OSA_UInt32 i;
   4475 
   4476     /* Do three loops, one for each plane type, in order to avoid having too many buffers
   4477     "hot" at the same time (better for cache). */
   4478     for (i=0; i<3; i++)
   4479     {
   4480         M4OSA_UInt32    topPartHeight, bottomPartHeight, width, sourceStride, destStride, y;
   4481         M4OSA_MemAddr8    sourceWalk, destWalk;
   4482 
   4483         /* cache the vars used in the loop so as to avoid them being repeatedly fetched and
   4484          recomputed from memory. */
   4485         if (0 == i) /* Y plane */
   4486         {
   4487             bottomPartHeight = 2*shiftUV;
   4488         }
   4489         else /* U and V planes */
   4490         {
   4491             bottomPartHeight = shiftUV;
   4492         }
   4493         topPartHeight = PlaneOut[i].u_height - bottomPartHeight;
   4494         width = PlaneOut[i].u_width;
   4495 
   4496         sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(topPlane[i], 0, bottomPartHeight);
   4497         sourceStride = topPlane[i].u_stride;
   4498 
   4499         destWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
   4500         destStride = PlaneOut[i].u_stride;
   4501 
   4502         /* First the part from the top source clip frame. */
   4503         for (y=0; y<topPartHeight; y++)
   4504         {
   4505             memcpy((void *)destWalk, (void *)sourceWalk, width);
   4506             destWalk += destStride;
   4507             sourceWalk += sourceStride;
   4508         }
   4509 
   4510         /* and now change the vars to copy the part from the bottom source clip frame. */
   4511         sourceWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(bottomPlane[i], 0, 0);
   4512         sourceStride = bottomPlane[i].u_stride;
   4513 
   4514         /* destWalk is already at M4XXX_SampleAddress(PlaneOut[i], 0, topPartHeight) */
   4515 
   4516         for (y=0; y<bottomPartHeight; y++)
   4517         {
   4518             memcpy((void *)destWalk, (void *)sourceWalk, width);
   4519             destWalk += destStride;
   4520             sourceWalk += sourceStride;
   4521         }
   4522     }
   4523     return M4NO_ERROR;
   4524 }
   4525 
   4526 static M4OSA_ERR M4xVSS_HorizontalSlideTransition(M4VIFI_ImagePlane* leftPlane,
   4527                                                   M4VIFI_ImagePlane* rightPlane,
   4528                                                   M4VIFI_ImagePlane *PlaneOut,
   4529                                                   M4OSA_UInt32    shiftUV)
   4530 {
   4531     M4OSA_UInt32 i, y;
   4532     /* If we shifted by exactly 0, or by the width of the target image, then we would get the left
   4533     frame or the right frame, respectively. These cases aren't handled too well by the general
   4534     handling, since they result in 0-size memcopies, so might as well particularize them. */
   4535 
   4536     if (0 == shiftUV)    /* output left frame */
   4537     {
   4538         for (i = 0; i<3; i++) /* for each YUV plane */
   4539         {
   4540             M4XXX_CopyPlane(&(PlaneOut[i]), &(leftPlane[i]));
   4541         }
   4542 
   4543         return M4NO_ERROR;
   4544     }
   4545 
   4546     if (PlaneOut[1].u_width == shiftUV) /* output right frame */
   4547     {
   4548         for (i = 0; i<3; i++) /* for each YUV plane */
   4549         {
   4550             M4XXX_CopyPlane(&(PlaneOut[i]), &(rightPlane[i]));
   4551         }
   4552 
   4553         return M4NO_ERROR;
   4554     }
   4555 
   4556 
   4557     /* Do three loops, one for each plane type, in order to avoid having too many buffers
   4558     "hot" at the same time (better for cache). */
   4559     for (i=0; i<3; i++)
   4560     {
   4561         M4OSA_UInt32    height, leftPartWidth, rightPartWidth;
   4562         M4OSA_UInt32    leftStride,    rightStride,    destStride;
   4563         M4OSA_MemAddr8    leftWalk,    rightWalk,    destWalkLeft, destWalkRight;
   4564 
   4565         /* cache the vars used in the loop so as to avoid them being repeatedly fetched
   4566         and recomputed from memory. */
   4567         height = PlaneOut[i].u_height;
   4568 
   4569         if (0 == i) /* Y plane */
   4570         {
   4571             rightPartWidth = 2*shiftUV;
   4572         }
   4573         else /* U and V planes */
   4574         {
   4575             rightPartWidth = shiftUV;
   4576         }
   4577         leftPartWidth = PlaneOut[i].u_width - rightPartWidth;
   4578 
   4579         leftWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(leftPlane[i], rightPartWidth, 0);
   4580         leftStride = leftPlane[i].u_stride;
   4581 
   4582         rightWalk = (M4OSA_MemAddr8)M4XXX_SampleAddress(rightPlane[i], 0, 0);
   4583         rightStride = rightPlane[i].u_stride;
   4584 
   4585         destWalkLeft = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], 0, 0);
   4586         destWalkRight = (M4OSA_MemAddr8)M4XXX_SampleAddress(PlaneOut[i], leftPartWidth, 0);
   4587         destStride = PlaneOut[i].u_stride;
   4588 
   4589         for (y=0; y<height; y++)
   4590         {
   4591             memcpy((void *)destWalkLeft, (void *)leftWalk, leftPartWidth);
   4592             leftWalk += leftStride;
   4593 
   4594             memcpy((void *)destWalkRight, (void *)rightWalk, rightPartWidth);
   4595             rightWalk += rightStride;
   4596 
   4597             destWalkLeft += destStride;
   4598             destWalkRight += destStride;
   4599         }
   4600     }
   4601 
   4602     return M4NO_ERROR;
   4603 }
   4604 
   4605 
   4606 M4OSA_ERR M4xVSS_SlideTransition( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
   4607                                   M4VIFI_ImagePlane PlaneIn2[3], M4VIFI_ImagePlane *PlaneOut,
   4608                                   M4VSS3GPP_ExternalProgress *pProgress,
   4609                                   M4OSA_UInt32 uiTransitionKind)
   4610 {
   4611     M4xVSS_internal_SlideTransitionSettings* settings =
   4612          (M4xVSS_internal_SlideTransitionSettings*)userData;
   4613     M4OSA_UInt32    shiftUV;
   4614 
   4615     M4OSA_TRACE1_0("inside M4xVSS_SlideTransition");
   4616     if ((M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
   4617         || (M4xVSS_SlideTransition_LeftOutRightIn == settings->direction) )
   4618     {
   4619         /* horizontal slide */
   4620         shiftUV = ((PlaneOut[1]).u_width * pProgress->uiProgress)/1000;
   4621         M4OSA_TRACE1_2("M4xVSS_SlideTransition upper: shiftUV = %d,progress = %d",
   4622             shiftUV,pProgress->uiProgress );
   4623         if (M4xVSS_SlideTransition_RightOutLeftIn == settings->direction)
   4624         {
   4625             /* Put the previous clip frame right, the next clip frame left, and reverse shiftUV
   4626             (since it's a shift from the left frame) so that we start out on the right
   4627             (i.e. not left) frame, it
   4628             being from the previous clip. */
   4629             return M4xVSS_HorizontalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
   4630                  (PlaneOut[1]).u_width - shiftUV);
   4631         }
   4632         else /* Left out, right in*/
   4633         {
   4634             return M4xVSS_HorizontalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
   4635         }
   4636     }
   4637     else
   4638     {
   4639         /* vertical slide */
   4640         shiftUV = ((PlaneOut[1]).u_height * pProgress->uiProgress)/1000;
   4641         M4OSA_TRACE1_2("M4xVSS_SlideTransition bottom: shiftUV = %d,progress = %d",shiftUV,
   4642             pProgress->uiProgress );
   4643         if (M4xVSS_SlideTransition_TopOutBottomIn == settings->direction)
   4644         {
   4645             /* Put the previous clip frame top, the next clip frame bottom. */
   4646             return M4xVSS_VerticalSlideTransition(PlaneIn1, PlaneIn2, PlaneOut, shiftUV);
   4647         }
   4648         else /* Bottom out, top in */
   4649         {
   4650             return M4xVSS_VerticalSlideTransition(PlaneIn2, PlaneIn1, PlaneOut,
   4651                 (PlaneOut[1]).u_height - shiftUV);
   4652         }
   4653     }
   4654 
   4655     /* Note: it might be worthwhile to do some parameter checking, see if dimensions match, etc.,
   4656     at least in debug mode. */
   4657 }
   4658 
   4659 
   4660 /**
   4661  ******************************************************************************
   4662  * prototype    M4xVSS_FadeBlackTransition(M4OSA_Void *pFunctionContext,
   4663  *                                                    M4VIFI_ImagePlane *PlaneIn,
   4664  *                                                    M4VIFI_ImagePlane *PlaneOut,
   4665  *                                                    M4VSS3GPP_ExternalProgress *pProgress,
   4666  *                                                    M4OSA_UInt32 uiEffectKind)
   4667  *
   4668  * @brief    This function apply a fade to black and then a fade from black
   4669  * @note
   4670  * @param    pFunctionContext(IN) Contains which color to apply (not very clean ...)
   4671  * @param    PlaneIn            (IN) Input YUV420 planar
   4672  * @param    PlaneOut        (IN/OUT) Output YUV420 planar
   4673  * @param    pProgress        (IN/OUT) Progress indication (0-100)
   4674  * @param    uiEffectKind    (IN) Unused
   4675  *
   4676  * @return    M4VIFI_OK:    No error
   4677  ******************************************************************************
   4678  */
   4679 M4OSA_ERR M4xVSS_FadeBlackTransition(M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn1[3],
   4680                                      M4VIFI_ImagePlane PlaneIn2[3],
   4681                                      M4VIFI_ImagePlane *PlaneOut,
   4682                                      M4VSS3GPP_ExternalProgress *pProgress,
   4683                                      M4OSA_UInt32 uiTransitionKind)
   4684 {
   4685     M4OSA_Int32 tmp = 0;
   4686     M4OSA_ERR err = M4NO_ERROR;
   4687 
   4688 
   4689     if((pProgress->uiProgress) < 500)
   4690     {
   4691         /**
   4692          * Compute where we are in the effect (scale is 0->1024) */
   4693         tmp = (M4OSA_Int32)((1.0 - ((M4OSA_Float)(pProgress->uiProgress*2)/1000)) * 1024 );
   4694 
   4695         /**
   4696          * Apply the darkening effect */
   4697         err = M4VFL_modifyLumaWithScale( (M4ViComImagePlane*)PlaneIn1,
   4698              (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
   4699         if (M4NO_ERROR != err)
   4700         {
   4701             M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition: M4VFL_modifyLumaWithScale returns\
   4702                  error 0x%x, returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
   4703             return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
   4704         }
   4705     }
   4706     else
   4707     {
   4708         /**
   4709          * Compute where we are in the effect (scale is 0->1024). */
   4710         tmp = (M4OSA_Int32)( (((M4OSA_Float)(((pProgress->uiProgress-500)*2))/1000)) * 1024 );
   4711 
   4712         /**
   4713          * Apply the darkening effect */
   4714         err = M4VFL_modifyLumaWithScale((M4ViComImagePlane*)PlaneIn2,
   4715              (M4ViComImagePlane*)PlaneOut, tmp, M4OSA_NULL);
   4716         if (M4NO_ERROR != err)
   4717         {
   4718             M4OSA_TRACE1_1("M4xVSS_FadeBlackTransition:\
   4719                  M4VFL_modifyLumaWithScale returns error 0x%x,\
   4720                      returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", err);
   4721             return M4VSS3GPP_ERR_LUMA_FILTER_ERROR;
   4722         }
   4723     }
   4724 
   4725 
   4726     return M4VIFI_OK;
   4727 }
   4728 
   4729 
   4730 /**
   4731  ******************************************************************************
   4732  * prototype    M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext,
   4733  *                                                        M4OSA_Void* pBufferIn,
   4734  *                                                        M4OSA_Void* pBufferOut,
   4735  *                                                        M4OSA_UInt32* convertedSize)
   4736  *
   4737  * @brief    This function convert from the customer format to UTF8
   4738  * @note
   4739  * @param    pContext        (IN)    The integrator own context
   4740  * @param    pBufferIn        (IN)    Buffer to convert
   4741  * @param    pBufferOut        (OUT)    Converted buffer
   4742  * @param    convertedSize    (OUT)    Size of the converted buffer
   4743  *
   4744  * @return    M4NO_ERROR:    No error
   4745  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   4746  ******************************************************************************
   4747  */
   4748 M4OSA_ERR M4xVSS_internalConvertToUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
   4749                                        M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
   4750 {
   4751     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   4752     M4OSA_ERR err;
   4753 
   4754     pBufferOut = pBufferIn;
   4755     if(xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL
   4756         && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   4757     {
   4758         M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
   4759 
   4760         memset((void *)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,0
   4761             ,(M4OSA_UInt32)xVSS_context->UTFConversionContext.m_TempOutConversionSize);
   4762 
   4763         err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
   4764             (M4OSA_UInt8*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
   4765                  (M4OSA_UInt32*)&ConvertedSize);
   4766         if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
   4767         {
   4768             M4OSA_TRACE2_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
   4769 
   4770             /*free too small buffer*/
   4771             free(xVSS_context->\
   4772                 UTFConversionContext.pTempOutConversionBuffer);
   4773 
   4774             /*re-allocate the buffer*/
   4775             xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
   4776                  (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
   4777                      (M4OSA_Char *)"M4xVSS_internalConvertToUTF8: UTF conversion buffer");
   4778             if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
   4779             {
   4780                 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertToUTF8");
   4781                 return M4ERR_ALLOC;
   4782             }
   4783             xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
   4784 
   4785             memset((void *)xVSS_context->\
   4786                 UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
   4787                     UTFConversionContext.m_TempOutConversionSize);
   4788 
   4789             err = xVSS_context->UTFConversionContext.pConvToUTF8Fct((M4OSA_Void*)pBufferIn,
   4790                 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
   4791                     (M4OSA_UInt32*)&ConvertedSize);
   4792             if(err != M4NO_ERROR)
   4793             {
   4794                 M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
   4795                 return err;
   4796             }
   4797         }
   4798         else if(err != M4NO_ERROR)
   4799         {
   4800             M4OSA_TRACE1_1("M4xVSS_internalConvertToUTF8: pConvToUTF8Fct return 0x%x",err);
   4801             return err;
   4802         }
   4803         /*decoded path*/
   4804         pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   4805         (*convertedSize) = ConvertedSize;
   4806     }
   4807     return M4NO_ERROR;
   4808 }
   4809 
   4810 
   4811 /**
   4812  ******************************************************************************
   4813  * prototype    M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext)
   4814  *
   4815  * @brief    This function convert from UTF8 to the customer format
   4816  * @note
   4817  * @param    pContext    (IN) The integrator own context
   4818  * @param    pBufferIn        (IN)    Buffer to convert
   4819  * @param    pBufferOut        (OUT)    Converted buffer
   4820  * @param    convertedSize    (OUT)    Size of the converted buffer
   4821  *
   4822  * @return    M4NO_ERROR:    No error
   4823  * @return    M4ERR_PARAMETER: At least one of the function parameters is null
   4824  ******************************************************************************
   4825  */
   4826 M4OSA_ERR M4xVSS_internalConvertFromUTF8(M4OSA_Context pContext, M4OSA_Void* pBufferIn,
   4827                                         M4OSA_Void* pBufferOut, M4OSA_UInt32* convertedSize)
   4828 {
   4829     M4xVSS_Context* xVSS_context = (M4xVSS_Context*)pContext;
   4830     M4OSA_ERR err;
   4831 
   4832     pBufferOut = pBufferIn;
   4833     if(xVSS_context->UTFConversionContext.pConvFromUTF8Fct != M4OSA_NULL
   4834         && xVSS_context->UTFConversionContext.pTempOutConversionBuffer != M4OSA_NULL)
   4835     {
   4836         M4OSA_UInt32 ConvertedSize = xVSS_context->UTFConversionContext.m_TempOutConversionSize;
   4837 
   4838         memset((void *)xVSS_context->\
   4839             UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
   4840                 UTFConversionContext.m_TempOutConversionSize);
   4841 
   4842         err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct\
   4843             ((M4OSA_Void*)pBufferIn,(M4OSA_UInt8*)xVSS_context->\
   4844                 UTFConversionContext.pTempOutConversionBuffer, (M4OSA_UInt32*)&ConvertedSize);
   4845         if(err == M4xVSSWAR_BUFFER_OUT_TOO_SMALL)
   4846         {
   4847             M4OSA_TRACE2_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
   4848 
   4849             /*free too small buffer*/
   4850             free(xVSS_context->\
   4851                 UTFConversionContext.pTempOutConversionBuffer);
   4852 
   4853             /*re-allocate the buffer*/
   4854             xVSS_context->UTFConversionContext.pTempOutConversionBuffer    =
   4855                 (M4OSA_Void*)M4OSA_32bitAlignedMalloc(ConvertedSize*sizeof(M4OSA_UInt8), M4VA,
   4856                      (M4OSA_Char *)"M4xVSS_internalConvertFromUTF8: UTF conversion buffer");
   4857             if(M4OSA_NULL == xVSS_context->UTFConversionContext.pTempOutConversionBuffer)
   4858             {
   4859                 M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertFromUTF8");
   4860                 return M4ERR_ALLOC;
   4861             }
   4862             xVSS_context->UTFConversionContext.m_TempOutConversionSize = ConvertedSize;
   4863 
   4864             memset((void *)xVSS_context->\
   4865                 UTFConversionContext.pTempOutConversionBuffer,0,(M4OSA_UInt32)xVSS_context->\
   4866                     UTFConversionContext.m_TempOutConversionSize);
   4867 
   4868             err = xVSS_context->UTFConversionContext.pConvFromUTF8Fct((M4OSA_Void*)pBufferIn,
   4869                 (M4OSA_Void*)xVSS_context->UTFConversionContext.pTempOutConversionBuffer,
   4870                      (M4OSA_UInt32*)&ConvertedSize);
   4871             if(err != M4NO_ERROR)
   4872             {
   4873                 M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
   4874                 return err;
   4875             }
   4876         }
   4877         else if(err != M4NO_ERROR)
   4878         {
   4879             M4OSA_TRACE1_1("M4xVSS_internalConvertFromUTF8: pConvFromUTF8Fct return 0x%x",err);
   4880             return err;
   4881         }
   4882         /*decoded path*/
   4883         pBufferOut = xVSS_context->UTFConversionContext.pTempOutConversionBuffer;
   4884         (*convertedSize) = ConvertedSize;
   4885     }
   4886 
   4887 
   4888     return M4NO_ERROR;
   4889 }
   4890