1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 /** 17 ****************************************************************************** 18 * @file M4VSS3GPP_EditVideo.c 19 * @brief Video Studio Service 3GPP edit API implementation. 20 * @note 21 ****************************************************************************** 22 */ 23 24 /****************/ 25 /*** Includes ***/ 26 /****************/ 27 28 #include "NXPSW_CompilerSwitches.h" 29 /** 30 * Our header */ 31 #include "M4VSS3GPP_API.h" 32 #include "M4VSS3GPP_InternalTypes.h" 33 #include "M4VSS3GPP_InternalFunctions.h" 34 #include "M4VSS3GPP_InternalConfig.h" 35 #include "M4VSS3GPP_ErrorCodes.h" 36 37 // StageFright encoders require %16 resolution 38 #include "M4ENCODER_common.h" 39 /** 40 * OSAL headers */ 41 #include "M4OSA_Memory.h" /**< OSAL memory management */ 42 #include "M4OSA_Debug.h" /**< OSAL debug management */ 43 44 /** 45 * component includes */ 46 #include "M4VFL_transition.h" /**< video effects */ 47 48 /*for transition behaviour*/ 49 #include <math.h> 50 #include "M4AIR_API.h" 51 #include "M4VSS3GPP_Extended_API.h" 52 /** Determine absolute value of a. */ 53 #define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) 54 #define Y_PLANE_BORDER_VALUE 0x00 55 #define U_PLANE_BORDER_VALUE 0x80 56 #define V_PLANE_BORDER_VALUE 0x80 57 58 /************************************************************************/ 59 /* Static local functions */ 60 /************************************************************************/ 61 62 static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 63 M4VSS3GPP_InternalEditContext *pC ); 64 static M4OSA_Void 65 M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 66 M4OSA_UInt8 uiClipNumber ); 67 static M4OSA_ERR M4VSS3GPP_intApplyVideoEffect( 68 M4VSS3GPP_InternalEditContext *pC, M4VIFI_ImagePlane *pPlaneIn, 69 M4VIFI_ImagePlane *pPlaneOut, M4OSA_Bool bSkipFramingEffect); 70 71 static M4OSA_ERR 72 M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 73 M4VIFI_ImagePlane *pPlaneOut ); 74 75 static M4OSA_Void 76 M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 77 M4SYS_AccessUnit *pAU ); 78 static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 79 M4OSA_UInt8 uiCts ); 80 static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 81 M4OSA_UInt32 uiCtsSec ); 82 static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 83 M4OSA_UInt32 *pCtsSec ); 84 static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 85 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ); 86 static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420( 87 M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr, 88 M4VIFI_ImagePlane* pImagePlanes, 89 M4OSA_UInt32 width,M4OSA_UInt32 height); 90 static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode( 91 M4VSS3GPP_InternalEditContext *pC, 92 M4xVSS_MediaRendering renderingMode, 93 M4VIFI_ImagePlane* pInplane, 94 M4VIFI_ImagePlane* pOutplane); 95 96 static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 97 M4VSS3GPP_InternalEditContext *pC, 98 M4VSS3GPP_ClipContext* pClipCtxt); 99 static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect( 100 M4VSS3GPP_InternalEditContext *pC, 101 M4VSS3GPP_ClipContext* pClipCtxt, 102 M4_MediaTime ts, 103 M4OSA_Bool bIsClip1, 104 M4VIFI_ImagePlane *pResizePlane, 105 M4VIFI_ImagePlane *pPlaneNoResize, 106 M4VIFI_ImagePlane *pPlaneOut); 107 108 static M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 109 M4OSA_UInt32 rotationDegree); 110 111 static M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 112 M4OSA_UInt32 width, M4OSA_UInt32 height); 113 114 static M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay ( 115 M4VSS3GPP_InternalEditContext *pC, 116 M4VIFI_ImagePlane *pPlaneIn, 117 M4VIFI_ImagePlane *pPlaneOut); 118 119 /** 120 ****************************************************************************** 121 * M4OSA_ERR M4VSS3GPP_intEditStepVideo() 122 * @brief One step of video processing 123 * @param pC (IN/OUT) Internal edit context 124 ****************************************************************************** 125 */ 126 M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) 127 { 128 M4OSA_ERR err; 129 M4OSA_Int32 iCts, iNextCts; 130 M4ENCODER_FrameMode FrameMode; 131 M4OSA_Bool bSkipFrame; 132 M4OSA_UInt16 offset; 133 134 /** 135 * Check if we reached end cut. Decorrelate input and output encoding 136 * timestamp to handle encoder prefetch 137 */ 138 if ( ((M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset 139 + pC->iInOutTimeOffset) >= pC->pC1->iEndTime ) 140 { 141 /* Re-adjust video to precise cut time */ 142 pC->iInOutTimeOffset = ((M4OSA_Int32)(pC->ewc.dInputVidCts)) 143 - pC->pC1->iVoffset + pC->iInOutTimeOffset - pC->pC1->iEndTime; 144 if ( pC->iInOutTimeOffset < 0 ) { 145 pC->iInOutTimeOffset = 0; 146 } 147 148 /** 149 * Video is done for this clip */ 150 err = M4VSS3GPP_intReachedEndOfVideo(pC); 151 152 /* RC: to know when a file has been processed */ 153 if (M4NO_ERROR != err && err != M4VSS3GPP_WAR_SWITCH_CLIP) 154 { 155 M4OSA_TRACE1_1( 156 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intReachedEndOfVideo returns 0x%x", 157 err); 158 } 159 160 return err; 161 } 162 163 /* Don't change the states if we are in decodeUpTo() */ 164 if ( (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 165 && (( pC->pC2 == M4OSA_NULL) 166 || (M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus)) ) 167 { 168 /** 169 * Check Video Mode, depending on the current output CTS */ 170 err = M4VSS3GPP_intCheckVideoMode( 171 pC); /**< This function change the pC->Vstate variable! */ 172 173 if (M4NO_ERROR != err) 174 { 175 M4OSA_TRACE1_1( 176 "M4VSS3GPP_intEditStepVideo: M4VSS3GPP_intCheckVideoMode returns 0x%x!", 177 err); 178 return err; 179 } 180 } 181 182 183 switch( pC->Vstate ) 184 { 185 /* _________________ */ 186 /*| |*/ 187 /*| READ_WRITE MODE |*/ 188 /*|_________________|*/ 189 190 case M4VSS3GPP_kEditVideoState_READ_WRITE: 191 case M4VSS3GPP_kEditVideoState_AFTER_CUT: 192 { 193 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo READ_WRITE"); 194 195 bSkipFrame = M4OSA_FALSE; 196 197 /** 198 * If we were decoding the clip, we must jump to be sure 199 * to get to the good position. */ 200 if( M4VSS3GPP_kClipStatus_READ != pC->pC1->Vstatus ) 201 { 202 /** 203 * Jump to target video time (tc = to-T) */ 204 // Decorrelate input and output encoding timestamp to handle encoder prefetch 205 iCts = (M4OSA_Int32)(pC->ewc.dInputVidCts) - pC->pC1->iVoffset; 206 err = pC->pC1->ShellAPI.m_pReader->m_pFctJump( 207 pC->pC1->pReaderContext, 208 (M4_StreamHandler *)pC->pC1->pVideoStream, &iCts); 209 210 if( M4NO_ERROR != err ) 211 { 212 M4OSA_TRACE1_1( 213 "M4VSS3GPP_intEditStepVideo:\ 214 READ_WRITE: m_pReader->m_pFctJump(V1) returns 0x%x!", 215 err); 216 return err; 217 } 218 219 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 220 pC->pC1->pReaderContext, 221 (M4_StreamHandler *)pC->pC1->pVideoStream, 222 &pC->pC1->VideoAU); 223 224 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 225 { 226 M4OSA_TRACE1_1( 227 "M4VSS3GPP_intEditStepVideo:\ 228 READ_WRITE: m_pReader->m_pFctGetNextAu returns 0x%x!", 229 err); 230 return err; 231 } 232 233 M4OSA_TRACE2_3("A .... read : cts = %.0f + %ld [ 0x%x ]", 234 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 235 pC->pC1->VideoAU.m_size); 236 237 /* This frame has been already written in BEGIN CUT step -> skip it */ 238 if( pC->pC1->VideoAU.m_CTS == iCts 239 && pC->pC1->iVideoRenderCts >= iCts ) 240 { 241 bSkipFrame = M4OSA_TRUE; 242 } 243 } 244 245 /* This frame has been already written in BEGIN CUT step -> skip it */ 246 if( ( pC->Vstate == M4VSS3GPP_kEditVideoState_AFTER_CUT) 247 && (pC->pC1->VideoAU.m_CTS 248 + pC->pC1->iVoffset <= pC->ewc.WriterVideoAU.CTS) ) 249 { 250 bSkipFrame = M4OSA_TRUE; 251 } 252 253 /** 254 * Remember the clip reading state */ 255 pC->pC1->Vstatus = M4VSS3GPP_kClipStatus_READ; 256 // Decorrelate input and output encoding timestamp to handle encoder prefetch 257 // Rounding is to compensate reader imprecision (m_CTS is actually an integer) 258 iCts = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pC->pC1->iVoffset - 1; 259 iNextCts = iCts + ((M4OSA_Int32)pC->dOutputFrameDuration) + 1; 260 /* Avoid to write a last frame of duration 0 */ 261 if( iNextCts > pC->pC1->iEndTime ) 262 iNextCts = pC->pC1->iEndTime; 263 264 /** 265 * If the AU is good to be written, write it, else just skip it */ 266 if( ( M4OSA_FALSE == bSkipFrame) 267 && (( pC->pC1->VideoAU.m_CTS >= iCts) 268 && (pC->pC1->VideoAU.m_CTS < iNextCts) 269 && (pC->pC1->VideoAU.m_size > 0)) ) 270 { 271 /** 272 * Get the output AU to write into */ 273 err = pC->ShellAPI.pWriterDataFcts->pStartAU( 274 pC->ewc.p3gpWriterContext, 275 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 276 &pC->ewc.WriterVideoAU); 277 278 if( M4NO_ERROR != err ) 279 { 280 M4OSA_TRACE1_1( 281 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 282 pWriterDataFcts->pStartAU(Video) returns 0x%x!", 283 err); 284 return err; 285 } 286 287 /** 288 * Copy the input AU to the output AU */ 289 pC->ewc.WriterVideoAU.attribute = pC->pC1->VideoAU.m_attribute; 290 // Decorrelate input and output encoding timestamp to handle encoder prefetch 291 pC->ewc.WriterVideoAU.CTS = (M4OSA_Time)pC->pC1->VideoAU.m_CTS + 292 (M4OSA_Time)pC->pC1->iVoffset; 293 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 294 offset = 0; 295 /* for h.264 stream do not read the 1st 4 bytes as they are header 296 indicators */ 297 if( pC->pC1->pVideoStream->m_basicProperties.m_streamType 298 == M4DA_StreamTypeVideoMpeg4Avc ) 299 offset = 4; 300 301 pC->ewc.WriterVideoAU.size = pC->pC1->VideoAU.m_size - offset; 302 if( pC->ewc.WriterVideoAU.size > pC->ewc.uiVideoMaxAuSize ) 303 { 304 M4OSA_TRACE1_2( 305 "M4VSS3GPP_intEditStepVideo: READ_WRITE: AU size greater than\ 306 MaxAuSize (%d>%d)! returning M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE", 307 pC->ewc.WriterVideoAU.size, pC->ewc.uiVideoMaxAuSize); 308 return M4VSS3GPP_ERR_INPUT_VIDEO_AU_TOO_LARGE; 309 } 310 311 memcpy((void *)pC->ewc.WriterVideoAU.dataAddress, 312 (void *)(pC->pC1->VideoAU.m_dataAddress + offset), 313 (pC->ewc.WriterVideoAU.size)); 314 315 /** 316 * Update time info for the Counter Time System to be equal to the bit 317 -stream time*/ 318 M4VSS3GPP_intUpdateTimeInfo(pC, &pC->ewc.WriterVideoAU); 319 M4OSA_TRACE2_2("B ---- write : cts = %lu [ 0x%x ]", 320 pC->ewc.WriterVideoAU.CTS, pC->ewc.WriterVideoAU.size); 321 322 /** 323 * Write the AU */ 324 err = pC->ShellAPI.pWriterDataFcts->pProcessAU( 325 pC->ewc.p3gpWriterContext, 326 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, 327 &pC->ewc.WriterVideoAU); 328 329 if( M4NO_ERROR != err ) 330 { 331 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 332 file size is reached 333 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 334 is returned*/ 335 if( M4WAR_WRITER_STOP_REQ == err ) 336 { 337 M4OSA_TRACE1_0( 338 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 339 return M4VSS3GPP_WAR_EDITING_DONE; 340 } 341 else 342 { 343 M4OSA_TRACE1_1( 344 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 345 pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 346 err); 347 return err; 348 } 349 } 350 351 /** 352 * Read next AU for next step */ 353 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 354 pC->pC1->pReaderContext, 355 (M4_StreamHandler *)pC->pC1->pVideoStream, 356 &pC->pC1->VideoAU); 357 358 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 359 { 360 M4OSA_TRACE1_1( 361 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 362 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 363 err); 364 return err; 365 } 366 367 M4OSA_TRACE2_3("C .... read : cts = %.0f + %ld [ 0x%x ]", 368 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 369 pC->pC1->VideoAU.m_size); 370 } 371 else 372 { 373 /** 374 * Decide wether to read or to increment time increment */ 375 if( ( pC->pC1->VideoAU.m_size == 0) 376 || (pC->pC1->VideoAU.m_CTS >= iNextCts) ) 377 { 378 /*Increment time by the encoding period (NO_MORE_AU or reader in advance */ 379 // Decorrelate input and output encoding timestamp to handle encoder prefetch 380 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 381 382 /* Switch (from AFTER_CUT) to normal mode because time is 383 no more frozen */ 384 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 385 } 386 else 387 { 388 /* In other cases (reader late), just let the reader catch up 389 pC->ewc.dVTo */ 390 err = pC->pC1->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( 391 pC->pC1->pReaderContext, 392 (M4_StreamHandler *)pC->pC1->pVideoStream, 393 &pC->pC1->VideoAU); 394 395 if( ( M4NO_ERROR != err) && (M4WAR_NO_MORE_AU != err) ) 396 { 397 M4OSA_TRACE1_1( 398 "M4VSS3GPP_intEditStepVideo: READ_WRITE:\ 399 m_pReaderDataIt->m_pFctGetNextAu returns 0x%x!", 400 err); 401 return err; 402 } 403 404 M4OSA_TRACE2_3("D .... read : cts = %.0f + %ld [ 0x%x ]", 405 pC->pC1->VideoAU.m_CTS, pC->pC1->iVoffset, 406 pC->pC1->VideoAU.m_size); 407 } 408 } 409 } 410 break; 411 412 /* ____________________ */ 413 /*| |*/ 414 /*| DECODE_ENCODE MODE |*/ 415 /*| BEGIN_CUT MODE |*/ 416 /*|____________________|*/ 417 418 case M4VSS3GPP_kEditVideoState_DECODE_ENCODE: 419 case M4VSS3GPP_kEditVideoState_BEGIN_CUT: 420 { 421 M4OSA_TRACE3_0( 422 "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT"); 423 424 if ((pC->pC1->pSettings->FileType == 425 M4VIDEOEDITING_kFileType_ARGB8888) && 426 (M4OSA_FALSE == 427 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 428 429 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 430 if( M4NO_ERROR != err ) { 431 M4OSA_TRACE1_1( 432 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 433 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 434 return err; 435 } 436 } 437 /** 438 * Decode the video up to the target time 439 (will jump to the previous RAP if needed ) */ 440 // Decorrelate input and output encoding timestamp to handle encoder prefetch 441 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts); 442 if( M4NO_ERROR != err ) 443 { 444 M4OSA_TRACE1_1( 445 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 446 M4VSS3GPP_intDecodeVideoUpToCts returns err=0x%x", 447 err); 448 return err; 449 } 450 451 /* If the decoding is not completed, do one more step with time frozen */ 452 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 453 { 454 return M4NO_ERROR; 455 } 456 457 /** 458 * Reset the video pre-processing error before calling the encoder */ 459 pC->ewc.VppError = M4NO_ERROR; 460 461 M4OSA_TRACE2_0("E ++++ encode AU"); 462 463 /** 464 * Encode the frame(rendering,filtering and writing will be done 465 in encoder callbacks)*/ 466 if( pC->Vstate == M4VSS3GPP_kEditVideoState_BEGIN_CUT ) 467 FrameMode = M4ENCODER_kIFrame; 468 else 469 FrameMode = M4ENCODER_kNormalFrame; 470 471 // Decorrelate input and output encoding timestamp to handle encoder prefetch 472 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 473 pC->ewc.dInputVidCts, FrameMode); 474 /** 475 * Check if we had a VPP error... */ 476 if( M4NO_ERROR != pC->ewc.VppError ) 477 { 478 M4OSA_TRACE1_1( 479 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 480 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 481 pC->ewc.VppError); 482 #ifdef M4VSS_SUPPORT_OMX_CODECS 483 484 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 485 { 486 #endif //M4VSS_SUPPORT_OMX_CODECS 487 488 return pC->ewc.VppError; 489 #ifdef M4VSS_SUPPORT_OMX_CODECS 490 491 } 492 493 #endif //M4VSS_SUPPORT_OMX_CODECS 494 495 } 496 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 497 { 498 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 499 { 500 M4OSA_TRACE1_0( 501 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 502 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 503 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 504 } 505 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 506 file size is reached 507 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE 508 is returned*/ 509 else if( M4WAR_WRITER_STOP_REQ == err ) 510 { 511 M4OSA_TRACE1_0( 512 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 513 return M4VSS3GPP_WAR_EDITING_DONE; 514 } 515 else 516 { 517 M4OSA_TRACE1_1( 518 "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ 519 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 520 err); 521 return err; 522 } 523 } 524 525 /** 526 * Increment time by the encoding period (for begin cut, do not increment to not 527 loose P-frames) */ 528 if( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate ) 529 { 530 // Decorrelate input and output encoding timestamp to handle encoder prefetch 531 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 532 } 533 } 534 break; 535 536 /* _________________ */ 537 /*| |*/ 538 /*| TRANSITION MODE |*/ 539 /*|_________________|*/ 540 541 case M4VSS3GPP_kEditVideoState_TRANSITION: 542 { 543 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo TRANSITION"); 544 545 /* Don't decode more than needed */ 546 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC1->Vstatus) 547 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus)) ) 548 { 549 /** 550 * Decode the clip1 video up to the target time 551 (will jump to the previous RAP if needed */ 552 if ((pC->pC1->pSettings->FileType == 553 M4VIDEOEDITING_kFileType_ARGB8888) && 554 (M4OSA_FALSE == 555 pC->pC1->pSettings->ClipProperties.bSetImageData)) { 556 557 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); 558 if( M4NO_ERROR != err ) { 559 M4OSA_TRACE1_1( 560 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 561 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 562 return err; 563 } 564 } 565 // Decorrelate input and output encoding timestamp to handle encoder prefetch 566 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, 567 (M4OSA_Int32)pC->ewc.dInputVidCts); 568 if( M4NO_ERROR != err ) 569 { 570 M4OSA_TRACE1_1( 571 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 572 M4VSS3GPP_intDecodeVideoUpToCts(C1) returns err=0x%x", 573 err); 574 return err; 575 } 576 577 /* If the decoding is not completed, do one more step with time frozen */ 578 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus ) 579 { 580 return M4NO_ERROR; 581 } 582 } 583 584 /* Don't decode more than needed */ 585 if( !(( M4VSS3GPP_kClipStatus_DECODE_UP_TO != pC->pC2->Vstatus) 586 && (M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC1->Vstatus)) ) 587 { 588 /** 589 * Decode the clip2 video up to the target time 590 (will jump to the previous RAP if needed) */ 591 if ((pC->pC2->pSettings->FileType == 592 M4VIDEOEDITING_kFileType_ARGB8888) && 593 (M4OSA_FALSE == 594 pC->pC2->pSettings->ClipProperties.bSetImageData)) { 595 596 err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2); 597 if( M4NO_ERROR != err ) { 598 M4OSA_TRACE1_1( 599 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 600 M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); 601 return err; 602 } 603 } 604 605 // Decorrelate input and output encoding timestamp to handle encoder prefetch 606 err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2, 607 (M4OSA_Int32)pC->ewc.dInputVidCts); 608 if( M4NO_ERROR != err ) 609 { 610 M4OSA_TRACE1_1( 611 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 612 M4VSS3GPP_intDecodeVideoUpToCts(C2) returns err=0x%x", 613 err); 614 return err; 615 } 616 617 /* If the decoding is not completed, do one more step with time frozen */ 618 if( M4VSS3GPP_kClipStatus_DECODE_UP_TO == pC->pC2->Vstatus ) 619 { 620 return M4NO_ERROR; 621 } 622 } 623 624 /** 625 * Reset the video pre-processing error before calling the encoder */ 626 pC->ewc.VppError = M4NO_ERROR; 627 628 M4OSA_TRACE2_0("F **** blend AUs"); 629 630 /** 631 * Encode the frame (rendering, filtering and writing will be done 632 in encoder callbacks */ 633 // Decorrelate input and output encoding timestamp to handle encoder prefetch 634 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctEncode(pC->ewc.pEncContext, M4OSA_NULL, 635 pC->ewc.dInputVidCts, M4ENCODER_kNormalFrame); 636 637 /** 638 * If encode returns a process frame error, it is likely to be a VPP error */ 639 if( M4NO_ERROR != pC->ewc.VppError ) 640 { 641 M4OSA_TRACE1_1( 642 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 643 pVideoEncoderGlobalFcts->pFctEncode, returning VppErr=0x%x", 644 pC->ewc.VppError); 645 #ifdef M4VSS_SUPPORT_OMX_CODECS 646 647 if( M4WAR_VIDEORENDERER_NO_NEW_FRAME != pC->ewc.VppError ) 648 { 649 650 #endif //M4VSS_SUPPORT_OMX_CODECS 651 652 return pC->ewc.VppError; 653 #ifdef M4VSS_SUPPORT_OMX_CODECS 654 655 } 656 657 #endif //M4VSS_SUPPORT_OMX_CODECS 658 659 } 660 else if( M4NO_ERROR != err ) /**< ...or an encoder error */ 661 { 662 if( ((M4OSA_UInt32)M4ERR_ALLOC) == err ) 663 { 664 M4OSA_TRACE1_0( 665 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 666 returning M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR"); 667 return M4VSS3GPP_ERR_ENCODER_ACCES_UNIT_ERROR; 668 } 669 670 /* the warning M4WAR_WRITER_STOP_REQ is returned when the targeted output 671 file size is reached 672 The editing is then finished, the warning M4VSS3GPP_WAR_EDITING_DONE is 673 returned*/ 674 else if( M4WAR_WRITER_STOP_REQ == err ) 675 { 676 M4OSA_TRACE1_0( 677 "M4VSS3GPP_intEditStepVideo: File was cut to avoid oversize"); 678 return M4VSS3GPP_WAR_EDITING_DONE; 679 } 680 else 681 { 682 M4OSA_TRACE1_1( 683 "M4VSS3GPP_intEditStepVideo: TRANSITION:\ 684 pVideoEncoderGlobalFcts->pFctEncode returns 0x%x", 685 err); 686 return err; 687 } 688 } 689 690 /** 691 * Increment time by the encoding period */ 692 // Decorrelate input and output encoding timestamp to handle encoder prefetch 693 pC->ewc.dInputVidCts += pC->dOutputFrameDuration; 694 } 695 break; 696 697 /* ____________ */ 698 /*| |*/ 699 /*| ERROR CASE |*/ 700 /*|____________|*/ 701 702 default: 703 M4OSA_TRACE1_1( 704 "M4VSS3GPP_intEditStepVideo: invalid internal state (0x%x),\ 705 returning M4VSS3GPP_ERR_INTERNAL_STATE", 706 pC->Vstate); 707 return M4VSS3GPP_ERR_INTERNAL_STATE; 708 } 709 710 /** 711 * Return with no error */ 712 M4OSA_TRACE3_0("M4VSS3GPP_intEditStepVideo: returning M4NO_ERROR"); 713 return M4NO_ERROR; 714 } 715 716 /** 717 ****************************************************************************** 718 * M4OSA_ERR M4VSS3GPP_intCheckVideoMode() 719 * @brief Check which video process mode we must use, depending on the output CTS. 720 * @param pC (IN/OUT) Internal edit context 721 ****************************************************************************** 722 */ 723 static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( 724 M4VSS3GPP_InternalEditContext *pC ) 725 { 726 M4OSA_ERR err; 727 // Decorrelate input and output encoding timestamp to handle encoder prefetch 728 const M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 729 /**< Transition duration */ 730 const M4OSA_Int32 TD = pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 731 732 M4OSA_Int32 iTmp; 733 734 const M4VSS3GPP_EditVideoState previousVstate = pC->Vstate; 735 736 /** 737 * Check if Clip1 is on its begin cut, or in an effect zone */ 738 M4VSS3GPP_intCheckVideoEffects(pC, 1); 739 740 /** 741 * Check if we are in the transition with next clip */ 742 if( ( TD > 0) && (( t - pC->pC1->iVoffset) >= (pC->pC1->iEndTime - TD)) ) 743 { 744 /** 745 * We are in a transition */ 746 pC->Vstate = M4VSS3GPP_kEditVideoState_TRANSITION; 747 pC->bTransitionEffect = M4OSA_TRUE; 748 749 /** 750 * Open second clip for transition, if not yet opened */ 751 if( M4OSA_NULL == pC->pC2 ) 752 { 753 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 754 755 err = M4VSS3GPP_intOpenClip(pC, &pC->pC2, 756 &pC->pClipList[pC->uiCurrentClip + 1]); 757 758 if( M4NO_ERROR != err ) 759 { 760 M4OSA_TRACE1_1( 761 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_editOpenClip returns 0x%x!", 762 err); 763 return err; 764 } 765 766 /** 767 * Add current video output CTS to the clip offset 768 * (audio output CTS is not yet at the transition, so audio 769 * offset can't be updated yet). */ 770 // Decorrelate input and output encoding timestamp to handle encoder prefetch 771 pC->pC2->iVoffset += (M4OSA_UInt32)pC->ewc.dInputVidCts; 772 773 /** 774 * 2005-03-24: BugFix for audio-video synchro: 775 * Update transition duration due to the actual video transition beginning time. 776 * It will avoid desynchronization when doing the audio transition. */ 777 // Decorrelate input and output encoding timestamp to handle encoder prefetch 778 iTmp = ((M4OSA_Int32)pC->ewc.dInputVidCts)\ 779 - (pC->pC1->iEndTime - TD + pC->pC1->iVoffset); 780 if (iTmp < (M4OSA_Int32)pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration) 781 /**< Test in case of a very short transition */ 782 { 783 pC->pTransitionList[pC-> 784 uiCurrentClip].uiTransitionDuration -= iTmp; 785 786 /** 787 * Don't forget to also correct the total duration used for the progress bar 788 * (it was computed with the original transition duration). */ 789 pC->ewc.iOutputDuration += iTmp; 790 } 791 /**< No "else" here because it's hard predict the effect of 0 duration transition...*/ 792 } 793 794 /** 795 * Check effects for clip2 */ 796 M4VSS3GPP_intCheckVideoEffects(pC, 2); 797 } 798 else 799 { 800 /** 801 * We are not in a transition */ 802 pC->bTransitionEffect = M4OSA_FALSE; 803 804 /* If there is an effect we go to decode/encode mode */ 805 if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) || 806 (pC->pC1->pSettings->FileType == 807 M4VIDEOEDITING_kFileType_ARGB8888) || 808 (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) { 809 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 810 } 811 /* We do a begin cut, except if already done (time is not progressing because we want 812 to catch all P-frames after the cut) */ 813 else if( M4OSA_TRUE == pC->bClip1AtBeginCut ) 814 { 815 if(pC->pC1->pSettings->ClipProperties.VideoStreamType == M4VIDEOEDITING_kH264) { 816 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 817 pC->bEncodeTillEoF = M4OSA_TRUE; 818 } else if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 819 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) { 820 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 821 } else { 822 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 823 } 824 } 825 /* Else we are in default copy/paste mode */ 826 else 827 { 828 if( ( M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 829 || (M4VSS3GPP_kEditVideoState_AFTER_CUT == previousVstate) ) 830 { 831 pC->Vstate = M4VSS3GPP_kEditVideoState_AFTER_CUT; 832 } 833 else if( pC->bIsMMS == M4OSA_TRUE ) 834 { 835 M4OSA_UInt32 currentBitrate; 836 M4OSA_ERR err = M4NO_ERROR; 837 838 /* Do we need to reencode the video to downgrade the bitrate or not ? */ 839 /* Let's compute the cirrent bitrate of the current edited clip */ 840 err = pC->pC1->ShellAPI.m_pReader->m_pFctGetOption( 841 pC->pC1->pReaderContext, 842 M4READER_kOptionID_Bitrate, ¤tBitrate); 843 844 if( err != M4NO_ERROR ) 845 { 846 M4OSA_TRACE1_1( 847 "M4VSS3GPP_intCheckVideoMode:\ 848 Error when getting next bitrate of edited clip: 0x%x", 849 err); 850 return err; 851 } 852 853 /* Remove audio bitrate */ 854 currentBitrate -= 12200; 855 856 /* Test if we go into copy/paste mode or into decode/encode mode */ 857 if( currentBitrate > pC->uiMMSVideoBitrate ) 858 { 859 pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; 860 } 861 else 862 { 863 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 864 } 865 } 866 else if(!((pC->m_bClipExternalHasStarted == M4OSA_TRUE) && 867 (pC->Vstate == M4VSS3GPP_kEditVideoState_DECODE_ENCODE)) && 868 pC->bEncodeTillEoF == M4OSA_FALSE) 869 { 870 /** 871 * Test if we go into copy/paste mode or into decode/encode mode 872 * If an external effect has been applied on the current clip 873 * then continue to be in decode/encode mode till end of 874 * clip to avoid H.264 distortion. 875 */ 876 pC->Vstate = M4VSS3GPP_kEditVideoState_READ_WRITE; 877 } 878 } 879 } 880 881 /** 882 * Check if we create an encoder */ 883 if( ( ( M4VSS3GPP_kEditVideoState_READ_WRITE == previousVstate) 884 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 885 == previousVstate)) /**< read mode */ 886 && (( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == pC->Vstate) 887 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == pC->Vstate) 888 || (M4VSS3GPP_kEditVideoState_TRANSITION 889 == pC->Vstate)) /**< encode mode */ 890 && pC->bIsMMS == M4OSA_FALSE ) 891 { 892 /** 893 * Create the encoder, if not created already*/ 894 if (pC->ewc.encoderState == M4VSS3GPP_kNoEncoder) { 895 err = M4VSS3GPP_intCreateVideoEncoder(pC); 896 897 if( M4NO_ERROR != err ) 898 { 899 M4OSA_TRACE1_1( 900 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder \ 901 returns 0x%x!", err); 902 return err; 903 } 904 } 905 } 906 else if( pC->bIsMMS == M4OSA_TRUE && pC->ewc.pEncContext == M4OSA_NULL ) 907 { 908 /** 909 * Create the encoder */ 910 err = M4VSS3GPP_intCreateVideoEncoder(pC); 911 912 if( M4NO_ERROR != err ) 913 { 914 M4OSA_TRACE1_1( 915 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intCreateVideoEncoder returns 0x%x!", 916 err); 917 return err; 918 } 919 } 920 921 /** 922 * When we go from filtering to read/write, we must act like a begin cut, 923 * because the last filtered image may be different than the original image. */ 924 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 925 || (M4VSS3GPP_kEditVideoState_TRANSITION 926 == previousVstate)) /**< encode mode */ 927 && (M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) /**< read mode */ 928 && (pC->bEncodeTillEoF == M4OSA_FALSE) ) 929 { 930 pC->Vstate = M4VSS3GPP_kEditVideoState_BEGIN_CUT; 931 } 932 933 /** 934 * Check if we destroy an encoder */ 935 else if( ( ( M4VSS3GPP_kEditVideoState_DECODE_ENCODE == previousVstate) 936 || (M4VSS3GPP_kEditVideoState_BEGIN_CUT == previousVstate) 937 || (M4VSS3GPP_kEditVideoState_TRANSITION 938 == previousVstate)) /**< encode mode */ 939 && (( M4VSS3GPP_kEditVideoState_READ_WRITE == pC->Vstate) 940 || (M4VSS3GPP_kEditVideoState_AFTER_CUT 941 == pC->Vstate)) /**< read mode */ 942 && pC->bIsMMS == M4OSA_FALSE ) 943 { 944 /** 945 * Destroy the previously created encoder */ 946 err = M4VSS3GPP_intDestroyVideoEncoder(pC); 947 948 if( M4NO_ERROR != err ) 949 { 950 M4OSA_TRACE1_1( 951 "M4VSS3GPP_intCheckVideoMode: M4VSS3GPP_intDestroyVideoEncoder returns 0x%x!", 952 err); 953 return err; 954 } 955 } 956 957 /** 958 * Return with no error */ 959 M4OSA_TRACE3_0("M4VSS3GPP_intCheckVideoMode: returning M4NO_ERROR"); 960 return M4NO_ERROR; 961 } 962 963 /****************************************************************************** 964 * M4OSA_ERR M4VSS3GPP_intStartAU() 965 * @brief StartAU writer-like interface used for the VSS 3GPP only 966 * @note 967 * @param pContext: (IN) It is the VSS 3GPP context in our case 968 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 969 * @param pAU: (IN/OUT) Access Unit to be prepared. 970 * @return M4NO_ERROR: there is no error 971 ****************************************************************************** 972 */ 973 M4OSA_ERR M4VSS3GPP_intStartAU( M4WRITER_Context pContext, 974 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 975 { 976 M4OSA_ERR err; 977 M4OSA_UInt32 uiMaxAuSize; 978 979 /** 980 * Given context is actually the VSS3GPP context */ 981 M4VSS3GPP_InternalEditContext *pC = 982 (M4VSS3GPP_InternalEditContext *)pContext; 983 984 /** 985 * Get the output AU to write into */ 986 err = pC->ShellAPI.pWriterDataFcts->pStartAU(pC->ewc.p3gpWriterContext, 987 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 988 989 if( M4NO_ERROR != err ) 990 { 991 M4OSA_TRACE1_1( 992 "M4VSS3GPP_intStartAU: pWriterDataFcts->pStartAU(Video) returns 0x%x!", 993 err); 994 return err; 995 } 996 997 /** 998 * Return */ 999 M4OSA_TRACE3_0("M4VSS3GPP_intStartAU: returning M4NO_ERROR"); 1000 return M4NO_ERROR; 1001 } 1002 1003 /****************************************************************************** 1004 * M4OSA_ERR M4VSS3GPP_intProcessAU() 1005 * @brief ProcessAU writer-like interface used for the VSS 3GPP only 1006 * @note 1007 * @param pContext: (IN) It is the VSS 3GPP context in our case 1008 * @param streamID: (IN) Id of the stream to which the Access Unit is related. 1009 * @param pAU: (IN/OUT) Access Unit to be written 1010 * @return M4NO_ERROR: there is no error 1011 ****************************************************************************** 1012 */ 1013 M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext, 1014 M4SYS_StreamID streamID, M4SYS_AccessUnit *pAU ) 1015 { 1016 M4OSA_ERR err; 1017 1018 /** 1019 * Given context is actually the VSS3GPP context */ 1020 M4VSS3GPP_InternalEditContext *pC = 1021 (M4VSS3GPP_InternalEditContext *)pContext; 1022 1023 /** 1024 * Fix the encoded AU time */ 1025 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1026 pC->ewc.dOutputVidCts = pAU->CTS; 1027 /** 1028 * Update time info for the Counter Time System to be equal to the bit-stream time */ 1029 M4VSS3GPP_intUpdateTimeInfo(pC, pAU); 1030 1031 /** 1032 * Write the AU */ 1033 err = pC->ShellAPI.pWriterDataFcts->pProcessAU(pC->ewc.p3gpWriterContext, 1034 M4VSS3GPP_WRITER_VIDEO_STREAM_ID, pAU); 1035 1036 if( M4NO_ERROR != err ) 1037 { 1038 M4OSA_TRACE1_1( 1039 "M4VSS3GPP_intProcessAU: pWriterDataFcts->pProcessAU(Video) returns 0x%x!", 1040 err); 1041 return err; 1042 } 1043 1044 /** 1045 * Return */ 1046 M4OSA_TRACE3_0("M4VSS3GPP_intProcessAU: returning M4NO_ERROR"); 1047 return M4NO_ERROR; 1048 } 1049 1050 /** 1051 ****************************************************************************** 1052 * M4OSA_ERR M4VSS3GPP_intVPP() 1053 * @brief We implement our own VideoPreProcessing function 1054 * @note It is called by the video encoder 1055 * @param pContext (IN) VPP context, which actually is the VSS 3GPP context in our case 1056 * @param pPlaneIn (IN) 1057 * @param pPlaneOut (IN/OUT) Pointer to an array of 3 planes that will contain the output 1058 * YUV420 image 1059 * @return M4NO_ERROR: No error 1060 ****************************************************************************** 1061 */ 1062 M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, 1063 M4VIFI_ImagePlane *pPlaneOut ) 1064 { 1065 M4OSA_ERR err = M4NO_ERROR; 1066 M4_MediaTime ts; 1067 M4VIFI_ImagePlane *pTmp = M4OSA_NULL; 1068 M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ; 1069 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 1070 M4VIFI_ImagePlane pTemp1[3],pTemp2[3]; 1071 M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3]; 1072 M4OSA_UInt32 i = 0, yuvFrameWidth = 0, yuvFrameHeight = 0; 1073 M4OSA_Bool bSkipFrameEffect = M4OSA_FALSE; 1074 /** 1075 * VPP context is actually the VSS3GPP context */ 1076 M4VSS3GPP_InternalEditContext *pC = 1077 (M4VSS3GPP_InternalEditContext *)pContext; 1078 1079 memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1080 memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1081 memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane)); 1082 memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane)); 1083 1084 /** 1085 * Reset VPP error remembered in context */ 1086 pC->ewc.VppError = M4NO_ERROR; 1087 1088 /** 1089 * At the end of the editing, we may be called when no more clip is loaded. 1090 * (because to close the encoder properly it must be stepped one or twice...) */ 1091 if( M4OSA_NULL == pC->pC1 ) 1092 { 1093 /** 1094 * We must fill the input of the encoder with a dummy image, because 1095 * encoding noise leads to a huge video AU, and thus a writer buffer overflow. */ 1096 memset((void *)pPlaneOut[0].pac_data,0, 1097 pPlaneOut[0].u_stride * pPlaneOut[0].u_height); 1098 memset((void *)pPlaneOut[1].pac_data,0, 1099 pPlaneOut[1].u_stride * pPlaneOut[1].u_height); 1100 memset((void *)pPlaneOut[2].pac_data,0, 1101 pPlaneOut[2].u_stride * pPlaneOut[2].u_height); 1102 1103 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR (abort)"); 1104 return M4NO_ERROR; 1105 } 1106 1107 /** 1108 **************** Transition case ****************/ 1109 if( M4OSA_TRUE == pC->bTransitionEffect ) 1110 { 1111 1112 err = M4VSS3GPP_intAllocateYUV420(pTemp1, pC->ewc.uiVideoWidth, 1113 pC->ewc.uiVideoHeight); 1114 if (M4NO_ERROR != err) 1115 { 1116 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(1) returns 0x%x, \ 1117 returning M4NO_ERROR", err); 1118 pC->ewc.VppError = err; 1119 return M4NO_ERROR; /**< Return no error to the encoder core 1120 (else it may leak in some situations...) */ 1121 } 1122 1123 err = M4VSS3GPP_intAllocateYUV420(pTemp2, pC->ewc.uiVideoWidth, 1124 pC->ewc.uiVideoHeight); 1125 if (M4NO_ERROR != err) 1126 { 1127 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(2) returns 0x%x, \ 1128 returning M4NO_ERROR", err); 1129 pC->ewc.VppError = err; 1130 return M4NO_ERROR; /**< Return no error to the encoder core 1131 (else it may leak in some situations...) */ 1132 } 1133 1134 err = M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, 1135 pC->ewc.uiVideoHeight); 1136 if( M4NO_ERROR != err ) 1137 { 1138 M4OSA_TRACE1_1( 1139 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1140 returning M4NO_ERROR", 1141 err); 1142 pC->ewc.VppError = err; 1143 return 1144 M4NO_ERROR; /**< Return no error to the encoder core 1145 (else it may leak in some situations...) */ 1146 } 1147 1148 err = M4VSS3GPP_intAllocateYUV420(pC->yuv2, pC->ewc.uiVideoWidth, 1149 pC->ewc.uiVideoHeight); 1150 if( M4NO_ERROR != err ) 1151 { 1152 M4OSA_TRACE1_1( 1153 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1154 returning M4NO_ERROR", 1155 err); 1156 pC->ewc.VppError = err; 1157 return 1158 M4NO_ERROR; /**< Return no error to the encoder core 1159 (else it may leak in some situations...) */ 1160 } 1161 1162 err = M4VSS3GPP_intAllocateYUV420(pC->yuv3, pC->ewc.uiVideoWidth, 1163 pC->ewc.uiVideoHeight); 1164 if( M4NO_ERROR != err ) 1165 { 1166 M4OSA_TRACE1_1( 1167 "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420(3) returns 0x%x,\ 1168 returning M4NO_ERROR", 1169 err); 1170 pC->ewc.VppError = err; 1171 return 1172 M4NO_ERROR; /**< Return no error to the encoder core 1173 (else it may leak in some situations...) */ 1174 } 1175 1176 /** 1177 * Compute the time in the clip1 base: ts = to - Offset */ 1178 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1179 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1180 1181 /** 1182 * Render Clip1 */ 1183 if( pC->pC1->isRenderDup == M4OSA_FALSE ) 1184 { 1185 pC->bIssecondClip = M4OSA_FALSE; 1186 1187 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, ts, M4OSA_TRUE, 1188 pTempPlaneClip1, pTemp1, 1189 pPlaneOut); 1190 if ((M4NO_ERROR != err) && 1191 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1192 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1193 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1194 pC->ewc.VppError = err; 1195 /** Return no error to the encoder core 1196 * else it may leak in some situations.*/ 1197 return M4NO_ERROR; 1198 } 1199 } 1200 if ((pC->pC1->isRenderDup == M4OSA_TRUE) || 1201 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1202 pTmp = pC->yuv1; 1203 if (pC->pC1->lastDecodedPlane != M4NO_ERROR) { 1204 /* Copy last decoded plane to output plane */ 1205 memcpy((void *)pTmp[0].pac_data, 1206 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1207 (pTmp[0].u_height * pTmp[0].u_width)); 1208 memcpy((void *)pTmp[1].pac_data, 1209 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1210 (pTmp[1].u_height * pTmp[1].u_width)); 1211 memcpy((void *)pTmp[2].pac_data, 1212 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1213 (pTmp[2].u_height * pTmp[2].u_width)); 1214 } 1215 pC->pC1->lastDecodedPlane = pTmp; 1216 } 1217 1218 /** 1219 * Compute the time in the clip2 base: ts = to - Offset */ 1220 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1221 ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset; 1222 /** 1223 * Render Clip2 */ 1224 if( pC->pC2->isRenderDup == M4OSA_FALSE ) 1225 { 1226 1227 err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, ts, M4OSA_FALSE, 1228 pTempPlaneClip2, pTemp2, 1229 pPlaneOut); 1230 if ((M4NO_ERROR != err) && 1231 (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { 1232 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1233 M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); 1234 pC->ewc.VppError = err; 1235 /** Return no error to the encoder core 1236 * else it may leak in some situations.*/ 1237 return M4NO_ERROR; 1238 } 1239 } 1240 if ((pC->pC2->isRenderDup == M4OSA_TRUE) || 1241 (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { 1242 pTmp = pC->yuv2; 1243 if (pC->pC2->lastDecodedPlane != M4NO_ERROR) { 1244 /* Copy last decoded plane to output plane */ 1245 memcpy((void *)pTmp[0].pac_data, 1246 (void *)pC->pC2->lastDecodedPlane[0].pac_data, 1247 (pTmp[0].u_height * pTmp[0].u_width)); 1248 memcpy((void *)pTmp[1].pac_data, 1249 (void *)pC->pC2->lastDecodedPlane[1].pac_data, 1250 (pTmp[1].u_height * pTmp[1].u_width)); 1251 memcpy((void *)pTmp[2].pac_data, 1252 (void *)pC->pC2->lastDecodedPlane[2].pac_data, 1253 (pTmp[2].u_height * pTmp[2].u_width)); 1254 } 1255 pC->pC2->lastDecodedPlane = pTmp; 1256 } 1257 1258 1259 pTmp = pPlaneOut; 1260 err = M4VSS3GPP_intVideoTransition(pC, pTmp); 1261 1262 if( M4NO_ERROR != err ) 1263 { 1264 M4OSA_TRACE1_1( 1265 "M4VSS3GPP_intVPP: M4VSS3GPP_intVideoTransition returns 0x%x,\ 1266 returning M4NO_ERROR", 1267 err); 1268 pC->ewc.VppError = err; 1269 return M4NO_ERROR; /**< Return no error to the encoder core 1270 (else it may leak in some situations...) */ 1271 } 1272 for (i=0; i < 3; i++) 1273 { 1274 if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) { 1275 free(pTempPlaneClip2[i].pac_data); 1276 pTempPlaneClip2[i].pac_data = M4OSA_NULL; 1277 } 1278 1279 if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) { 1280 free(pTempPlaneClip1[i].pac_data); 1281 pTempPlaneClip1[i].pac_data = M4OSA_NULL; 1282 } 1283 1284 if (pTemp2[i].pac_data != M4OSA_NULL) { 1285 free(pTemp2[i].pac_data); 1286 pTemp2[i].pac_data = M4OSA_NULL; 1287 } 1288 1289 if (pTemp1[i].pac_data != M4OSA_NULL) { 1290 free(pTemp1[i].pac_data); 1291 pTemp1[i].pac_data = M4OSA_NULL; 1292 } 1293 } 1294 } 1295 /** 1296 **************** No Transition case ****************/ 1297 else 1298 { 1299 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case"); 1300 /** 1301 * Compute the time in the clip base: ts = to - Offset */ 1302 ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; 1303 pC->bIssecondClip = M4OSA_FALSE; 1304 /** 1305 * Render */ 1306 if (pC->pC1->isRenderDup == M4OSA_FALSE) { 1307 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false"); 1308 /** 1309 * Check if resizing is needed */ 1310 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1311 if ((pC->pC1->pSettings->FileType == 1312 M4VIDEOEDITING_kFileType_ARGB8888) && 1313 (pC->nbActiveEffects == 0) && 1314 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 1315 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1316 pC->pC1->pViDecCtxt, 1317 M4DECODER_kOptionID_EnableYuvWithEffect, 1318 (M4OSA_DataOption)M4OSA_TRUE); 1319 if (M4NO_ERROR == err ) { 1320 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1321 pC->pC1->pViDecCtxt, &ts, 1322 pPlaneOut, M4OSA_TRUE); 1323 } 1324 } else { 1325 if (pC->pC1->pSettings->FileType == 1326 M4VIDEOEDITING_kFileType_ARGB8888) { 1327 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1328 pC->pC1->pViDecCtxt, 1329 M4DECODER_kOptionID_EnableYuvWithEffect, 1330 (M4OSA_DataOption)M4OSA_FALSE); 1331 } 1332 if (M4NO_ERROR == err) { 1333 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1334 pC->pC1->pViDecCtxt, &ts, 1335 pC->pC1->m_pPreResizeFrame, M4OSA_TRUE); 1336 } 1337 } 1338 if (M4NO_ERROR != err) { 1339 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1340 m_pFctRender() returns error 0x%x", err); 1341 pC->ewc.VppError = err; 1342 return M4NO_ERROR; 1343 } 1344 if (pC->pC1->pSettings->FileType != 1345 M4VIDEOEDITING_kFileType_ARGB8888) { 1346 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1347 // Save width and height of un-rotated frame 1348 yuvFrameWidth = pC->pC1->m_pPreResizeFrame[0].u_width; 1349 yuvFrameHeight = pC->pC1->m_pPreResizeFrame[0].u_height; 1350 err = M4VSS3GPP_intRotateVideo(pC->pC1->m_pPreResizeFrame, 1351 pC->pC1->pSettings->ClipProperties.videoRotationDegrees); 1352 if (M4NO_ERROR != err) { 1353 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1354 rotateVideo() returns error 0x%x", err); 1355 pC->ewc.VppError = err; 1356 return M4NO_ERROR; 1357 } 1358 } 1359 } 1360 1361 if (pC->nbActiveEffects > 0) { 1362 pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; 1363 /** 1364 * If we do modify the image, we need an intermediate 1365 * image plane */ 1366 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1367 pC->pC1->m_pPreResizeFrame[0].u_width, 1368 pC->pC1->m_pPreResizeFrame[0].u_height); 1369 if (M4NO_ERROR != err) { 1370 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1371 M4VSS3GPP_intAllocateYUV420 error 0x%x", err); 1372 pC->ewc.VppError = err; 1373 return M4NO_ERROR; 1374 } 1375 /* If video frame need to be resized, then apply the overlay after 1376 * the frame was rendered with rendering mode. 1377 * Here skip the framing(overlay) effect when applying video Effect. */ 1378 bSkipFrameEffect = M4OSA_TRUE; 1379 err = M4VSS3GPP_intApplyVideoEffect(pC, 1380 pC->pC1->m_pPreResizeFrame, pTemp1, bSkipFrameEffect); 1381 if (M4NO_ERROR != err) { 1382 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1383 M4VSS3GPP_intApplyVideoEffect() error 0x%x", err); 1384 pC->ewc.VppError = err; 1385 return M4NO_ERROR; 1386 } 1387 pDecoderRenderFrame= pTemp1; 1388 1389 } else { 1390 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1391 } 1392 /* Prepare overlay temporary buffer if overlay exist */ 1393 if (pC->bClip1ActiveFramingEffect) { 1394 err = M4VSS3GPP_intAllocateYUV420(pTemp2, 1395 pPlaneOut[0].u_width, pPlaneOut[0].u_height); 1396 if (M4NO_ERROR != err) { 1397 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \ 1398 returns 0x%x, returning M4NO_ERROR", err); 1399 pC->ewc.VppError = err; 1400 return M4NO_ERROR; 1401 } 1402 pTmp = pTemp2; 1403 } else { 1404 pTmp = pPlaneOut; 1405 } 1406 1407 /* Do rendering mode. */ 1408 if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) || 1409 (pC->pC1->pSettings->FileType != 1410 M4VIDEOEDITING_kFileType_ARGB8888)) { 1411 1412 err = M4VSS3GPP_intApplyRenderingMode(pC, 1413 pC->pC1->pSettings->xVSS.MediaRendering, 1414 pDecoderRenderFrame, pTmp); 1415 if (M4NO_ERROR != err) { 1416 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1417 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1418 pC->ewc.VppError = err; 1419 return M4NO_ERROR; 1420 } 1421 } 1422 1423 /* Apply overlay if overlay is exist */ 1424 if (pC->bClip1ActiveFramingEffect) { 1425 pDecoderRenderFrame = pTmp; 1426 pTmp = pPlaneOut; 1427 err = M4VSS3GPP_intApplyVideoOverlay(pC, 1428 pDecoderRenderFrame, pTmp); 1429 if (M4NO_ERROR != err) { 1430 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1431 M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err); 1432 pC->ewc.VppError = err; 1433 return M4NO_ERROR; 1434 } 1435 } 1436 1437 if ((pC->pC1->pSettings->FileType == 1438 M4VIDEOEDITING_kFileType_ARGB8888) && 1439 (pC->nbActiveEffects == 0) && 1440 (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 1441 1442 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 1443 pC->pC1->pViDecCtxt, 1444 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 1445 (M4OSA_DataOption)pTmp); 1446 if (M4NO_ERROR != err) { 1447 pC->ewc.VppError = err; 1448 return M4NO_ERROR; 1449 } 1450 pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE; 1451 } 1452 1453 // Reset original width and height for resize frame plane 1454 if (0 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees && 1455 180 != pC->pC1->pSettings->ClipProperties.videoRotationDegrees) { 1456 1457 M4VSS3GPP_intSetYUV420Plane(pC->pC1->m_pPreResizeFrame, 1458 yuvFrameWidth, yuvFrameHeight); 1459 } 1460 } 1461 else 1462 { 1463 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required"); 1464 if (pC->nbActiveEffects > 0) { 1465 /** If we do modify the image, we need an 1466 * intermediate image plane */ 1467 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1468 pC->ewc.uiVideoWidth, 1469 pC->ewc.uiVideoHeight); 1470 if (M4NO_ERROR != err) { 1471 pC->ewc.VppError = err; 1472 return M4NO_ERROR; 1473 } 1474 pDecoderRenderFrame = pTemp1; 1475 } 1476 else { 1477 pDecoderRenderFrame = pPlaneOut; 1478 } 1479 1480 pTmp = pPlaneOut; 1481 err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( 1482 pC->pC1->pViDecCtxt, &ts, 1483 pDecoderRenderFrame, M4OSA_TRUE); 1484 if (M4NO_ERROR != err) { 1485 pC->ewc.VppError = err; 1486 return M4NO_ERROR; 1487 } 1488 1489 if (pC->nbActiveEffects > 0) { 1490 /* Here we do not skip the overlay effect since 1491 * overlay and video frame are both of same resolution */ 1492 bSkipFrameEffect = M4OSA_FALSE; 1493 err = M4VSS3GPP_intApplyVideoEffect(pC, 1494 pDecoderRenderFrame,pPlaneOut,bSkipFrameEffect); 1495 } 1496 if (M4NO_ERROR != err) { 1497 pC->ewc.VppError = err; 1498 return M4NO_ERROR; 1499 } 1500 } 1501 pC->pC1->lastDecodedPlane = pTmp; 1502 pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts; 1503 1504 } else { 1505 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true"); 1506 1507 if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { 1508 /** 1509 * Copy last decoded plane to output plane */ 1510 memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data, 1511 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1512 (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width)); 1513 1514 memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data, 1515 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1516 (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width)); 1517 1518 memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data, 1519 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1520 (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width)); 1521 1522 if(pC->nbActiveEffects > 0) { 1523 /** 1524 * If we do modify the image, we need an 1525 * intermediate image plane */ 1526 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1527 pC->pC1->m_pPreResizeFrame[0].u_width, 1528 pC->pC1->m_pPreResizeFrame[0].u_height); 1529 if (M4NO_ERROR != err) { 1530 pC->ewc.VppError = err; 1531 return M4NO_ERROR; 1532 } 1533 /* If video frame need to be resized, then apply the overlay after 1534 * the frame was rendered with rendering mode. 1535 * Here skip the framing(overlay) effect when applying video Effect. */ 1536 bSkipFrameEffect = M4OSA_TRUE; 1537 err = M4VSS3GPP_intApplyVideoEffect(pC, 1538 pC->pC1->m_pPreResizeFrame,pTemp1, bSkipFrameEffect); 1539 if (M4NO_ERROR != err) { 1540 pC->ewc.VppError = err; 1541 return M4NO_ERROR; 1542 } 1543 pDecoderRenderFrame= pTemp1; 1544 } else { 1545 pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; 1546 } 1547 /* Prepare overlay temporary buffer if overlay exist */ 1548 if (pC->bClip1ActiveFramingEffect) { 1549 err = M4VSS3GPP_intAllocateYUV420( 1550 pTemp2, pC->ewc.uiVideoWidth, pC->ewc.uiVideoHeight); 1551 if (M4NO_ERROR != err) { 1552 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 \ 1553 returns 0x%x, returning M4NO_ERROR", err); 1554 pC->ewc.VppError = err; 1555 return M4NO_ERROR; 1556 } 1557 pTmp = pTemp2; 1558 } else { 1559 pTmp = pPlaneOut; 1560 } 1561 /* Do rendering mode */ 1562 err = M4VSS3GPP_intApplyRenderingMode(pC, 1563 pC->pC1->pSettings->xVSS.MediaRendering, 1564 pDecoderRenderFrame, pTmp); 1565 if (M4NO_ERROR != err) { 1566 pC->ewc.VppError = err; 1567 return M4NO_ERROR; 1568 } 1569 /* Apply overlay if overlay is exist */ 1570 pTmp = pPlaneOut; 1571 if (pC->bClip1ActiveFramingEffect) { 1572 err = M4VSS3GPP_intApplyVideoOverlay(pC, 1573 pTemp2, pTmp); 1574 if (M4NO_ERROR != err) { 1575 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 1576 M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); 1577 pC->ewc.VppError = err; 1578 return M4NO_ERROR; 1579 } 1580 } 1581 } else { 1582 1583 err = M4VSS3GPP_intAllocateYUV420(pTemp1, 1584 pC->ewc.uiVideoWidth, 1585 pC->ewc.uiVideoHeight); 1586 if (M4NO_ERROR != err) { 1587 pC->ewc.VppError = err; 1588 return M4NO_ERROR; 1589 } 1590 /** 1591 * Copy last decoded plane to output plane */ 1592 memcpy((void *)pLastDecodedFrame[0].pac_data, 1593 (void *)pC->pC1->lastDecodedPlane[0].pac_data, 1594 (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width)); 1595 1596 memcpy((void *)pLastDecodedFrame[1].pac_data, 1597 (void *)pC->pC1->lastDecodedPlane[1].pac_data, 1598 (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width)); 1599 1600 memcpy((void *)pLastDecodedFrame[2].pac_data, 1601 (void *)pC->pC1->lastDecodedPlane[2].pac_data, 1602 (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width)); 1603 1604 pTmp = pPlaneOut; 1605 /** 1606 * Check if there is a effect */ 1607 if(pC->nbActiveEffects > 0) { 1608 /* Here we do not skip the overlay effect since 1609 * overlay and video are both of same resolution */ 1610 bSkipFrameEffect = M4OSA_FALSE; 1611 err = M4VSS3GPP_intApplyVideoEffect(pC, 1612 pLastDecodedFrame, pTmp,bSkipFrameEffect); 1613 if (M4NO_ERROR != err) { 1614 pC->ewc.VppError = err; 1615 return M4NO_ERROR; 1616 } 1617 } 1618 } 1619 pC->pC1->lastDecodedPlane = pTmp; 1620 } 1621 1622 M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts); 1623 1624 for (i=0; i<3; i++) { 1625 if (pTemp1[i].pac_data != M4OSA_NULL) { 1626 free(pTemp1[i].pac_data); 1627 pTemp1[i].pac_data = M4OSA_NULL; 1628 } 1629 } 1630 for (i=0; i<3; i++) { 1631 if (pTemp2[i].pac_data != M4OSA_NULL) { 1632 free(pTemp2[i].pac_data); 1633 pTemp2[i].pac_data = M4OSA_NULL; 1634 } 1635 } 1636 } 1637 1638 /** 1639 * Return */ 1640 M4OSA_TRACE3_0("M4VSS3GPP_intVPP: returning M4NO_ERROR"); 1641 return M4NO_ERROR; 1642 } 1643 /** 1644 ****************************************************************************** 1645 * M4OSA_ERR M4VSS3GPP_intApplyVideoOverlay() 1646 * @brief Apply video overlay from pPlaneIn to pPlaneOut 1647 * @param pC (IN/OUT) Internal edit context 1648 * @param pInputPlanes (IN) Input raw YUV420 image 1649 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1650 * @return M4NO_ERROR: No error 1651 ****************************************************************************** 1652 */ 1653 static M4OSA_ERR 1654 M4VSS3GPP_intApplyVideoOverlay (M4VSS3GPP_InternalEditContext *pC, 1655 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) { 1656 1657 M4VSS3GPP_ClipContext *pClip; 1658 M4VSS3GPP_EffectSettings *pFx; 1659 M4VSS3GPP_ExternalProgress extProgress; 1660 M4OSA_Double VideoEffectTime; 1661 M4OSA_Double PercentageDone; 1662 M4OSA_UInt8 NumActiveEffects =0; 1663 M4OSA_UInt32 Cts = 0; 1664 M4OSA_Int32 nextEffectTime; 1665 M4OSA_Int32 tmp; 1666 M4OSA_UInt8 i; 1667 M4OSA_ERR err; 1668 1669 pClip = pC->pC1; 1670 if (pC->bIssecondClip == M4OSA_TRUE) { 1671 NumActiveEffects = pC->nbActiveEffects1; 1672 } else { 1673 NumActiveEffects = pC->nbActiveEffects; 1674 } 1675 for (i=0; i<NumActiveEffects; i++) { 1676 if (pC->bIssecondClip == M4OSA_TRUE) { 1677 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]); 1678 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1679 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1680 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) + 1681 pC->pTransitionList[pC->uiCurrentClip].uiTransitionDuration - pFx->uiStartTime; 1682 } else { 1683 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]); 1684 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1685 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1686 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime; 1687 } 1688 /* Do the framing(overlay) effect only, 1689 * skip other color effect which had been applied */ 1690 if (pFx->xVSS.pFramingBuffer == M4OSA_NULL) { 1691 continue; 1692 } 1693 1694 /* To calculate %, substract timeIncrement because effect should finish 1695 * on the last frame which is presented from CTS = eof-timeIncrement till CTS = eof */ 1696 PercentageDone = VideoEffectTime / ((M4OSA_Float)pFx->uiDuration); 1697 1698 if (PercentageDone < 0.0) { 1699 PercentageDone = 0.0; 1700 } 1701 if (PercentageDone > 1.0) { 1702 PercentageDone = 1.0; 1703 } 1704 /** 1705 * Compute where we are in the effect (scale is 0->1000) */ 1706 tmp = (M4OSA_Int32)(PercentageDone * 1000); 1707 1708 /** 1709 * Set the progress info provided to the external function */ 1710 extProgress.uiProgress = (M4OSA_UInt32)tmp; 1711 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1712 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1713 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset; 1714 extProgress.bIsLast = M4OSA_FALSE; 1715 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1716 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \ 1717 + pC->dOutputFrameDuration); 1718 if (nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) { 1719 extProgress.bIsLast = M4OSA_TRUE; 1720 } 1721 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt, 1722 pPlaneIn, pPlaneOut, &extProgress, 1723 pFx->VideoEffectType - M4VSS3GPP_kVideoEffectType_External); 1724 1725 if (M4NO_ERROR != err) { 1726 M4OSA_TRACE1_1( 1727 "M4VSS3GPP_intApplyVideoOverlay: \ 1728 External video effect function returns 0x%x!", 1729 err); 1730 return err; 1731 } 1732 } 1733 1734 /** 1735 * Return */ 1736 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoOverlay: returning M4NO_ERROR"); 1737 return M4NO_ERROR; 1738 } 1739 /** 1740 ****************************************************************************** 1741 * M4OSA_ERR M4VSS3GPP_intApplyVideoEffect() 1742 * @brief Apply video effect from pPlaneIn to pPlaneOut 1743 * @param pC (IN/OUT) Internal edit context 1744 * @param uiClip1orClip2 (IN/OUT) 1 for first clip, 2 for second clip 1745 * @param pInputPlanes (IN) Input raw YUV420 image 1746 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1747 * @param bSkipFramingEffect (IN) skip framing effect flag 1748 * @return M4NO_ERROR: No error 1749 ****************************************************************************** 1750 */ 1751 static M4OSA_ERR 1752 M4VSS3GPP_intApplyVideoEffect (M4VSS3GPP_InternalEditContext *pC, 1753 M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut, 1754 M4OSA_Bool bSkipFramingEffect) { 1755 1756 M4OSA_ERR err; 1757 1758 M4VSS3GPP_ClipContext *pClip; 1759 M4VSS3GPP_EffectSettings *pFx; 1760 M4VSS3GPP_ExternalProgress extProgress; 1761 1762 M4OSA_Double VideoEffectTime; 1763 M4OSA_Double PercentageDone; 1764 M4OSA_Int32 tmp; 1765 1766 M4VIFI_ImagePlane *pPlaneTempIn; 1767 M4VIFI_ImagePlane *pPlaneTempOut; 1768 M4VIFI_ImagePlane pTempYuvPlane[3]; 1769 M4OSA_UInt8 i; 1770 M4OSA_UInt8 NumActiveEffects =0; 1771 1772 1773 pClip = pC->pC1; 1774 if (pC->bIssecondClip == M4OSA_TRUE) 1775 { 1776 NumActiveEffects = pC->nbActiveEffects1; 1777 } 1778 else 1779 { 1780 NumActiveEffects = pC->nbActiveEffects; 1781 } 1782 1783 memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane)); 1784 1785 /** 1786 * Allocate temporary plane if needed RC */ 1787 if (NumActiveEffects > 1) { 1788 err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width, 1789 pPlaneOut->u_height); 1790 1791 if( M4NO_ERROR != err ) 1792 { 1793 M4OSA_TRACE1_1( 1794 "M4VSS3GPP_intApplyVideoEffect: M4VSS3GPP_intAllocateYUV420(4) returns 0x%x,\ 1795 returning M4NO_ERROR", 1796 err); 1797 pC->ewc.VppError = err; 1798 return 1799 M4NO_ERROR; /**< Return no error to the encoder core 1800 (else it may leak in some situations...) */ 1801 } 1802 } 1803 1804 if (NumActiveEffects % 2 == 0) 1805 { 1806 pPlaneTempIn = pPlaneIn; 1807 pPlaneTempOut = pTempYuvPlane; 1808 } 1809 else 1810 { 1811 pPlaneTempIn = pPlaneIn; 1812 pPlaneTempOut = pPlaneOut; 1813 } 1814 1815 for (i=0; i<NumActiveEffects; i++) 1816 { 1817 if (pC->bIssecondClip == M4OSA_TRUE) 1818 { 1819 1820 1821 pFx = &(pC->pEffectsList[pC->pActiveEffectsList1[i]]); 1822 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1823 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1824 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) + 1825 pC->pTransitionList[pC->uiCurrentClip]. 1826 uiTransitionDuration- pFx->uiStartTime; 1827 } 1828 else 1829 { 1830 pFx = &(pC->pEffectsList[pC->pActiveEffectsList[i]]); 1831 /* Compute how far from the beginning of the effect we are, in clip-base time. */ 1832 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1833 VideoEffectTime = ((M4OSA_Int32)pC->ewc.dInputVidCts) - pFx->uiStartTime; 1834 } 1835 1836 1837 1838 /* To calculate %, substract timeIncrement because effect should finish on the last frame*/ 1839 /* which is presented from CTS = eof-timeIncrement till CTS = eof */ 1840 PercentageDone = VideoEffectTime 1841 / ((M4OSA_Float)pFx->uiDuration/*- pC->dOutputFrameDuration*/); 1842 1843 if( PercentageDone < 0.0 ) 1844 PercentageDone = 0.0; 1845 1846 if( PercentageDone > 1.0 ) 1847 PercentageDone = 1.0; 1848 1849 switch( pFx->VideoEffectType ) 1850 { 1851 case M4VSS3GPP_kVideoEffectType_FadeFromBlack: 1852 /** 1853 * Compute where we are in the effect (scale is 0->1024). */ 1854 tmp = (M4OSA_Int32)(PercentageDone * 1024); 1855 1856 /** 1857 * Apply the darkening effect */ 1858 err = 1859 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1860 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1861 1862 if( M4NO_ERROR != err ) 1863 { 1864 M4OSA_TRACE1_1( 1865 "M4VSS3GPP_intApplyVideoEffect:\ 1866 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1867 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1868 err); 1869 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1870 } 1871 break; 1872 1873 case M4VSS3GPP_kVideoEffectType_FadeToBlack: 1874 /** 1875 * Compute where we are in the effect (scale is 0->1024) */ 1876 tmp = (M4OSA_Int32)(( 1.0 - PercentageDone) * 1024); 1877 1878 /** 1879 * Apply the darkening effect */ 1880 err = 1881 M4VFL_modifyLumaWithScale((M4ViComImagePlane *)pPlaneTempIn, 1882 (M4ViComImagePlane *)pPlaneTempOut, tmp, M4OSA_NULL); 1883 1884 if( M4NO_ERROR != err ) 1885 { 1886 M4OSA_TRACE1_1( 1887 "M4VSS3GPP_intApplyVideoEffect:\ 1888 M4VFL_modifyLumaWithScale returns error 0x%x,\ 1889 returning M4VSS3GPP_ERR_LUMA_FILTER_ERROR", 1890 err); 1891 return M4VSS3GPP_ERR_LUMA_FILTER_ERROR; 1892 } 1893 break; 1894 1895 default: 1896 if( pFx->VideoEffectType 1897 >= M4VSS3GPP_kVideoEffectType_External ) 1898 { 1899 M4OSA_UInt32 Cts = 0; 1900 M4OSA_Int32 nextEffectTime; 1901 1902 /** 1903 * Compute where we are in the effect (scale is 0->1000) */ 1904 tmp = (M4OSA_Int32)(PercentageDone * 1000); 1905 1906 /** 1907 * Set the progress info provided to the external function */ 1908 extProgress.uiProgress = (M4OSA_UInt32)tmp; 1909 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1910 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 1911 extProgress.uiClipTime = extProgress.uiOutputTime - pClip->iVoffset; 1912 extProgress.bIsLast = M4OSA_FALSE; 1913 // Decorrelate input and output encoding timestamp to handle encoder prefetch 1914 nextEffectTime = (M4OSA_Int32)(pC->ewc.dInputVidCts \ 1915 + pC->dOutputFrameDuration); 1916 if(nextEffectTime >= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) 1917 { 1918 extProgress.bIsLast = M4OSA_TRUE; 1919 } 1920 /* Here skip the framing effect, 1921 * do the framing effect after apply rendering mode */ 1922 if ((pFx->xVSS.pFramingBuffer != M4OSA_NULL) && 1923 bSkipFramingEffect == M4OSA_TRUE) { 1924 memcpy(pPlaneTempOut[0].pac_data, pPlaneTempIn[0].pac_data, 1925 pPlaneTempIn[0].u_height * pPlaneTempIn[0].u_width); 1926 memcpy(pPlaneTempOut[1].pac_data, pPlaneTempIn[1].pac_data, 1927 pPlaneTempIn[1].u_height * pPlaneTempIn[1].u_width); 1928 memcpy(pPlaneTempOut[2].pac_data, pPlaneTempIn[2].pac_data, 1929 pPlaneTempIn[2].u_height * pPlaneTempIn[2].u_width); 1930 1931 } else { 1932 err = pFx->ExtVideoEffectFct(pFx->pExtVideoEffectFctCtxt, 1933 pPlaneTempIn, pPlaneTempOut, &extProgress, 1934 pFx->VideoEffectType 1935 - M4VSS3GPP_kVideoEffectType_External); 1936 } 1937 if( M4NO_ERROR != err ) 1938 { 1939 M4OSA_TRACE1_1( 1940 "M4VSS3GPP_intApplyVideoEffect: \ 1941 External video effect function returns 0x%x!", 1942 err); 1943 return err; 1944 } 1945 break; 1946 } 1947 else 1948 { 1949 M4OSA_TRACE1_1( 1950 "M4VSS3GPP_intApplyVideoEffect: unknown effect type (0x%x),\ 1951 returning M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE", 1952 pFx->VideoEffectType); 1953 return M4VSS3GPP_ERR_INVALID_VIDEO_EFFECT_TYPE; 1954 } 1955 } 1956 /** 1957 * RC Updates pTempPlaneIn and pTempPlaneOut depending on current effect */ 1958 if (((i % 2 == 0) && (NumActiveEffects % 2 == 0)) 1959 || ((i % 2 != 0) && (NumActiveEffects % 2 != 0))) 1960 { 1961 pPlaneTempIn = pTempYuvPlane; 1962 pPlaneTempOut = pPlaneOut; 1963 } 1964 else 1965 { 1966 pPlaneTempIn = pPlaneOut; 1967 pPlaneTempOut = pTempYuvPlane; 1968 } 1969 } 1970 1971 for(i=0; i<3; i++) { 1972 if(pTempYuvPlane[i].pac_data != M4OSA_NULL) { 1973 free(pTempYuvPlane[i].pac_data); 1974 pTempYuvPlane[i].pac_data = M4OSA_NULL; 1975 } 1976 } 1977 1978 /** 1979 * Return */ 1980 M4OSA_TRACE3_0("M4VSS3GPP_intApplyVideoEffect: returning M4NO_ERROR"); 1981 return M4NO_ERROR; 1982 } 1983 1984 /** 1985 ****************************************************************************** 1986 * M4OSA_ERR M4VSS3GPP_intVideoTransition() 1987 * @brief Apply video transition effect pC1+pC2->pPlaneOut 1988 * @param pC (IN/OUT) Internal edit context 1989 * @param pOutputPlanes (IN/OUT) Output raw YUV420 image 1990 * @return M4NO_ERROR: No error 1991 ****************************************************************************** 1992 */ 1993 static M4OSA_ERR 1994 M4VSS3GPP_intVideoTransition( M4VSS3GPP_InternalEditContext *pC, 1995 M4VIFI_ImagePlane *pPlaneOut ) 1996 { 1997 M4OSA_ERR err; 1998 M4OSA_Int32 iProgress; 1999 M4VSS3GPP_ExternalProgress extProgress; 2000 M4VIFI_ImagePlane *pPlane; 2001 M4OSA_Int32 i; 2002 const M4OSA_Int32 iDur = (M4OSA_Int32)pC-> 2003 pTransitionList[pC->uiCurrentClip].uiTransitionDuration; 2004 2005 /** 2006 * Compute how far from the end cut we are, in clip-base time. 2007 * It is done with integers because the offset and begin cut have been rounded already. */ 2008 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2009 iProgress = (M4OSA_Int32)((M4OSA_Double)pC->pC1->iEndTime) - pC->ewc.dInputVidCts + 2010 ((M4OSA_Double)pC->pC1->iVoffset); 2011 /** 2012 * We must remove the duration of one frame, else we would almost never reach the end 2013 * (It's kind of a "pile and intervals" issue). */ 2014 iProgress -= (M4OSA_Int32)pC->dOutputFrameDuration; 2015 2016 if( iProgress < 0 ) /**< Sanity checks */ 2017 { 2018 iProgress = 0; 2019 } 2020 2021 /** 2022 * Compute where we are in the transition, on a base 1000 */ 2023 iProgress = ( ( iDur - iProgress) * 1000) / iDur; 2024 2025 /** 2026 * Sanity checks */ 2027 if( iProgress < 0 ) 2028 { 2029 iProgress = 0; 2030 } 2031 else if( iProgress > 1000 ) 2032 { 2033 iProgress = 1000; 2034 } 2035 2036 switch( pC->pTransitionList[pC->uiCurrentClip].TransitionBehaviour ) 2037 { 2038 case M4VSS3GPP_TransitionBehaviour_SpeedUp: 2039 iProgress = ( iProgress * iProgress) / 1000; 2040 break; 2041 2042 case M4VSS3GPP_TransitionBehaviour_Linear: 2043 /*do nothing*/ 2044 break; 2045 2046 case M4VSS3GPP_TransitionBehaviour_SpeedDown: 2047 iProgress = (M4OSA_Int32)(sqrt(iProgress * 1000)); 2048 break; 2049 2050 case M4VSS3GPP_TransitionBehaviour_SlowMiddle: 2051 if( iProgress < 500 ) 2052 { 2053 iProgress = (M4OSA_Int32)(sqrt(iProgress * 500)); 2054 } 2055 else 2056 { 2057 iProgress = 2058 (M4OSA_Int32)(( ( ( iProgress - 500) * (iProgress - 500)) 2059 / 500) + 500); 2060 } 2061 break; 2062 2063 case M4VSS3GPP_TransitionBehaviour_FastMiddle: 2064 if( iProgress < 500 ) 2065 { 2066 iProgress = (M4OSA_Int32)(( iProgress * iProgress) / 500); 2067 } 2068 else 2069 { 2070 iProgress = (M4OSA_Int32)(sqrt(( iProgress - 500) * 500) + 500); 2071 } 2072 break; 2073 2074 default: 2075 /*do nothing*/ 2076 break; 2077 } 2078 2079 switch( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType ) 2080 { 2081 case M4VSS3GPP_kVideoTransitionType_CrossFade: 2082 /** 2083 * Apply the transition effect */ 2084 err = M4VIFI_ImageBlendingonYUV420(M4OSA_NULL, 2085 (M4ViComImagePlane *)pC->yuv1, 2086 (M4ViComImagePlane *)pC->yuv2, 2087 (M4ViComImagePlane *)pPlaneOut, iProgress); 2088 2089 if( M4NO_ERROR != err ) 2090 { 2091 M4OSA_TRACE1_1( 2092 "M4VSS3GPP_intVideoTransition:\ 2093 M4VIFI_ImageBlendingonYUV420 returns error 0x%x,\ 2094 returning M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR", 2095 err); 2096 return M4VSS3GPP_ERR_TRANSITION_FILTER_ERROR; 2097 } 2098 break; 2099 2100 case M4VSS3GPP_kVideoTransitionType_None: 2101 /** 2102 * This is a stupid-non optimized version of the None transition... 2103 * We copy the YUV frame */ 2104 if( iProgress < 500 ) /**< first half of transition */ 2105 { 2106 pPlane = pC->yuv1; 2107 } 2108 else /**< second half of transition */ 2109 { 2110 pPlane = pC->yuv2; 2111 } 2112 /** 2113 * Copy the input YUV frames */ 2114 i = 3; 2115 2116 while( i-- > 0 ) 2117 { 2118 memcpy((void *)pPlaneOut[i].pac_data, 2119 (void *)pPlane[i].pac_data, 2120 pPlaneOut[i].u_stride * pPlaneOut[i].u_height); 2121 } 2122 break; 2123 2124 default: 2125 if( pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType 2126 >= M4VSS3GPP_kVideoTransitionType_External ) 2127 { 2128 /** 2129 * Set the progress info provided to the external function */ 2130 extProgress.uiProgress = (M4OSA_UInt32)iProgress; 2131 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2132 extProgress.uiOutputTime = (M4OSA_UInt32)pC->ewc.dInputVidCts; 2133 extProgress.uiClipTime = extProgress.uiOutputTime - pC->pC1->iVoffset; 2134 2135 err = pC->pTransitionList[pC-> 2136 uiCurrentClip].ExtVideoTransitionFct( 2137 pC->pTransitionList[pC-> 2138 uiCurrentClip].pExtVideoTransitionFctCtxt, 2139 pC->yuv1, pC->yuv2, pPlaneOut, &extProgress, 2140 pC->pTransitionList[pC-> 2141 uiCurrentClip].VideoTransitionType 2142 - M4VSS3GPP_kVideoTransitionType_External); 2143 2144 if( M4NO_ERROR != err ) 2145 { 2146 M4OSA_TRACE1_1( 2147 "M4VSS3GPP_intVideoTransition:\ 2148 External video transition function returns 0x%x!", 2149 err); 2150 return err; 2151 } 2152 break; 2153 } 2154 else 2155 { 2156 M4OSA_TRACE1_1( 2157 "M4VSS3GPP_intVideoTransition: unknown transition type (0x%x),\ 2158 returning M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE", 2159 pC->pTransitionList[pC->uiCurrentClip].VideoTransitionType); 2160 return M4VSS3GPP_ERR_INVALID_VIDEO_TRANSITION_TYPE; 2161 } 2162 } 2163 2164 /** 2165 * Return */ 2166 M4OSA_TRACE3_0("M4VSS3GPP_intVideoTransition: returning M4NO_ERROR"); 2167 return M4NO_ERROR; 2168 } 2169 2170 /** 2171 ****************************************************************************** 2172 * M4OSA_Void M4VSS3GPP_intUpdateTimeInfo() 2173 * @brief Update bit stream time info by Counter Time System to be compliant with 2174 * players using bit stream time info 2175 * @note H263 uses an absolute time counter unlike MPEG4 which uses Group Of Vops 2176 * (GOV, see the standard) 2177 * @param pC (IN/OUT) returns time updated video AU, 2178 * the offset between system and video time (MPEG4 only) 2179 * and the state of the current clip (MPEG4 only) 2180 * @return nothing 2181 ****************************************************************************** 2182 */ 2183 static M4OSA_Void 2184 M4VSS3GPP_intUpdateTimeInfo( M4VSS3GPP_InternalEditContext *pC, 2185 M4SYS_AccessUnit *pAU ) 2186 { 2187 M4OSA_UInt8 uiTmp; 2188 M4OSA_UInt32 uiCts = 0; 2189 M4OSA_MemAddr8 pTmp; 2190 M4OSA_UInt32 uiAdd; 2191 M4OSA_UInt32 uiCurrGov; 2192 M4OSA_Int8 iDiff; 2193 2194 M4VSS3GPP_ClipContext *pClipCtxt = pC->pC1; 2195 M4OSA_Int32 *pOffset = &(pC->ewc.iMpeg4GovOffset); 2196 2197 /** 2198 * Set H263 time counter from system time */ 2199 if( M4SYS_kH263 == pAU->stream->streamType ) 2200 { 2201 uiTmp = (M4OSA_UInt8)((M4OSA_UInt32)( ( pAU->CTS * 30) / 1001 + 0.5) 2202 % M4VSS3GPP_EDIT_H263_MODULO_TIME); 2203 M4VSS3GPP_intSetH263TimeCounter((M4OSA_MemAddr8)(pAU->dataAddress), 2204 uiTmp); 2205 } 2206 /* 2207 * Set MPEG4 GOV time counter regarding video and system time */ 2208 else if( M4SYS_kMPEG_4 == pAU->stream->streamType ) 2209 { 2210 /* 2211 * If GOV. 2212 * beware of little/big endian! */ 2213 /* correction: read 8 bits block instead of one 32 bits block */ 2214 M4OSA_UInt8 *temp8 = (M4OSA_UInt8 *)(pAU->dataAddress); 2215 M4OSA_UInt32 temp32 = 0; 2216 2217 temp32 = ( 0x000000ff & (M4OSA_UInt32)(*temp8)) 2218 + (0x0000ff00 & ((M4OSA_UInt32)(*(temp8 + 1))) << 8) 2219 + (0x00ff0000 & ((M4OSA_UInt32)(*(temp8 + 2))) << 16) 2220 + (0xff000000 & ((M4OSA_UInt32)(*(temp8 + 3))) << 24); 2221 2222 M4OSA_TRACE3_2("RC: Temp32: 0x%x, dataAddress: 0x%x\n", temp32, 2223 *(pAU->dataAddress)); 2224 2225 if( M4VSS3GPP_EDIT_GOV_HEADER == temp32 ) 2226 { 2227 pTmp = 2228 (M4OSA_MemAddr8)(pAU->dataAddress 2229 + 1); /**< Jump to the time code (just after the 32 bits header) */ 2230 uiAdd = (M4OSA_UInt32)(pAU->CTS)+( *pOffset); 2231 2232 switch( pClipCtxt->bMpeg4GovState ) 2233 { 2234 case M4OSA_FALSE: /*< INIT */ 2235 { 2236 /* video time = ceil (system time + offset) */ 2237 uiCts = ( uiAdd + 999) / 1000; 2238 2239 /* offset update */ 2240 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2241 2242 /* Save values */ 2243 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2244 2245 /* State to 'first' */ 2246 pClipCtxt->bMpeg4GovState = M4OSA_TRUE; 2247 } 2248 break; 2249 2250 case M4OSA_TRUE: /*< UPDATE */ 2251 { 2252 /* Get current Gov value */ 2253 M4VSS3GPP_intGetMPEG4Gov(pTmp, &uiCurrGov); 2254 2255 /* video time = floor or ceil (system time + offset) */ 2256 uiCts = (uiAdd / 1000); 2257 iDiff = (M4OSA_Int8)(uiCurrGov 2258 - pClipCtxt->uiMpeg4PrevGovValueGet - uiCts 2259 + pClipCtxt->uiMpeg4PrevGovValueSet); 2260 2261 /* ceiling */ 2262 if( iDiff > 0 ) 2263 { 2264 uiCts += (M4OSA_UInt32)(iDiff); 2265 2266 /* offset update */ 2267 ( *pOffset) += (( uiCts * 1000) - uiAdd); 2268 } 2269 2270 /* Save values */ 2271 pClipCtxt->uiMpeg4PrevGovValueGet = uiCurrGov; 2272 pClipCtxt->uiMpeg4PrevGovValueSet = uiCts; 2273 } 2274 break; 2275 } 2276 2277 M4VSS3GPP_intSetMPEG4Gov(pTmp, uiCts); 2278 } 2279 } 2280 return; 2281 } 2282 2283 /** 2284 ****************************************************************************** 2285 * M4OSA_Void M4VSS3GPP_intCheckVideoEffects() 2286 * @brief Check which video effect must be applied at the current time 2287 ****************************************************************************** 2288 */ 2289 static M4OSA_Void 2290 M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, 2291 M4OSA_UInt8 uiClipNumber ) 2292 { 2293 M4OSA_UInt8 uiClipIndex; 2294 M4OSA_UInt8 uiFxIndex, i; 2295 M4VSS3GPP_ClipContext *pClip; 2296 M4VSS3GPP_EffectSettings *pFx; 2297 M4OSA_Int32 Off, BC, EC; 2298 // Decorrelate input and output encoding timestamp to handle encoder prefetch 2299 M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; 2300 2301 uiClipIndex = pC->uiCurrentClip; 2302 if (uiClipNumber == 1) { 2303 pClip = pC->pC1; 2304 pC->bClip1ActiveFramingEffect = M4OSA_FALSE; 2305 } else { 2306 pClip = pC->pC2; 2307 pC->bClip2ActiveFramingEffect = M4OSA_FALSE; 2308 } 2309 /** 2310 * Shortcuts for code readability */ 2311 Off = pClip->iVoffset; 2312 BC = pClip->iActualVideoBeginCut; 2313 EC = pClip->iEndTime; 2314 2315 i = 0; 2316 2317 for ( uiFxIndex = 0; uiFxIndex < pC->nbEffects; uiFxIndex++ ) 2318 { 2319 /** Shortcut, reverse order because of priority between effects(EndEffect always clean )*/ 2320 pFx = &(pC->pEffectsList[pC->nbEffects - 1 - uiFxIndex]); 2321 2322 if( M4VSS3GPP_kVideoEffectType_None != pFx->VideoEffectType ) 2323 { 2324 /** 2325 * Check if there is actually a video effect */ 2326 2327 if(uiClipNumber ==1) 2328 { 2329 /**< Are we after the start time of the effect? 2330 * or Are we into the effect duration? 2331 */ 2332 if ( (t >= (M4OSA_Int32)(pFx->uiStartTime)) && 2333 (t <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2334 /** 2335 * Set the active effect(s) */ 2336 pC->pActiveEffectsList[i] = pC->nbEffects-1-uiFxIndex; 2337 2338 /** 2339 * Update counter of active effects */ 2340 i++; 2341 if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) { 2342 pC->bClip1ActiveFramingEffect = M4OSA_TRUE; 2343 } 2344 2345 /** 2346 * For all external effects set this flag to true. */ 2347 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2348 { 2349 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2350 } 2351 } 2352 2353 } 2354 else 2355 { 2356 /**< Are we into the effect duration? */ 2357 if ( ((M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2358 >= (M4OSA_Int32)(pFx->uiStartTime)) 2359 && ( (M4OSA_Int32)(t + pC->pTransitionList[uiClipIndex].uiTransitionDuration) 2360 <= (M4OSA_Int32)(pFx->uiStartTime + pFx->uiDuration)) ) { 2361 /** 2362 * Set the active effect(s) */ 2363 pC->pActiveEffectsList1[i] = pC->nbEffects-1-uiFxIndex; 2364 2365 /** 2366 * Update counter of active effects */ 2367 i++; 2368 if (pFx->xVSS.pFramingBuffer != M4OSA_NULL) { 2369 pC->bClip2ActiveFramingEffect = M4OSA_TRUE; 2370 } 2371 /** 2372 * For all external effects set this flag to true. */ 2373 if(pFx->VideoEffectType > M4VSS3GPP_kVideoEffectType_External) 2374 { 2375 pC->m_bClipExternalHasStarted = M4OSA_TRUE; 2376 } 2377 2378 /** 2379 * The third effect has the highest priority, then the second one, then the first one. 2380 * Hence, as soon as we found an active effect, we can get out of this loop */ 2381 } 2382 } 2383 if (M4VIDEOEDITING_kH264 != 2384 pC->pC1->pSettings->ClipProperties.VideoStreamType) { 2385 2386 // For Mpeg4 and H263 clips, full decode encode not required 2387 pC->m_bClipExternalHasStarted = M4OSA_FALSE; 2388 } 2389 } 2390 } 2391 if(1==uiClipNumber) 2392 { 2393 /** 2394 * Save number of active effects */ 2395 pC->nbActiveEffects = i; 2396 } 2397 else 2398 { 2399 pC->nbActiveEffects1 = i; 2400 } 2401 2402 /** 2403 * Change the absolut time to clip related time */ 2404 t -= Off; 2405 2406 /** 2407 * Check if we are on the begin cut (for clip1 only) */ 2408 if( ( 0 != BC) && (t == BC) && (1 == uiClipNumber) ) 2409 { 2410 pC->bClip1AtBeginCut = M4OSA_TRUE; 2411 } 2412 else 2413 { 2414 pC->bClip1AtBeginCut = M4OSA_FALSE; 2415 } 2416 2417 return; 2418 } 2419 2420 /** 2421 ****************************************************************************** 2422 * M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder() 2423 * @brief Creates the video encoder 2424 * @note 2425 ****************************************************************************** 2426 */ 2427 M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2428 { 2429 M4OSA_ERR err; 2430 M4ENCODER_AdvancedParams EncParams; 2431 2432 /** 2433 * Simulate a writer interface with our specific function */ 2434 pC->ewc.OurWriterDataInterface.pProcessAU = 2435 M4VSS3GPP_intProcessAU; /**< This function is VSS 3GPP specific, 2436 but it follow the writer interface */ 2437 pC->ewc.OurWriterDataInterface.pStartAU = 2438 M4VSS3GPP_intStartAU; /**< This function is VSS 3GPP specific, 2439 but it follow the writer interface */ 2440 pC->ewc.OurWriterDataInterface.pWriterContext = 2441 (M4WRITER_Context) 2442 pC; /**< We give the internal context as writer context */ 2443 2444 /** 2445 * Get the encoder interface, if not already done */ 2446 if( M4OSA_NULL == pC->ShellAPI.pVideoEncoderGlobalFcts ) 2447 { 2448 err = M4VSS3GPP_setCurrentVideoEncoder(&pC->ShellAPI, 2449 pC->ewc.VideoStreamType); 2450 M4OSA_TRACE1_1( 2451 "M4VSS3GPP_intCreateVideoEncoder: setCurrentEncoder returns 0x%x", 2452 err); 2453 M4ERR_CHECK_RETURN(err); 2454 } 2455 2456 /** 2457 * Set encoder shell parameters according to VSS settings */ 2458 2459 /* Common parameters */ 2460 EncParams.InputFormat = M4ENCODER_kIYUV420; 2461 EncParams.FrameWidth = pC->ewc.uiVideoWidth; 2462 EncParams.FrameHeight = pC->ewc.uiVideoHeight; 2463 EncParams.uiTimeScale = pC->ewc.uiVideoTimeScale; 2464 2465 if( pC->bIsMMS == M4OSA_FALSE ) 2466 { 2467 /* No strict regulation in video editor */ 2468 /* Because of the effects and transitions we should allow more flexibility */ 2469 /* Also it prevents to drop important frames (with a bad result on sheduling and 2470 block effetcs) */ 2471 EncParams.bInternalRegulation = M4OSA_FALSE; 2472 // Variable framerate is not supported by StageFright encoders 2473 EncParams.FrameRate = M4ENCODER_k30_FPS; 2474 } 2475 else 2476 { 2477 /* In case of MMS mode, we need to enable bitrate regulation to be sure */ 2478 /* to reach the targeted output file size */ 2479 EncParams.bInternalRegulation = M4OSA_TRUE; 2480 EncParams.FrameRate = pC->MMSvideoFramerate; 2481 } 2482 2483 /** 2484 * Other encoder settings (defaults) */ 2485 EncParams.uiHorizontalSearchRange = 0; /* use default */ 2486 EncParams.uiVerticalSearchRange = 0; /* use default */ 2487 EncParams.bErrorResilience = M4OSA_FALSE; /* no error resilience */ 2488 EncParams.uiIVopPeriod = 0; /* use default */ 2489 EncParams.uiMotionEstimationTools = 0; /* M4V_MOTION_EST_TOOLS_ALL */ 2490 EncParams.bAcPrediction = M4OSA_TRUE; /* use AC prediction */ 2491 EncParams.uiStartingQuantizerValue = 10; /* initial QP = 10 */ 2492 EncParams.bDataPartitioning = M4OSA_FALSE; /* no data partitioning */ 2493 2494 /** 2495 * Set the video profile and level */ 2496 EncParams.videoProfile = pC->ewc.outputVideoProfile; 2497 EncParams.videoLevel= pC->ewc.outputVideoLevel; 2498 2499 switch ( pC->ewc.VideoStreamType ) 2500 { 2501 case M4SYS_kH263: 2502 2503 EncParams.Format = M4ENCODER_kH263; 2504 2505 EncParams.uiStartingQuantizerValue = 10; 2506 EncParams.uiRateFactor = 1; /* default */ 2507 2508 EncParams.bErrorResilience = M4OSA_FALSE; 2509 EncParams.bDataPartitioning = M4OSA_FALSE; 2510 break; 2511 2512 case M4SYS_kMPEG_4: 2513 2514 EncParams.Format = M4ENCODER_kMPEG4; 2515 2516 EncParams.uiStartingQuantizerValue = 8; 2517 EncParams.uiRateFactor = (M4OSA_UInt8)(( pC->dOutputFrameDuration 2518 * pC->ewc.uiVideoTimeScale) / 1000.0 + 0.5); 2519 2520 if( EncParams.uiRateFactor == 0 ) 2521 EncParams.uiRateFactor = 1; /* default */ 2522 2523 if( M4OSA_FALSE == pC->ewc.bVideoDataPartitioning ) 2524 { 2525 EncParams.bErrorResilience = M4OSA_FALSE; 2526 EncParams.bDataPartitioning = M4OSA_FALSE; 2527 } 2528 else 2529 { 2530 EncParams.bErrorResilience = M4OSA_TRUE; 2531 EncParams.bDataPartitioning = M4OSA_TRUE; 2532 } 2533 break; 2534 2535 case M4SYS_kH264: 2536 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: M4SYS_H264"); 2537 2538 EncParams.Format = M4ENCODER_kH264; 2539 2540 EncParams.uiStartingQuantizerValue = 10; 2541 EncParams.uiRateFactor = 1; /* default */ 2542 2543 EncParams.bErrorResilience = M4OSA_FALSE; 2544 EncParams.bDataPartitioning = M4OSA_FALSE; 2545 //EncParams.FrameRate = M4VIDEOEDITING_k5_FPS; 2546 break; 2547 2548 default: 2549 M4OSA_TRACE1_1( 2550 "M4VSS3GPP_intCreateVideoEncoder: Unknown videoStreamType 0x%x", 2551 pC->ewc.VideoStreamType); 2552 return M4VSS3GPP_ERR_EDITING_UNSUPPORTED_VIDEO_FORMAT; 2553 } 2554 2555 if( pC->bIsMMS == M4OSA_FALSE ) 2556 { 2557 EncParams.Bitrate = pC->xVSS.outputVideoBitrate; 2558 2559 } 2560 else 2561 { 2562 EncParams.Bitrate = pC->uiMMSVideoBitrate; /* RC */ 2563 EncParams.uiTimeScale = 0; /* We let the encoder choose the timescale */ 2564 } 2565 2566 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctInit"); 2567 /** 2568 * Init the video encoder (advanced settings version of the encoder Open function) */ 2569 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctInit(&pC->ewc.pEncContext, 2570 &pC->ewc.OurWriterDataInterface, M4VSS3GPP_intVPP, pC, 2571 pC->ShellAPI.pCurrentVideoEncoderExternalAPI, 2572 pC->ShellAPI.pCurrentVideoEncoderUserData); 2573 2574 if( M4NO_ERROR != err ) 2575 { 2576 M4OSA_TRACE1_1( 2577 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctInit returns 0x%x", 2578 err); 2579 return err; 2580 } 2581 2582 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2583 M4OSA_TRACE1_0("M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctOpen"); 2584 2585 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctOpen(pC->ewc.pEncContext, 2586 &pC->ewc.WriterVideoAU, &EncParams); 2587 2588 if( M4NO_ERROR != err ) 2589 { 2590 M4OSA_TRACE1_1( 2591 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctOpen returns 0x%x", 2592 err); 2593 return err; 2594 } 2595 2596 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2597 M4OSA_TRACE1_0( 2598 "M4VSS3GPP_intCreateVideoEncoder: calling encoder pFctStart"); 2599 2600 if( M4OSA_NULL != pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart ) 2601 { 2602 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStart( 2603 pC->ewc.pEncContext); 2604 2605 if( M4NO_ERROR != err ) 2606 { 2607 M4OSA_TRACE1_1( 2608 "M4VSS3GPP_intCreateVideoEncoder: pVideoEncoderGlobalFcts->pFctStart returns 0x%x", 2609 err); 2610 return err; 2611 } 2612 } 2613 2614 pC->ewc.encoderState = M4VSS3GPP_kEncoderRunning; 2615 2616 /** 2617 * Return */ 2618 M4OSA_TRACE3_0("M4VSS3GPP_intCreateVideoEncoder: returning M4NO_ERROR"); 2619 return M4NO_ERROR; 2620 } 2621 2622 /** 2623 ****************************************************************************** 2624 * M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder() 2625 * @brief Destroy the video encoder 2626 * @note 2627 ****************************************************************************** 2628 */ 2629 M4OSA_ERR M4VSS3GPP_intDestroyVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) 2630 { 2631 M4OSA_ERR err = M4NO_ERROR; 2632 2633 if( M4OSA_NULL != pC->ewc.pEncContext ) 2634 { 2635 if( M4VSS3GPP_kEncoderRunning == pC->ewc.encoderState ) 2636 { 2637 if( pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop != M4OSA_NULL ) 2638 { 2639 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctStop( 2640 pC->ewc.pEncContext); 2641 2642 if( M4NO_ERROR != err ) 2643 { 2644 M4OSA_TRACE1_1( 2645 "M4VSS3GPP_intDestroyVideoEncoder:\ 2646 pVideoEncoderGlobalFcts->pFctStop returns 0x%x", 2647 err); 2648 /* Well... how the heck do you handle a failed cleanup? */ 2649 } 2650 } 2651 2652 pC->ewc.encoderState = M4VSS3GPP_kEncoderStopped; 2653 } 2654 2655 /* Has the encoder actually been opened? Don't close it if that's not the case. */ 2656 if( M4VSS3GPP_kEncoderStopped == pC->ewc.encoderState ) 2657 { 2658 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctClose( 2659 pC->ewc.pEncContext); 2660 2661 if( M4NO_ERROR != err ) 2662 { 2663 M4OSA_TRACE1_1( 2664 "M4VSS3GPP_intDestroyVideoEncoder:\ 2665 pVideoEncoderGlobalFcts->pFctClose returns 0x%x", 2666 err); 2667 /* Well... how the heck do you handle a failed cleanup? */ 2668 } 2669 2670 pC->ewc.encoderState = M4VSS3GPP_kEncoderClosed; 2671 } 2672 2673 err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctCleanup( 2674 pC->ewc.pEncContext); 2675 2676 if( M4NO_ERROR != err ) 2677 { 2678 M4OSA_TRACE1_1( 2679 "M4VSS3GPP_intDestroyVideoEncoder:\ 2680 pVideoEncoderGlobalFcts->pFctCleanup returns 0x%x!", 2681 err); 2682 /**< We do not return the error here because we still have stuff to free */ 2683 } 2684 2685 pC->ewc.encoderState = M4VSS3GPP_kNoEncoder; 2686 /** 2687 * Reset variable */ 2688 pC->ewc.pEncContext = M4OSA_NULL; 2689 } 2690 2691 M4OSA_TRACE3_1("M4VSS3GPP_intDestroyVideoEncoder: returning 0x%x", err); 2692 return err; 2693 } 2694 2695 /** 2696 ****************************************************************************** 2697 * M4OSA_Void M4VSS3GPP_intSetH263TimeCounter() 2698 * @brief Modify the time counter of the given H263 video AU 2699 * @note 2700 * @param pAuDataBuffer (IN/OUT) H263 Video AU to modify 2701 * @param uiCts (IN) New time counter value 2702 * @return nothing 2703 ****************************************************************************** 2704 */ 2705 static M4OSA_Void M4VSS3GPP_intSetH263TimeCounter( M4OSA_MemAddr8 pAuDataBuffer, 2706 M4OSA_UInt8 uiCts ) 2707 { 2708 /* 2709 * The H263 time counter is 8 bits located on the "x" below: 2710 * 2711 * |--------|--------|--------|--------| 2712 * ???????? ???????? ??????xx xxxxxx?? 2713 */ 2714 2715 /** 2716 * Write the 2 bits on the third byte */ 2717 pAuDataBuffer[2] = ( pAuDataBuffer[2] & 0xFC) | (( uiCts >> 6) & 0x3); 2718 2719 /** 2720 * Write the 6 bits on the fourth byte */ 2721 pAuDataBuffer[3] = ( ( uiCts << 2) & 0xFC) | (pAuDataBuffer[3] & 0x3); 2722 2723 return; 2724 } 2725 2726 /** 2727 ****************************************************************************** 2728 * M4OSA_Void M4VSS3GPP_intSetMPEG4Gov() 2729 * @brief Modify the time info from Group Of VOP video AU 2730 * @note 2731 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2732 * @param uiCtsSec (IN) New GOV time info in second unit 2733 * @return nothing 2734 ****************************************************************************** 2735 */ 2736 static M4OSA_Void M4VSS3GPP_intSetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2737 M4OSA_UInt32 uiCtsSec ) 2738 { 2739 /* 2740 * The MPEG-4 time code length is 18 bits: 2741 * 2742 * hh mm marker ss 2743 * xxxxx|xxx xxx 1 xxxx xx ?????? 2744 * |----- ---|--- - ----|-- ------| 2745 */ 2746 M4OSA_UInt8 uiHh; 2747 M4OSA_UInt8 uiMm; 2748 M4OSA_UInt8 uiSs; 2749 M4OSA_UInt8 uiTmp; 2750 2751 /** 2752 * Write the 2 last bits ss */ 2753 uiSs = (M4OSA_UInt8)(uiCtsSec % 60); /**< modulo part */ 2754 pAuDataBuffer[2] = (( ( uiSs & 0x03) << 6) | (pAuDataBuffer[2] & 0x3F)); 2755 2756 if( uiCtsSec < 60 ) 2757 { 2758 /** 2759 * Write the 3 last bits of mm, the marker bit (0x10 */ 2760 pAuDataBuffer[1] = (( 0x10) | (uiSs >> 2)); 2761 2762 /** 2763 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2764 pAuDataBuffer[0] = 0; 2765 } 2766 else 2767 { 2768 /** 2769 * Write the 3 last bits of mm, the marker bit (0x10 */ 2770 uiTmp = (M4OSA_UInt8)(uiCtsSec / 60); /**< integer part */ 2771 uiMm = (M4OSA_UInt8)(uiTmp % 60); 2772 pAuDataBuffer[1] = (( uiMm << 5) | (0x10) | (uiSs >> 2)); 2773 2774 if( uiTmp < 60 ) 2775 { 2776 /** 2777 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2778 pAuDataBuffer[0] = ((uiMm >> 3)); 2779 } 2780 else 2781 { 2782 /** 2783 * Write the 5 bits of hh and 3 of mm (out of 6) */ 2784 uiHh = (M4OSA_UInt8)(uiTmp / 60); 2785 pAuDataBuffer[0] = (( uiHh << 3) | (uiMm >> 3)); 2786 } 2787 } 2788 return; 2789 } 2790 2791 /** 2792 ****************************************************************************** 2793 * M4OSA_Void M4VSS3GPP_intGetMPEG4Gov() 2794 * @brief Get the time info from Group Of VOP video AU 2795 * @note 2796 * @param pAuDataBuffer (IN) MPEG4 Video AU to modify 2797 * @param pCtsSec (OUT) Current GOV time info in second unit 2798 * @return nothing 2799 ****************************************************************************** 2800 */ 2801 static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, 2802 M4OSA_UInt32 *pCtsSec ) 2803 { 2804 /* 2805 * The MPEG-4 time code length is 18 bits: 2806 * 2807 * hh mm marker ss 2808 * xxxxx|xxx xxx 1 xxxx xx ?????? 2809 * |----- ---|--- - ----|-- ------| 2810 */ 2811 M4OSA_UInt8 uiHh; 2812 M4OSA_UInt8 uiMm; 2813 M4OSA_UInt8 uiSs; 2814 M4OSA_UInt8 uiTmp; 2815 M4OSA_UInt32 uiCtsSec; 2816 2817 /** 2818 * Read ss */ 2819 uiSs = (( pAuDataBuffer[2] & 0xC0) >> 6); 2820 uiTmp = (( pAuDataBuffer[1] & 0x0F) << 2); 2821 uiCtsSec = uiSs + uiTmp; 2822 2823 /** 2824 * Read mm */ 2825 uiMm = (( pAuDataBuffer[1] & 0xE0) >> 5); 2826 uiTmp = (( pAuDataBuffer[0] & 0x07) << 3); 2827 uiMm = uiMm + uiTmp; 2828 uiCtsSec = ( uiMm * 60) + uiCtsSec; 2829 2830 /** 2831 * Read hh */ 2832 uiHh = (( pAuDataBuffer[0] & 0xF8) >> 3); 2833 2834 if( uiHh ) 2835 { 2836 uiCtsSec = ( uiHh * 3600) + uiCtsSec; 2837 } 2838 2839 /* 2840 * in sec */ 2841 *pCtsSec = uiCtsSec; 2842 2843 return; 2844 } 2845 2846 /** 2847 ****************************************************************************** 2848 * M4OSA_ERR M4VSS3GPP_intAllocateYUV420() 2849 * @brief Allocate the three YUV 4:2:0 planes 2850 * @note 2851 * @param pPlanes (IN/OUT) valid pointer to 3 M4VIFI_ImagePlane structures 2852 * @param uiWidth (IN) Image width 2853 * @param uiHeight(IN) Image height 2854 ****************************************************************************** 2855 */ 2856 static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, 2857 M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ) 2858 { 2859 if (pPlanes == M4OSA_NULL) { 2860 M4OSA_TRACE1_0("M4VSS3GPP_intAllocateYUV420: Invalid pPlanes pointer"); 2861 return M4ERR_PARAMETER; 2862 } 2863 /* if the buffer is not NULL and same size with target size, 2864 * do not malloc again*/ 2865 if (pPlanes[0].pac_data != M4OSA_NULL && 2866 pPlanes[0].u_width == uiWidth && 2867 pPlanes[0].u_height == uiHeight) { 2868 return M4NO_ERROR; 2869 } 2870 2871 pPlanes[0].u_width = uiWidth; 2872 pPlanes[0].u_height = uiHeight; 2873 pPlanes[0].u_stride = uiWidth; 2874 pPlanes[0].u_topleft = 0; 2875 2876 if (pPlanes[0].pac_data != M4OSA_NULL) { 2877 free(pPlanes[0].pac_data); 2878 pPlanes[0].pac_data = M4OSA_NULL; 2879 } 2880 pPlanes[0].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[0].u_stride 2881 * pPlanes[0].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[0].pac_data"); 2882 2883 if( M4OSA_NULL == pPlanes[0].pac_data ) 2884 { 2885 M4OSA_TRACE1_0( 2886 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[0].pac_data,\ 2887 returning M4ERR_ALLOC"); 2888 return M4ERR_ALLOC; 2889 } 2890 2891 pPlanes[1].u_width = pPlanes[0].u_width >> 1; 2892 pPlanes[1].u_height = pPlanes[0].u_height >> 1; 2893 pPlanes[1].u_stride = pPlanes[1].u_width; 2894 pPlanes[1].u_topleft = 0; 2895 if (pPlanes[1].pac_data != M4OSA_NULL) { 2896 free(pPlanes[1].pac_data); 2897 pPlanes[1].pac_data = M4OSA_NULL; 2898 } 2899 pPlanes[1].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[1].u_stride 2900 * pPlanes[1].u_height, M4VSS3GPP,(M4OSA_Char *) "pPlanes[1].pac_data"); 2901 2902 if( M4OSA_NULL == pPlanes[1].pac_data ) 2903 { 2904 M4OSA_TRACE1_0( 2905 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\ 2906 returning M4ERR_ALLOC"); 2907 free((void *)pPlanes[0].pac_data); 2908 pPlanes[0].pac_data = M4OSA_NULL; 2909 return M4ERR_ALLOC; 2910 } 2911 2912 pPlanes[2].u_width = pPlanes[1].u_width; 2913 pPlanes[2].u_height = pPlanes[1].u_height; 2914 pPlanes[2].u_stride = pPlanes[2].u_width; 2915 pPlanes[2].u_topleft = 0; 2916 if (pPlanes[2].pac_data != M4OSA_NULL) { 2917 free(pPlanes[2].pac_data); 2918 pPlanes[2].pac_data = M4OSA_NULL; 2919 } 2920 pPlanes[2].pac_data = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(pPlanes[2].u_stride 2921 * pPlanes[2].u_height, M4VSS3GPP, (M4OSA_Char *)"pPlanes[2].pac_data"); 2922 2923 if( M4OSA_NULL == pPlanes[2].pac_data ) 2924 { 2925 M4OSA_TRACE1_0( 2926 "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\ 2927 returning M4ERR_ALLOC"); 2928 free((void *)pPlanes[0].pac_data); 2929 free((void *)pPlanes[1].pac_data); 2930 pPlanes[0].pac_data = M4OSA_NULL; 2931 pPlanes[1].pac_data = M4OSA_NULL; 2932 return M4ERR_ALLOC; 2933 } 2934 2935 memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height); 2936 memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height); 2937 memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height); 2938 /** 2939 * Return */ 2940 M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR"); 2941 return M4NO_ERROR; 2942 } 2943 2944 /** 2945 ****************************************************************************** 2946 * M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2947 * M4OSA_FileReadPointer* pFileReadPtr, 2948 * M4VIFI_ImagePlane* pImagePlanes, 2949 * M4OSA_UInt32 width, 2950 * M4OSA_UInt32 height); 2951 * @brief It Coverts and resizes a ARGB8888 image to YUV420 2952 * @note 2953 * @param pFileIn (IN) The ARGB888 input file 2954 * @param pFileReadPtr (IN) Pointer on filesystem functions 2955 * @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user. 2956 * ARGB8888 image will be converted and resized to output 2957 * YUV420 plane size 2958 * @param width (IN) width of the ARGB8888 2959 * @param height (IN) height of the ARGB8888 2960 * @return M4NO_ERROR: No error 2961 * @return M4ERR_ALLOC: memory error 2962 * @return M4ERR_PARAMETER: At least one of the function parameters is null 2963 ****************************************************************************** 2964 */ 2965 2966 M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, 2967 M4OSA_FileReadPointer* pFileReadPtr, 2968 M4VIFI_ImagePlane* pImagePlanes, 2969 M4OSA_UInt32 width,M4OSA_UInt32 height) { 2970 M4OSA_Context pARGBIn; 2971 M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; 2972 M4OSA_UInt32 frameSize_argb = width * height * 4; 2973 M4OSA_UInt32 frameSize_rgb888 = width * height * 3; 2974 M4OSA_UInt32 i = 0,j= 0; 2975 M4OSA_ERR err = M4NO_ERROR; 2976 2977 M4OSA_UInt8 *pArgbPlane = 2978 (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, 2979 M4VS, (M4OSA_Char*)"argb data"); 2980 if (pArgbPlane == M4OSA_NULL) { 2981 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \ 2982 Failed to allocate memory for ARGB plane"); 2983 return M4ERR_ALLOC; 2984 } 2985 2986 /* Get file size */ 2987 err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); 2988 if (err != M4NO_ERROR) { 2989 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \ 2990 Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 2991 free(pArgbPlane); 2992 pArgbPlane = M4OSA_NULL; 2993 goto cleanup; 2994 } 2995 2996 err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, 2997 &frameSize_argb); 2998 if (err != M4NO_ERROR) { 2999 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 3000 Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 3001 pFileReadPtr->closeRead(pARGBIn); 3002 free(pArgbPlane); 3003 pArgbPlane = M4OSA_NULL; 3004 goto cleanup; 3005 } 3006 3007 err = pFileReadPtr->closeRead(pARGBIn); 3008 if(err != M4NO_ERROR) { 3009 M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 3010 Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); 3011 free(pArgbPlane); 3012 pArgbPlane = M4OSA_NULL; 3013 goto cleanup; 3014 } 3015 3016 rgbPlane1.pac_data = 3017 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, 3018 M4VS, (M4OSA_Char*)"RGB888 plane1"); 3019 if(rgbPlane1.pac_data == M4OSA_NULL) { 3020 M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ 3021 Failed to allocate memory for rgb plane1"); 3022 free(pArgbPlane); 3023 return M4ERR_ALLOC; 3024 } 3025 3026 rgbPlane1.u_height = height; 3027 rgbPlane1.u_width = width; 3028 rgbPlane1.u_stride = width*3; 3029 rgbPlane1.u_topleft = 0; 3030 3031 3032 /** Remove the alpha channel */ 3033 for (i=0, j = 0; i < frameSize_argb; i++) { 3034 if ((i % 4) == 0) continue; 3035 rgbPlane1.pac_data[j] = pArgbPlane[i]; 3036 j++; 3037 } 3038 free(pArgbPlane); 3039 3040 /** 3041 * Check if resizing is required with color conversion */ 3042 if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { 3043 3044 frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; 3045 rgbPlane2.pac_data = 3046 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, 3047 (M4OSA_Char*)"rgb Plane2"); 3048 if(rgbPlane2.pac_data == M4OSA_NULL) { 3049 M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); 3050 free(rgbPlane1.pac_data); 3051 return M4ERR_ALLOC; 3052 } 3053 rgbPlane2.u_height = pImagePlanes->u_height; 3054 rgbPlane2.u_width = pImagePlanes->u_width; 3055 rgbPlane2.u_stride = pImagePlanes->u_width*3; 3056 rgbPlane2.u_topleft = 0; 3057 3058 /* Resizing */ 3059 err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, 3060 &rgbPlane1, &rgbPlane2); 3061 free(rgbPlane1.pac_data); 3062 if(err != M4NO_ERROR) { 3063 M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); 3064 free(rgbPlane2.pac_data); 3065 return err; 3066 } 3067 3068 /*Converting Resized RGB888 to YUV420 */ 3069 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); 3070 free(rgbPlane2.pac_data); 3071 if(err != M4NO_ERROR) { 3072 M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err); 3073 return err; 3074 } 3075 } else { 3076 err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); 3077 if(err != M4NO_ERROR) { 3078 M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); 3079 } 3080 free(rgbPlane1.pac_data); 3081 } 3082 cleanup: 3083 M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit"); 3084 return err; 3085 } 3086 3087 M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC, 3088 M4xVSS_MediaRendering renderingMode, 3089 M4VIFI_ImagePlane* pInplane, 3090 M4VIFI_ImagePlane* pOutplane) { 3091 3092 M4OSA_ERR err = M4NO_ERROR; 3093 M4AIR_Params airParams; 3094 M4VIFI_ImagePlane pImagePlanesTemp[3]; 3095 M4OSA_UInt32 i = 0; 3096 3097 if (renderingMode == M4xVSS_kBlackBorders) { 3098 memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, 3099 (pOutplane[0].u_height*pOutplane[0].u_stride)); 3100 memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE, 3101 (pOutplane[1].u_height*pOutplane[1].u_stride)); 3102 memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE, 3103 (pOutplane[2].u_height*pOutplane[2].u_stride)); 3104 } 3105 3106 if (renderingMode == M4xVSS_kResizing) { 3107 /** 3108 * Call the resize filter. 3109 * From the intermediate frame to the encoder image plane */ 3110 err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, 3111 pInplane, pOutplane); 3112 if (M4NO_ERROR != err) { 3113 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3114 M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err); 3115 return err; 3116 } 3117 } else { 3118 M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; 3119 M4OSA_UInt8* pOutPlaneY = 3120 pOutplane[0].pac_data + pOutplane[0].u_topleft; 3121 M4OSA_UInt8* pOutPlaneU = 3122 pOutplane[1].pac_data + pOutplane[1].u_topleft; 3123 M4OSA_UInt8* pOutPlaneV = 3124 pOutplane[2].pac_data + pOutplane[2].u_topleft; 3125 M4OSA_UInt8* pInPlaneY = M4OSA_NULL; 3126 M4OSA_UInt8* pInPlaneU = M4OSA_NULL; 3127 M4OSA_UInt8* pInPlaneV = M4OSA_NULL; 3128 3129 /* To keep media aspect ratio*/ 3130 /* Initialize AIR Params*/ 3131 airParams.m_inputCoord.m_x = 0; 3132 airParams.m_inputCoord.m_y = 0; 3133 airParams.m_inputSize.m_height = pInplane->u_height; 3134 airParams.m_inputSize.m_width = pInplane->u_width; 3135 airParams.m_outputSize.m_width = pOutplane->u_width; 3136 airParams.m_outputSize.m_height = pOutplane->u_height; 3137 airParams.m_bOutputStripe = M4OSA_FALSE; 3138 airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; 3139 3140 /** 3141 Media rendering: Black borders*/ 3142 if (renderingMode == M4xVSS_kBlackBorders) { 3143 pImagePlanesTemp[0].u_width = pOutplane[0].u_width; 3144 pImagePlanesTemp[0].u_height = pOutplane[0].u_height; 3145 pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; 3146 pImagePlanesTemp[0].u_topleft = 0; 3147 3148 pImagePlanesTemp[1].u_width = pOutplane[1].u_width; 3149 pImagePlanesTemp[1].u_height = pOutplane[1].u_height; 3150 pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; 3151 pImagePlanesTemp[1].u_topleft = 0; 3152 3153 pImagePlanesTemp[2].u_width = pOutplane[2].u_width; 3154 pImagePlanesTemp[2].u_height = pOutplane[2].u_height; 3155 pImagePlanesTemp[2].u_stride = pOutplane[2].u_width; 3156 pImagePlanesTemp[2].u_topleft = 0; 3157 3158 /** 3159 * Allocates plan in local image plane structure */ 3160 pImagePlanesTemp[0].pac_data = 3161 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3162 pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, 3163 M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; 3164 if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { 3165 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3166 return M4ERR_ALLOC; 3167 } 3168 pImagePlanesTemp[1].pac_data = 3169 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3170 pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, 3171 M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ; 3172 if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { 3173 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3174 free(pImagePlanesTemp[0].pac_data); 3175 return M4ERR_ALLOC; 3176 } 3177 pImagePlanesTemp[2].pac_data = 3178 (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( 3179 pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, 3180 M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ; 3181 if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) { 3182 M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); 3183 free(pImagePlanesTemp[0].pac_data); 3184 free(pImagePlanesTemp[1].pac_data); 3185 return M4ERR_ALLOC; 3186 } 3187 3188 pInPlaneY = pImagePlanesTemp[0].pac_data ; 3189 pInPlaneU = pImagePlanesTemp[1].pac_data ; 3190 pInPlaneV = pImagePlanesTemp[2].pac_data ; 3191 3192 memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, 3193 (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); 3194 memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE, 3195 (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); 3196 memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE, 3197 (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride)); 3198 3199 M4OSA_UInt32 height = 3200 (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; 3201 3202 if (height <= pOutplane->u_height) { 3203 /** 3204 * Black borders will be on the top and the bottom side */ 3205 airParams.m_outputSize.m_width = pOutplane->u_width; 3206 airParams.m_outputSize.m_height = height; 3207 /** 3208 * Number of lines at the top */ 3209 pImagePlanesTemp[0].u_topleft = 3210 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - 3211 airParams.m_outputSize.m_height)>>1)) * 3212 pImagePlanesTemp[0].u_stride; 3213 pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; 3214 pImagePlanesTemp[1].u_topleft = 3215 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - 3216 (airParams.m_outputSize.m_height>>1)))>>1) * 3217 pImagePlanesTemp[1].u_stride; 3218 pImagePlanesTemp[1].u_height = 3219 airParams.m_outputSize.m_height>>1; 3220 pImagePlanesTemp[2].u_topleft = 3221 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height - 3222 (airParams.m_outputSize.m_height>>1)))>>1) * 3223 pImagePlanesTemp[2].u_stride; 3224 pImagePlanesTemp[2].u_height = 3225 airParams.m_outputSize.m_height>>1; 3226 } else { 3227 /** 3228 * Black borders will be on the left and right side */ 3229 airParams.m_outputSize.m_height = pOutplane->u_height; 3230 airParams.m_outputSize.m_width = 3231 (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); 3232 3233 pImagePlanesTemp[0].u_topleft = 3234 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - 3235 airParams.m_outputSize.m_width)>>1)); 3236 pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; 3237 pImagePlanesTemp[1].u_topleft = 3238 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - 3239 (airParams.m_outputSize.m_width>>1)))>>1); 3240 pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1; 3241 pImagePlanesTemp[2].u_topleft = 3242 (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width - 3243 (airParams.m_outputSize.m_width>>1)))>>1); 3244 pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1; 3245 } 3246 3247 /** 3248 * Width and height have to be even */ 3249 airParams.m_outputSize.m_width = 3250 (airParams.m_outputSize.m_width>>1)<<1; 3251 airParams.m_outputSize.m_height = 3252 (airParams.m_outputSize.m_height>>1)<<1; 3253 airParams.m_inputSize.m_width = 3254 (airParams.m_inputSize.m_width>>1)<<1; 3255 airParams.m_inputSize.m_height = 3256 (airParams.m_inputSize.m_height>>1)<<1; 3257 pImagePlanesTemp[0].u_width = 3258 (pImagePlanesTemp[0].u_width>>1)<<1; 3259 pImagePlanesTemp[1].u_width = 3260 (pImagePlanesTemp[1].u_width>>1)<<1; 3261 pImagePlanesTemp[2].u_width = 3262 (pImagePlanesTemp[2].u_width>>1)<<1; 3263 pImagePlanesTemp[0].u_height = 3264 (pImagePlanesTemp[0].u_height>>1)<<1; 3265 pImagePlanesTemp[1].u_height = 3266 (pImagePlanesTemp[1].u_height>>1)<<1; 3267 pImagePlanesTemp[2].u_height = 3268 (pImagePlanesTemp[2].u_height>>1)<<1; 3269 3270 /** 3271 * Check that values are coherent */ 3272 if (airParams.m_inputSize.m_height == 3273 airParams.m_outputSize.m_height) { 3274 airParams.m_inputSize.m_width = 3275 airParams.m_outputSize.m_width; 3276 } else if (airParams.m_inputSize.m_width == 3277 airParams.m_outputSize.m_width) { 3278 airParams.m_inputSize.m_height = 3279 airParams.m_outputSize.m_height; 3280 } 3281 pPlaneTemp = pImagePlanesTemp; 3282 } 3283 3284 /** 3285 * Media rendering: Cropping*/ 3286 if (renderingMode == M4xVSS_kCropping) { 3287 airParams.m_outputSize.m_height = pOutplane->u_height; 3288 airParams.m_outputSize.m_width = pOutplane->u_width; 3289 if ((airParams.m_outputSize.m_height * 3290 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < 3291 airParams.m_inputSize.m_height) { 3292 /* Height will be cropped */ 3293 airParams.m_inputSize.m_height = 3294 (M4OSA_UInt32)((airParams.m_outputSize.m_height * 3295 airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); 3296 airParams.m_inputSize.m_height = 3297 (airParams.m_inputSize.m_height>>1)<<1; 3298 airParams.m_inputCoord.m_y = 3299 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - 3300 airParams.m_inputSize.m_height))>>1); 3301 } else { 3302 /* Width will be cropped */ 3303 airParams.m_inputSize.m_width = 3304 (M4OSA_UInt32)((airParams.m_outputSize.m_width * 3305 airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); 3306 airParams.m_inputSize.m_width = 3307 (airParams.m_inputSize.m_width>>1)<<1; 3308 airParams.m_inputCoord.m_x = 3309 (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - 3310 airParams.m_inputSize.m_width))>>1); 3311 } 3312 pPlaneTemp = pOutplane; 3313 } 3314 /** 3315 * Call AIR functions */ 3316 if (M4OSA_NULL == pC->m_air_context) { 3317 err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); 3318 if(err != M4NO_ERROR) { 3319 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3320 M4AIR_create returned error 0x%x", err); 3321 goto cleanUp; 3322 } 3323 } 3324 3325 err = M4AIR_configure(pC->m_air_context, &airParams); 3326 if (err != M4NO_ERROR) { 3327 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3328 Error when configuring AIR: 0x%x", err); 3329 M4AIR_cleanUp(pC->m_air_context); 3330 goto cleanUp; 3331 } 3332 3333 err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp); 3334 if (err != M4NO_ERROR) { 3335 M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ 3336 Error when getting AIR plane: 0x%x", err); 3337 M4AIR_cleanUp(pC->m_air_context); 3338 goto cleanUp; 3339 } 3340 3341 if (renderingMode == M4xVSS_kBlackBorders) { 3342 for (i=0; i<pOutplane[0].u_height; i++) { 3343 memcpy((void *)pOutPlaneY, (void *)pInPlaneY, 3344 pOutplane[0].u_width); 3345 pInPlaneY += pOutplane[0].u_width; 3346 pOutPlaneY += pOutplane[0].u_stride; 3347 } 3348 for (i=0; i<pOutplane[1].u_height; i++) { 3349 memcpy((void *)pOutPlaneU, (void *)pInPlaneU, 3350 pOutplane[1].u_width); 3351 pInPlaneU += pOutplane[1].u_width; 3352 pOutPlaneU += pOutplane[1].u_stride; 3353 } 3354 for (i=0; i<pOutplane[2].u_height; i++) { 3355 memcpy((void *)pOutPlaneV, (void *)pInPlaneV, 3356 pOutplane[2].u_width); 3357 pInPlaneV += pOutplane[2].u_width; 3358 pOutPlaneV += pOutplane[2].u_stride; 3359 } 3360 } 3361 } 3362 cleanUp: 3363 if (renderingMode == M4xVSS_kBlackBorders) { 3364 for (i=0; i<3; i++) { 3365 if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) { 3366 free(pImagePlanesTemp[i].pac_data); 3367 pImagePlanesTemp[i].pac_data = M4OSA_NULL; 3368 } 3369 } 3370 } 3371 return err; 3372 } 3373 3374 M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( 3375 M4VSS3GPP_InternalEditContext *pC, 3376 M4VSS3GPP_ClipContext* pClipCtxt) { 3377 3378 M4OSA_ERR err= M4NO_ERROR; 3379 3380 // Allocate memory for YUV plane 3381 pClipCtxt->pPlaneYuv = 3382 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3383 3*sizeof(M4VIFI_ImagePlane), M4VS, 3384 (M4OSA_Char*)"pPlaneYuv"); 3385 3386 if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { 3387 return M4ERR_ALLOC; 3388 } 3389 3390 pClipCtxt->pPlaneYuv[0].u_height = 3391 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; 3392 pClipCtxt->pPlaneYuv[0].u_width = 3393 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; 3394 pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; 3395 pClipCtxt->pPlaneYuv[0].u_topleft = 0; 3396 3397 pClipCtxt->pPlaneYuv[0].pac_data = 3398 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3399 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5, 3400 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3401 if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { 3402 free(pClipCtxt->pPlaneYuv); 3403 return M4ERR_ALLOC; 3404 } 3405 3406 pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3407 pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3408 pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; 3409 pClipCtxt->pPlaneYuv[1].u_topleft = 0; 3410 pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( 3411 pClipCtxt->pPlaneYuv[0].pac_data + 3412 pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width); 3413 3414 pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; 3415 pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; 3416 pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width; 3417 pClipCtxt->pPlaneYuv[2].u_topleft = 0; 3418 pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)( 3419 pClipCtxt->pPlaneYuv[1].pac_data + 3420 pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width); 3421 3422 err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 ( 3423 pClipCtxt->pSettings->pFile, 3424 pC->pOsaFileReadPtr, 3425 pClipCtxt->pPlaneYuv, 3426 pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, 3427 pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); 3428 if (M4NO_ERROR != err) { 3429 free(pClipCtxt->pPlaneYuv[0].pac_data); 3430 free(pClipCtxt->pPlaneYuv); 3431 return err; 3432 } 3433 3434 // Set the YUV data to the decoder using setoption 3435 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( 3436 pClipCtxt->pViDecCtxt, 3437 M4DECODER_kOptionID_DecYuvData, 3438 (M4OSA_DataOption)pClipCtxt->pPlaneYuv); 3439 if (M4NO_ERROR != err) { 3440 free(pClipCtxt->pPlaneYuv[0].pac_data); 3441 free(pClipCtxt->pPlaneYuv); 3442 return err; 3443 } 3444 3445 pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; 3446 3447 // Allocate Yuv plane with effect 3448 pClipCtxt->pPlaneYuvWithEffect = 3449 (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( 3450 3*sizeof(M4VIFI_ImagePlane), M4VS, 3451 (M4OSA_Char*)"pPlaneYuvWithEffect"); 3452 if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { 3453 free(pClipCtxt->pPlaneYuv[0].pac_data); 3454 free(pClipCtxt->pPlaneYuv); 3455 return M4ERR_ALLOC; 3456 } 3457 3458 pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; 3459 pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; 3460 pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; 3461 pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; 3462 3463 pClipCtxt->pPlaneYuvWithEffect[0].pac_data = 3464 (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( 3465 pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, 3466 M4VS, (M4OSA_Char*)"imageClip YUV data"); 3467 if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { 3468 free(pClipCtxt->pPlaneYuv[0].pac_data); 3469 free(pClipCtxt->pPlaneYuv); 3470 free(pClipCtxt->pPlaneYuvWithEffect); 3471 return M4ERR_ALLOC; 3472 } 3473 3474 pClipCtxt->pPlaneYuvWithEffect[1].u_height = 3475 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3476 pClipCtxt->pPlaneYuvWithEffect[1].u_width = 3477 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3478 pClipCtxt->pPlaneYuvWithEffect[1].u_stride = 3479 pClipCtxt->pPlaneYuvWithEffect[1].u_width; 3480 pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; 3481 pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( 3482 pClipCtxt->pPlaneYuvWithEffect[0].pac_data + 3483 pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width); 3484 3485 pClipCtxt->pPlaneYuvWithEffect[2].u_height = 3486 pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; 3487 pClipCtxt->pPlaneYuvWithEffect[2].u_width = 3488 pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; 3489 pClipCtxt->pPlaneYuvWithEffect[2].u_stride = 3490 pClipCtxt->pPlaneYuvWithEffect[2].u_width; 3491 pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0; 3492 pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)( 3493 pClipCtxt->pPlaneYuvWithEffect[1].pac_data + 3494 pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width); 3495 3496 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3497 pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, 3498 (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); 3499 if (M4NO_ERROR != err) { 3500 free(pClipCtxt->pPlaneYuv[0].pac_data); 3501 free(pClipCtxt->pPlaneYuv); 3502 free(pClipCtxt->pPlaneYuvWithEffect); 3503 return err; 3504 } 3505 3506 return M4NO_ERROR; 3507 } 3508 3509 M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC, 3510 M4VSS3GPP_ClipContext* pClipCtxt, 3511 M4_MediaTime ts, 3512 M4OSA_Bool bIsClip1, 3513 M4VIFI_ImagePlane *pResizePlane, 3514 M4VIFI_ImagePlane *pPlaneNoResize, 3515 M4VIFI_ImagePlane *pPlaneOut) { 3516 3517 M4OSA_ERR err = M4NO_ERROR; 3518 M4OSA_UInt8 numEffects = 0; 3519 M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; 3520 M4OSA_UInt32 yuvFrameWidth = 0, yuvFrameHeight = 0; 3521 M4VIFI_ImagePlane* pTmp = M4OSA_NULL; 3522 M4VIFI_ImagePlane pTemp[3]; 3523 M4OSA_UInt8 i = 0; 3524 M4OSA_Bool bSkipFramingEffect = M4OSA_FALSE; 3525 3526 memset((void *)pTemp, 0, 3*sizeof(M4VIFI_ImagePlane)); 3527 /* Resize or rotate case */ 3528 if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) { 3529 /** 3530 * If we do modify the image, we need an intermediate image plane */ 3531 err = M4VSS3GPP_intAllocateYUV420(pResizePlane, 3532 pClipCtxt->m_pPreResizeFrame[0].u_width, 3533 pClipCtxt->m_pPreResizeFrame[0].u_height); 3534 if (M4NO_ERROR != err) { 3535 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3536 M4VSS3GPP_intAllocateYUV420 returns 0x%x", err); 3537 return err; 3538 } 3539 3540 if ((pClipCtxt->pSettings->FileType == 3541 M4VIDEOEDITING_kFileType_ARGB8888) && 3542 (pC->nbActiveEffects == 0) && 3543 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) { 3544 3545 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3546 pClipCtxt->pViDecCtxt, 3547 M4DECODER_kOptionID_EnableYuvWithEffect, 3548 (M4OSA_DataOption)M4OSA_TRUE); 3549 if (M4NO_ERROR == err) { 3550 pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3551 pClipCtxt->pViDecCtxt, &ts, 3552 pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE); 3553 } 3554 3555 } else { 3556 if (pClipCtxt->pSettings->FileType == 3557 M4VIDEOEDITING_kFileType_ARGB8888) { 3558 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3559 pClipCtxt->pViDecCtxt, 3560 M4DECODER_kOptionID_EnableYuvWithEffect, 3561 (M4OSA_DataOption)M4OSA_FALSE); 3562 } 3563 if (M4NO_ERROR == err) { 3564 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3565 pClipCtxt->pViDecCtxt, &ts, 3566 pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE); 3567 } 3568 3569 } 3570 if (M4NO_ERROR != err) { 3571 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3572 returns error 0x%x", err); 3573 return err; 3574 } 3575 3576 if (pClipCtxt->pSettings->FileType != 3577 M4VIDEOEDITING_kFileType_ARGB8888) { 3578 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3579 // Save width and height of un-rotated frame 3580 yuvFrameWidth = pClipCtxt->m_pPreResizeFrame[0].u_width; 3581 yuvFrameHeight = pClipCtxt->m_pPreResizeFrame[0].u_height; 3582 err = M4VSS3GPP_intRotateVideo(pClipCtxt->m_pPreResizeFrame, 3583 pClipCtxt->pSettings->ClipProperties.videoRotationDegrees); 3584 if (M4NO_ERROR != err) { 3585 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3586 rotateVideo() returns error 0x%x", err); 3587 return err; 3588 } 3589 /* Set the new video size for temporary buffer */ 3590 M4VSS3GPP_intSetYUV420Plane(pResizePlane, 3591 pClipCtxt->m_pPreResizeFrame[0].u_width, 3592 pClipCtxt->m_pPreResizeFrame[0].u_height); 3593 } 3594 } 3595 3596 if (bIsClip1 == M4OSA_TRUE) { 3597 numEffects = pC->nbActiveEffects; 3598 } else { 3599 numEffects = pC->nbActiveEffects1; 3600 } 3601 3602 if ( numEffects > 0) { 3603 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE; 3604 /* If video frame need to be resized or rotated, 3605 * then apply the overlay after the frame was rendered with rendering mode. 3606 * Here skip the framing(overlay) effect when applying video Effect. */ 3607 bSkipFramingEffect = M4OSA_TRUE; 3608 err = M4VSS3GPP_intApplyVideoEffect(pC, 3609 pClipCtxt->m_pPreResizeFrame, pResizePlane, bSkipFramingEffect); 3610 if (M4NO_ERROR != err) { 3611 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3612 M4VSS3GPP_intApplyVideoEffect() err 0x%x", err); 3613 return err; 3614 } 3615 pDecoderRenderFrame= pResizePlane; 3616 } else { 3617 pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame; 3618 } 3619 /* Do rendering mode */ 3620 if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) || 3621 (pClipCtxt->pSettings->FileType != 3622 M4VIDEOEDITING_kFileType_ARGB8888)) { 3623 if (bIsClip1 == M4OSA_TRUE) { 3624 if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) { 3625 err = M4VSS3GPP_intAllocateYUV420(pTemp, 3626 pPlaneOut[0].u_width, pPlaneOut[0].u_height); 3627 if (M4NO_ERROR != err) { 3628 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 3629 M4VSS3GPP_intAllocateYUV420 error 0x%x", err); 3630 pC->ewc.VppError = err; 3631 return M4NO_ERROR; 3632 } 3633 pTmp = pTemp; 3634 } else { 3635 pTmp = pC->yuv1; 3636 } 3637 err = M4VSS3GPP_intApplyRenderingMode (pC, 3638 pClipCtxt->pSettings->xVSS.MediaRendering, 3639 pDecoderRenderFrame,pTmp); 3640 } else { 3641 if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) { 3642 err = M4VSS3GPP_intAllocateYUV420(pTemp, 3643 pPlaneOut[0].u_width, pPlaneOut[0].u_height); 3644 if (M4NO_ERROR != err) { 3645 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 3646 M4VSS3GPP_intAllocateYUV420 error 0x%x", err); 3647 pC->ewc.VppError = err; 3648 return M4NO_ERROR; 3649 } 3650 pTmp = pTemp; 3651 } else { 3652 pTmp = pC->yuv2; 3653 } 3654 err = M4VSS3GPP_intApplyRenderingMode (pC, 3655 pClipCtxt->pSettings->xVSS.MediaRendering, 3656 pDecoderRenderFrame,pTmp); 3657 } 3658 if (M4NO_ERROR != err) { 3659 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3660 M4VSS3GPP_intApplyRenderingMode error 0x%x ", err); 3661 for (i=0; i<3; i++) { 3662 if (pTemp[i].pac_data != M4OSA_NULL) { 3663 free(pTemp[i].pac_data); 3664 pTemp[i].pac_data = M4OSA_NULL; 3665 } 3666 } 3667 return err; 3668 } 3669 /* Apply overlay if overlay exist*/ 3670 if (bIsClip1 == M4OSA_TRUE) { 3671 if (pC->bClip1ActiveFramingEffect == M4OSA_TRUE) { 3672 err = M4VSS3GPP_intApplyVideoOverlay(pC, 3673 pTemp, pC->yuv1); 3674 } 3675 pClipCtxt->lastDecodedPlane = pC->yuv1; 3676 } else { 3677 if (pC->bClip2ActiveFramingEffect == M4OSA_TRUE) { 3678 err = M4VSS3GPP_intApplyVideoOverlay(pC, 3679 pTemp, pC->yuv2); 3680 } 3681 pClipCtxt->lastDecodedPlane = pC->yuv2; 3682 } 3683 if (M4NO_ERROR != err) { 3684 M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ 3685 M4VSS3GPP_intApplyVideoOverlay) error 0x%x ", err); 3686 pC->ewc.VppError = err; 3687 for (i=0; i<3; i++) { 3688 if (pTemp[i].pac_data != M4OSA_NULL) { 3689 free(pTemp[i].pac_data); 3690 pTemp[i].pac_data = M4OSA_NULL; 3691 } 3692 } 3693 return M4NO_ERROR; 3694 } 3695 } else { 3696 pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect; 3697 } 3698 // free the temp buffer 3699 for (i=0; i<3; i++) { 3700 if (pTemp[i].pac_data != M4OSA_NULL) { 3701 free(pTemp[i].pac_data); 3702 pTemp[i].pac_data = M4OSA_NULL; 3703 } 3704 } 3705 3706 if ((pClipCtxt->pSettings->FileType == 3707 M4VIDEOEDITING_kFileType_ARGB8888) && 3708 (pC->nbActiveEffects == 0) && 3709 (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) { 3710 if (bIsClip1 == M4OSA_TRUE) { 3711 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3712 pClipCtxt->pViDecCtxt, 3713 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3714 (M4OSA_DataOption)pC->yuv1); 3715 } else { 3716 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( 3717 pClipCtxt->pViDecCtxt, 3718 M4DECODER_kOptionID_YuvWithEffectNonContiguous, 3719 (M4OSA_DataOption)pC->yuv2); 3720 } 3721 if (M4NO_ERROR != err) { 3722 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3723 null decoder setOption error 0x%x ", err); 3724 return err; 3725 } 3726 pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE; 3727 } 3728 3729 // Reset original width and height for resize frame plane 3730 if (0 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees && 3731 180 != pClipCtxt->pSettings->ClipProperties.videoRotationDegrees) { 3732 3733 M4VSS3GPP_intSetYUV420Plane(pClipCtxt->m_pPreResizeFrame, 3734 yuvFrameWidth, yuvFrameHeight); 3735 } 3736 3737 } else { 3738 /* No rotate or no resize case*/ 3739 if (bIsClip1 == M4OSA_TRUE) { 3740 numEffects = pC->nbActiveEffects; 3741 } else { 3742 numEffects = pC->nbActiveEffects1; 3743 } 3744 3745 if(numEffects > 0) { 3746 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3747 pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE); 3748 if (M4NO_ERROR != err) { 3749 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3750 Render returns error 0x%x", err); 3751 return err; 3752 } 3753 3754 bSkipFramingEffect = M4OSA_FALSE; 3755 if (bIsClip1 == M4OSA_TRUE) { 3756 pC->bIssecondClip = M4OSA_FALSE; 3757 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize, 3758 pC->yuv1, bSkipFramingEffect); 3759 pClipCtxt->lastDecodedPlane = pC->yuv1; 3760 } else { 3761 pC->bIssecondClip = M4OSA_TRUE; 3762 err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize, 3763 pC->yuv2, bSkipFramingEffect); 3764 pClipCtxt->lastDecodedPlane = pC->yuv2; 3765 } 3766 3767 if (M4NO_ERROR != err) { 3768 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3769 M4VSS3GPP_intApplyVideoEffect error 0x%x", err); 3770 return err; 3771 } 3772 } else { 3773 3774 if (bIsClip1 == M4OSA_TRUE) { 3775 pTmp = pC->yuv1; 3776 } else { 3777 pTmp = pC->yuv2; 3778 } 3779 err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( 3780 pClipCtxt->pViDecCtxt, &ts, pTmp, M4OSA_TRUE); 3781 if (M4NO_ERROR != err) { 3782 M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ 3783 Render returns error 0x%x,", err); 3784 return err; 3785 } 3786 pClipCtxt->lastDecodedPlane = pTmp; 3787 } 3788 pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts; 3789 } 3790 3791 return err; 3792 } 3793 3794 M4OSA_ERR M4VSS3GPP_intRotateVideo(M4VIFI_ImagePlane* pPlaneIn, 3795 M4OSA_UInt32 rotationDegree) { 3796 3797 M4OSA_ERR err = M4NO_ERROR; 3798 M4VIFI_ImagePlane outPlane[3]; 3799 3800 if (rotationDegree != 180) { 3801 // Swap width and height of in plane 3802 outPlane[0].u_width = pPlaneIn[0].u_height; 3803 outPlane[0].u_height = pPlaneIn[0].u_width; 3804 outPlane[0].u_stride = outPlane[0].u_width; 3805 outPlane[0].u_topleft = 0; 3806 outPlane[0].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3807 (outPlane[0].u_stride*outPlane[0].u_height), M4VS, 3808 (M4OSA_Char*)("out Y plane for rotation")); 3809 if (outPlane[0].pac_data == M4OSA_NULL) { 3810 return M4ERR_ALLOC; 3811 } 3812 3813 outPlane[1].u_width = pPlaneIn[0].u_height/2; 3814 outPlane[1].u_height = pPlaneIn[0].u_width/2; 3815 outPlane[1].u_stride = outPlane[1].u_width; 3816 outPlane[1].u_topleft = 0; 3817 outPlane[1].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3818 (outPlane[1].u_stride*outPlane[1].u_height), M4VS, 3819 (M4OSA_Char*)("out U plane for rotation")); 3820 if (outPlane[1].pac_data == M4OSA_NULL) { 3821 free((void *)outPlane[0].pac_data); 3822 return M4ERR_ALLOC; 3823 } 3824 3825 outPlane[2].u_width = pPlaneIn[0].u_height/2; 3826 outPlane[2].u_height = pPlaneIn[0].u_width/2; 3827 outPlane[2].u_stride = outPlane[2].u_width; 3828 outPlane[2].u_topleft = 0; 3829 outPlane[2].pac_data = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc( 3830 (outPlane[2].u_stride*outPlane[2].u_height), M4VS, 3831 (M4OSA_Char*)("out V plane for rotation")); 3832 if (outPlane[2].pac_data == M4OSA_NULL) { 3833 free((void *)outPlane[0].pac_data); 3834 free((void *)outPlane[1].pac_data); 3835 return M4ERR_ALLOC; 3836 } 3837 } 3838 3839 switch(rotationDegree) { 3840 case 90: 3841 M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3842 break; 3843 3844 case 180: 3845 // In plane rotation, so planeOut = planeIn 3846 M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneIn); 3847 break; 3848 3849 case 270: 3850 M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, pPlaneIn, outPlane); 3851 break; 3852 3853 default: 3854 M4OSA_TRACE1_1("invalid rotation param %d", (int)rotationDegree); 3855 err = M4ERR_PARAMETER; 3856 break; 3857 } 3858 3859 if (rotationDegree != 180) { 3860 memset((void *)pPlaneIn[0].pac_data, 0, 3861 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3862 memset((void *)pPlaneIn[1].pac_data, 0, 3863 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3864 memset((void *)pPlaneIn[2].pac_data, 0, 3865 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3866 // Copy Y, U and V planes 3867 memcpy((void *)pPlaneIn[0].pac_data, (void *)outPlane[0].pac_data, 3868 (pPlaneIn[0].u_width*pPlaneIn[0].u_height)); 3869 memcpy((void *)pPlaneIn[1].pac_data, (void *)outPlane[1].pac_data, 3870 (pPlaneIn[1].u_width*pPlaneIn[1].u_height)); 3871 memcpy((void *)pPlaneIn[2].pac_data, (void *)outPlane[2].pac_data, 3872 (pPlaneIn[2].u_width*pPlaneIn[2].u_height)); 3873 3874 free((void *)outPlane[0].pac_data); 3875 free((void *)outPlane[1].pac_data); 3876 free((void *)outPlane[2].pac_data); 3877 3878 // Swap the width and height of the in plane 3879 uint32_t temp = 0; 3880 temp = pPlaneIn[0].u_width; 3881 pPlaneIn[0].u_width = pPlaneIn[0].u_height; 3882 pPlaneIn[0].u_height = temp; 3883 pPlaneIn[0].u_stride = pPlaneIn[0].u_width; 3884 3885 temp = pPlaneIn[1].u_width; 3886 pPlaneIn[1].u_width = pPlaneIn[1].u_height; 3887 pPlaneIn[1].u_height = temp; 3888 pPlaneIn[1].u_stride = pPlaneIn[1].u_width; 3889 3890 temp = pPlaneIn[2].u_width; 3891 pPlaneIn[2].u_width = pPlaneIn[2].u_height; 3892 pPlaneIn[2].u_height = temp; 3893 pPlaneIn[2].u_stride = pPlaneIn[2].u_width; 3894 } 3895 3896 return err; 3897 } 3898 3899 M4OSA_ERR M4VSS3GPP_intSetYUV420Plane(M4VIFI_ImagePlane* planeIn, 3900 M4OSA_UInt32 width, M4OSA_UInt32 height) { 3901 3902 M4OSA_ERR err = M4NO_ERROR; 3903 3904 if (planeIn == M4OSA_NULL) { 3905 M4OSA_TRACE1_0("NULL in plane, error"); 3906 return M4ERR_PARAMETER; 3907 } 3908 3909 planeIn[0].u_width = width; 3910 planeIn[0].u_height = height; 3911 planeIn[0].u_stride = planeIn[0].u_width; 3912 3913 planeIn[1].u_width = width/2; 3914 planeIn[1].u_height = height/2; 3915 planeIn[1].u_stride = planeIn[1].u_width; 3916 3917 planeIn[2].u_width = width/2; 3918 planeIn[2].u_height = height/2; 3919 planeIn[2].u_stride = planeIn[1].u_width; 3920 3921 return err; 3922 } 3923