1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 18 package android.filterpacks.videosrc; 19 20 import android.content.Context; 21 import android.content.res.AssetFileDescriptor; 22 import android.filterfw.core.Filter; 23 import android.filterfw.core.FilterContext; 24 import android.filterfw.core.Frame; 25 import android.filterfw.core.FrameFormat; 26 import android.filterfw.core.GenerateFieldPort; 27 import android.filterfw.core.GenerateFinalPort; 28 import android.filterfw.core.GLFrame; 29 import android.filterfw.core.MutableFrameFormat; 30 import android.filterfw.core.ShaderProgram; 31 import android.filterfw.format.ImageFormat; 32 import android.graphics.SurfaceTexture; 33 import android.media.MediaPlayer; 34 import android.net.Uri; 35 import android.opengl.Matrix; 36 import android.view.Surface; 37 38 import java.io.IOException; 39 import java.lang.IllegalArgumentException; 40 41 import android.util.Log; 42 43 /** 44 * @hide 45 */ 46 public class MediaSource extends Filter { 47 48 /** User-visible parameters */ 49 50 /** The source URL for the media source. Can be an http: link to a remote 51 * resource, or a file: link to a local media file 52 */ 53 @GenerateFieldPort(name = "sourceUrl", hasDefault = true) 54 private String mSourceUrl = ""; 55 56 /** An open asset file descriptor to a local media source. Default is null */ 57 @GenerateFieldPort(name = "sourceAsset", hasDefault = true) 58 private AssetFileDescriptor mSourceAsset = null; 59 60 /** The context for the MediaPlayer to resolve the sourceUrl. 61 * Make sure this is set before the sourceUrl to avoid unexpected result. 62 * If the sourceUrl is not a content URI, it is OK to keep this as null. */ 63 @GenerateFieldPort(name = "context", hasDefault = true) 64 private Context mContext = null; 65 66 /** Whether the media source is a URL or an asset file descriptor. Defaults 67 * to false. 68 */ 69 @GenerateFieldPort(name = "sourceIsUrl", hasDefault = true) 70 private boolean mSelectedIsUrl = false; 71 72 /** Whether the filter will always wait for a new video frame, or whether it 73 * will output an old frame again if a new frame isn't available. Defaults 74 * to true. 75 */ 76 @GenerateFinalPort(name = "waitForNewFrame", hasDefault = true) 77 private boolean mWaitForNewFrame = true; 78 79 /** Whether the media source should loop automatically or not. Defaults to 80 * true. 81 */ 82 @GenerateFieldPort(name = "loop", hasDefault = true) 83 private boolean mLooping = true; 84 85 /** Volume control. Currently sound is piped directly to the speakers, so 86 * this defaults to mute. 87 */ 88 @GenerateFieldPort(name = "volume", hasDefault = true) 89 private float mVolume = 0.f; 90 91 /** Orientation. This controls the output orientation of the video. Valid 92 * values are 0, 90, 180, 270 93 */ 94 @GenerateFieldPort(name = "orientation", hasDefault = true) 95 private int mOrientation = 0; 96 97 private MediaPlayer mMediaPlayer; 98 private GLFrame mMediaFrame; 99 private SurfaceTexture mSurfaceTexture; 100 private ShaderProgram mFrameExtractor; 101 private MutableFrameFormat mOutputFormat; 102 private int mWidth, mHeight; 103 104 // Total timeouts will be PREP_TIMEOUT*PREP_TIMEOUT_REPEAT 105 private static final int PREP_TIMEOUT = 100; // ms 106 private static final int PREP_TIMEOUT_REPEAT = 100; 107 private static final int NEWFRAME_TIMEOUT = 100; //ms 108 private static final int NEWFRAME_TIMEOUT_REPEAT = 10; 109 110 // This is an identity shader; not using the default identity 111 // shader because reading from a SurfaceTexture requires the 112 // GL_OES_EGL_image_external extension. 113 private final String mFrameShader = 114 "#extension GL_OES_EGL_image_external : require\n" + 115 "precision mediump float;\n" + 116 "uniform samplerExternalOES tex_sampler_0;\n" + 117 "varying vec2 v_texcoord;\n" + 118 "void main() {\n" + 119 " gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" + 120 "}\n"; 121 122 // The following transforms enable rotation of the decoded source. 123 // These are multiplied with the transform obtained from the 124 // SurfaceTexture to get the final transform to be set on the media source. 125 // Currently, given a device orientation, the MediaSource rotates in such a way 126 // that the source is displayed upright. A particular use case 127 // is "Background Replacement" feature in the Camera app 128 // where the MediaSource rotates the source to align with the camera feed and pass it 129 // on to the backdropper filter. The backdropper only does the blending 130 // and does not have to do any rotation 131 // (except for mirroring in case of front camera). 132 // TODO: Currently the rotations are spread over a bunch of stages in the 133 // pipeline. A cleaner design 134 // could be to cast away all the rotation in a separate filter or attach a transform 135 // to the frame so that MediaSource itself need not know about any rotation. 136 private static final float[] mSourceCoords_0 = { 1, 1, 0, 1, 137 0, 1, 0, 1, 138 1, 0, 0, 1, 139 0, 0, 0, 1 }; 140 private static final float[] mSourceCoords_270 = { 0, 1, 0, 1, 141 0, 0, 0, 1, 142 1, 1, 0, 1, 143 1, 0, 0, 1 }; 144 private static final float[] mSourceCoords_180 = { 0, 0, 0, 1, 145 1, 0, 0, 1, 146 0, 1, 0, 1, 147 1, 1, 0, 1 }; 148 private static final float[] mSourceCoords_90 = { 1, 0, 0, 1, 149 1, 1, 0, 1, 150 0, 0, 0, 1, 151 0, 1, 0, 1 }; 152 153 private boolean mGotSize; 154 private boolean mPrepared; 155 private boolean mPlaying; 156 private boolean mNewFrameAvailable; 157 private boolean mOrientationUpdated; 158 private boolean mPaused; 159 private boolean mCompleted; 160 161 private final boolean mLogVerbose; 162 private static final String TAG = "MediaSource"; 163 164 public MediaSource(String name) { 165 super(name); 166 mNewFrameAvailable = false; 167 168 mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE); 169 } 170 171 @Override 172 public void setupPorts() { 173 // Add input port 174 addOutputPort("video", ImageFormat.create(ImageFormat.COLORSPACE_RGBA, 175 FrameFormat.TARGET_GPU)); 176 } 177 178 private void createFormats() { 179 mOutputFormat = ImageFormat.create(ImageFormat.COLORSPACE_RGBA, 180 FrameFormat.TARGET_GPU); 181 } 182 183 @Override 184 protected void prepare(FilterContext context) { 185 if (mLogVerbose) Log.v(TAG, "Preparing MediaSource"); 186 187 mFrameExtractor = new ShaderProgram(context, mFrameShader); 188 // SurfaceTexture defines (0,0) to be bottom-left. The filter framework 189 // defines (0,0) as top-left, so do the flip here. 190 mFrameExtractor.setSourceRect(0, 1, 1, -1); 191 192 createFormats(); 193 } 194 195 @Override 196 public void open(FilterContext context) { 197 if (mLogVerbose) { 198 Log.v(TAG, "Opening MediaSource"); 199 if (mSelectedIsUrl) { 200 Log.v(TAG, "Current URL is " + mSourceUrl); 201 } else { 202 Log.v(TAG, "Current source is Asset!"); 203 } 204 } 205 206 mMediaFrame = (GLFrame)context.getFrameManager().newBoundFrame( 207 mOutputFormat, 208 GLFrame.EXTERNAL_TEXTURE, 209 0); 210 211 mSurfaceTexture = new SurfaceTexture(mMediaFrame.getTextureId()); 212 213 if (!setupMediaPlayer(mSelectedIsUrl)) { 214 throw new RuntimeException("Error setting up MediaPlayer!"); 215 } 216 } 217 218 @Override 219 public void process(FilterContext context) { 220 // Note: process is synchronized by its caller in the Filter base class 221 if (mLogVerbose) Log.v(TAG, "Processing new frame"); 222 223 if (mMediaPlayer == null) { 224 // Something went wrong in initialization or parameter updates 225 throw new NullPointerException("Unexpected null media player!"); 226 } 227 228 if (mCompleted) { 229 // Video playback is done, so close us down 230 closeOutputPort("video"); 231 return; 232 } 233 234 if (!mPlaying) { 235 int waitCount = 0; 236 if (mLogVerbose) Log.v(TAG, "Waiting for preparation to complete"); 237 while (!mGotSize || !mPrepared) { 238 try { 239 this.wait(PREP_TIMEOUT); 240 } catch (InterruptedException e) { 241 // ignoring 242 } 243 if (mCompleted) { 244 // Video playback is done, so close us down 245 closeOutputPort("video"); 246 return; 247 } 248 waitCount++; 249 if (waitCount == PREP_TIMEOUT_REPEAT) { 250 mMediaPlayer.release(); 251 throw new RuntimeException("MediaPlayer timed out while preparing!"); 252 } 253 } 254 if (mLogVerbose) Log.v(TAG, "Starting playback"); 255 mMediaPlayer.start(); 256 } 257 258 // Use last frame if paused, unless just starting playback, in which case 259 // we want at least one valid frame before pausing 260 if (!mPaused || !mPlaying) { 261 if (mWaitForNewFrame) { 262 if (mLogVerbose) Log.v(TAG, "Waiting for new frame"); 263 264 int waitCount = 0; 265 while (!mNewFrameAvailable) { 266 if (waitCount == NEWFRAME_TIMEOUT_REPEAT) { 267 if (mCompleted) { 268 // Video playback is done, so close us down 269 closeOutputPort("video"); 270 return; 271 } else { 272 throw new RuntimeException("Timeout waiting for new frame!"); 273 } 274 } 275 try { 276 this.wait(NEWFRAME_TIMEOUT); 277 } catch (InterruptedException e) { 278 if (mLogVerbose) Log.v(TAG, "interrupted"); 279 // ignoring 280 } 281 waitCount++; 282 } 283 mNewFrameAvailable = false; 284 if (mLogVerbose) Log.v(TAG, "Got new frame"); 285 } 286 287 mSurfaceTexture.updateTexImage(); 288 mOrientationUpdated = true; 289 } 290 if (mOrientationUpdated) { 291 float[] surfaceTransform = new float[16]; 292 mSurfaceTexture.getTransformMatrix(surfaceTransform); 293 294 float[] sourceCoords = new float[16]; 295 switch (mOrientation) { 296 default: 297 case 0: 298 Matrix.multiplyMM(sourceCoords, 0, 299 surfaceTransform, 0, 300 mSourceCoords_0, 0); 301 break; 302 case 90: 303 Matrix.multiplyMM(sourceCoords, 0, 304 surfaceTransform, 0, 305 mSourceCoords_90, 0); 306 break; 307 case 180: 308 Matrix.multiplyMM(sourceCoords, 0, 309 surfaceTransform, 0, 310 mSourceCoords_180, 0); 311 break; 312 case 270: 313 Matrix.multiplyMM(sourceCoords, 0, 314 surfaceTransform, 0, 315 mSourceCoords_270, 0); 316 break; 317 } 318 if (mLogVerbose) { 319 Log.v(TAG, "OrientationHint = " + mOrientation); 320 String temp = String.format("SetSourceRegion: %.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f, %.2f", 321 sourceCoords[4], sourceCoords[5],sourceCoords[0], sourceCoords[1], 322 sourceCoords[12], sourceCoords[13],sourceCoords[8], sourceCoords[9]); 323 Log.v(TAG, temp); 324 } 325 mFrameExtractor.setSourceRegion(sourceCoords[4], sourceCoords[5], 326 sourceCoords[0], sourceCoords[1], 327 sourceCoords[12], sourceCoords[13], 328 sourceCoords[8], sourceCoords[9]); 329 mOrientationUpdated = false; 330 } 331 332 Frame output = context.getFrameManager().newFrame(mOutputFormat); 333 mFrameExtractor.process(mMediaFrame, output); 334 335 long timestamp = mSurfaceTexture.getTimestamp(); 336 if (mLogVerbose) Log.v(TAG, "Timestamp: " + (timestamp / 1000000000.0) + " s"); 337 output.setTimestamp(timestamp); 338 339 pushOutput("video", output); 340 output.release(); 341 342 mPlaying = true; 343 } 344 345 @Override 346 public void close(FilterContext context) { 347 if (mMediaPlayer.isPlaying()) { 348 mMediaPlayer.stop(); 349 } 350 mPrepared = false; 351 mGotSize = false; 352 mPlaying = false; 353 mPaused = false; 354 mCompleted = false; 355 mNewFrameAvailable = false; 356 357 mMediaPlayer.release(); 358 mMediaPlayer = null; 359 mSurfaceTexture.release(); 360 mSurfaceTexture = null; 361 if (mLogVerbose) Log.v(TAG, "MediaSource closed"); 362 } 363 364 @Override 365 public void tearDown(FilterContext context) { 366 if (mMediaFrame != null) { 367 mMediaFrame.release(); 368 } 369 } 370 371 // When updating the port values of the filter, users can update sourceIsUrl to switch 372 // between using URL objects or Assets. 373 // If updating only sourceUrl/sourceAsset, MediaPlayer gets reset if the current player 374 // uses Url objects/Asset. 375 // Otherwise the new sourceUrl/sourceAsset is stored and will be used when users switch 376 // sourceIsUrl next time. 377 @Override 378 public void fieldPortValueUpdated(String name, FilterContext context) { 379 if (mLogVerbose) Log.v(TAG, "Parameter update"); 380 if (name.equals("sourceUrl")) { 381 if (isOpen()) { 382 if (mLogVerbose) Log.v(TAG, "Opening new source URL"); 383 if (mSelectedIsUrl) { 384 setupMediaPlayer(mSelectedIsUrl); 385 } 386 } 387 } else if (name.equals("sourceAsset") ) { 388 if (isOpen()) { 389 if (mLogVerbose) Log.v(TAG, "Opening new source FD"); 390 if (!mSelectedIsUrl) { 391 setupMediaPlayer(mSelectedIsUrl); 392 } 393 } 394 } else if (name.equals("loop")) { 395 if (isOpen()) { 396 mMediaPlayer.setLooping(mLooping); 397 } 398 } else if (name.equals("sourceIsUrl")) { 399 if (isOpen()){ 400 if (mSelectedIsUrl){ 401 if (mLogVerbose) Log.v(TAG, "Opening new source URL"); 402 } else { 403 if (mLogVerbose) Log.v(TAG, "Opening new source Asset"); 404 } 405 setupMediaPlayer(mSelectedIsUrl); 406 } 407 } else if (name.equals("volume")) { 408 if (isOpen()) { 409 mMediaPlayer.setVolume(mVolume, mVolume); 410 } 411 } else if (name.equals("orientation") && mGotSize) { 412 if (mOrientation == 0 || mOrientation == 180) { 413 mOutputFormat.setDimensions(mWidth, mHeight); 414 } else { 415 mOutputFormat.setDimensions(mHeight, mWidth); 416 } 417 mOrientationUpdated = true; 418 } 419 } 420 421 synchronized public void pauseVideo(boolean pauseState) { 422 if (isOpen()) { 423 if (pauseState && !mPaused) { 424 mMediaPlayer.pause(); 425 } else if (!pauseState && mPaused) { 426 mMediaPlayer.start(); 427 } 428 } 429 mPaused = pauseState; 430 } 431 432 /** Creates a media player, sets it up, and calls prepare */ 433 synchronized private boolean setupMediaPlayer(boolean useUrl) { 434 mPrepared = false; 435 mGotSize = false; 436 mPlaying = false; 437 mPaused = false; 438 mCompleted = false; 439 mNewFrameAvailable = false; 440 441 if (mLogVerbose) Log.v(TAG, "Setting up playback."); 442 443 if (mMediaPlayer != null) { 444 // Clean up existing media players 445 if (mLogVerbose) Log.v(TAG, "Resetting existing MediaPlayer."); 446 mMediaPlayer.reset(); 447 } else { 448 // Create new media player 449 if (mLogVerbose) Log.v(TAG, "Creating new MediaPlayer."); 450 mMediaPlayer = new MediaPlayer(); 451 } 452 453 if (mMediaPlayer == null) { 454 throw new RuntimeException("Unable to create a MediaPlayer!"); 455 } 456 457 // Set up data sources, etc 458 try { 459 if (useUrl) { 460 if (mLogVerbose) Log.v(TAG, "Setting MediaPlayer source to URI " + mSourceUrl); 461 if (mContext == null) { 462 mMediaPlayer.setDataSource(mSourceUrl); 463 } else { 464 mMediaPlayer.setDataSource(mContext, Uri.parse(mSourceUrl.toString())); 465 } 466 } else { 467 if (mLogVerbose) Log.v(TAG, "Setting MediaPlayer source to asset " + mSourceAsset); 468 mMediaPlayer.setDataSource(mSourceAsset.getFileDescriptor(), mSourceAsset.getStartOffset(), mSourceAsset.getLength()); 469 } 470 } catch(IOException e) { 471 mMediaPlayer.release(); 472 mMediaPlayer = null; 473 if (useUrl) { 474 throw new RuntimeException(String.format("Unable to set MediaPlayer to URL %s!", mSourceUrl), e); 475 } else { 476 throw new RuntimeException(String.format("Unable to set MediaPlayer to asset %s!", mSourceAsset), e); 477 } 478 } catch(IllegalArgumentException e) { 479 mMediaPlayer.release(); 480 mMediaPlayer = null; 481 if (useUrl) { 482 throw new RuntimeException(String.format("Unable to set MediaPlayer to URL %s!", mSourceUrl), e); 483 } else { 484 throw new RuntimeException(String.format("Unable to set MediaPlayer to asset %s!", mSourceAsset), e); 485 } 486 } 487 488 mMediaPlayer.setLooping(mLooping); 489 mMediaPlayer.setVolume(mVolume, mVolume); 490 491 // Bind it to our media frame 492 Surface surface = new Surface(mSurfaceTexture); 493 mMediaPlayer.setSurface(surface); 494 surface.release(); 495 496 // Connect Media Player to callbacks 497 498 mMediaPlayer.setOnVideoSizeChangedListener(onVideoSizeChangedListener); 499 mMediaPlayer.setOnPreparedListener(onPreparedListener); 500 mMediaPlayer.setOnCompletionListener(onCompletionListener); 501 502 // Connect SurfaceTexture to callback 503 mSurfaceTexture.setOnFrameAvailableListener(onMediaFrameAvailableListener); 504 505 if (mLogVerbose) Log.v(TAG, "Preparing MediaPlayer."); 506 mMediaPlayer.prepareAsync(); 507 508 return true; 509 } 510 511 private MediaPlayer.OnVideoSizeChangedListener onVideoSizeChangedListener = 512 new MediaPlayer.OnVideoSizeChangedListener() { 513 public void onVideoSizeChanged(MediaPlayer mp, int width, int height) { 514 if (mLogVerbose) Log.v(TAG, "MediaPlayer sent dimensions: " + width + " x " + height); 515 if (!mGotSize) { 516 if (mOrientation == 0 || mOrientation == 180) { 517 mOutputFormat.setDimensions(width, height); 518 } else { 519 mOutputFormat.setDimensions(height, width); 520 } 521 mWidth = width; 522 mHeight = height; 523 } else { 524 if (mOutputFormat.getWidth() != width || 525 mOutputFormat.getHeight() != height) { 526 Log.e(TAG, "Multiple video size change events received!"); 527 } 528 } 529 synchronized(MediaSource.this) { 530 mGotSize = true; 531 MediaSource.this.notify(); 532 } 533 } 534 }; 535 536 private MediaPlayer.OnPreparedListener onPreparedListener = 537 new MediaPlayer.OnPreparedListener() { 538 public void onPrepared(MediaPlayer mp) { 539 if (mLogVerbose) Log.v(TAG, "MediaPlayer is prepared"); 540 synchronized(MediaSource.this) { 541 mPrepared = true; 542 MediaSource.this.notify(); 543 } 544 } 545 }; 546 547 private MediaPlayer.OnCompletionListener onCompletionListener = 548 new MediaPlayer.OnCompletionListener() { 549 public void onCompletion(MediaPlayer mp) { 550 if (mLogVerbose) Log.v(TAG, "MediaPlayer has completed playback"); 551 synchronized(MediaSource.this) { 552 mCompleted = true; 553 } 554 } 555 }; 556 557 private SurfaceTexture.OnFrameAvailableListener onMediaFrameAvailableListener = 558 new SurfaceTexture.OnFrameAvailableListener() { 559 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 560 if (mLogVerbose) Log.v(TAG, "New frame from media player"); 561 synchronized(MediaSource.this) { 562 if (mLogVerbose) Log.v(TAG, "New frame: notify"); 563 mNewFrameAvailable = true; 564 MediaSource.this.notify(); 565 if (mLogVerbose) Log.v(TAG, "New frame: notify done"); 566 } 567 } 568 }; 569 570 } 571