/frameworks/av/include/media/stagefright/ |
MediaSync.h | 97 size_t sizeInBytes, int64_t presentationTimeUs);
|
/frameworks/av/media/ndk/ |
NdkMediaCodec.cpp | 321 int64_t presentationTimeUs; 322 status_t ret = mData->mCodec->dequeueOutputBuffer(&idx, &offset, &size, &presentationTimeUs, 330 info->presentationTimeUs = presentationTimeUs;
|
/frameworks/base/media/jni/ |
android_media_MediaCodec.h | 85 int64_t presentationTimeUs,
|
android_media_MediaSync.cpp | 110 int sizeInBytes, int64_t presentationTimeUs) { 111 return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); 258 JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) { 265 status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
|
android_media_MediaCodec.cpp | 340 int64_t presentationTimeUs, 345 presentationTimeUs, flags, errorDetailMsg); [all...] |
/frameworks/base/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/ |
GpuVideoTrackDecoder.java | 110 mCurrentPresentationTimeUs = info.presentationTimeUs;
|
CpuVideoTrackDecoder.java | 83 mCurrentPresentationTimeUs = info.presentationTimeUs;
|
/packages/apps/Gallery2/src/com/android/gallery3d/app/ |
VideoUtils.java | 205 bufferInfo.presentationTimeUs = extractor.getSampleTime(); 206 if (endMs > 0 && bufferInfo.presentationTimeUs > (endMs * 1000)) {
|
/cts/tests/tests/media/src/android/media/cts/ |
ExtractDecodeEditEncodeMuxTest.java | [all...] |
MediaSyncTest.java | 328 mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */); 330 mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */); 785 info.presentationTimeUs); 787 && info.presentationTimeUs >= mStartingAudioTimestampUs) { 792 codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000);
|
MediaMuxerTest.java | 244 bufferInfo.presentationTimeUs = extractor.getSampleTime(); 255 "PresentationTimeUs:" + bufferInfo.presentationTimeUs +
|
DecodeEditEncodeTest.java | 328 output.addChunk(encodedData, info.flags, info.presentationTimeUs); 526 outputData.addChunk(encodedData, info.flags, info.presentationTimeUs); 577 inputSurface.setPresentationTime(info.presentationTimeUs * 1000); [all...] |
DecoderTest.java | 275 info.presentationTimeUs > lastPresentationTimeUsFromDecoder); 277 lastPresentationTimeUsFromDecoder = info.presentationTimeUs; [all...] |
EncodeDecodeTest.java | [all...] |
Vp8CodecTestBase.java | 632 outPresentationTimeUs = bufferInfo.presentationTimeUs; [all...] |
ImageReaderDecoderTest.java | 504 long presentationTimeUs = 0; 511 presentationTimeUs = extractor.getSampleTime(); 518 presentationTimeUs, [all...] |
NativeDecoderTest.java | 275 long presentationTimeUs = ex.getSampleTime(); 277 codec[t].queueInputBuffer(bufidx, 0, sampleSize, presentationTimeUs,
|
VideoDecoderPerfTest.java | 183 long presentationTimeUs = inputNum * 1000000L / frameRate; 196 presentationTimeUs,
|
EncodeVirtualDisplayTest.java | 432 info.presentationTimeUs, info.flags);
|
/frameworks/av/media/libstagefright/ |
MediaSync.cpp | 295 size_t sizeInBytes, int64_t presentationTimeUs) { 308 int64_t maxMediaTimeUs = presentationTimeUs 312 int64_t nowMediaUs = presentationTimeUs
|
MediaCodec.cpp | 692 int64_t presentationTimeUs, 703 msg->setInt64("timeUs", presentationTimeUs); 720 int64_t presentationTimeUs, 737 msg->setInt64("timeUs", presentationTimeUs); 766 int64_t *presentationTimeUs, 781 CHECK(response->findInt64("timeUs", presentationTimeUs)); [all...] |
/external/webrtc/talk/app/webrtc/java/src/org/webrtc/ |
MediaCodecVideoEncoder.java | 546 isKeyFrame, info.presentationTimeUs); 549 isKeyFrame, info.presentationTimeUs);
|
/cts/tests/tests/security/src/android/security/cts/ |
StagefrightTest.java | 408 if (info.presentationTimeUs > TIMEOUT_NS / 1000) {
|
/frameworks/base/media/jni/soundpool/ |
SoundPool.cpp | 568 int64_t presentationTimeUs = AMediaExtractor_getSampleTime(ex); 571 0 /* offset */, sampleSize, presentationTimeUs, [all...] |
/cts/tests/video/src/android/video/cts/ |
VideoEncoderDecoderTest.java | [all...] |