/external/chromium_org/media/cast/test/utility/ |
generate_timecode_audio.cc | 18 fprintf(stderr, "Usage: %s <fps> <frames> >output.s16le\n", argv[0]); 22 const uint32 frames = static_cast<uint32>(std::max(0, atoi(argv[2]))); local 25 for (uint32 frame_id = 1; frame_id <= frames; frame_id++) {
|
/external/chromium_org/third_party/webrtc/common_audio/resampler/ |
sinusoidal_linear_chirp_source.cc | 32 void SinusoidalLinearChirpSource::Run(int frames, float* destination) { 33 for (int i = 0; i < frames; ++i, ++current_index_) {
|
/external/chromium_org/tools/android/heap_profiler/ |
heap_profiler.h | 31 uintptr_t frames[HEAP_PROFILER_MAX_DEPTH]; // Absolute addrs of stack frames. member in struct:StacktraceEntry 32 uint32_t hash; // H(frames), used to keep these entries in a hashtable. 71 // frames array. Flags are optionals and don't affect the behavior of the 75 uintptr_t* frames,
|
/frameworks/av/media/libstagefright/rtsp/ |
ARTPAssembler.h | 53 const List<sp<ABuffer> > &frames); 56 const List<sp<ABuffer> > &frames);
|
/external/chromium_org/net/websockets/ |
websocket_basic_stream.cc | 45 // Returns the total serialized size of |frames|. This function assumes that 46 // |frames| will be serialized with mask field. This function forces the 47 // masked bit of the frames on. 49 ScopedVector<WebSocketFrame>* frames) { 53 for (WebSocketFrameIterator it = frames->begin(); it != frames->end(); ++it) { 58 // to cache anywhere near 2GB of frames. 89 int WebSocketBasicStream::ReadFrames(ScopedVector<WebSocketFrame>* frames, 91 DCHECK(frames->empty()); 93 // it as WebSocket frames [all...] |
/external/chromium_org/media/audio/ |
audio_power_monitor.h | 50 // Scan more |frames| of audio data from |buffer|. It is safe to call this 52 void Scan(const AudioBus& buffer, int frames);
|
/external/chromium_org/media/base/ |
audio_pull_fifo.h | 23 // frames are available to satisfy the request. |frame_delay| is the number 24 // of output frames already processed and can be used to estimate delay. 30 // length |frames| audio frames. 31 AudioPullFifo(int channels, int frames, const ReadCB& read_cb); 34 // Consumes |frames_to_consume| audio frames from the FIFO and copies
|
audio_bus_perftest.cc | 17 const int frame_size = bus->frames() * bus->channels(); 23 bus->ToInterleaved(bus->frames(), byte_size, interleaved.get()); 33 bus->FromInterleaved(interleaved.get(), bus->frames(), byte_size);
|
audio_pull_fifo_unittest.cc | 44 // Consume data using different sizes, acquire audio frames from the FIFO 56 EXPECT_LT(last_frame_delay_, audio_bus_->frames()); 67 EXPECT_EQ(audio_bus->frames(), kMaxFramesInFifo); 68 for (int i = 0; i < audio_bus->frames(); ++i) {
|
/external/compiler-rt/lib/sanitizer_common/ |
sanitizer_symbolizer_libbacktrace.h | 35 uptr SymbolizeCode(uptr addr, AddressInfo *frames, uptr max_frames,
|
/external/glide/third_party/gif_decoder/src/main/java/com/bumptech/glide/gifdecoder/ |
GifHeader.java | 16 public List<GifFrame> frames = new ArrayList<GifFrame>(); field in class:GifHeader 27 //TODO: this is set both during reading the header and while decoding frames...
|
/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6/sysroot/usr/include/alsa/ |
pcm_rate.h | 96 snd_pcm_uframes_t (*input_frames)(void *obj, snd_pcm_uframes_t frames); 100 snd_pcm_uframes_t (*output_frames)(void *obj, snd_pcm_uframes_t frames); 144 snd_pcm_uframes_t (*input_frames)(void *obj, snd_pcm_uframes_t frames); 145 snd_pcm_uframes_t (*output_frames)(void *obj, snd_pcm_uframes_t frames);
|
/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8/sysroot/usr/include/alsa/ |
pcm_rate.h | 96 snd_pcm_uframes_t (*input_frames)(void *obj, snd_pcm_uframes_t frames); 100 snd_pcm_uframes_t (*output_frames)(void *obj, snd_pcm_uframes_t frames); 144 snd_pcm_uframes_t (*input_frames)(void *obj, snd_pcm_uframes_t frames); 145 snd_pcm_uframes_t (*output_frames)(void *obj, snd_pcm_uframes_t frames);
|
/external/chromium_org/media/audio/cras/ |
cras_unified.h | 55 unsigned int frames, 67 uint32 DispatchCallback(size_t frames, 74 uint32 WriteAudio(size_t frames, uint8* buffer, const timespec* sample_ts);
|
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/extensions/ |
ExtensionView.js | 68 var frames = /** @type {!Array.<!Window>} */ (window.frames); 69 this._frameIndex = Array.prototype.indexOf.call(frames, this._iframe.contentWindow);
|
/external/chromium_org/third_party/libvpx/source/libvpx/test/ |
decode_perf_test.cc | 91 const unsigned frames = video.frame_number(); local 92 const double fps = double(frames) / elapsed_secs; 100 printf("\t\"totalFrames\" : %u,\n", frames);
|
/external/chromium_org/ui/views/controls/ |
throbber.h | 24 // |frame_time_ms| is the amount of time that should elapse between frames 29 Throbber(int frame_time_ms, bool paint_while_stopped, gfx::ImageSkia* frames); 36 // Set custom throbber frames. Otherwise IDR_THROBBER is loaded. 37 void SetFrames(const gfx::ImageSkia* frames); 51 int frame_count_; // How many frames we have. 53 const gfx::ImageSkia* frames_; // Frames images. 67 SmoothedThrobber(int frame_delay_ms, gfx::ImageSkia* frames);
|
/external/libvpx/libvpx/test/ |
decode_perf_test.cc | 89 const unsigned frames = video.frame_number(); local 90 const double fps = double(frames) / elapsed_secs; 97 printf("\t\"totalFrames\" : %u,\n", frames);
|
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/test/ |
decode_perf_test.cc | 89 const unsigned frames = video.frame_number(); local 90 const double fps = double(frames) / elapsed_secs; 97 printf("\t\"totalFrames\" : %u,\n", frames);
|
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/timeline/ |
TimelineFrameOverview.js | 40 this.element.id = "timeline-overview-frames"; 69 this._overviewGrid.element.classList.add("timeline-overview-frames-mode"); 74 this._overviewGrid.element.classList.remove("timeline-overview-frames-mode"); 90 var frames = this._frameModel.frames(); 91 var framesPerBar = Math.max(1, frames.length * minBarWidth / this._canvas.width); 92 var visibleFrames = this._aggregateFrames(frames, framesPerBar); 104 * @param {!Array.<!WebInspector.TimelineFrame>} frames 108 _aggregateFrames: function(frames, framesPerBar) 111 for (var barNumber = 0, currentFrame = 0; currentFrame < frames.length; ++barNumber) [all...] |
/external/chromium_org/net/tools/quic/ |
quic_server_session_test.cc | 126 vector<QuicStreamFrame> frames; local 127 frames.push_back(data1); 128 session_->OnStreamFrames(frames); 139 visitor_->OnStreamFrames(frames); 156 vector<QuicStreamFrame> frames; local 157 frames.push_back(data1); 158 visitor_->OnStreamFrames(frames); 166 vector<QuicStreamFrame> frames; local 168 frames.push_back(QuicStreamFrame(kClientDataStreamId1, false, 0, 170 frames.push_back(QuicStreamFrame(kClientDataStreamId2, false, 0 [all...] |
/frameworks/av/media/libstagefright/webm/ |
WebmFrameThread.cpp | 94 // frames: 95 // sequence of input audio/video frames received from the source. 99 // frame since frames are ordered by timestamp. 107 List<const sp<WebmFrame> >& frames, 110 CHECK(!frames.empty() && children.empty()); 112 const sp<WebmFrame> f = *(frames.begin()); 129 // Write out (possibly multiple) webm cluster(s) from frames split on video key frames. 133 void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) { 134 if (frames.empty()) [all...] |
/external/chromium_org/base/debug/ |
stack_trace_android.cc | 23 StackCrawlState(uintptr_t* frames, size_t max_depth) 24 : frames(frames), 29 uintptr_t* frames; member in struct:__anon6854::StackCrawlState 45 state->frames[state->frame_count++] = ip;
|
/external/compiler-rt/lib/asan/scripts/ |
symbolize.py | 82 frames = [] 93 frames.append((function_name, file_name)) 96 if not frames: 97 frames.append(('', '')) 104 for frame in frames:
|
/system/core/libbacktrace/ |
UnwindPtrace.cpp | 77 std::vector<backtrace_frame_data_t>* frames = GetFrames(); local 78 frames->reserve(MAX_BACKTRACE_FRAMES); 95 frames->resize(num_frames+1); 96 backtrace_frame_data_t* frame = &frames->at(num_frames); 103 backtrace_frame_data_t* prev = &frames->at(num_frames-1);
|