/external/chromium_org/media/audio/cras/ |
cras_unified.h | 55 unsigned int frames, 67 uint32 DispatchCallback(size_t frames, 74 uint32 ReadWriteAudio(size_t frames, 81 uint32 WriteAudio(size_t frames, uint8* buffer, const timespec* sample_ts);
|
/external/chromium_org/media/base/ |
audio_hash.h | 38 void Update(const AudioBus* audio_bus, int frames);
|
/external/chromium_org/net/websockets/ |
websocket_deflate_predictor.h | 35 // which begins with |frames[frame_index]| or not. 36 // |frames[(frame_index + 1):]| consists of future frames if any. 37 // |frames[frame_index]| must be the first frame of a data message, 38 // but future frames may contain control message frames. 39 // |frames[frame_index]| cannot be recorded yet and all preceding 40 // data frames have to be already recorded when this method is called. 41 virtual Result Predict(const ScopedVector<WebSocketFrame>& frames, 45 // Only data frames should be recorded. Do not pass control frames' data [all...] |
websocket_deflate_predictor_impl.cc | 12 const ScopedVector<WebSocketFrame>& frames,
|
websocket_deflate_predictor_impl.h | 23 virtual Result Predict(const ScopedVector<WebSocketFrame>& frames,
|
websocket_basic_stream.h | 46 virtual int ReadFrames(ScopedVector<WebSocketFrame>* frames, 49 virtual int WriteFrames(ScopedVector<WebSocketFrame>* frames, 79 // Attempts to parse the output of a read as WebSocket frames. On success, 80 // returns OK and places the frame(s) in |frames|. 81 int HandleReadResult(int result, ScopedVector<WebSocketFrame>* frames); 83 // Converts the chunks in |frame_chunks| into frames and writes them to 84 // |frames|. |frame_chunks| is destroyed in the process. Returns 85 // ERR_WS_PROTOCOL_ERROR if an invalid chunk was found. If one or more frames 86 // was added to |frames|, then returns OK, otherwise returns ERR_IO_PENDING. 88 ScopedVector<WebSocketFrame>* frames); [all...] |
websocket_basic_stream.cc | 45 // Returns the total serialized size of |frames|. This function assumes that 46 // |frames| will be serialized with mask field. This function forces the 47 // masked bit of the frames on. 49 ScopedVector<WebSocketFrame>* frames) { 53 for (WebSocketFrameIterator it = frames->begin(); it != frames->end(); ++it) { 58 // to cache anywhere near 2GB of frames. 89 int WebSocketBasicStream::ReadFrames(ScopedVector<WebSocketFrame>* frames, 91 DCHECK(frames->empty()); 93 // it as WebSocket frames [all...] |
/external/chromium_org/third_party/webrtc/common_audio/resampler/ |
sinusoidal_linear_chirp_source.h | 34 virtual void Run(int frames, float* destination) OVERRIDE;
|
sinusoidal_linear_chirp_source.cc | 32 void SinusoidalLinearChirpSource::Run(int frames, float* destination) { 33 for (int i = 0; i < frames; ++i, ++current_index_) {
|
/external/chromium_org/third_party/webrtc/tools/frame_analyzer/ |
video_quality_analysis_unittest.cc | 48 result.frames.push_back(AnalysisResult(0, 35.0, 0.9)); 54 result.frames.push_back(AnalysisResult(0, 35.0, 0.9)); 55 result.frames.push_back(AnalysisResult(1, 34.0, 0.8)); 56 result.frames.push_back(AnalysisResult(2, 33.0, 0.7));
|
/development/perftests/panorama/feature_mos/src/mosaic/ |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/external/chromium_org/tools/memory_inspector/memory_inspector/core/ |
stacktrace.py | 14 self.frames = [] 18 self.frames += [frame] 22 return len(self.frames) 25 return self.frames[index] 28 return ', '.join([str(x) for x in self.frames])
|
/external/lldb/source/Utility/ |
SharingPtr.cpp | 53 void *frames[1024]; local 54 const int count = ::backtrace (frames, sizeof(frames)/sizeof(void*)); 56 m_frames.assign (frames + 2, frames + (count - 2));
|
/packages/apps/Camera/jni/feature_mos/src/mosaic/ |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/packages/apps/LegacyCamera/jni/feature_mos/src/mosaic/ |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/external/chromium_org/net/tools/quic/ |
quic_server_session_test.cc | 86 vector<QuicStreamFrame> frames; local 87 frames.push_back(data1); 88 session_->OnStreamFrames(frames); 99 visitor_->OnStreamFrames(frames); 116 vector<QuicStreamFrame> frames; local 117 frames.push_back(data1); 118 visitor_->OnStreamFrames(frames); 126 vector<QuicStreamFrame> frames; local 128 frames.push_back(QuicStreamFrame(kClientDataStreamId1, false, 0, 130 frames.push_back(QuicStreamFrame(kClientDataStreamId2, false, 0 [all...] |
/external/chromium_org/v8/test/mjsunit/ |
debug-backtrace.js | 109 var frames = backtrace.frames; 110 assertEquals(4, frames.length); 111 for (var i = 0; i < frames.length; i++) { 112 assertEquals('frame', frames[i].type); 114 assertEquals(0, frames[0].index); 115 assertEquals("f", response.lookup(frames[0].func.ref).name); 116 assertEquals(1, frames[1].index); 117 assertEquals("", response.lookup(frames[1].func.ref).name); 118 assertEquals("m", response.lookup(frames[1].func.ref).inferredName) [all...] |
/external/webrtc/test/testsupport/metrics/ |
video_metrics.cc | 50 result->frames.push_back(frame_result); 55 if (result == NULL || result->frames.size() == 0) { 61 for (iter = result->frames.begin(); iter != result->frames.end(); ++iter) { 64 result->average = metrics_values_sum / result->frames.size(); 67 iter = min_element(result->frames.begin(), result->frames.end(), 71 iter = max_element(result->frames.begin(), result->frames.end(),
|
/external/chromium_org/third_party/libvpx/source/libvpx/examples/ |
resize_util.c | 24 printf("<output_yuv> [<frames>]\n"); 47 int f, frames; local 82 frames = atoi(argv[5]); 84 frames = INT_MAX; 88 printf("Target size: %dx%d, Frames: ", 90 if (frames == INT_MAX) 93 printf("%d\n", frames); 102 while (f < frames) { 113 printf("%d frames processed\n", f);
|
/external/libvpx/libvpx/examples/ |
resize_util.c | 24 printf("<output_yuv> [<frames>]\n"); 47 int f, frames; local 82 frames = atoi(argv[5]); 84 frames = INT_MAX; 88 printf("Target size: %dx%d, Frames: ", 90 if (frames == INT_MAX) 93 printf("%d\n", frames); 102 while (f < frames) { 113 printf("%d frames processed\n", f);
|
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/examples/ |
resize_util.c | 24 printf("<output_yuv> [<frames>]\n"); 47 int f, frames; local 82 frames = atoi(argv[5]); 84 frames = INT_MAX; 88 printf("Target size: %dx%d, Frames: ", 90 if (frames == INT_MAX) 93 printf("%d\n", frames); 102 while (f < frames) { 113 printf("%d frames processed\n", f);
|
/external/chromium_org/native_client_sdk/src/gonacl_appengine/static/bullet/ |
NaClAM.js | 7 this.frames = new Array(); 12 this.frames = new Array(); 58 if (typeof(header['frames']) != "number") { 59 console.log('Header message frames is not a number.'); 62 this.framesLeft = header['frames']; 67 this.message.frames.push(event.data); 85 if (header['frames'] == undefined) { 86 console.log('NaClAM: Message header does not contain frames.'); 89 if (typeof(header['frames']) != "number") { 90 console.log('NaClAm: Message frames is not a number.') [all...] |
/external/chromium_org/content/renderer/media/ |
webrtc_local_audio_source_provider.cc | 102 DCHECK(input_bus_->frames() == number_of_frames); 106 if (fifo_->frames() + number_of_frames <= fifo_->max_frames()) { 111 DVLOG(3) << "Local source provicer FIFO is full" << fifo_->frames(); 142 if (fifo_->frames() >= audio_bus->frames()) { 143 fifo_->Consume(audio_bus, 0, audio_bus->frames()); 146 DVLOG(1) << "WARNING: Underrun, FIFO has data " << fifo_->frames() 147 << " samples but " << audio_bus->frames()
|
/external/chromium_org/media/cast/test/utility/ |
generate_timecode_audio.cc | 18 fprintf(stderr, "Usage: %s <fps> <frames> >output.s16le\n", argv[0]); 22 const uint32 frames = static_cast<uint32>(std::max(0, atoi(argv[2]))); local 25 for (uint32 frame_id = 1; frame_id <= frames; frame_id++) {
|
/frameworks/av/media/libstagefright/rtsp/ |
ARTPAssembler.h | 53 const List<sp<ABuffer> > &frames); 56 const List<sp<ABuffer> > &frames);
|