/external/chromium_org/net/websockets/ |
websocket_deflate_predictor_impl.cc | 12 const ScopedVector<WebSocketFrame>& frames,
|
websocket_basic_stream.h | 46 virtual int ReadFrames(ScopedVector<WebSocketFrame>* frames, 49 virtual int WriteFrames(ScopedVector<WebSocketFrame>* frames, 79 // Attempts to parse the output of a read as WebSocket frames. On success, 80 // returns OK and places the frame(s) in |frames|. 81 int HandleReadResult(int result, ScopedVector<WebSocketFrame>* frames); 83 // Converts the chunks in |frame_chunks| into frames and writes them to 84 // |frames|. |frame_chunks| is destroyed in the process. Returns 85 // ERR_WS_PROTOCOL_ERROR if an invalid chunk was found. If one or more frames 86 // was added to |frames|, then returns OK, otherwise returns ERR_IO_PENDING. 88 ScopedVector<WebSocketFrame>* frames); [all...] |
websocket_frame_parser_test.cc | 56 ScopedVector<WebSocketFrameChunk> frames; local 57 EXPECT_TRUE(parser.Decode(kHelloFrame, kHelloFrameLength, &frames)); 59 ASSERT_EQ(1u, frames.size()); 60 WebSocketFrameChunk* frame = frames[0]; 82 ScopedVector<WebSocketFrameChunk> frames; local 84 parser.Decode(kMaskedHelloFrame, kMaskedHelloFrameLength, &frames)); 86 ASSERT_EQ(1u, frames.size()); 87 WebSocketFrameChunk* frame = frames[0]; 132 // Concatenate all frames. 141 ScopedVector<WebSocketFrameChunk> frames; local 321 ScopedVector<WebSocketFrameChunk> frames; local 373 ScopedVector<WebSocketFrameChunk> frames; local 443 ScopedVector<WebSocketFrameChunk> frames; local 492 ScopedVector<WebSocketFrameChunk> frames; local 548 ScopedVector<WebSocketFrameChunk> frames; local [all...] |
/external/qemu/distrib/sdl-1.2.15/docs/man3/ |
SDL_CDPlay.3 | 11 Plays the given \fBcdrom\fR, starting a frame \fBstart\fR for \fBlength\fR frames\&.
|
SDL_CD.3 | 40 A frame is the base data unit of a CD\&. \fBCD_FPS\fP frames is equal to 1 second of music\&. SDL provides two macros for converting between time and frames: \fBFRAMES_TO_MSF(f, M,S,F)\fP and \fBMSF_TO_FRAMES\fP\&. 48 printf("Current Position: %d minutes, %d seconds, %d frames
|
/dalvik/hit/src/com/android/hit/ |
StackTrace.java | 25 * For subsets of the stack frame we'll reference the parent list of frames 37 public StackTrace(int serial, int thread, StackFrame[] frames) { 40 mFrames = frames;
|
/development/samples/HelloEffects/ |
_index.html | 19 effects to image frames represented as OpenGL ES 2.0 textures. Image frames can 20 be images loaded from disk, frames from the device's camera, or other video
|
/external/chromium_org/media/base/ |
audio_buffer.h | 34 // number of frames in each buffer. |data| must not be null and |frame_count| 45 // Create an AudioBuffer with |frame_count| frames. Buffer is allocated, but 51 // Create an empty AudioBuffer with |frame_count| frames. 63 // Copy frames into |dest|. |frames_to_copy| is the number of frames to copy. 64 // |source_frame_offset| specifies how many frames in the buffer to skip 65 // first. |dest_frame_offset| is the frame offset in |dest|. The frames are 73 // Trim an AudioBuffer by removing |frames_to_trim| frames from the start. 74 // Timestamp and duration are adjusted to reflect the fewer frames. 79 // Trim an AudioBuffer by removing |frames_to_trim| frames from the end [all...] |
audio_timestamp_helper_unittest.cc | 19 // Adds frames to the helper and returns the current timestamp in 21 int64 AddFrames(int frames) { 22 helper_.AddFrames(frames); 58 // Verify that adding frames one frame at a time matches the timestamp 59 // returned if the same number of frames are added all at once. 99 // Verify that the same number of frames is returned up 109 // Add frames to the helper so negative frame counts can be tested.
|
audio_bus_unittest.cc | 33 EXPECT_EQ(kFrameCount, bus->frames()); 46 ASSERT_EQ(expected->frames(), result->frames()); 48 for (int i = 0; i < result->frames(); ++i) { 72 std::fill(bus->channel(i), bus->channel(i) + bus->frames(), i); 76 VerifyValue(bus->channel(i), bus->frames(), i); 80 VerifyValue(bus->channel(i), bus->frames(), 0); 87 std::fill(bus1->channel(i), bus1->channel(i) + bus1->frames(), i); 152 VerifyValue(bus->channel(i), bus->frames(), kTestValue); 158 EXPECT_LT(bus->channel(bus->channels() - 1) + bus->frames(), [all...] |
/external/chromium_org/tools/perf/ |
unit-info.json | 26 "frames": { 28 "why": "Dropped frames" 30 "frames-per-second": {
|
/packages/apps/Camera/jni/feature_mos/src/mosaic/ |
Mosaic.h | 34 The class Mosaic provides a simple interface to the panoramic mosaicing algorithm. The class allows passing in individual image frames to be stitched together, computes the alignment transformation between them, and then stitches and blends them together into a single panoramic output which can then be accessed as a single image. \ 47 while (<image frames are available>) 56 // Add to list of frames 93 * \param nframes Number of frames to pre-allocate; default value -1 will allocate each frame as it comes 95 * \param thresh_still Minimum number of pixels of translation detected between the new frame and the last frame before this frame is added to be mosaiced. For the low-res processing at 320x180 resolution input, we set this to 5 pixels. To reject no frames, set this to 0.0 (default value). 115 * After adding all frames, call this function to perform the final blending. 155 * Size of image frames making up mosaic 172 * Collection of frames that will make up mosaic. 174 MosaicFrame **frames; member in class:Mosaic 177 * Subset of frames that are considered as relevant [all...] |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/packages/apps/Camera2/jni/feature_mos/src/mosaic/ |
Mosaic.h | 34 The class Mosaic provides a simple interface to the panoramic mosaicing algorithm. The class allows passing in individual image frames to be stitched together, computes the alignment transformation between them, and then stitches and blends them together into a single panoramic output which can then be accessed as a single image. \ 47 while (<image frames are available>) 56 // Add to list of frames 93 * \param nframes Number of frames to pre-allocate; default value -1 will allocate each frame as it comes 95 * \param thresh_still Minimum number of pixels of translation detected between the new frame and the last frame before this frame is added to be mosaiced. For the low-res processing at 320x180 resolution input, we set this to 5 pixels. To reject no frames, set this to 0.0 (default value). 115 * After adding all frames, call this function to perform the final blending. 155 * Size of image frames making up mosaic 172 * Collection of frames that will make up mosaic. 174 MosaicFrame **frames; member in class:Mosaic 177 * Subset of frames that are considered as relevant [all...] |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/external/tinyalsa/ |
pcm.c | 234 unsigned int pcm_frames_to_bytes(struct pcm *pcm, unsigned int frames) 236 return frames * pcm->config.channels * 312 unsigned int frames) 314 int size_bytes = pcm_frames_to_bytes(pcm, frames); 335 unsigned int pcm_offset, frames, count = 0; local 338 frames = size; 339 pcm_mmap_begin(pcm, &pcm_areas, &pcm_offset, &frames); 340 pcm_areas_copy(pcm, pcm_offset, buf, offset, frames); 341 commit = pcm_mmap_commit(pcm, pcm_offset, frames); 343 oops(pcm, commit, "failed to commit %d frames\n", frames) 357 int frames; local 942 int err = 0, frames, avail; local [all...] |
/external/chromium-trace/trace-viewer/src/tracing/tracks/ |
object_instance_track_test.js | 35 var frames = objects.getAllInstancesByTypeName()['Frame']; 36 frames[0].snapshots[1].selected = true; 50 track.objectInstances = frames; 56 var frames = objects.getAllInstancesByTypeName()['Frame']; 59 track.objectInstances = frames;
|
/frameworks/ex/variablespeed/jni/ |
sola_time_scaler.h | 56 // @param num_frames number of input frames (that is to say, number of 77 // Injects a SolaAnalyzer instance for analyzing signal frames. 107 // @param num_frames number of frames (num_samples / num_channels) 108 // @returns number of frames actually accepted 113 // @param num_frames maximum desired number of frames 114 // @returns number of frames actually returned 117 // Returns the number of frames that the input buffer can accept. 118 // @returns number of frames for the next Process() call 121 // Returns the number of available output frames. 122 // @returns number of frames that can be retrieve [all...] |
ring_buffer.h | 41 // @param size: size of the buffer in frames. 58 // @param num_frames number of frames to read. 64 // @param num_frames number of frames to write. 70 // Returns the number of frames we can still write. 73 // Returns the number of frames we can read for a given reader. 83 // @param num_frames number of frames to read. 93 // @param num_frames number of frames to copy back to the ring buffer.
|
/packages/apps/LegacyCamera/jni/feature_mos/src/mosaic/ |
Mosaic.h | 34 The class Mosaic provides a simple interface to the panoramic mosaicing algorithm. The class allows passing in individual image frames to be stitched together, computes the alignment transformation between them, and then stitches and blends them together into a single panoramic output which can then be accessed as a single image. \ 47 while (<image frames are available>) 56 // Add to list of frames 93 * \param nframes Number of frames to pre-allocate; default value -1 will allocate each frame as it comes 95 * \param thresh_still Minimum number of pixels of translation detected between the new frame and the last frame before this frame is added to be mosaiced. For the low-res processing at 320x180 resolution input, we set this to 5 pixels. To reject no frames, set this to 0.0 (default value). 115 * After adding all frames, call this function to perform the final blending. 155 * Size of image frames making up mosaic 172 * Collection of frames that will make up mosaic. 174 MosaicFrame **frames; member in class:Mosaic 177 * Subset of frames that are considered as relevant [all...] |
Blend.h | 37 // of the input image frames for them to be accepted for blending in the 76 int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU, 94 // Height and width of individual frames 105 void AlignToMiddleFrame(MosaicFrame **frames, int frames_size); 107 int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation); 114 void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360); 115 void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
|
/external/chromium_org/media/audio/sounds/ |
wav_audio_handler.cc | 98 const int frames = std::min(bus->frames(), remaining_frames); local 99 bus->FromInterleaved(data_.data() + cursor, frames, bytes_per_sample_); 100 *bytes_written = frames * bytes_per_frame_; 101 bus->ZeroFramesPartial(frames, bus->frames() - frames);
|
/external/chromium_org/ui/views/controls/ |
throbber.h | 24 // |frame_time_ms| is the amount of time that should elapse between frames 29 Throbber(int frame_time_ms, bool paint_while_stopped, gfx::ImageSkia* frames); 36 // Set custom throbber frames. Otherwise IDR_THROBBER is loaded. 37 void SetFrames(const gfx::ImageSkia* frames); 51 int frame_count_; // How many frames we have. 53 const gfx::ImageSkia* frames_; // Frames images. 67 SmoothedThrobber(int frame_delay_ms, gfx::ImageSkia* frames);
|
/packages/inputmethods/LatinIME/java/src/com/android/inputmethod/latin/utils/ |
DebugLogUtils.java | 49 * @param limit the maximum number of stack frames to be returned. 57 final StackTraceElement[] frames = e.getStackTrace(); local 59 for (int j = 1; j < frames.length && j < limit + 1; ++j) { 60 sb.append(frames[j].toString() + "\n"); 73 final StackTraceElement[] frames = t.getStackTrace(); local 74 for (int j = 0; j < frames.length; ++j) { 75 sb.append(frames[j].toString() + "\n");
|
/external/chromium_org/third_party/WebKit/Source/devtools/front_end/ |
TimelineFrameOverview.js | 39 this.element.id = "timeline-overview-frames"; 122 var frames; 124 frames = this._backgroundFrames; 126 frames = this._mainThreadFrames; 129 frames.push(frame); 151 * @param {!Array.<!WebInspector.TimelineFrame>} frames 155 _aggregateFrames: function(frames, framesPerBar) 158 for (var barNumber = 0, currentFrame = 0; currentFrame < frames.length; ++barNumber) { 159 var barStartTime = frames[currentFrame].startTime; 163 for (var lastFrame = Math.min(Math.floor((barNumber + 1) * framesPerBar), frames.length) [all...] |