/external/libvpx/libvpx/test/ |
superframe_test.cc | 51 const int frames = (marker & 0x7) + 1; local 53 const unsigned int index_sz = 2 + mag * frames; 72 // Make sure we do a few frames after the last SF
|
/external/lldb/scripts/Python/interface/ |
SBThread.i | 213 '''A helper object that will lazily hand out frames for a thread when supplied an index.''' 232 '''An accessor function that returns a list() that contains all frames in a lldb.SBThread object.''' 233 frames = [] 235 frames.append(frame) 236 return frames 251 if _newclass: num_frames = property(GetNumFrames, None, doc='''A read only property that returns the number of stack frames in this thread as an integer.''') 253 __swig_getmethods__["frames"] = get_thread_frames 254 if _newclass: frames = property(get_thread_frames, None, doc='''A read only property that returns a list() of lldb.SBFrame objects for all frames in this thread.''') 257 if _newclass: frame = property(get_frames_access_object, None, doc='''A read only property that returns an object that can be used to access frames as an array ("frame_12 = lldb.thread.frame[12]").''' [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/test/ |
superframe_test.cc | 51 const int frames = (marker & 0x7) + 1; local 53 const unsigned int index_sz = 2 + mag * frames; 72 // Make sure we do a few frames after the last SF
|
/system/media/audio_utils/ |
primitives.c | 244 size_t nonZeroStereo32(const int32_t *frames, size_t count) 248 if (frames[0] != 0 || frames[1] != 0) { 251 frames += 2; 256 size_t nonZeroStereo16(const int16_t *frames, size_t count) 260 if (frames[0] != 0 || frames[1] != 0) { 263 frames += 2;
|
/external/apache-harmony/jdwp/src/test/java/org/apache/harmony/jpda/tests/jdwp/StackFrame/ |
JDWPStackFrameTestCase.java | 114 int frames = reply.getNextValueAsInt(); local 115 FrameInfo[] frameInfos = new FrameInfo[frames]; 116 for (int i = 0; i < frames; i++) { 158 int frames = reply.getNextValueAsInt(); local 160 long[] frameIDs = new long[frames]; 161 for (int i = 0; i < frames; i++) {
|
/external/chromium_org/content/renderer/media/ |
webaudiosourceprovider_impl_unittest.cc | 72 EXPECT_EQ(bus1->frames(), bus2->frames()); 75 sizeof(*bus1->channel(ch)) * bus1->frames()) != 0) { 222 bus2->ZeroFramesPartial(bus2->frames() / 2, 223 bus2->frames() - bus2->frames() / 2);
|
webrtc_local_audio_renderer.cc | 43 if (loopback_fifo_->frames() >= audio_bus->frames()) { 44 loopback_fifo_->Consume(audio_bus, 0, audio_bus->frames()); 48 // frames. It should not happen in a steady-state mode. 52 return audio_bus->frames(); 71 if (loopback_fifo_->frames() + number_of_frames <= 76 audio_source->frames(), 126 // audio frames but I have selected a max size of ten buffers just
|
/external/chromium_org/media/cast/receiver/ |
audio_decoder_unittest.cc | 87 const int num_elements = audio_bus->channels() * audio_bus->frames(); 90 audio_bus->frames(), sizeof(int16), &interleaved.front()); 105 audio_bus->frames(), 150 // Did the decoder detect whether frames were dropped? 164 TestAudioBusFactory::kMiddleANoteFreq * 2 * audio_bus->frames() / 166 CountZeroCrossings(audio_bus->channel(ch), audio_bus->frames()), 174 audio_bus->frames() / GetParam().sampling_rate;
|
/external/chromium_org/third_party/webrtc/modules/audio_coding/codecs/ilbc/ |
complexityMeasures.m | 19 frames = length(index)-indexnonzero(1)+1; 22 new=zeros(frames,functionOrder); 24 for i = 1:frames,
|
/external/chromium_org/tools/perf/metrics/ |
speedindex_unittest.py | 41 def __init__(self, frames): 42 self._frames = frames 114 frames = [ 124 tab = FakeTab(frames) 142 frames = [ 148 tab = FakeTab(frames)
|
/external/chromium_org/third_party/WebKit/Source/core/animation/ |
CompositorAnimationsTest.cpp | 117 AnimatableValueKeyframeVector frames; local 120 frames.append(frame); 121 frames.append(toAnimatableValueKeyframe(second.get())); 122 return isCandidateForAnimationOnCompositor(m_timing, *AnimatableValueKeyframeEffectModel::create(frames).get()); 176 OwnPtrWillBeRawPtr<AnimatableValueKeyframeVector> frames = adoptPtrWillBeNoop(new AnimatableValueKeyframeVector); local 180 frames->append(createReplaceOpKeyframe(CSSPropertyOpacity, value.get(), offset).get()); 182 return frames.release(); 193 AnimatableValueKeyframeVector frames; local 194 frames.append(from); 196 frames.append(to) 733 AnimatableValueKeyframeVector frames; local 794 AnimatableValueKeyframeVector frames; local [all...] |
/dalvik/tools/dmtracedump/ |
CreateTestTrace.c | 80 dataRecord **frames; member in struct:stack 166 callStack[ii].frames = NULL; 199 if (callStack[threadId].frames == NULL) { 202 callStack[threadId].frames = stk; 261 callStack[threadId].frames[indentLevel] = &records[nextRecord]; 264 if (callStack[threadId].frames[indentLevel - 1] == NULL) { 267 callStack[threadId].frames[indentLevel - 1] = &records[nextRecord]; 276 char *name = callStack[threadId].frames[indentLevel - 1]->fullName; 284 callStack[threadId].frames[indentLevel - 1]->fullName); 303 // frames than we entered [all...] |
/external/chromium_org/media/base/ |
audio_renderer_mixer_unittest.cc | 99 bool ValidateAudioData(int index, int frames, float scale, double epsilon) { 101 for (int j = index; j < frames; j++) { 115 bool ValidateAudioData(int index, int frames, float scale) { 116 return ValidateAudioData(index, frames, scale, epsilon_); 130 int frames = mixer_callback_->Render(audio_bus_.get(), 0); local 131 if (frames != audio_bus_->frames()) 141 0, frames, 0, std::numeric_limits<double>::max()); 143 return ValidateAudioData(0, frames, scale); 151 audio_bus_->channel(i) + audio_bus_->frames(), value) [all...] |
/external/chromium_org/net/quic/ |
quic_packet_creator.cc | 106 // pending frames when FEC protection is turned on. If current packet can be 111 LOG(DFATAL) << "Cannot start FEC protection with pending frames."; 143 // Don't change creator state if there are frames queued. 272 const QuicFrames& frames, 286 SerializedPacket serialized_packet = SerializeAllFrames(frames); 295 const QuicFrames& frames) { 297 // frames from SendStreamData()[send_stream_should_flush_ == false && 300 LOG_IF(DFATAL, frames.empty()) 302 for (size_t i = 0; i < frames.size(); ++i) { 303 bool success = AddFrame(frames[i], false) 415 QuicFrames frames; local [all...] |
quic_sent_packet_manager.cc | 222 // Discard any retransmittable frames associated with revived packets. 239 const RetransmittableFrames* frames = it->second.retransmittable_frames; local 243 if (frames != NULL && (retransmission_type == ALL_PACKETS || 244 frames->encryption_level() == ENCRYPTION_INITIAL)) { 254 const RetransmittableFrames* frames = it->second.retransmittable_frames; local 257 if (frames != NULL && frames->encryption_level() == ENCRYPTION_NONE) { 265 // number can have frames. 514 const RetransmittableFrames* frames = it->second.retransmittable_frames; local 515 // Only retransmit frames which are in flight, and therefore have been sent 533 const RetransmittableFrames* frames = it->second.retransmittable_frames; local 557 const RetransmittableFrames* frames = it->second.retransmittable_frames; local [all...] |
/external/chromium_org/third_party/libvpx/source/libvpx/vp8/encoder/ |
temporal_filter.c | 242 YV12_BUFFER_CONFIG *f = cpi->frames[alt_ref_index]; 254 /* Source frames are extended to 16 pixels. This is different than 255 * L/A/G reference frames that have a border of 32 (VP8BORDERINPIXELS) 286 if (cpi->frames[frame] == NULL) 305 cpi->frames[alt_ref_index], 306 cpi->frames[frame], 323 cpi->frames[frame]->y_buffer + mb_y_offset, 324 cpi->frames[frame]->u_buffer + mb_uv_offset, 325 cpi->frames[frame]->v_buffer + mb_uv_offset, 326 cpi->frames[frame]->y_stride [all...] |
/external/libvpx/libvpx/vp8/encoder/ |
temporal_filter.c | 241 YV12_BUFFER_CONFIG *f = cpi->frames[alt_ref_index]; 253 /* Source frames are extended to 16 pixels. This is different than 254 * L/A/G reference frames that have a border of 32 (VP8BORDERINPIXELS) 285 if (cpi->frames[frame] == NULL) 304 cpi->frames[alt_ref_index], 305 cpi->frames[frame], 322 cpi->frames[frame]->y_buffer + mb_y_offset, 323 cpi->frames[frame]->u_buffer + mb_uv_offset, 324 cpi->frames[frame]->v_buffer + mb_uv_offset, 325 cpi->frames[frame]->y_stride [all...] |
/hardware/intel/common/omx-components/videocodec/libvpx_internal/libvpx/vp8/encoder/ |
temporal_filter.c | 241 YV12_BUFFER_CONFIG *f = cpi->frames[alt_ref_index]; 253 /* Source frames are extended to 16 pixels. This is different than 254 * L/A/G reference frames that have a border of 32 (VP8BORDERINPIXELS) 285 if (cpi->frames[frame] == NULL) 304 cpi->frames[alt_ref_index], 305 cpi->frames[frame], 322 cpi->frames[frame]->y_buffer + mb_y_offset, 323 cpi->frames[frame]->u_buffer + mb_uv_offset, 324 cpi->frames[frame]->v_buffer + mb_uv_offset, 325 cpi->frames[frame]->y_stride [all...] |
/external/chromium_org/media/filters/ |
decrypting_audio_decoder.cc | 95 // Return empty (end-of-stream) frames if decoding has finished. 250 const Decryptor::AudioBuffers& frames) { 270 DCHECK_EQ(status == Decryptor::kSuccess, !frames.empty()); 304 DCHECK(!frames.empty()); 305 ProcessDecodedFrames(frames); 342 const Decryptor::AudioBuffers& frames) { 343 for (Decryptor::AudioBuffers::const_iterator iter = frames.begin(); 344 iter != frames.end();
|
/external/chromium_org/third_party/opus/src/src/ |
repacketizer.c | 85 ret=opus_packet_parse_impl(data, len, self_delimited, &tmp_toc, &rp->frames[rp->nb_frames], &rp->len[rp->nb_frames], NULL, NULL); 108 const unsigned char **frames; local 119 frames = rp->frames+begin; 216 celt_assert(frames[i] + len[i] <= data || ptr <= frames[i]); 217 OPUS_MOVE(ptr, frames[i], len[i]); 319 /* Unpad all frames */
|
/external/chromium_org/third_party/webrtc/tools/frame_analyzer/ |
video_quality_analysis.cc | 229 // Allocate buffers for test and reference frames. 273 results->frames.push_back(result); 309 // Calculate how many frames a cluster of repeated frames contains. 319 // Calculate how much frames have been skipped. 344 static_cast<unsigned int>(results->frames.size())); 346 if (results->frames.size() > 0u) { 348 for (iter = results->frames.begin(); iter != results->frames.end() - 1; 355 for (iter = results->frames.begin(); iter != results->frames.end() - 1 [all...] |
/external/libopus/src/ |
repacketizer.c | 85 ret=opus_packet_parse_impl(data, len, self_delimited, &tmp_toc, &rp->frames[rp->nb_frames], &rp->len[rp->nb_frames], NULL, NULL); 108 const unsigned char **frames; local 119 frames = rp->frames+begin; 216 celt_assert(frames[i] + len[i] <= data || ptr <= frames[i]); 217 OPUS_MOVE(ptr, frames[i], len[i]); 319 /* Unpad all frames */
|
/external/qemu/distrib/sdl-1.2.15/test/ |
testpalette.c | 141 int boatcols, frames, i, red; local 230 frames = 0; 290 cmap[boatcols + ((i + frames) & 63)] = wavemap[i]; 325 redphase = frames % 64; 332 frames++; 335 printf("%d frames, %.2f fps\n", 336 frames, 1000.0 * frames / (SDL_GetTicks() - start));
|
/external/lldb/examples/interposing/darwin/fd_interposing/ |
FDInterposing.cpp | 168 typedef std::vector<void *> Frames; 170 typedef std::vector<void *> Frames; 186 FDEvent (int fd, int err, const StringSP &string_sp, bool is_create, const Frames& frames) : 188 m_frames (frames.begin(), frames.end()), 208 Frames & 214 const Frames & 245 // The frames for the stack backtrace for this event 246 Frames m_frames 376 void *frames[2048]; local 562 Frames frames; local 588 Frames frames; local 598 Frames frames; local [all...] |
/hardware/qcom/audio/legacy/libalsa-intf/ |
arec.c | 185 long frames; local 269 frames = bufsize / 2; 271 frames = bufsize / 8; 273 frames = bufsize / 12; 275 frames = bufsize / 4; 277 x.frames = frames; 300 fprintf(stderr, "Arec:avail 1 = %d frames = %ld\n",avail, frames); 307 if (x.frames > avail [all...] |