HomeSort by relevance Sort by last modified time
    Searched refs:Q8 (Results 1 - 25 of 48) sorted by null

1 2

  /external/libhevc/decoder/arm/
ihevcd_fmt_conv_420sp_to_rgba8888.s 219 VADDW.U8 Q8,Q5,D30 @//Q8 - HAS Y + R
228 VQMOVUN.S16 D16,Q8
233 VZIP.16 Q7,Q8
246 VZIP.32 Q8,Q11
260 VADDW.U8 Q8,Q5,D28 @//Q2 - HAS Y + R
279 VQMOVUN.S16 D16,Q8
284 VZIP.16 Q7,Q8
297 VZIP.32 Q8,Q11
350 VADDW.U8 Q8,Q5,D30 @//Q8 - HAS Y +
    [all...]
  /frameworks/av/media/libstagefright/codecs/amrwbenc/src/asm/ARMV7/
Filt_6k_7k_neon.s 67 VLD1.S16 {Q8, Q9}, [r7]! @ signal[64] ~ signal[79]
77 VSHR.S16 Q2, Q8, #2
97 VLD1.S16 {Q8}, [r4]!
121 VEXT.8 Q7,Q7,Q8,#2
125 VEXT.8 Q8,Q8,Q15,#2
143 VEXT.8 Q7,Q7,Q8,#2
147 VEXT.8 Q8,Q8,Q15,#2
165 VEXT.8 Q7,Q7,Q8,#
    [all...]
scale_sig_neon.s 64 VST1.S16 {Q8, Q9}, [r0]! @store 16 Word16 x[]
72 VSHLL.S16 Q8, D0, #16
76 VSHL.S32 Q8, Q8, Q14
80 VADDHN.S32 D16, Q8, Q15
84 VST1.S16 {Q8, Q9}, [r0]! @store 16 Word16 x[]
99 VST1.S16 {Q8, Q9}, [r0]! @store 16 Word16 x[]
113 VST1.S16 {Q8, Q9}, [r0]! @store 16 Word16 x[]
127 VST1.S16 {Q8, Q9}, [r0]! @store 16 Word16 x[]
residu_asm_neon.s 38 VMOV.S32 Q8, #0x8000
113 VQADD.S32 Q10, Q10, Q8
  /frameworks/av/media/libstagefright/codecs/aacenc/src/asm/ARMV7/
PrePostMDCT_v7.s 47 VREV64.32 Q8, Q7
52 VQDMULH.S32 Q11, Q1, Q8 @ MULHIGH(sina, ti1)
53 VQDMULH.S32 Q12, Q0, Q8 @ MULHIGH(cosa, ti1)
109 VREV64.32 Q8, Q6
120 VQDMULH.S32 Q10, Q2, Q8 @ MULHIGH(cosb, tr2)
123 VQDMULH.S32 Q13, Q3, Q8 @ MULHIGH(sinb, tr2)
Radix4FFT_v7.s 90 VADD.S32 Q8, Q10, Q11 @ MULHIGH(cosx, t0) + MULHIGH(sinx, t1)
104 VADD.S32 Q4, Q8, Q6 @ r4 = t0 + r6@
106 VSUB.S32 Q6, Q8, Q6 @ r6 = t0 - r6@
109 VADD.S32 Q8, Q0, Q5 @ xptr[0] = r0 + r5@
118 VSUB.S32 Q8, Q0, Q5 @ xptr[0] = r0 - r5@
  /external/libhevc/common/arm/
ihevc_sao_edge_offset_class1.s 137 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
158 VADD.I8 Q6,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
164 VNEG.S8 Q8,Q10 @sign_up = vnegq_s8(sign_down)
170 VADD.I8 Q11,Q0,Q8 @II edge_idx = vaddq_s8(const_2, sign_up)
173 VNEG.S8 Q8,Q4 @II sign_up = vnegq_s8(sign_down)
231 VADD.I8 Q11,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
279 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
300 VADD.I8 Q6,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
306 VNEG.S8 Q8,Q10 @sign_up = vnegq_s8(sign_down)
310 VADD.I8 Q11,Q0,Q8 @II edge_idx = vaddq_s8(const_2, sign_up
    [all...]
ihevc_sao_edge_offset_class1_chroma.s 142 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
163 VADD.I8 Q6,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
169 VNEG.S8 Q8,Q10 @sign_up = vnegq_s8(sign_down)
175 VADD.I8 Q11,Q0,Q8 @II edge_idx = vaddq_s8(const_2, sign_up)
179 VNEG.S8 Q8,Q14 @II sign_up = vnegq_s8(sign_down)
244 VADD.I8 Q11,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
297 VSUB.U8 Q8,Q7,Q6 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
318 VADD.I8 Q6,Q0,Q8 @edge_idx = vaddq_s8(const_2, sign_up)
324 VNEG.S8 Q8,Q10 @sign_up = vnegq_s8(sign_down)
330 VADD.I8 Q11,Q0,Q8 @II edge_idx = vaddq_s8(const_2, sign_up
    [all...]
ihevc_sao_edge_offset_class0.s 157 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
165 VSUB.I8 Q10,Q9,Q8 @sign_left = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
185 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
192 VSUB.I8 Q11,Q9,Q8 @sign_right = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
297 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
299 VSUB.I8 Q10,Q9,Q8 @sign_left = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
305 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
307 VSUB.I8 Q11,Q9,Q8 @sign_right = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
ihevc_sao_edge_offset_class0_chroma.s 163 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
170 VSUB.U8 Q10,Q9,Q8 @sign_left = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
191 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
197 VSUB.U8 Q11,Q9,Q8 @sign_right = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
326 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
333 VSUB.U8 Q10,Q9,Q8 @sign_left = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
351 VCGT.U8 Q8,Q6,Q7 @vcgtq_u8(pu1_cur_row, pu1_cur_row_tmp)
358 VSUB.U8 Q11,Q9,Q8 @sign_right = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
ihevc_sao_edge_offset_class2.s 251 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
255 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
276 VEXT.8 Q9,Q8,Q9,#1 @I pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 1)
316 VMOV Q6,Q8 @I pu1_cur_row = pu1_next_row
348 VEXT.8 Q11,Q8,Q14,#1 @II pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 1)
374 VCGT.U8 Q5,Q8,Q9 @III vcgtq_u8(pu1_cur_row, pu1_next_row_tmp)
380 VCLT.U8 Q9,Q8,Q9 @III vcltq_u8(pu1_cur_row, pu1_next_row_tmp)
457 VEXT.8 Q9,Q8,Q9,#1 @pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 1)
563 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
567 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt)
    [all...]
ihevc_sao_edge_offset_class2_chroma.s 343 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
355 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
371 VEXT.8 Q9,Q8,Q9,#2 @I pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 2)
425 VMOV Q6,Q8 @I pu1_cur_row = pu1_next_row
459 VEXT.8 Q14,Q8,Q14,#2 @II pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 2)
526 VCGT.U8 Q10,Q8,Q9 @III vcgtq_u8(pu1_cur_row, pu1_next_row_tmp)
528 VCLT.U8 Q11,Q8,Q9 @III vcltq_u8(pu1_cur_row, pu1_next_row_tmp)
605 VEXT.8 Q9,Q8,Q9,#2 @pu1_next_row_tmp = vextq_u8(pu1_next_row, pu1_next_row_tmp, 2)
    [all...]
ihevc_sao_edge_offset_class3.s 272 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
275 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
291 VEXT.8 Q9,Q9,Q8,#15 @I pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 15)
331 VMOV Q6,Q8
369 VEXT.8 Q9,Q9,Q8,#15 @II pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 15)
409 VCGT.U8 Q5,Q8,Q9 @III vcgtq_u8(pu1_cur_row, pu1_next_row_tmp)
413 VCLT.U8 Q9,Q8,Q9 @III vcltq_u8(pu1_cur_row, pu1_next_row_tmp)
494 VEXT.8 Q9,Q9,Q8,#15 @pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 15)
601 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
602 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt)
    [all...]
ihevc_sao_edge_offset_class3_chroma.s 333 VCLT.U8 Q8,Q6,Q5 @vcltq_u8(pu1_cur_row, pu1_top_row)
337 VSUB.U8 Q7,Q8,Q7 @sign_up = vreinterpretq_s8_u8(vsubq_u8(cmp_lt, cmp_gt))
364 VEXT.8 Q9,Q9,Q8,#14 @I pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 14)
420 VMOV Q6,Q8 @I pu1_cur_row = pu1_next_row
459 VEXT.8 Q14,Q14,Q8,#14 @II pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 14)
516 VCGT.U8 Q11,Q8,Q9 @III vcgtq_u8(pu1_cur_row, pu1_next_row_tmp)
524 VCLT.U8 Q10,Q8,Q9 @III vcltq_u8(pu1_cur_row, pu1_next_row_tmp)
614 VEXT.8 Q9,Q9,Q8,#14 @pu1_next_row_tmp = vextq_u8(pu1_next_row_tmp, pu1_next_row, 14)
    [all...]
  /external/chromium_org/third_party/openmax_dl/dl/sp/src/arm/neon/
armSP_FFT_CToC_FC32_Radix8_fs_unsafe_s.S 113 #define qU0 Q8.F32
140 #define qV1 Q8.F32
164 #define qY0 Q8.F32
armSP_FFT_CToC_SC16_Radix4_ls_unsafe_s.S 132 #define qY1 Q8.S16
144 #define qT3 Q8.S32
armSP_FFT_CToC_SC16_Radix4_unsafe_s.S 106 #define qT0 Q8.S32
121 #define qY2 Q8.S16
armSP_FFT_CToC_SC16_Radix8_fs_unsafe_s.S 124 #define qU0 Q8.S16
154 #define qV1 Q8.S16
181 #define qY0 Q8.S16
armSP_FFT_CToC_SC32_Radix8_fs_unsafe_s.S 123 #define qU0 Q8.S32
151 #define qV1 Q8.S32
177 #define qY0 Q8.S32
armSP_FFT_CToC_SC32_Radix4_ls_unsafe_s.S 112 #define qT1 Q8.S64
128 #define qY0 Q8.S32
armSP_FFT_CToC_SC32_Radix4_unsafe_s.S 108 #define qT0 Q8.S64
124 #define qY2 Q8.S32
armSP_FFTInv_CCSToR_S32_preTwiddleRadix2_unsafe_s.S 114 #define qT2 Q8.S64
armSP_FFT_CToC_FC32_Radix4_fs_unsafe_s.S 103 #define qZ0 Q8.F32
armSP_FFT_CToC_SC16_Radix4_fs_unsafe_s.S 112 #define qZ0 Q8.S16
  /bionic/libm/upstream-freebsd/lib/msun/bsdsrc/
b_tgamma.c 110 #define Q8 6.13275507472443958924745652239e-06
254 q = Q0 +z*(Q1+z*(Q2+z*(Q3+z*(Q4+z*(Q5+z*(Q6+z*(Q7+z*Q8)))))));

Completed in 779 milliseconds

1 2