/external/libvpx/libvpx/vp8/common/arm/neon/ |
bilinearpredict_neon.c | 82 d0 = vmull_u8(vreinterpret_u8_u32(c0.val[0]), filter0); 83 d1 = vmull_u8(vreinterpret_u8_u32(c1.val[0]), filter0); 84 d2 = vmull_u8(a4, filter0); 103 uint16x8_t b0 = vmull_u8(e0, filter0); 104 uint16x8_t b1 = vmull_u8(e1, filter0); 153 q6u16 = vmull_u8(vget_low_u8(q1u8), d0u8); 154 q7u16 = vmull_u8(vget_low_u8(q2u8), d0u8); 155 q8u16 = vmull_u8(vget_low_u8(q3u8), d0u8); 156 q9u16 = vmull_u8(vget_low_u8(q4u8), d0u8); 157 q10u16 = vmull_u8(vget_low_u8(q5u8), d0u8) [all...] |
sixtappredict_neon.c | 124 c0 = vmull_u8(b0, filter0); 125 c1 = vmull_u8(b2, filter0); 126 c2 = vmull_u8(b5, filter5); 127 c3 = vmull_u8(b7, filter5); 225 d0123 = vmull_u8(vreinterpret_u8_u32(s01_f5_q.val[0]), filter5); 226 d4567 = vmull_u8(vreinterpret_u8_u32(s23_f5_q.val[0]), filter5); 259 d0123_a = vmull_u8(vreinterpret_u8_u32(s01_f3_q.val[0]), filter3); 260 d4567_a = vmull_u8(vreinterpret_u8_u32(s23_f3_q.val[0]), filter3); 301 d0123 = vmull_u8(vreinterpret_u8_u32(s01_f5_q.val[0]), filter5); 302 d4567 = vmull_u8(vreinterpret_u8_u32(s23_f5_q.val[0]), filter5) [all...] |
/external/libhevc/common/arm/ |
ihevc_intra_pred_chroma_mode_27_to_33.s | 175 vmull.u8 q5,d8,d30 @(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 178 vmlal.u8 q5,d9,d31 @(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 186 vmull.u8 q7,d12,d28 @(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 190 vmlal.u8 q7,d13,d29 @(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 201 vmull.u8 q9,d16,d26 @(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 204 vmlal.u8 q9,d17,d27 @(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 218 vmull.u8 q11,d20,d24 @(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 222 vmlal.u8 q11,d21,d25 @(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 236 vmull.u8 q5,d8,d30 @(v)vmull_u8(ref_main_idx, dup_const_32_fract) 239 vmlal.u8 q5,d9,d31 @(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_filters_luma_mode_19_to_25.s | 287 vmull.u8 q5,d8,d30 @(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 290 vmlal.u8 q5,d9,d31 @(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 297 vmull.u8 q7,d12,d28 @(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 301 vmlal.u8 q7,d13,d29 @(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 312 vmull.u8 q9,d16,d26 @(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 315 vmlal.u8 q9,d17,d27 @(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 327 vmull.u8 q11,d20,d24 @(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 331 vmlal.u8 q11,d21,d25 @(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 344 vmull.u8 q5,d8,d30 @(v)vmull_u8(ref_main_idx, dup_const_32_fract) 347 vmlal.u8 q5,d9,d31 @(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_luma_mode_27_to_33.s | 178 vmull.u8 q5,d8,d30 @(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 181 vmlal.u8 q5,d9,d31 @(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 189 vmull.u8 q7,d12,d28 @(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 193 vmlal.u8 q7,d13,d29 @(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 204 vmull.u8 q9,d16,d26 @(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 207 vmlal.u8 q9,d17,d27 @(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 220 vmull.u8 q11,d20,d24 @(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 224 vmlal.u8 q11,d21,d25 @(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 238 vmull.u8 q5,d8,d30 @(v)vmull_u8(ref_main_idx, dup_const_32_fract) 241 vmlal.u8 q5,d9,d31 @(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_filters_chroma_mode_19_to_25.s | 284 vmull.u8 q5,d8,d30 @(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 287 vmlal.u8 q5,d9,d31 @(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 294 vmull.u8 q7,d12,d28 @(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 298 vmlal.u8 q7,d13,d29 @(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 309 vmull.u8 q9,d16,d26 @(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 312 vmlal.u8 q9,d17,d27 @(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 325 vmull.u8 q11,d20,d24 @(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 329 vmlal.u8 q11,d21,d25 @(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 342 vmull.u8 q5,d8,d30 @(v)vmull_u8(ref_main_idx, dup_const_32_fract) 345 vmlal.u8 q5,d9,d31 @(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_inter_pred_chroma_horz.s | 170 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 207 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 233 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 245 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 292 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 306 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 314 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 344 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 350 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 400 vmull.u8 q4,d1,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3) [all...] |
ihevc_inter_pred_chroma_horz_w16out.s | 190 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 221 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 249 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 262 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 302 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 316 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 324 vmull.u8 q15,d2,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 347 vmull.u8 q11,d10,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 353 vmull.u8 q10,d11,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3)@ 398 vmull.u8 q4,d1,d25 @mul_res = vmull_u8(src[0_3], coeffabs_3) [all...] |
ihevc_inter_pred_filters_luma_vert_w16inp.s | 148 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 164 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 220 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 236 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 305 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 319 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@
|
ihevc_inter_pred_luma_vert_w16inp_w16out.s | 158 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 174 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 233 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 250 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 322 vmull.s16 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 337 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@
|
ihevc_inter_pred_filters_luma_vert.s | 158 vmull.u8 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 176 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 241 vmull.u8 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 267 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 347 vmull.u8 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 360 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 433 vmull.u8 q0,d5,d23 @mul_res1 = vmull_u8(vreinterpret_u8_u32(src_tmp2), coeffabs_1)@ 446 vmull.u8 q1,d7,d25 @mul_res2 = vmull_u8(vreinterpret_u8_u32(src_tmp4), coeffabs_3)@ 608 vmull.u8 q4,d1,d23 @mul_res1 = vmull_u8(src_tmp2, coeffabs_1)@ 626 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1) [all...] |
ihevc_intra_pred_filters_neon_intr.c | 750 prod_t1 = vmull_u8(const_nt_1_col_t, pu1_ref_two_nt_1_row_dup); 754 prod_t2 = vmull_u8(const_col_1_t, pu1_ref_three_nt_1_dup); 830 prod_t1 = vmull_u8(const_nt_1_col_t, pu1_ref_two_nt_1_row_dup); 837 prod_t2 = vmull_u8(const_col_1_t, pu1_ref_three_nt_1_dup); [all...] |
/external/libhevc/common/arm64/ |
ihevc_intra_pred_chroma_mode_27_to_33.s | 169 umull v10.8h, v23.8b, v30.8b //(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 172 umlal v10.8h, v9.8b, v31.8b //(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 180 umull v14.8h, v12.8b, v28.8b //(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 184 umlal v14.8h, v13.8b, v29.8b //(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 195 umull v18.8h, v16.8b, v26.8b //(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 198 umlal v18.8h, v17.8b, v27.8b //(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 212 umull v22.8h, v20.8b, v24.8b //(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 216 umlal v22.8h, v21.8b, v25.8b //(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 230 umull v10.8h, v23.8b, v30.8b //(v)vmull_u8(ref_main_idx, dup_const_32_fract) 233 umlal v10.8h, v9.8b, v31.8b //(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_filters_luma_mode_19_to_25.s | 284 umull v10.8h, v23.8b, v30.8b //(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 287 umlal v10.8h, v9.8b, v31.8b //(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 294 umull v14.8h, v12.8b, v28.8b //(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 298 umlal v14.8h, v13.8b, v29.8b //(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 309 umull v18.8h, v16.8b, v26.8b //(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 313 umlal v18.8h, v17.8b, v27.8b //(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 325 umull v22.8h, v20.8b, v24.8b //(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 329 umlal v22.8h, v21.8b, v25.8b //(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 342 umull v10.8h, v23.8b, v30.8b //(v)vmull_u8(ref_main_idx, dup_const_32_fract) 345 umlal v10.8h, v9.8b, v31.8b //(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_luma_mode_27_to_33.s | 174 umull v10.8h, v23.8b, v30.8b //(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 177 umlal v10.8h, v9.8b, v31.8b //(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 185 umull v14.8h, v12.8b, v28.8b //(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 189 umlal v14.8h, v13.8b, v29.8b //(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 200 umull v18.8h, v16.8b, v26.8b //(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 204 umlal v18.8h, v17.8b, v27.8b //(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 217 umull v22.8h, v20.8b, v24.8b //(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 221 umlal v22.8h, v21.8b, v25.8b //(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 235 umull v10.8h, v23.8b, v30.8b //(v)vmull_u8(ref_main_idx, dup_const_32_fract) 238 umlal v10.8h, v9.8b, v31.8b //(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_intra_pred_filters_chroma_mode_19_to_25.s | 281 umull v23.8h, v7.8b, v30.8b //(i row)vmull_u8(ref_main_idx, dup_const_32_fract) 284 umlal v23.8h, v19.8b, v31.8b //(i row)vmull_u8(ref_main_idx_1, dup_const_fract) 291 umull v14.8h, v12.8b, v28.8b //(ii)vmull_u8(ref_main_idx, dup_const_32_fract) 295 umlal v14.8h, v13.8b, v29.8b //(ii)vmull_u8(ref_main_idx_1, dup_const_fract) 306 umull v18.8h, v16.8b, v26.8b //(iii)vmull_u8(ref_main_idx, dup_const_32_fract) 309 umlal v18.8h, v17.8b, v27.8b //(iii)vmull_u8(ref_main_idx_1, dup_const_fract) 322 umull v22.8h, v20.8b, v24.8b //(iv)vmull_u8(ref_main_idx, dup_const_32_fract) 326 umlal v22.8h, v21.8b, v25.8b //(iv)vmull_u8(ref_main_idx_1, dup_const_fract) 339 umull v23.8h, v7.8b, v30.8b //(v)vmull_u8(ref_main_idx, dup_const_32_fract) 342 umlal v23.8h, v19.8b, v31.8b //(v)vmull_u8(ref_main_idx_1, dup_const_fract [all...] |
ihevc_inter_pred_chroma_horz_w16out.s | 206 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 252 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 287 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 304 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 347 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 365 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 384 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 415 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 421 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 469 umull v29.8h, v1.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)/ [all...] |
ihevc_inter_pred_chroma_horz.s | 191 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 240 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 277 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 297 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 354 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 370 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 387 umull v30.8h, v2.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 425 umull v22.8h, v10.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 431 umull v20.8h, v11.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)// 486 umull v29.8h, v1.8b, v25.8b //mul_res = vmull_u8(src[0_3], coeffabs_3)/ [all...] |
ihevc_inter_pred_filters_luma_vert.s | 164 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 182 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 253 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 281 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 365 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 378 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 457 umull v0.8h, v5.8b, v23.8b //mul_res1 = vmull_u8(vreinterpret_u8_u32(src_tmp2), coeffabs_1)// 470 umull v2.8h, v7.8b, v25.8b //mul_res2 = vmull_u8(vreinterpret_u8_u32(src_tmp4), coeffabs_3)//
|
ihevc_inter_pred_filters_luma_vert_w16out.s | 121 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 140 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 207 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 234 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 315 umull v19.8h, v1.8b, v23.8b //mul_res1 = vmull_u8(src_tmp2, coeffabs_1)// 328 umull v20.8h, v2.8b, v23.8b //mul_res2 = vmull_u8(src_tmp3, coeffabs_1)// 410 umull v0.8h, v5.8b, v23.8b //mul_res1 = vmull_u8(vreinterpret_u8_u32(src_tmp2), coeffabs_1)// 423 umull v2.8h, v7.8b, v25.8b //mul_res2 = vmull_u8(vreinterpret_u8_u32(src_tmp4), coeffabs_3)//
|
/external/skia/src/opts/ |
SkBitmapProcState_filter_neon.h | 41 tmp1 = vmull_u8(vreinterpret_u8_u32(va0), v16_y); // tmp1 = [a01|a00] * (16-y) 42 tmp2 = vmull_u8(vreinterpret_u8_u32(va1), vy); // tmp2 = [a11|a10] * y 76 tmp1 = vmull_u8(vreinterpret_u8_u32(va0), v16_y); // tmp1 = [a01|a00] * (16-y) 77 tmp2 = vmull_u8(vreinterpret_u8_u32(va1), vy); // tmp2 = [a11|a10] * y
|
Sk4px_NEON.h | 50 return Sk16h(vmull_u8(vget_low_u8 (this->fVec), vget_low_u8 (other.fVec)), 51 vmull_u8(vget_high_u8(this->fVec), vget_high_u8(other.fVec)));
|
SkBlitRow_opts_arm_neon.cpp | 55 vdst_wide = vmull_u8(vdst, vdup_n_u8(dst_scale)); 80 vdst_wide = vmull_u8(vdst, vdup_n_u8(dst_scale));
|
/external/libvpx/libvpx/vpx_dsp/arm/ |
subpel_variance_neon.c | 38 const uint16x8_t a = vmull_u8(src_0, f0); 63 const uint16x8_t a = vmull_u8(vget_low_u8(src_0), f0); 66 const uint16x8_t c = vmull_u8(vget_high_u8(src_0), f0);
|
/external/webp/src/dsp/ |
alpha_processing_neon.c | 27 const uint16x8_t r1 = vmull_u8((V).val[1], alpha); \ 28 const uint16x8_t g1 = vmull_u8((V).val[2], alpha); \ 29 const uint16x8_t b1 = vmull_u8((V).val[(OTHER)], alpha); \
|