Home | History | Annotate | Download | only in priv

Lines Matching refs:vM

4278             010 01110 11 1 m  100001 n d   ADD Vd.2d,  Vn.2d,  Vm.2d
4279 010 01110 10 1 m 100001 n d ADD Vd.4s, Vn.4s, Vm.4s
4280 010 01110 01 1 m 100001 n d ADD Vd.8h, Vn.8h, Vm.8h
4281 010 01110 00 1 m 100001 n d ADD Vd.16b, Vn.16b, Vm.16b
4283 011 01110 11 1 m 100001 n d SUB Vd.2d, Vn.2d, Vm.2d
4284 011 01110 10 1 m 100001 n d SUB Vd.4s, Vn.4s, Vm.4s
4285 011 01110 01 1 m 100001 n d SUB Vd.8h, Vn.8h, Vm.8h
4286 011 01110 00 1 m 100001 n d SUB Vd.16b, Vn.16b, Vm.16b
4288 010 01110 10 1 m 100111 n d MUL Vd.4s, Vn.4s, Vm.4s
4289 010 01110 01 1 m 100111 n d MUL Vd.8h, Vn.8h, Vm.8h
4290 010 01110 00 1 m 100111 n d MUL Vd.16b, Vn.16b, Vm.16b
4292 010 01110 01 1 m 110101 n d FADD Vd.2d, Vn.2d, Vm.2d
4293 010 01110 00 1 m 110101 n d FADD Vd.4s, Vn.4s, Vm.4s
4294 010 01110 11 1 m 110101 n d FSUB Vd.2d, Vn.2d, Vm.2d
4295 010 01110 10 1 m 110101 n d FSUB Vd.4s, Vn.4s, Vm.4s
4297 011 01110 01 1 m 110111 n d FMUL Vd.2d, Vn.2d, Vm.2d
4298 011 01110 00 1 m 110111 n d FMUL Vd.4s, Vn.4s, Vm.4s
4299 011 01110 01 1 m 111111 n d FDIV Vd.2d, Vn.2d, Vm.2d
4300 011 01110 00 1 m 111111 n d FDIV Vd.4s, Vn.4s, Vm.4s
4302 010 01110 01 1 m 111101 n d FMAX Vd.2d, Vn.2d, Vm.2d
4303 010 01110 00 1 m 111101 n d FMAX Vd.4s, Vn.4s, Vm.4s
4304 010 01110 11 1 m 111101 n d FMIN Vd.2d, Vn.2d, Vm.2d
4305 010 01110 10 1 m 111101 n d FMIN Vd.4s, Vn.4s, Vm.4s
4307 011 01110 10 1 m 011001 n d UMAX Vd.4s, Vn.4s, Vm.4s
4308 011 01110 01 1 m 011001 n d UMAX Vd.8h, Vn.8h, Vm.8h
4309 011 01110 00 1 m 011001 n d UMAX Vd.16b, Vn.16b, Vm.16b
4311 011 01110 10 1 m 011011 n d UMIN Vd.4s, Vn.4s, Vm.4s
4312 011 01110 01 1 m 011011 n d UMIN Vd.8h, Vn.8h, Vm.8h
4313 011 01110 00 1 m 011011 n d UMIN Vd.16b, Vn.16b, Vm.16b
4315 010 01110 10 1 m 011001 n d SMAX Vd.4s, Vn.4s, Vm.4s
4316 010 01110 01 1 m 011001 n d SMAX Vd.8h, Vn.8h, Vm.8h
4317 010 01110 00 1 m 011001 n d SMAX Vd.16b, Vn.16b, Vm.16b
4319 010 01110 10 1 m 011011 n d SMIN Vd.4s, Vn.4s, Vm.4s
4320 010 01110 01 1 m 011011 n d SMIN Vd.8h, Vn.8h, Vm.8h
4321 010 01110 00 1 m 011011 n d SMIN Vd.16b, Vn.16b, Vm.16b
4323 010 01110 00 1 m 000111 n d AND Vd, Vn, Vm
4324 010 01110 10 1 m 000111 n d ORR Vd, Vn, Vm
4325 011 01110 00 1 m 000111 n d EOR Vd, Vn, Vm
4327 011 01110 11 1 m 100011 n d CMEQ Vd.2d, Vn.2d, Vm.2d
4328 011 01110 10 1 m 100011 n d CMEQ Vd.4s, Vn.4s, Vm.4s
4329 011 01110 01 1 m 100011 n d CMEQ Vd.8h, Vn.8h, Vm.8h
4330 011 01110 00 1 m 100011 n d CMEQ Vd.16b, Vn.16b, Vm.16b
4332 011 01110 11 1 m 001101 n d CMHI Vd.2d, Vn.2d, Vm.2d
4333 011 01110 10 1 m 001101 n d CMHI Vd.4s, Vn.4s, Vm.4s
4334 011 01110 01 1 m 001101 n d CMHI Vd.8h, Vn.8h, Vm.8h
4335 011 01110 00 1 m 001101 n d CMHI Vd.16b, Vn.16b, Vm.16b
4337 010 01110 11 1 m 001101 n d CMGT Vd.2d, Vn.2d, Vm.2d
4338 010 01110 10 1 m 001101 n d CMGT Vd.4s, Vn.4s, Vm.4s
4339 010 01110 01 1 m 001101 n d CMGT Vd.8h, Vn.8h, Vm.8h
4340 010 01110 00 1 m 001101 n d CMGT Vd.16b, Vn.16b, Vm.16b
4342 010 01110 01 1 m 111001 n d FCMEQ Vd.2d, Vn.2d, Vm.2d
4343 010 01110 00 1 m 111001 n d FCMEQ Vd.4s, Vn.4s, Vm.4s
4345 011 01110 01 1 m 111001 n d FCMGE Vd.2d, Vn.2d, Vm.2d
4346 011 01110 00 1 m 111001 n d FCMGE Vd.4s, Vn.4s, Vm.4s
4348 011 01110 11 1 m 111001 n d FCMGT Vd.2d, Vn.2d, Vm.2d
4349 011 01110 10 1 m 111001 n d FCMGT Vd.4s, Vn.4s, Vm.4s
4351 010 01110 00 0 m 000000 n d TBL Vd.16b, {Vn.16b}, Vm.16b
4353 010 01110 11 0 m 000110 n d UZP1 Vd.2d, Vn.2d, Vm.2d
4354 010 01110 10 0 m 000110 n d UZP1 Vd.4s, Vn.4s, Vm.4s
4355 010 01110 01 0 m 000110 n d UZP1 Vd.8h, Vn.8h, Vm.8h
4356 010 01110 00 0 m 000110 n d UZP1 Vd.16b, Vn.16b, Vm.16b
4358 010 01110 11 0 m 010110 n d UZP2 Vd.2d, Vn.2d, Vm.2d
4359 010 01110 10 0 m 010110 n d UZP2 Vd.4s, Vn.4s, Vm.4s
4360 010 01110 01 0 m 010110 n d UZP2 Vd.8h, Vn.8h, Vm.8h
4361 010 01110 00 0 m 010110 n d UZP2 Vd.16b, Vn.16b, Vm.16b
4363 010 01110 10 0 m 001110 n d ZIP1 Vd.4s, Vn.4s, Vm.4s
4364 010 01110 01 0 m 001110 n d ZIP1 Vd.8h, Vn.8h, Vm.8h
4365 010 01110 10 0 m 001110 n d ZIP1 Vd.16b, Vn.16b, Vm.16b
4367 010 01110 10 0 m 011110 n d ZIP2 Vd.4s, Vn.4s, Vm.4s
4368 010 01110 01 0 m 011110 n d ZIP2 Vd.8h, Vn.8h, Vm.8h
4369 010 01110 10 0 m 011110 n d ZIP2 Vd.16b, Vn.16b, Vm.16b
4371 011 01110 00 1 m 100111 n d PMUL Vd.16b, Vn.16b, Vm.16b
4373 000 01110 00 1 m 111000 n d PMULL Vd.8h, Vn.8b, Vm.8b
4375 001 01110 10 1 m 110000 n d UMULL Vd.2d, Vn.2s, Vm.2s
4376 001 01110 01 1 m 110000 n d UMULL Vd.4s, Vn.4h, Vm.4h
4377 001 01110 00 1 m 110000 n d UMULL Vd.8h, Vn.8b, Vm.8b
4379 000 01110 10 1 m 110000 n d SMULL Vd.2d, Vn.2s, Vm.2s
4380 000 01110 01 1 m 110000 n d SMULL Vd.4s, Vn.4h, Vm.4h
4381 000 01110 00 1 m 110000 n d SMULL Vd.8h, Vn.8b, Vm.8b
4383 010 01110 11 1 m 000011 n d SQADD Vd.2d, Vn.2d, Vm.2d
4384 010 01110 10 1 m 000011 n d SQADD Vd.4s, Vn.4s, Vm.4s
4385 010 01110 01 1 m 000011 n d SQADD Vd.8h, Vn.8h, Vm.8h
4386 010 01110 00 1 m 000011 n d SQADD Vd.16b, Vn.16b, Vm.16b
4388 011 01110 11 1 m 000011 n d UQADD Vd.2d, Vn.2d, Vm.2d
4389 011 01110 10 1 m 000011 n d UQADD Vd.4s, Vn.4s, Vm.4s
4390 011 01110 01 1 m 000011 n d UQADD Vd.8h, Vn.8h, Vm.8h
4391 011 01110 00 1 m 000011 n d UQADD Vd.16b, Vn.16b, Vm.16b
4393 010 01110 11 1 m 001011 n d SQSUB Vd.2d, Vn.2d, Vm.2d
4394 010 01110 10 1 m 001011 n d SQSUB Vd.4s, Vn.4s, Vm.4s
4395 010 01110 01 1 m 001011 n d SQSUB Vd.8h, Vn.8h, Vm.8h
4396 010 01110 00 1 m 001011 n d SQSUB Vd.16b, Vn.16b, Vm.16b
4398 011 01110 11 1 m 001011 n d UQSUB Vd.2d, Vn.2d, Vm.2d
4399 011 01110 10 1 m 001011 n d UQSUB Vd.4s, Vn.4s, Vm.4s
4400 011 01110 01 1 m 001011 n d UQSUB Vd.8h, Vn.8h, Vm.8h
4401 011 01110 00 1 m 001011 n d UQSUB Vd.16b, Vn.16b, Vm.16b
4403 000 01110 10 1 m 110100 n d SQDMULL Vd.2d, Vn.2s, Vm.2s
4404 000 01110 01 1 m 110100 n d SQDMULL Vd.4s, Vn.4h, Vm.4h
4406 010 01110 10 1 m 101101 n d SQDMULH Vd.4s, Vn.4s, Vm.4s
4407 010 01110 01 1 m 101101 n d SQDMULH Vd.8h, Vn.8h, Vm.8h
4408 011 01110 10 1 m 101101 n d SQRDMULH Vd.4s, Vn.4s, Vm.4s
4409 011 01110 10 1 m 101101 n d SQRDMULH Vd.8h, Vn.8h, Vm.8h
4411 010 01110 sz 1 m 010011 n d SQSHL@sz Vd, Vn, Vm
4412 010 01110 sz 1 m 010111 n d SQRSHL@sz Vd, Vn, Vm
4413 011 01110 sz 1 m 010011 n d UQSHL@sz Vd, Vn, Vm
4414 011 01110 sz 1 m 010111 n d URQSHL@sz Vd, Vn, Vm
4416 010 01110 sz 1 m 010001 n d SSHL@sz Vd, Vn, Vm
4417 010 01110 sz 1 m 010101 n d SRSHL@sz Vd, Vn, Vm
4418 011 01110 sz 1 m 010001 n d USHL@sz Vd, Vn, Vm
4419 011 01110 sz 1 m 010101 n d URSHL@sz Vd, Vn, Vm
4421 010 01110 01 1 m 111111 n d FRECPS Vd.2d, Vn.2d, Vm.2d
4422 010 01110 00 1 m 111111 n d FRECPS Vd.4s, Vn.4s, Vm.4s
4423 010 01110 11 1 m 111111 n d FRSQRTS Vd.2d, Vn.2d, Vm.2d
4424 010 01110 10 1 m 111111 n d FRSQRTS Vd.4s, Vn.4s, Vm.4s
4428 UInt vM = qregEnc(i->ARM64in.VBinV.argR);
4431 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X100001, vN, vD);
4434 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X100001, vN, vD);
4437 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X100001, vN, vD);
4440 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X100001, vN, vD);
4443 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X100001, vN, vD);
4446 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X100001, vN, vD);
4449 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X100001, vN, vD);
4452 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100001, vN, vD);
4455 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X100111, vN, vD);
4458 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X100111, vN, vD);
4461 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X100111, vN, vD);
4464 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X110101, vN, vD);
4467 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X110101, vN, vD);
4470 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X110101, vN, vD);
4473 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X110101, vN, vD);
4476 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X110111, vN, vD);
4479 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X110111, vN, vD);
4482 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X111111, vN, vD);
4485 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X111111, vN, vD);
4489 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111101, vN, vD);
4492 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111101, vN, vD);
4495 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X111101, vN, vD);
4498 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X111101, vN, vD);
4502 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X011001, vN, vD);
4505 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X011001, vN, vD);
4508 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X011001, vN, vD);
4512 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X011011, vN, vD);
4515 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X011011, vN, vD);
4518 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X011011, vN, vD);
4522 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X011001, vN, vD);
4525 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X011001, vN, vD);
4528 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X011001, vN, vD);
4532 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X011011, vN, vD);
4535 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X011011, vN, vD);
4538 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X011011, vN, vD);
4542 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X000111, vN, vD);
4545 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X000111, vN, vD);
4548 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X000111, vN, vD);
4552 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X100011, vN, vD);
4555 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X100011, vN, vD);
4558 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X100011, vN, vD);
4561 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100011, vN, vD);
4565 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X001101, vN, vD);
4568 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X001101, vN, vD);
4571 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X001101, vN, vD);
4574 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X001101, vN, vD);
4578 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X001101, vN, vD);
4581 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X001101, vN, vD);
4584 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X001101, vN, vD);
4587 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X001101, vN, vD);
4591 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111001, vN, vD);
4594 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111001, vN, vD);
4598 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X111001, vN, vD);
4601 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X111001, vN, vD);
4605 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X111001, vN, vD);
4608 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X111001, vN, vD);
4612 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X000000, vN, vD);
4616 *p++ = X_3_8_5_6_5_5(X010, X01110110, vM, X000110, vN, vD);
4619 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X000110, vN, vD);
4622 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X000110, vN, vD);
4625 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X000110, vN, vD);
4629 *p++ = X_3_8_5_6_5_5(X010, X01110110, vM, X010110, vN, vD);
4632 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X010110, vN, vD);
4635 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X010110, vN, vD);
4638 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X010110, vN, vD);
4642 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X001110, vN, vD);
4645 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X001110, vN, vD);
4648 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X001110, vN, vD);
4652 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X011110, vN, vD);
4655 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X011110, vN, vD);
4658 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X011110, vN, vD);
4662 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100111, vN, vD);
4666 *p++ = X_3_8_5_6_5_5(X000, X01110001, vM, X111000, vN, vD);
4670 *p++ = X_3_8_5_6_5_5(X001, X01110101, vM, X110000, vN, vD);
4673 *p++ = X_3_8_5_6_5_5(X001, X01110011, vM, X110000, vN, vD);
4676 *p++ = X_3_8_5_6_5_5(X001, X01110001, vM, X110000, vN, vD);
4680 *p++ = X_3_8_5_6_5_5(X000, X01110101, vM, X110000, vN, vD);
4683 *p++ = X_3_8_5_6_5_5(X000, X01110011, vM, X110000, vN, vD);
4686 *p++ = X_3_8_5_6_5_5(X000, X01110001, vM, X110000, vN, vD);
4690 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X000011, vN, vD);
4693 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X000011, vN, vD);
4696 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X000011, vN, vD);
4699 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X000011, vN, vD);
4703 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X000011, vN, vD);
4706 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X000011, vN, vD);
4709 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X000011, vN, vD);
4712 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X000011, vN, vD);
4716 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X001011, vN, vD);
4719 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X001011, vN, vD);
4722 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X001011, vN, vD);
4725 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X001011, vN, vD);
4729 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X001011, vN, vD);
4732 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X001011, vN, vD);
4735 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X001011, vN, vD);
4738 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X001011, vN, vD);
4742 *p++ = X_3_8_5_6_5_5(X000, X01110101, vM, X110100, vN, vD);
4745 *p++ = X_3_8_5_6_5_5(X000, X01110011, vM, X110100, vN, vD);
4749 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X101101, vN, vD);
4752 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X101101, vN, vD);
4755 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X101101, vN, vD);
4758 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X101101, vN, vD);
4762 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010011, vN, vD);
4765 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010011, vN, vD);
4768 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010011, vN, vD);
4771 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010011, vN, vD);
4775 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010111, vN, vD);
4778 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010111, vN, vD);
4781 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010111, vN, vD);
4784 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010111, vN, vD);
4788 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010011, vN, vD);
4791 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010011, vN, vD);
4794 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010011, vN, vD);
4797 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010011, vN, vD);
4801 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010111, vN, vD);
4804 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010111, vN, vD);
4807 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010111, vN, vD);
4810 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010111, vN, vD);
4814 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010001, vN, vD);
4817 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010001, vN, vD);
4820 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010001, vN, vD);
4823 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010001, vN, vD);
4827 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010101, vN, vD);
4830 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010101, vN, vD);
4833 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010101, vN, vD);
4836 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010101, vN, vD);
4840 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010001, vN, vD);
4843 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010001, vN, vD);
4846 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010001, vN, vD);
4849 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010001, vN, vD);
4853 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010101, vN, vD);
4856 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010101, vN, vD);
4859 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010101, vN, vD);
4862 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010101, vN, vD);
4866 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111111, vN, vD);
4869 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111111, vN, vD);
4872 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X111111, vN, vD);
4875 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X111111, vN, vD);
5276 011 01110 000 m 0 imm4 0 n d EXT Vd.16b, Vn.16b, Vm.16b, #imm4
5278 Vn is low operand, Vm is high operand
5282 UInt vM = qregEnc(i->ARM64in.VExtV.srcHi);
5285 *p++ = X_3_8_5_6_5_5(X011, X01110000, vM,