Home | History | Annotate | Download | only in priv

Lines Matching defs:vD

4192             010 01110 11 1 m  100001 n d   ADD Vd.2d,  Vn.2d,  Vm.2d
4193 010 01110 10 1 m 100001 n d ADD Vd.4s, Vn.4s, Vm.4s
4194 010 01110 01 1 m 100001 n d ADD Vd.8h, Vn.8h, Vm.8h
4195 010 01110 00 1 m 100001 n d ADD Vd.16b, Vn.16b, Vm.16b
4197 011 01110 11 1 m 100001 n d SUB Vd.2d, Vn.2d, Vm.2d
4198 011 01110 10 1 m 100001 n d SUB Vd.4s, Vn.4s, Vm.4s
4199 011 01110 01 1 m 100001 n d SUB Vd.8h, Vn.8h, Vm.8h
4200 011 01110 00 1 m 100001 n d SUB Vd.16b, Vn.16b, Vm.16b
4202 010 01110 10 1 m 100111 n d MUL Vd.4s, Vn.4s, Vm.4s
4203 010 01110 01 1 m 100111 n d MUL Vd.8h, Vn.8h, Vm.8h
4204 010 01110 00 1 m 100111 n d MUL Vd.16b, Vn.16b, Vm.16b
4206 010 01110 01 1 m 110101 n d FADD Vd.2d, Vn.2d, Vm.2d
4207 010 01110 00 1 m 110101 n d FADD Vd.4s, Vn.4s, Vm.4s
4208 010 01110 11 1 m 110101 n d FSUB Vd.2d, Vn.2d, Vm.2d
4209 010 01110 10 1 m 110101 n d FSUB Vd.4s, Vn.4s, Vm.4s
4211 011 01110 01 1 m 110111 n d FMUL Vd.2d, Vn.2d, Vm.2d
4212 011 01110 00 1 m 110111 n d FMUL Vd.4s, Vn.4s, Vm.4s
4213 011 01110 01 1 m 111111 n d FDIV Vd.2d, Vn.2d, Vm.2d
4214 011 01110 00 1 m 111111 n d FDIV Vd.4s, Vn.4s, Vm.4s
4216 010 01110 01 1 m 111101 n d FMAX Vd.2d, Vn.2d, Vm.2d
4217 010 01110 00 1 m 111101 n d FMAX Vd.4s, Vn.4s, Vm.4s
4218 010 01110 11 1 m 111101 n d FMIN Vd.2d, Vn.2d, Vm.2d
4219 010 01110 10 1 m 111101 n d FMIN Vd.4s, Vn.4s, Vm.4s
4221 011 01110 10 1 m 011001 n d UMAX Vd.4s, Vn.4s, Vm.4s
4222 011 01110 01 1 m 011001 n d UMAX Vd.8h, Vn.8h, Vm.8h
4223 011 01110 00 1 m 011001 n d UMAX Vd.16b, Vn.16b, Vm.16b
4225 011 01110 10 1 m 011011 n d UMIN Vd.4s, Vn.4s, Vm.4s
4226 011 01110 01 1 m 011011 n d UMIN Vd.8h, Vn.8h, Vm.8h
4227 011 01110 00 1 m 011011 n d UMIN Vd.16b, Vn.16b, Vm.16b
4229 010 01110 10 1 m 011001 n d SMAX Vd.4s, Vn.4s, Vm.4s
4230 Vd.8h, Vn.8h, Vm.8h
4231 010 01110 00 1 m 011001 n d SMAX Vd.16b, Vn.16b, Vm.16b
4233 010 01110 10 1 m 011011 n d SMIN Vd.4s, Vn.4s, Vm.4s
4234 010 01110 01 1 m 011011 n d SMIN Vd.8h, Vn.8h, Vm.8h
4235 010 01110 00 1 m 011011 n d SMIN Vd.16b, Vn.16b, Vm.16b
4237 010 01110 00 1 m 000111 n d AND Vd, Vn, Vm
4238 010 01110 10 1 m 000111 n d ORR Vd, Vn, Vm
4239 011 01110 00 1 m 000111 n d EOR Vd, Vn, Vm
4241 011 01110 11 1 m 100011 n d CMEQ Vd.2d, Vn.2d, Vm.2d
4242 011 01110 10 1 m 100011 n d CMEQ Vd.4s, Vn.4s, Vm.4s
4243 011 01110 01 1 m 100011 n d CMEQ Vd.8h, Vn.8h, Vm.8h
4244 011 01110 00 1 m 100011 n d CMEQ Vd.16b, Vn.16b, Vm.16b
4246 011 01110 11 1 m 001101 n d CMHI Vd.2d, Vn.2d, Vm.2d
4247 011 01110 10 1 m 001101 n d CMHI Vd.4s, Vn.4s, Vm.4s
4248 011 01110 01 1 m 001101 n d CMHI Vd.8h, Vn.8h, Vm.8h
4249 011 01110 00 1 m 001101 n d CMHI Vd.16b, Vn.16b, Vm.16b
4251 010 01110 11 1 m 001101 n d CMGT Vd.2d, Vn.2d, Vm.2d
4252 010 01110 10 1 m 001101 n d CMGT Vd.4s, Vn.4s, Vm.4s
4253 010 01110 01 1 m 001101 n d CMGT Vd.8h, Vn.8h, Vm.8h
4254 010 01110 00 1 m 001101 n d CMGT Vd.16b, Vn.16b, Vm.16b
4256 010 01110 01 1 m 111001 n d FCMEQ Vd.2d, Vn.2d, Vm.2d
4257 010 01110 00 1 m 111001 n d FCMEQ Vd.4s, Vn.4s, Vm.4s
4259 011 01110 01 1 m 111001 n d FCMGE Vd.2d, Vn.2d, Vm.2d
4260 011 01110 00 1 m 111001 n d FCMGE Vd.4s, Vn.4s, Vm.4s
4262 011 01110 11 1 m 111001 n d FCMGT Vd.2d, Vn.2d, Vm.2d
4263 011 01110 10 1 m 111001 n d FCMGT Vd.4s, Vn.4s, Vm.4s
4265 010 01110 00 0 m 000000 n d TBL Vd.16b, {Vn.16b}, Vm.16b
4267 010 01110 11 0 m 000110 n d UZP1 Vd.2d, Vn.2d, Vm.2d
4268 010 01110 10 0 m 000110 n d UZP1 Vd.4s, Vn.4s, Vm.4s
4269 010 01110 01 0 m 000110 n d UZP1 Vd.8h, Vn.8h, Vm.8h
4270 010 01110 00 0 m 000110 n d UZP1 Vd.16b, Vn.16b, Vm.16b
4272 010 01110 11 0 m 010110 n d UZP2 Vd.2d, Vn.2d, Vm.2d
4273 010 01110 10 0 m 010110 n d UZP2 Vd.4s, Vn.4s, Vm.4s
4274 010 01110 01 0 m 010110 n d UZP2 Vd.8h, Vn.8h, Vm.8h
4275 010 01110 00 0 m 010110 n d UZP2 Vd.16b, Vn.16b, Vm.16b
4277 010 01110 10 0 m 001110 n d ZIP1 Vd.4s, Vn.4s, Vm.4s
4278 010 01110 01 0 m 001110 n d ZIP1 Vd.8h, Vn.8h, Vm.8h
4279 010 01110 10 0 m 001110 n d ZIP1 Vd.16b, Vn.16b, Vm.16b
4281 010 01110 10 0 m 011110 n d ZIP2 Vd.4s, Vn.4s, Vm.4s
4282 010 01110 01 0 m 011110 n d ZIP2 Vd.8h, Vn.8h, Vm.8h
4283 010 01110 10 0 m 011110 n d ZIP2 Vd.16b, Vn.16b, Vm.16b
4285 011 01110 00 1 m 100111 n d PMUL Vd.16b, Vn.16b, Vm.16b
4287 000 01110 00 1 m 111000 n d PMULL Vd.8h, Vn.8b, Vm.8b
4289 001 01110 10 1 m 110000 n d UMULL Vd.2d, Vn.2s, Vm.2s
4290 001 01110 01 1 m 110000 n d UMULL Vd.4s, Vn.4h, Vm.4h
4291 001 01110 00 1 m 110000 n d UMULL Vd.8h, Vn.8b, Vm.8b
4293 000 01110 10 1 m 110000 n d SMULL Vd.2d, Vn.2s, Vm.2s
4294 000 01110 01 1 m 110000 n d SMULL Vd.4s, Vn.4h, Vm.4h
4295 000 01110 00 1 m 110000 n d SMULL Vd.8h, Vn.8b, Vm.8b
4297 010 01110 11 1 m 000011 n d SQADD Vd.2d, Vn.2d, Vm.2d
4298 010 01110 10 1 m 000011 n d SQADD Vd.4s, Vn.4s, Vm.4s
4299 010 01110 01 1 m 000011 n d SQADD Vd.8h, Vn.8h, Vm.8h
4300 010 01110 00 1 m 000011 n d SQADD Vd.16b, Vn.16b, Vm.16b
4302 011 01110 11 1 m 000011 n d UQADD Vd.2d, Vn.2d, Vm.2d
4303 011 01110 10 1 m 000011 n d UQADD Vd.4s, Vn.4s, Vm.4s
4304 011 01110 01 1 m 000011 n d UQADD Vd.8h, Vn.8h, Vm.8h
4305 011 01110 00 1 m 000011 n d UQADD Vd.16b, Vn.16b, Vm.16b
4307 010 01110 11 1 m 001011 n d SQSUB Vd.2d, Vn.2d, Vm.2d
4308 010 01110 10 1 m 001011 n d SQSUB Vd.4s, Vn.4s, Vm.4s
4309 010 01110 01 1 m 001011 n d SQSUB Vd.8h, Vn.8h, Vm.8h
4310 010 01110 00 1 m 001011 n d SQSUB Vd.16b, Vn.16b, Vm.16b
4312 011 01110 11 1 m 001011 n d UQSUB Vd.2d, Vn.2d, Vm.2d
4313 011 01110 10 1 m 001011 n d UQSUB Vd.4s, Vn.4s, Vm.4s
4314 011 01110 01 1 m 001011 n d UQSUB Vd.8h, Vn.8h, Vm.8h
4315 011 01110 00 1 m 001011 n d UQSUB Vd.16b, Vn.16b, Vm.16b
4317 000 01110 10 1 m 110100 n d SQDMULL Vd.2d, Vn.2s, Vm.2s
4318 000 01110 01 1 m 110100 n d SQDMULL Vd.4s, Vn.4h, Vm.4h
4320 010 01110 10 1 m 101101 n d SQDMULH Vd.4s, Vn.4s, Vm.4s
4321 010 01110 01 1 m 101101 n d SQDMULH Vd.8h, Vn.8h, Vm.8h
4322 011 01110 10 1 m 101101 n d SQRDMULH Vd.4s, Vn.4s, Vm.4s
4323 011 01110 10 1 m 101101 n d SQRDMULH Vd.8h, Vn.8h, Vm.8h
4325 010 01110 sz 1 m 010011 n d SQSHL@sz Vd, Vn, Vm
4326 010 01110 sz 1 m 010111 n d SQRSHL@sz Vd, Vn, Vm
4327 011 01110 sz 1 m 010011 n d UQSHL@sz Vd, Vn, Vm
4328 011 01110 sz 1 m 010111 n d URQSHL@sz Vd, Vn, Vm
4330 010 01110 sz 1 m 010001 n d SSHL@sz Vd, Vn, Vm
4331 010 01110 sz 1 m 010101 n d SRSHL@sz Vd, Vn, Vm
4332 011 01110 sz 1 m 010001 n d USHL@sz Vd, Vn, Vm
4333 011 01110 sz 1 m 010101 n d URSHL@sz Vd, Vn, Vm
4335 010 01110 01 1 m 111111 n d FRECPS Vd.2d, Vn.2d, Vm.2d
4336 010 01110 00 1 m 111111 n d FRECPS Vd.4s, Vn.4s, Vm.4s
4337 010 01110 11 1 m 111111 n d FRSQRTS Vd.2d, Vn.2d, Vm.2d
4338 010 01110 10 1 m 111111 n d FRSQRTS Vd.4s, Vn.4s, Vm.4s
4340 UInt vD = qregEnc(i->ARM64in.VBinV.dst);
4345 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X100001, vN, vD);
4348 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X100001, vN, vD);
4351 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X100001, vN, vD);
4354 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X100001, vN, vD);
4357 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X100001, vN, vD);
4360 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X100001, vN, vD);
4363 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X100001, vN, vD);
4366 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100001, vN, vD);
4369 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X100111, vN, vD);
4372 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X100111, vN, vD);
4375 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X100111, vN, vD);
4378 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X110101, vN, vD);
4381 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X110101, vN, vD);
4384 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X110101, vN, vD);
4387 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X110101, vN, vD);
4390 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X110111, vN, vD);
4393 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X110111, vN, vD);
4396 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X111111, vN, vD);
4399 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X111111, vN, vD);
4403 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111101, vN, vD);
4406 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111101, vN, vD);
4409 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X111101, vN, vD);
4412 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X111101, vN, vD);
4416 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X011001, vN, vD);
4419 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X011001, vN, vD);
4422 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X011001, vN, vD);
4426 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X011011, vN, vD);
4429 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X011011, vN, vD);
4432 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X011011, vN, vD);
4436 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X011001, vN, vD);
4439 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X011001, vN, vD);
4442 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X011001, vN, vD);
4446 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X011011, vN, vD);
4449 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X011011, vN, vD);
4452 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X011011, vN, vD);
4456 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X000111, vN, vD);
4459 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X000111, vN, vD);
4462 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X000111, vN, vD);
4466 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X100011, vN, vD);
4469 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X100011, vN, vD);
4472 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X100011, vN, vD);
4475 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100011, vN, vD);
4479 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X001101, vN, vD);
4482 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X001101, vN, vD);
4485 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X001101, vN, vD);
4488 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X001101, vN, vD);
4492 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X001101, vN, vD);
4495 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X001101, vN, vD);
4498 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X001101, vN, vD);
4501 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X001101, vN, vD);
4505 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111001, vN, vD);
4508 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111001, vN, vD);
4512 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X111001, vN, vD);
4515 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X111001, vN, vD);
4519 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X111001, vN, vD);
4522 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X111001, vN, vD);
4526 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X000000, vN, vD);
4530 *p++ = X_3_8_5_6_5_5(X010, X01110110, vM, X000110, vN, vD);
4533 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X000110, vN, vD);
4536 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X000110, vN, vD);
4539 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X000110, vN, vD);
4543 *p++ = X_3_8_5_6_5_5(X010, X01110110, vM, X010110, vN, vD);
4546 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X010110, vN, vD);
4549 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X010110, vN, vD);
4552 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X010110, vN, vD);
4556 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X001110, vN, vD);
4559 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X001110, vN, vD);
4562 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X001110, vN, vD);
4566 *p++ = X_3_8_5_6_5_5(X010, X01110100, vM, X011110, vN, vD);
4569 *p++ = X_3_8_5_6_5_5(X010, X01110010, vM, X011110, vN, vD);
4572 *p++ = X_3_8_5_6_5_5(X010, X01110000, vM, X011110, vN, vD);
4576 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X100111, vN, vD);
4580 *p++ = X_3_8_5_6_5_5(X000, X01110001, vM, X111000, vN, vD);
4584 *p++ = X_3_8_5_6_5_5(X001, X01110101, vM, X110000, vN, vD);
4587 *p++ = X_3_8_5_6_5_5(X001, X01110011, vM, X110000, vN, vD);
4590 *p++ = X_3_8_5_6_5_5(X001, X01110001, vM, X110000, vN, vD);
4594 *p++ = X_3_8_5_6_5_5(X000, X01110101, vM, X110000, vN, vD);
4597 vD);
4600 *p++ = X_3_8_5_6_5_5(X000, X01110001, vM, X110000, vN, vD);
4604 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X000011, vN, vD);
4607 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X000011, vN, vD);
4610 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X000011, vN, vD);
4613 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X000011, vN, vD);
4617 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X000011, vN, vD);
4620 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X000011, vN, vD);
4623 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X000011, vN, vD);
4626 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X000011, vN, vD);
4630 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X001011, vN, vD);
4633 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X001011, vN, vD);
4636 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X001011, vN, vD);
4639 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X001011, vN, vD);
4643 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X001011, vN, vD);
4646 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X001011, vN, vD);
4649 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X001011, vN, vD);
4652 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X001011, vN, vD);
4656 *p++ = X_3_8_5_6_5_5(X000, X01110101, vM, X110100, vN, vD);
4659 *p++ = X_3_8_5_6_5_5(X000, X01110011, vM, X110100, vN, vD);
4663 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X101101, vN, vD);
4666 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X101101, vN, vD);
4669 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X101101, vN, vD);
4672 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X101101, vN, vD);
4676 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010011, vN, vD);
4679 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010011, vN, vD);
4682 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010011, vN, vD);
4685 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010011, vN, vD);
4689 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010111, vN, vD);
4692 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010111, vN, vD);
4695 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010111, vN, vD);
4698 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010111, vN, vD);
4702 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010011, vN, vD);
4705 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010011, vN, vD);
4708 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010011, vN, vD);
4711 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010011, vN, vD);
4715 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010111, vN, vD);
4718 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010111, vN, vD);
4721 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010111, vN, vD);
4724 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010111, vN, vD);
4728 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010001, vN, vD);
4731 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010001, vN, vD);
4734 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010001, vN, vD);
4737 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010001, vN, vD);
4741 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X010101, vN, vD);
4744 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X010101, vN, vD);
4747 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X010101, vN, vD);
4750 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X010101, vN, vD);
4754 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010001, vN, vD);
4757 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010001, vN, vD);
4760 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010001, vN, vD);
4763 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010001, vN, vD);
4767 *p++ = X_3_8_5_6_5_5(X011, X01110111, vM, X010101, vN, vD);
4770 *p++ = X_3_8_5_6_5_5(X011, X01110101, vM, X010101, vN, vD);
4773 *p++ = X_3_8_5_6_5_5(X011, X01110011, vM, X010101, vN, vD);
4776 *p++ = X_3_8_5_6_5_5(X011, X01110001, vM, X010101, vN, vD);
4780 *p++ = X_3_8_5_6_5_5(X010, X01110011, vM, X111111, vN, vD);
4783 *p++ = X_3_8_5_6_5_5(X010, X01110001, vM, X111111, vN, vD);
4786 *p++ = X_3_8_5_6_5_5(X010, X01110111, vM, X111111, vN, vD);
4789 *p++ = X_3_8_5_6_5_5(X010, X01110101, vM, X111111, vN, vD);
4799 010 01110 sz 1 00000 001110 n d SUQADD@sz Vd, Vn
4800 011 01110 sz 1 00000 001110 n d USQADD@sz Vd, Vn
4802 UInt vD = qregEnc(i->ARM64in.VModifyV.mod);
4806 *p++ = X_3_8_5_6_5_5(X010, X01110111, X00000, X001110, vN, vD);
4809 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00000, X001110, vN, vD);
4812 *p++ = X_3_8_5_6_5_5(X010, X01110011, X00000, X001110, vN, vD);
4815 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X001110, vN, vD);
4818 *p++ = X_3_8_5_6_5_5(X011, X01110111, X00000, X001110, vN, vD);
4821 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00000, X001110, vN, vD);
4824 *p++ = X_3_8_5_6_5_5(X011, X01110011, X00000, X001110, vN, vD);
4827 *p++ = X_3_8_5_6_5_5(X011, X01110001, X00000, X001110, vN, vD);
4836 010 01110 11 1 00000 111110 n d FABS Vd.2d, Vn.2d
4837 010 01110 10 1 00000 111110 n d FABS Vd.4s, Vn.4s
4838 011 01110 11 1 00000 111110 n d FNEG Vd.2d, Vn.2d
4839 011 01110 10 1 00000 111110 n d FNEG Vd.4s, Vn.4s
4840 011 01110 00 1 00000 010110 n d NOT Vd.16b, Vn.16b
4842 010 01110 11 1 00000 101110 n d ABS Vd.2d, Vn.2d
4843 010 01110 10 1 00000 101110 n d ABS Vd.4s, Vn.4s
4844 010 01110 01 1 00000 101110 n d ABS Vd.8h, Vn.8h
4845 010 01110 00 1 00000 101110 n d ABS Vd.16b, Vn.16b
4847 010 01110 10 1 00000 010010 n d CLS Vd.4s, Vn.4s
4848 010 01110 01 1 00000 010010 n d CLS Vd.8h, Vn.8h
4849 010 01110 00 1 00000 010010 n d CLS Vd.16b, Vn.16b
4851 011 01110 10 1 00000 010010 n d CLZ Vd.4s, Vn.4s
4852 011 01110 01 1 00000 010010 n d CLZ Vd.8h, Vn.8h
4853 011 01110 00 1 00000 010010 n d CLZ Vd.16b, Vn.16b
4855 010 01110 00 1 00000 010110 n d CNT Vd.16b, Vn.16b
4857 011 01110 01 1 00000 010110 n d RBIT Vd.16b, Vn.16b
4858 010 01110 00 1 00000 000110 n d REV16 Vd.16b, Vn.16b
4859 011 01110 00 1 00000 000010 n d REV32 Vd.16b, Vn.16b
4860 011 01110 01 1 00000 000010 n d REV32 Vd.8h, Vn.8h
4862 010 01110 00 1 00000 000010 n d REV64 Vd.16b, Vn.16b
4863 010 01110 01 1 00000 000010 n d REV64 Vd.8h, Vn.8h
4864 010 01110 10 1 00000 000010 n d REV64 Vd.4s, Vn.4s
4866 010 01110 10 1 00001 110010 n d URECPE Vd.4s, Vn.4s
4867 011 01110 10 1 00001 110010 n d URSQRTE Vd.4s, Vn.4s
4869 010 01110 11 1 00001 110110 n d FRECPE Vd.2d, Vn.2d
4870 010 01110 10 1 00001 110110 n d FRECPE Vd.4s, Vn.4s
4872 011 01110 11 1 00001 110110 n d FRECPE Vd.2d, Vn.2d
4873 011 01110 10 1 00001 110110 n d FRECPE Vd.4s, Vn.4s
4875 011 01110 11 1 00001 111110 n d FSQRT Vd.2d, Vn.2d
4876 011 01110 10 1 00001 111110 n d FSQRT Vd.4s, Vn.4s
4878 UInt vD = qregEnc(i->ARM64in.VUnaryV.dst);
4882 *p++ = X_3_8_5_6_5_5(X010, X01110111, X00000, X111110, vN, vD);
4885 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00000, X111110, vN, vD);
4888 *p++ = X_3_8_5_6_5_5(X011, X01110111, X00000, X111110, vN, vD);
4891 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00000, X111110, vN, vD);
4894 *p++ = X_3_8_5_6_5_5(X011, X01110001, X00000, X010110, vN, vD);
4897 *p++ = X_3_8_5_6_5_5(X010, X01110111, X00000, X101110, vN, vD);
4900 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00000, X101110, vN, vD);
4903 *p++ = X_3_8_5_6_5_5(X010, X01110011, X00000, X101110, vN, vD);
4906 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X101110, vN, vD);
4909 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00000, X010010, vN, vD);
4912 *p++ = X_3_8_5_6_5_5(X010, X01110011, X00000, X010010, vN, vD);
4915 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X010010, vN, vD);
4918 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00000, X010010, vN, vD);
4921 *p++ = X_3_8_5_6_5_5(X011, X01110011, X00000, X010010, vN, vD);
4924 *p++ = X_3_8_5_6_5_5(X011, X01110001, X00000, X010010, vN, vD);
4927 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X010110, vN, vD);
4930 *p++ = X_3_8_5_6_5_5(X011, X01110011, X00000, X010110, vN, vD);
4933 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X000110, vN, vD);
4936 *p++ = X_3_8_5_6_5_5(X011, X01110001, X00000, X000010, vN, vD);
4939 *p++ = X_3_8_5_6_5_5(X011, X01110011, X00000, X000010, vN, vD);
4942 *p++ = X_3_8_5_6_5_5(X010, X01110001, X00000, X000010, vN, vD);
4945 *p++ = X_3_8_5_6_5_5(X010, X01110011, X00000, X000010, vN, vD);
4948 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00000, X000010, vN, vD);
4951 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00001, X110010, vN, vD);
4954 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00001, X110010, vN, vD);
4957 *p++ = X_3_8_5_6_5_5(X010, X01110111, X00001, X110110, vN, vD);
4960 *p++ = X_3_8_5_6_5_5(X010, X01110101, X00001, X110110, vN, vD);
4963 *p++ = X_3_8_5_6_5_5(X011, X01110111, X00001, X110110, vN, vD);
4966 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00001, X110110, vN, vD);
4969 *p++ = X_3_8_5_6_5_5(X011, X01110111, X00001, X111110, vN, vD);
4972 *p++ = X_3_8_5_6_5_5(X011, X01110101, X00001, X111110, vN, vD);
4981 Vd.8b, Vn.8h
4982 000 01110 01 1,00001 001010 n d XTN Vd.4h, Vn.4s
4983 000 01110 10 1,00001 001010 n d XTN Vd.2s, Vn.2d
4985 001 01110 00 1,00001 001010 n d SQXTUN Vd.8b, Vn.8h
4986 001 01110 01 1,00001 001010 n d SQXTUN Vd.4h, Vn.4s
4987 001 01110 10 1,00001 001010 n d SQXTUN Vd.2s, Vn.2d
4989 000 01110 00 1,00001 010010 n d SQXTN Vd.8b, Vn.8h
4990 000 01110 01 1,00001 010010 n d SQXTN Vd.4h, Vn.4s
4991 000 01110 10 1,00001 010010 n d SQXTN Vd.2s, Vn.2d
4993 001 01110 00 1,00001 010010 n d UQXTN Vd.8b, Vn.8h
4994 001 01110 01 1,00001 010010 n d UQXTN Vd.4h, Vn.4s
4995 001 01110 10 1,00001 010010 n d UQXTN Vd.2s, Vn.2d
4997 UInt vD = qregEnc(i->ARM64in.VNarrowV.dst);
5004 X00001, X001010, vN, vD);
5008 X00001, X001010, vN, vD);
5012 X00001, X010010, vN, vD);
5016 X00001, X010010, vN, vD);
5025 011 011110 immh immb 000001 n d USHR Vd.T, Vn.T, #sh
5026 010 011110 immh immb 000001 n d SSHR Vd.T, Vn.T, #sh
5043 010 011110 immh immb 010101 n d SHL Vd.T, Vn.T, #sh
5045 011 011110 immh immb 011101 n d UQSHL Vd.T, Vn.T, #sh
5046 010 011110 immh immb 011101 n d SQSHL Vd.T, Vn.T, #sh
5047 011 011110 immh immb 011001 n d SQSHLU Vd.T, Vn.T, #sh
5056 UInt vD = qregEnc(i->ARM64in.VShiftImmV.dst);
5062 = X_3_6_7_6_5_5(X011, X011110, 0, X000001, vN, vD);
5064 = X_3_6_7_6_5_5(X010, X011110, 0, X000001, vN, vD);
5067 = X_3_6_7_6_5_5(X001, X011110, 0, X100101, vN, vD);
5069 = X_3_6_7_6_5_5(X000, X011110, 0, X100101, vN, vD);
5071 = X_3_6_7_6_5_5(X001, X011110, 0, X100001, vN, vD);
5074 = X_3_6_7_6_5_5(X001, X011110, 0, X100111, vN, vD);
5076 = X_3_6_7_6_5_5(X000, X011110, 0, X100111, vN, vD);
5078 = X_3_6_7_6_5_5(X001, X011110, 0, X100011, vN, vD);
5081 = X_3_6_7_6_5_5(X010, X011110, 0, X010101, vN, vD);
5084 = X_3_6_7_6_5_5(X011, X011110, 0, X011101, vN, vD);
5086 = X_3_6_7_6_5_5(X010, X011110, 0, X011101, vN, vD);
5088 = X_3_6_7_6_5_5(X011, X011110, 0, X011001, vN, vD);
5190 011 01110 000 m 0 imm4 0 n d EXT Vd.16b, Vn.16b, Vm.16b, #imm4
5194 UInt vD = qregEnc(i->ARM64in.VExtV.dst);
5200 X000000 | (imm4 << 1), vN, vD);
5243 /* INS Vd.D[0], rX
5244 0100 1110 0000 1000 0001 11 nn dd INS Vd.D[0], Xn
5257 1001 1110 0110 0111 0000 00 nn dd FMOV Vd.D[0], Xn
5269 INS Vd.D[0], Xlo; INS Vd.D[1], Xhi
5270 0100 1110 0000 1000 0001 11 nn dd INS Vd.D[0], Xn
5271 0100 1110 0001 1000 0001 11 nn dd INS Vd.D[1], Xn
5313 010 01110 10 1 n 0 00111 n d MOV Vd.16b, Vn.16b