1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/v8.h" 6 7 #if V8_TARGET_ARCH_X64 8 9 #include "src/macro-assembler.h" 10 #include "src/serialize.h" 11 12 namespace v8 { 13 namespace internal { 14 15 // ----------------------------------------------------------------------------- 16 // Implementation of CpuFeatures 17 18 void CpuFeatures::ProbeImpl(bool cross_compile) { 19 CPU cpu; 20 CHECK(cpu.has_sse2()); // SSE2 support is mandatory. 21 CHECK(cpu.has_cmov()); // CMOV support is mandatory. 22 23 // Only use statically determined features for cross compile (snapshot). 24 if (cross_compile) return; 25 26 if (cpu.has_sse41() && FLAG_enable_sse4_1) supported_ |= 1u << SSE4_1; 27 if (cpu.has_sse3() && FLAG_enable_sse3) supported_ |= 1u << SSE3; 28 // SAHF is not generally available in long mode. 29 if (cpu.has_sahf() && FLAG_enable_sahf) supported_|= 1u << SAHF; 30 } 31 32 33 void CpuFeatures::PrintTarget() { } 34 void CpuFeatures::PrintFeatures() { } 35 36 37 // ----------------------------------------------------------------------------- 38 // Implementation of RelocInfo 39 40 // Patch the code at the current PC with a call to the target address. 41 // Additional guard int3 instructions can be added if required. 42 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { 43 int code_size = Assembler::kCallSequenceLength + guard_bytes; 44 45 // Create a code patcher. 46 CodePatcher patcher(pc_, code_size); 47 48 // Add a label for checking the size of the code used for returning. 49 #ifdef DEBUG 50 Label check_codesize; 51 patcher.masm()->bind(&check_codesize); 52 #endif 53 54 // Patch the code. 55 patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target), 56 Assembler::RelocInfoNone()); 57 patcher.masm()->call(kScratchRegister); 58 59 // Check that the size of the code generated is as expected. 60 ASSERT_EQ(Assembler::kCallSequenceLength, 61 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); 62 63 // Add the requested number of int3 instructions after the call. 64 for (int i = 0; i < guard_bytes; i++) { 65 patcher.masm()->int3(); 66 } 67 } 68 69 70 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { 71 // Patch the code at the current address with the supplied instructions. 72 for (int i = 0; i < instruction_count; i++) { 73 *(pc_ + i) = *(instructions + i); 74 } 75 76 // Indicate that code has changed. 77 CPU::FlushICache(pc_, instruction_count); 78 } 79 80 81 // ----------------------------------------------------------------------------- 82 // Register constants. 83 84 const int 85 Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = { 86 // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15 87 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15 88 }; 89 90 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = { 91 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10 92 }; 93 94 95 // ----------------------------------------------------------------------------- 96 // Implementation of Operand 97 98 Operand::Operand(Register base, int32_t disp) : rex_(0) { 99 len_ = 1; 100 if (base.is(rsp) || base.is(r12)) { 101 // SIB byte is needed to encode (rsp + offset) or (r12 + offset). 102 set_sib(times_1, rsp, base); 103 } 104 105 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 106 set_modrm(0, base); 107 } else if (is_int8(disp)) { 108 set_modrm(1, base); 109 set_disp8(disp); 110 } else { 111 set_modrm(2, base); 112 set_disp32(disp); 113 } 114 } 115 116 117 Operand::Operand(Register base, 118 Register index, 119 ScaleFactor scale, 120 int32_t disp) : rex_(0) { 121 ASSERT(!index.is(rsp)); 122 len_ = 1; 123 set_sib(scale, index, base); 124 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 125 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits 126 // possibly set by set_sib. 127 set_modrm(0, rsp); 128 } else if (is_int8(disp)) { 129 set_modrm(1, rsp); 130 set_disp8(disp); 131 } else { 132 set_modrm(2, rsp); 133 set_disp32(disp); 134 } 135 } 136 137 138 Operand::Operand(Register index, 139 ScaleFactor scale, 140 int32_t disp) : rex_(0) { 141 ASSERT(!index.is(rsp)); 142 len_ = 1; 143 set_modrm(0, rsp); 144 set_sib(scale, index, rbp); 145 set_disp32(disp); 146 } 147 148 149 Operand::Operand(const Operand& operand, int32_t offset) { 150 ASSERT(operand.len_ >= 1); 151 // Operand encodes REX ModR/M [SIB] [Disp]. 152 byte modrm = operand.buf_[0]; 153 ASSERT(modrm < 0xC0); // Disallow mode 3 (register target). 154 bool has_sib = ((modrm & 0x07) == 0x04); 155 byte mode = modrm & 0xC0; 156 int disp_offset = has_sib ? 2 : 1; 157 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07; 158 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit 159 // displacement. 160 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base. 161 int32_t disp_value = 0; 162 if (mode == 0x80 || is_baseless) { 163 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement. 164 disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]); 165 } else if (mode == 0x40) { 166 // Mode 1: Byte displacement. 167 disp_value = static_cast<signed char>(operand.buf_[disp_offset]); 168 } 169 170 // Write new operand with same registers, but with modified displacement. 171 ASSERT(offset >= 0 ? disp_value + offset > disp_value 172 : disp_value + offset < disp_value); // No overflow. 173 disp_value += offset; 174 rex_ = operand.rex_; 175 if (!is_int8(disp_value) || is_baseless) { 176 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13. 177 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80); 178 len_ = disp_offset + 4; 179 Memory::int32_at(&buf_[disp_offset]) = disp_value; 180 } else if (disp_value != 0 || (base_reg == 0x05)) { 181 // Need 8 bits of displacement. 182 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1. 183 len_ = disp_offset + 1; 184 buf_[disp_offset] = static_cast<byte>(disp_value); 185 } else { 186 // Need no displacement. 187 buf_[0] = (modrm & 0x3f); // Mode 0. 188 len_ = disp_offset; 189 } 190 if (has_sib) { 191 buf_[1] = operand.buf_[1]; 192 } 193 } 194 195 196 bool Operand::AddressUsesRegister(Register reg) const { 197 int code = reg.code(); 198 ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand. 199 // Start with only low three bits of base register. Initial decoding doesn't 200 // distinguish on the REX.B bit. 201 int base_code = buf_[0] & 0x07; 202 if (base_code == rsp.code()) { 203 // SIB byte present in buf_[1]. 204 // Check the index register from the SIB byte + REX.X prefix. 205 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2); 206 // Index code (including REX.X) of 0x04 (rsp) means no index register. 207 if (index_code != rsp.code() && index_code == code) return true; 208 // Add REX.B to get the full base register code. 209 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3); 210 // A base register of 0x05 (rbp) with mod = 0 means no base register. 211 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false; 212 return code == base_code; 213 } else { 214 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means 215 // no base register. 216 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false; 217 base_code |= ((rex_ & 0x01) << 3); 218 return code == base_code; 219 } 220 } 221 222 223 // ----------------------------------------------------------------------------- 224 // Implementation of Assembler. 225 226 #ifdef GENERATED_CODE_COVERAGE 227 static void InitCoverageLog(); 228 #endif 229 230 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 231 : AssemblerBase(isolate, buffer, buffer_size), 232 code_targets_(100), 233 positions_recorder_(this) { 234 // Clear the buffer in debug mode unless it was provided by the 235 // caller in which case we can't be sure it's okay to overwrite 236 // existing code in it. 237 #ifdef DEBUG 238 if (own_buffer_) { 239 memset(buffer_, 0xCC, buffer_size_); // int3 240 } 241 #endif 242 243 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 244 245 246 #ifdef GENERATED_CODE_COVERAGE 247 InitCoverageLog(); 248 #endif 249 } 250 251 252 void Assembler::GetCode(CodeDesc* desc) { 253 // Finalize code (at this point overflow() may be true, but the gap ensures 254 // that we are still not overlapping instructions and relocation info). 255 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap. 256 // Set up code descriptor. 257 desc->buffer = buffer_; 258 desc->buffer_size = buffer_size_; 259 desc->instr_size = pc_offset(); 260 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. 261 desc->reloc_size = 262 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); 263 desc->origin = this; 264 } 265 266 267 void Assembler::Align(int m) { 268 ASSERT(IsPowerOf2(m)); 269 int delta = (m - (pc_offset() & (m - 1))) & (m - 1); 270 Nop(delta); 271 } 272 273 274 void Assembler::CodeTargetAlign() { 275 Align(16); // Preferred alignment of jump targets on x64. 276 } 277 278 279 bool Assembler::IsNop(Address addr) { 280 Address a = addr; 281 while (*a == 0x66) a++; 282 if (*a == 0x90) return true; 283 if (a[0] == 0xf && a[1] == 0x1f) return true; 284 return false; 285 } 286 287 288 void Assembler::bind_to(Label* L, int pos) { 289 ASSERT(!L->is_bound()); // Label may only be bound once. 290 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid. 291 if (L->is_linked()) { 292 int current = L->pos(); 293 int next = long_at(current); 294 while (next != current) { 295 // Relative address, relative to point after address. 296 int imm32 = pos - (current + sizeof(int32_t)); 297 long_at_put(current, imm32); 298 current = next; 299 next = long_at(next); 300 } 301 // Fix up last fixup on linked list. 302 int last_imm32 = pos - (current + sizeof(int32_t)); 303 long_at_put(current, last_imm32); 304 } 305 while (L->is_near_linked()) { 306 int fixup_pos = L->near_link_pos(); 307 int offset_to_next = 308 static_cast<int>(*reinterpret_cast<int8_t*>(addr_at(fixup_pos))); 309 ASSERT(offset_to_next <= 0); 310 int disp = pos - (fixup_pos + sizeof(int8_t)); 311 CHECK(is_int8(disp)); 312 set_byte_at(fixup_pos, disp); 313 if (offset_to_next < 0) { 314 L->link_to(fixup_pos + offset_to_next, Label::kNear); 315 } else { 316 L->UnuseNear(); 317 } 318 } 319 L->bind_to(pos); 320 } 321 322 323 void Assembler::bind(Label* L) { 324 bind_to(L, pc_offset()); 325 } 326 327 328 void Assembler::GrowBuffer() { 329 ASSERT(buffer_overflow()); 330 if (!own_buffer_) FATAL("external code buffer is too small"); 331 332 // Compute new buffer size. 333 CodeDesc desc; // the new buffer 334 if (buffer_size_ < 4*KB) { 335 desc.buffer_size = 4*KB; 336 } else { 337 desc.buffer_size = 2*buffer_size_; 338 } 339 // Some internal data structures overflow for very large buffers, 340 // they must ensure that kMaximalBufferSize is not too large. 341 if ((desc.buffer_size > kMaximalBufferSize) || 342 (desc.buffer_size > isolate()->heap()->MaxOldGenerationSize())) { 343 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); 344 } 345 346 // Set up new buffer. 347 desc.buffer = NewArray<byte>(desc.buffer_size); 348 desc.instr_size = pc_offset(); 349 desc.reloc_size = 350 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); 351 352 // Clear the buffer in debug mode. Use 'int3' instructions to make 353 // sure to get into problems if we ever run uninitialized code. 354 #ifdef DEBUG 355 memset(desc.buffer, 0xCC, desc.buffer_size); 356 #endif 357 358 // Copy the data. 359 intptr_t pc_delta = desc.buffer - buffer_; 360 intptr_t rc_delta = (desc.buffer + desc.buffer_size) - 361 (buffer_ + buffer_size_); 362 MemMove(desc.buffer, buffer_, desc.instr_size); 363 MemMove(rc_delta + reloc_info_writer.pos(), reloc_info_writer.pos(), 364 desc.reloc_size); 365 366 // Switch buffers. 367 if (isolate() != NULL && 368 isolate()->assembler_spare_buffer() == NULL && 369 buffer_size_ == kMinimalBufferSize) { 370 isolate()->set_assembler_spare_buffer(buffer_); 371 } else { 372 DeleteArray(buffer_); 373 } 374 buffer_ = desc.buffer; 375 buffer_size_ = desc.buffer_size; 376 pc_ += pc_delta; 377 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 378 reloc_info_writer.last_pc() + pc_delta); 379 380 // Relocate runtime entries. 381 for (RelocIterator it(desc); !it.done(); it.next()) { 382 RelocInfo::Mode rmode = it.rinfo()->rmode(); 383 if (rmode == RelocInfo::INTERNAL_REFERENCE) { 384 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc()); 385 if (*p != 0) { // 0 means uninitialized. 386 *p += pc_delta; 387 } 388 } 389 } 390 391 ASSERT(!buffer_overflow()); 392 } 393 394 395 void Assembler::emit_operand(int code, const Operand& adr) { 396 ASSERT(is_uint3(code)); 397 const unsigned length = adr.len_; 398 ASSERT(length > 0); 399 400 // Emit updated ModR/M byte containing the given register. 401 ASSERT((adr.buf_[0] & 0x38) == 0); 402 pc_[0] = adr.buf_[0] | code << 3; 403 404 // Emit the rest of the encoded operand. 405 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; 406 pc_ += length; 407 } 408 409 410 // Assembler Instruction implementations. 411 412 void Assembler::arithmetic_op(byte opcode, 413 Register reg, 414 const Operand& op, 415 int size) { 416 EnsureSpace ensure_space(this); 417 emit_rex(reg, op, size); 418 emit(opcode); 419 emit_operand(reg, op); 420 } 421 422 423 void Assembler::arithmetic_op(byte opcode, 424 Register reg, 425 Register rm_reg, 426 int size) { 427 EnsureSpace ensure_space(this); 428 ASSERT((opcode & 0xC6) == 2); 429 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 430 // Swap reg and rm_reg and change opcode operand order. 431 emit_rex(rm_reg, reg, size); 432 emit(opcode ^ 0x02); 433 emit_modrm(rm_reg, reg); 434 } else { 435 emit_rex(reg, rm_reg, size); 436 emit(opcode); 437 emit_modrm(reg, rm_reg); 438 } 439 } 440 441 442 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) { 443 EnsureSpace ensure_space(this); 444 ASSERT((opcode & 0xC6) == 2); 445 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 446 // Swap reg and rm_reg and change opcode operand order. 447 emit(0x66); 448 emit_optional_rex_32(rm_reg, reg); 449 emit(opcode ^ 0x02); 450 emit_modrm(rm_reg, reg); 451 } else { 452 emit(0x66); 453 emit_optional_rex_32(reg, rm_reg); 454 emit(opcode); 455 emit_modrm(reg, rm_reg); 456 } 457 } 458 459 460 void Assembler::arithmetic_op_16(byte opcode, 461 Register reg, 462 const Operand& rm_reg) { 463 EnsureSpace ensure_space(this); 464 emit(0x66); 465 emit_optional_rex_32(reg, rm_reg); 466 emit(opcode); 467 emit_operand(reg, rm_reg); 468 } 469 470 471 void Assembler::arithmetic_op_8(byte opcode, Register reg, const Operand& op) { 472 EnsureSpace ensure_space(this); 473 if (!reg.is_byte_register()) { 474 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 475 emit_rex_32(reg); 476 } 477 emit(opcode); 478 emit_operand(reg, op); 479 } 480 481 482 void Assembler::arithmetic_op_8(byte opcode, Register reg, Register rm_reg) { 483 EnsureSpace ensure_space(this); 484 ASSERT((opcode & 0xC6) == 2); 485 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 486 // Swap reg and rm_reg and change opcode operand order. 487 if (!rm_reg.is_byte_register() || !reg.is_byte_register()) { 488 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 489 emit_rex_32(rm_reg, reg); 490 } 491 emit(opcode ^ 0x02); 492 emit_modrm(rm_reg, reg); 493 } else { 494 if (!reg.is_byte_register() || !rm_reg.is_byte_register()) { 495 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 496 emit_rex_32(reg, rm_reg); 497 } 498 emit(opcode); 499 emit_modrm(reg, rm_reg); 500 } 501 } 502 503 504 void Assembler::immediate_arithmetic_op(byte subcode, 505 Register dst, 506 Immediate src, 507 int size) { 508 EnsureSpace ensure_space(this); 509 emit_rex(dst, size); 510 if (is_int8(src.value_)) { 511 emit(0x83); 512 emit_modrm(subcode, dst); 513 emit(src.value_); 514 } else if (dst.is(rax)) { 515 emit(0x05 | (subcode << 3)); 516 emitl(src.value_); 517 } else { 518 emit(0x81); 519 emit_modrm(subcode, dst); 520 emitl(src.value_); 521 } 522 } 523 524 void Assembler::immediate_arithmetic_op(byte subcode, 525 const Operand& dst, 526 Immediate src, 527 int size) { 528 EnsureSpace ensure_space(this); 529 emit_rex(dst, size); 530 if (is_int8(src.value_)) { 531 emit(0x83); 532 emit_operand(subcode, dst); 533 emit(src.value_); 534 } else { 535 emit(0x81); 536 emit_operand(subcode, dst); 537 emitl(src.value_); 538 } 539 } 540 541 542 void Assembler::immediate_arithmetic_op_16(byte subcode, 543 Register dst, 544 Immediate src) { 545 EnsureSpace ensure_space(this); 546 emit(0x66); // Operand size override prefix. 547 emit_optional_rex_32(dst); 548 if (is_int8(src.value_)) { 549 emit(0x83); 550 emit_modrm(subcode, dst); 551 emit(src.value_); 552 } else if (dst.is(rax)) { 553 emit(0x05 | (subcode << 3)); 554 emitw(src.value_); 555 } else { 556 emit(0x81); 557 emit_modrm(subcode, dst); 558 emitw(src.value_); 559 } 560 } 561 562 563 void Assembler::immediate_arithmetic_op_16(byte subcode, 564 const Operand& dst, 565 Immediate src) { 566 EnsureSpace ensure_space(this); 567 emit(0x66); // Operand size override prefix. 568 emit_optional_rex_32(dst); 569 if (is_int8(src.value_)) { 570 emit(0x83); 571 emit_operand(subcode, dst); 572 emit(src.value_); 573 } else { 574 emit(0x81); 575 emit_operand(subcode, dst); 576 emitw(src.value_); 577 } 578 } 579 580 581 void Assembler::immediate_arithmetic_op_8(byte subcode, 582 const Operand& dst, 583 Immediate src) { 584 EnsureSpace ensure_space(this); 585 emit_optional_rex_32(dst); 586 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 587 emit(0x80); 588 emit_operand(subcode, dst); 589 emit(src.value_); 590 } 591 592 593 void Assembler::immediate_arithmetic_op_8(byte subcode, 594 Register dst, 595 Immediate src) { 596 EnsureSpace ensure_space(this); 597 if (!dst.is_byte_register()) { 598 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 599 emit_rex_32(dst); 600 } 601 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 602 emit(0x80); 603 emit_modrm(subcode, dst); 604 emit(src.value_); 605 } 606 607 608 void Assembler::shift(Register dst, 609 Immediate shift_amount, 610 int subcode, 611 int size) { 612 EnsureSpace ensure_space(this); 613 ASSERT(size == kInt64Size ? is_uint6(shift_amount.value_) 614 : is_uint5(shift_amount.value_)); 615 if (shift_amount.value_ == 1) { 616 emit_rex(dst, size); 617 emit(0xD1); 618 emit_modrm(subcode, dst); 619 } else { 620 emit_rex(dst, size); 621 emit(0xC1); 622 emit_modrm(subcode, dst); 623 emit(shift_amount.value_); 624 } 625 } 626 627 628 void Assembler::shift(Register dst, int subcode, int size) { 629 EnsureSpace ensure_space(this); 630 emit_rex(dst, size); 631 emit(0xD3); 632 emit_modrm(subcode, dst); 633 } 634 635 636 void Assembler::bt(const Operand& dst, Register src) { 637 EnsureSpace ensure_space(this); 638 emit_rex_64(src, dst); 639 emit(0x0F); 640 emit(0xA3); 641 emit_operand(src, dst); 642 } 643 644 645 void Assembler::bts(const Operand& dst, Register src) { 646 EnsureSpace ensure_space(this); 647 emit_rex_64(src, dst); 648 emit(0x0F); 649 emit(0xAB); 650 emit_operand(src, dst); 651 } 652 653 654 void Assembler::bsrl(Register dst, Register src) { 655 EnsureSpace ensure_space(this); 656 emit_optional_rex_32(dst, src); 657 emit(0x0F); 658 emit(0xBD); 659 emit_modrm(dst, src); 660 } 661 662 663 void Assembler::call(Label* L) { 664 positions_recorder()->WriteRecordedPositions(); 665 EnsureSpace ensure_space(this); 666 // 1110 1000 #32-bit disp. 667 emit(0xE8); 668 if (L->is_bound()) { 669 int offset = L->pos() - pc_offset() - sizeof(int32_t); 670 ASSERT(offset <= 0); 671 emitl(offset); 672 } else if (L->is_linked()) { 673 emitl(L->pos()); 674 L->link_to(pc_offset() - sizeof(int32_t)); 675 } else { 676 ASSERT(L->is_unused()); 677 int32_t current = pc_offset(); 678 emitl(current); 679 L->link_to(current); 680 } 681 } 682 683 684 void Assembler::call(Address entry, RelocInfo::Mode rmode) { 685 ASSERT(RelocInfo::IsRuntimeEntry(rmode)); 686 positions_recorder()->WriteRecordedPositions(); 687 EnsureSpace ensure_space(this); 688 // 1110 1000 #32-bit disp. 689 emit(0xE8); 690 emit_runtime_entry(entry, rmode); 691 } 692 693 694 void Assembler::call(Handle<Code> target, 695 RelocInfo::Mode rmode, 696 TypeFeedbackId ast_id) { 697 positions_recorder()->WriteRecordedPositions(); 698 EnsureSpace ensure_space(this); 699 // 1110 1000 #32-bit disp. 700 emit(0xE8); 701 emit_code_target(target, rmode, ast_id); 702 } 703 704 705 void Assembler::call(Register adr) { 706 positions_recorder()->WriteRecordedPositions(); 707 EnsureSpace ensure_space(this); 708 // Opcode: FF /2 r64. 709 emit_optional_rex_32(adr); 710 emit(0xFF); 711 emit_modrm(0x2, adr); 712 } 713 714 715 void Assembler::call(const Operand& op) { 716 positions_recorder()->WriteRecordedPositions(); 717 EnsureSpace ensure_space(this); 718 // Opcode: FF /2 m64. 719 emit_optional_rex_32(op); 720 emit(0xFF); 721 emit_operand(0x2, op); 722 } 723 724 725 // Calls directly to the given address using a relative offset. 726 // Should only ever be used in Code objects for calls within the 727 // same Code object. Should not be used when generating new code (use labels), 728 // but only when patching existing code. 729 void Assembler::call(Address target) { 730 positions_recorder()->WriteRecordedPositions(); 731 EnsureSpace ensure_space(this); 732 // 1110 1000 #32-bit disp. 733 emit(0xE8); 734 Address source = pc_ + 4; 735 intptr_t displacement = target - source; 736 ASSERT(is_int32(displacement)); 737 emitl(static_cast<int32_t>(displacement)); 738 } 739 740 741 void Assembler::clc() { 742 EnsureSpace ensure_space(this); 743 emit(0xF8); 744 } 745 746 747 void Assembler::cld() { 748 EnsureSpace ensure_space(this); 749 emit(0xFC); 750 } 751 752 753 void Assembler::cdq() { 754 EnsureSpace ensure_space(this); 755 emit(0x99); 756 } 757 758 759 void Assembler::cmovq(Condition cc, Register dst, Register src) { 760 if (cc == always) { 761 movq(dst, src); 762 } else if (cc == never) { 763 return; 764 } 765 // No need to check CpuInfo for CMOV support, it's a required part of the 766 // 64-bit architecture. 767 ASSERT(cc >= 0); // Use mov for unconditional moves. 768 EnsureSpace ensure_space(this); 769 // Opcode: REX.W 0f 40 + cc /r. 770 emit_rex_64(dst, src); 771 emit(0x0f); 772 emit(0x40 + cc); 773 emit_modrm(dst, src); 774 } 775 776 777 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { 778 if (cc == always) { 779 movq(dst, src); 780 } else if (cc == never) { 781 return; 782 } 783 ASSERT(cc >= 0); 784 EnsureSpace ensure_space(this); 785 // Opcode: REX.W 0f 40 + cc /r. 786 emit_rex_64(dst, src); 787 emit(0x0f); 788 emit(0x40 + cc); 789 emit_operand(dst, src); 790 } 791 792 793 void Assembler::cmovl(Condition cc, Register dst, Register src) { 794 if (cc == always) { 795 movl(dst, src); 796 } else if (cc == never) { 797 return; 798 } 799 ASSERT(cc >= 0); 800 EnsureSpace ensure_space(this); 801 // Opcode: 0f 40 + cc /r. 802 emit_optional_rex_32(dst, src); 803 emit(0x0f); 804 emit(0x40 + cc); 805 emit_modrm(dst, src); 806 } 807 808 809 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { 810 if (cc == always) { 811 movl(dst, src); 812 } else if (cc == never) { 813 return; 814 } 815 ASSERT(cc >= 0); 816 EnsureSpace ensure_space(this); 817 // Opcode: 0f 40 + cc /r. 818 emit_optional_rex_32(dst, src); 819 emit(0x0f); 820 emit(0x40 + cc); 821 emit_operand(dst, src); 822 } 823 824 825 void Assembler::cmpb_al(Immediate imm8) { 826 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_)); 827 EnsureSpace ensure_space(this); 828 emit(0x3c); 829 emit(imm8.value_); 830 } 831 832 833 void Assembler::cpuid() { 834 EnsureSpace ensure_space(this); 835 emit(0x0F); 836 emit(0xA2); 837 } 838 839 840 void Assembler::cqo() { 841 EnsureSpace ensure_space(this); 842 emit_rex_64(); 843 emit(0x99); 844 } 845 846 847 void Assembler::emit_dec(Register dst, int size) { 848 EnsureSpace ensure_space(this); 849 emit_rex(dst, size); 850 emit(0xFF); 851 emit_modrm(0x1, dst); 852 } 853 854 855 void Assembler::emit_dec(const Operand& dst, int size) { 856 EnsureSpace ensure_space(this); 857 emit_rex(dst, size); 858 emit(0xFF); 859 emit_operand(1, dst); 860 } 861 862 863 void Assembler::decb(Register dst) { 864 EnsureSpace ensure_space(this); 865 if (!dst.is_byte_register()) { 866 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 867 emit_rex_32(dst); 868 } 869 emit(0xFE); 870 emit_modrm(0x1, dst); 871 } 872 873 874 void Assembler::decb(const Operand& dst) { 875 EnsureSpace ensure_space(this); 876 emit_optional_rex_32(dst); 877 emit(0xFE); 878 emit_operand(1, dst); 879 } 880 881 882 void Assembler::enter(Immediate size) { 883 EnsureSpace ensure_space(this); 884 emit(0xC8); 885 emitw(size.value_); // 16 bit operand, always. 886 emit(0); 887 } 888 889 890 void Assembler::hlt() { 891 EnsureSpace ensure_space(this); 892 emit(0xF4); 893 } 894 895 896 void Assembler::emit_idiv(Register src, int size) { 897 EnsureSpace ensure_space(this); 898 emit_rex(src, size); 899 emit(0xF7); 900 emit_modrm(0x7, src); 901 } 902 903 904 void Assembler::emit_imul(Register src, int size) { 905 EnsureSpace ensure_space(this); 906 emit_rex(src, size); 907 emit(0xF7); 908 emit_modrm(0x5, src); 909 } 910 911 912 void Assembler::emit_imul(Register dst, Register src, int size) { 913 EnsureSpace ensure_space(this); 914 emit_rex(dst, src, size); 915 emit(0x0F); 916 emit(0xAF); 917 emit_modrm(dst, src); 918 } 919 920 921 void Assembler::emit_imul(Register dst, const Operand& src, int size) { 922 EnsureSpace ensure_space(this); 923 emit_rex(dst, src, size); 924 emit(0x0F); 925 emit(0xAF); 926 emit_operand(dst, src); 927 } 928 929 930 void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) { 931 EnsureSpace ensure_space(this); 932 emit_rex(dst, src, size); 933 if (is_int8(imm.value_)) { 934 emit(0x6B); 935 emit_modrm(dst, src); 936 emit(imm.value_); 937 } else { 938 emit(0x69); 939 emit_modrm(dst, src); 940 emitl(imm.value_); 941 } 942 } 943 944 945 void Assembler::emit_inc(Register dst, int size) { 946 EnsureSpace ensure_space(this); 947 emit_rex(dst, size); 948 emit(0xFF); 949 emit_modrm(0x0, dst); 950 } 951 952 953 void Assembler::emit_inc(const Operand& dst, int size) { 954 EnsureSpace ensure_space(this); 955 emit_rex(dst, size); 956 emit(0xFF); 957 emit_operand(0, dst); 958 } 959 960 961 void Assembler::int3() { 962 EnsureSpace ensure_space(this); 963 emit(0xCC); 964 } 965 966 967 void Assembler::j(Condition cc, Label* L, Label::Distance distance) { 968 if (cc == always) { 969 jmp(L); 970 return; 971 } else if (cc == never) { 972 return; 973 } 974 EnsureSpace ensure_space(this); 975 ASSERT(is_uint4(cc)); 976 if (L->is_bound()) { 977 const int short_size = 2; 978 const int long_size = 6; 979 int offs = L->pos() - pc_offset(); 980 ASSERT(offs <= 0); 981 // Determine whether we can use 1-byte offsets for backwards branches, 982 // which have a max range of 128 bytes. 983 984 // We also need to check predictable_code_size() flag here, because on x64, 985 // when the full code generator recompiles code for debugging, some places 986 // need to be padded out to a certain size. The debugger is keeping track of 987 // how often it did this so that it can adjust return addresses on the 988 // stack, but if the size of jump instructions can also change, that's not 989 // enough and the calculated offsets would be incorrect. 990 if (is_int8(offs - short_size) && !predictable_code_size()) { 991 // 0111 tttn #8-bit disp. 992 emit(0x70 | cc); 993 emit((offs - short_size) & 0xFF); 994 } else { 995 // 0000 1111 1000 tttn #32-bit disp. 996 emit(0x0F); 997 emit(0x80 | cc); 998 emitl(offs - long_size); 999 } 1000 } else if (distance == Label::kNear) { 1001 // 0111 tttn #8-bit disp 1002 emit(0x70 | cc); 1003 byte disp = 0x00; 1004 if (L->is_near_linked()) { 1005 int offset = L->near_link_pos() - pc_offset(); 1006 ASSERT(is_int8(offset)); 1007 disp = static_cast<byte>(offset & 0xFF); 1008 } 1009 L->link_to(pc_offset(), Label::kNear); 1010 emit(disp); 1011 } else if (L->is_linked()) { 1012 // 0000 1111 1000 tttn #32-bit disp. 1013 emit(0x0F); 1014 emit(0x80 | cc); 1015 emitl(L->pos()); 1016 L->link_to(pc_offset() - sizeof(int32_t)); 1017 } else { 1018 ASSERT(L->is_unused()); 1019 emit(0x0F); 1020 emit(0x80 | cc); 1021 int32_t current = pc_offset(); 1022 emitl(current); 1023 L->link_to(current); 1024 } 1025 } 1026 1027 1028 void Assembler::j(Condition cc, Address entry, RelocInfo::Mode rmode) { 1029 ASSERT(RelocInfo::IsRuntimeEntry(rmode)); 1030 EnsureSpace ensure_space(this); 1031 ASSERT(is_uint4(cc)); 1032 emit(0x0F); 1033 emit(0x80 | cc); 1034 emit_runtime_entry(entry, rmode); 1035 } 1036 1037 1038 void Assembler::j(Condition cc, 1039 Handle<Code> target, 1040 RelocInfo::Mode rmode) { 1041 EnsureSpace ensure_space(this); 1042 ASSERT(is_uint4(cc)); 1043 // 0000 1111 1000 tttn #32-bit disp. 1044 emit(0x0F); 1045 emit(0x80 | cc); 1046 emit_code_target(target, rmode); 1047 } 1048 1049 1050 void Assembler::jmp(Label* L, Label::Distance distance) { 1051 EnsureSpace ensure_space(this); 1052 const int short_size = sizeof(int8_t); 1053 const int long_size = sizeof(int32_t); 1054 if (L->is_bound()) { 1055 int offs = L->pos() - pc_offset() - 1; 1056 ASSERT(offs <= 0); 1057 if (is_int8(offs - short_size) && !predictable_code_size()) { 1058 // 1110 1011 #8-bit disp. 1059 emit(0xEB); 1060 emit((offs - short_size) & 0xFF); 1061 } else { 1062 // 1110 1001 #32-bit disp. 1063 emit(0xE9); 1064 emitl(offs - long_size); 1065 } 1066 } else if (distance == Label::kNear) { 1067 emit(0xEB); 1068 byte disp = 0x00; 1069 if (L->is_near_linked()) { 1070 int offset = L->near_link_pos() - pc_offset(); 1071 ASSERT(is_int8(offset)); 1072 disp = static_cast<byte>(offset & 0xFF); 1073 } 1074 L->link_to(pc_offset(), Label::kNear); 1075 emit(disp); 1076 } else if (L->is_linked()) { 1077 // 1110 1001 #32-bit disp. 1078 emit(0xE9); 1079 emitl(L->pos()); 1080 L->link_to(pc_offset() - long_size); 1081 } else { 1082 // 1110 1001 #32-bit disp. 1083 ASSERT(L->is_unused()); 1084 emit(0xE9); 1085 int32_t current = pc_offset(); 1086 emitl(current); 1087 L->link_to(current); 1088 } 1089 } 1090 1091 1092 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { 1093 EnsureSpace ensure_space(this); 1094 // 1110 1001 #32-bit disp. 1095 emit(0xE9); 1096 emit_code_target(target, rmode); 1097 } 1098 1099 1100 void Assembler::jmp(Address entry, RelocInfo::Mode rmode) { 1101 ASSERT(RelocInfo::IsRuntimeEntry(rmode)); 1102 EnsureSpace ensure_space(this); 1103 ASSERT(RelocInfo::IsRuntimeEntry(rmode)); 1104 emit(0xE9); 1105 emit_runtime_entry(entry, rmode); 1106 } 1107 1108 1109 void Assembler::jmp(Register target) { 1110 EnsureSpace ensure_space(this); 1111 // Opcode FF/4 r64. 1112 emit_optional_rex_32(target); 1113 emit(0xFF); 1114 emit_modrm(0x4, target); 1115 } 1116 1117 1118 void Assembler::jmp(const Operand& src) { 1119 EnsureSpace ensure_space(this); 1120 // Opcode FF/4 m64. 1121 emit_optional_rex_32(src); 1122 emit(0xFF); 1123 emit_operand(0x4, src); 1124 } 1125 1126 1127 void Assembler::emit_lea(Register dst, const Operand& src, int size) { 1128 EnsureSpace ensure_space(this); 1129 emit_rex(dst, src, size); 1130 emit(0x8D); 1131 emit_operand(dst, src); 1132 } 1133 1134 1135 void Assembler::load_rax(void* value, RelocInfo::Mode mode) { 1136 EnsureSpace ensure_space(this); 1137 if (kPointerSize == kInt64Size) { 1138 emit(0x48); // REX.W 1139 emit(0xA1); 1140 emitp(value, mode); 1141 } else { 1142 ASSERT(kPointerSize == kInt32Size); 1143 emit(0xA1); 1144 emitp(value, mode); 1145 // In 64-bit mode, need to zero extend the operand to 8 bytes. 1146 // See 2.2.1.4 in Intel64 and IA32 Architectures Software 1147 // Developer's Manual Volume 2. 1148 emitl(0); 1149 } 1150 } 1151 1152 1153 void Assembler::load_rax(ExternalReference ref) { 1154 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1155 } 1156 1157 1158 void Assembler::leave() { 1159 EnsureSpace ensure_space(this); 1160 emit(0xC9); 1161 } 1162 1163 1164 void Assembler::movb(Register dst, const Operand& src) { 1165 EnsureSpace ensure_space(this); 1166 if (!dst.is_byte_register()) { 1167 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1168 emit_rex_32(dst, src); 1169 } else { 1170 emit_optional_rex_32(dst, src); 1171 } 1172 emit(0x8A); 1173 emit_operand(dst, src); 1174 } 1175 1176 1177 void Assembler::movb(Register dst, Immediate imm) { 1178 EnsureSpace ensure_space(this); 1179 if (!dst.is_byte_register()) { 1180 emit_rex_32(dst); 1181 } 1182 emit(0xB0 + dst.low_bits()); 1183 emit(imm.value_); 1184 } 1185 1186 1187 void Assembler::movb(const Operand& dst, Register src) { 1188 EnsureSpace ensure_space(this); 1189 if (!src.is_byte_register()) { 1190 emit_rex_32(src, dst); 1191 } else { 1192 emit_optional_rex_32(src, dst); 1193 } 1194 emit(0x88); 1195 emit_operand(src, dst); 1196 } 1197 1198 1199 void Assembler::movb(const Operand& dst, Immediate imm) { 1200 EnsureSpace ensure_space(this); 1201 emit_optional_rex_32(dst); 1202 emit(0xC6); 1203 emit_operand(0x0, dst); 1204 emit(static_cast<byte>(imm.value_)); 1205 } 1206 1207 1208 void Assembler::movw(Register dst, const Operand& src) { 1209 EnsureSpace ensure_space(this); 1210 emit(0x66); 1211 emit_optional_rex_32(dst, src); 1212 emit(0x8B); 1213 emit_operand(dst, src); 1214 } 1215 1216 1217 void Assembler::movw(const Operand& dst, Register src) { 1218 EnsureSpace ensure_space(this); 1219 emit(0x66); 1220 emit_optional_rex_32(src, dst); 1221 emit(0x89); 1222 emit_operand(src, dst); 1223 } 1224 1225 1226 void Assembler::movw(const Operand& dst, Immediate imm) { 1227 EnsureSpace ensure_space(this); 1228 emit(0x66); 1229 emit_optional_rex_32(dst); 1230 emit(0xC7); 1231 emit_operand(0x0, dst); 1232 emit(static_cast<byte>(imm.value_ & 0xff)); 1233 emit(static_cast<byte>(imm.value_ >> 8)); 1234 } 1235 1236 1237 void Assembler::emit_mov(Register dst, const Operand& src, int size) { 1238 EnsureSpace ensure_space(this); 1239 emit_rex(dst, src, size); 1240 emit(0x8B); 1241 emit_operand(dst, src); 1242 } 1243 1244 1245 void Assembler::emit_mov(Register dst, Register src, int size) { 1246 EnsureSpace ensure_space(this); 1247 if (src.low_bits() == 4) { 1248 emit_rex(src, dst, size); 1249 emit(0x89); 1250 emit_modrm(src, dst); 1251 } else { 1252 emit_rex(dst, src, size); 1253 emit(0x8B); 1254 emit_modrm(dst, src); 1255 } 1256 } 1257 1258 1259 void Assembler::emit_mov(const Operand& dst, Register src, int size) { 1260 EnsureSpace ensure_space(this); 1261 emit_rex(src, dst, size); 1262 emit(0x89); 1263 emit_operand(src, dst); 1264 } 1265 1266 1267 void Assembler::emit_mov(Register dst, Immediate value, int size) { 1268 EnsureSpace ensure_space(this); 1269 emit_rex(dst, size); 1270 if (size == kInt64Size) { 1271 emit(0xC7); 1272 emit_modrm(0x0, dst); 1273 } else { 1274 ASSERT(size == kInt32Size); 1275 emit(0xB8 + dst.low_bits()); 1276 } 1277 emit(value); 1278 } 1279 1280 1281 void Assembler::emit_mov(const Operand& dst, Immediate value, int size) { 1282 EnsureSpace ensure_space(this); 1283 emit_rex(dst, size); 1284 emit(0xC7); 1285 emit_operand(0x0, dst); 1286 emit(value); 1287 } 1288 1289 1290 void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) { 1291 EnsureSpace ensure_space(this); 1292 emit_rex(dst, kPointerSize); 1293 emit(0xB8 | dst.low_bits()); 1294 emitp(value, rmode); 1295 } 1296 1297 1298 void Assembler::movq(Register dst, int64_t value) { 1299 EnsureSpace ensure_space(this); 1300 emit_rex_64(dst); 1301 emit(0xB8 | dst.low_bits()); 1302 emitq(value); 1303 } 1304 1305 1306 void Assembler::movq(Register dst, uint64_t value) { 1307 movq(dst, static_cast<int64_t>(value)); 1308 } 1309 1310 1311 // Loads the ip-relative location of the src label into the target location 1312 // (as a 32-bit offset sign extended to 64-bit). 1313 void Assembler::movl(const Operand& dst, Label* src) { 1314 EnsureSpace ensure_space(this); 1315 emit_optional_rex_32(dst); 1316 emit(0xC7); 1317 emit_operand(0, dst); 1318 if (src->is_bound()) { 1319 int offset = src->pos() - pc_offset() - sizeof(int32_t); 1320 ASSERT(offset <= 0); 1321 emitl(offset); 1322 } else if (src->is_linked()) { 1323 emitl(src->pos()); 1324 src->link_to(pc_offset() - sizeof(int32_t)); 1325 } else { 1326 ASSERT(src->is_unused()); 1327 int32_t current = pc_offset(); 1328 emitl(current); 1329 src->link_to(current); 1330 } 1331 } 1332 1333 1334 void Assembler::movsxbl(Register dst, const Operand& src) { 1335 EnsureSpace ensure_space(this); 1336 emit_optional_rex_32(dst, src); 1337 emit(0x0F); 1338 emit(0xBE); 1339 emit_operand(dst, src); 1340 } 1341 1342 1343 void Assembler::movsxbq(Register dst, const Operand& src) { 1344 EnsureSpace ensure_space(this); 1345 emit_rex_64(dst, src); 1346 emit(0x0F); 1347 emit(0xBE); 1348 emit_operand(dst, src); 1349 } 1350 1351 1352 void Assembler::movsxwl(Register dst, const Operand& src) { 1353 EnsureSpace ensure_space(this); 1354 emit_optional_rex_32(dst, src); 1355 emit(0x0F); 1356 emit(0xBF); 1357 emit_operand(dst, src); 1358 } 1359 1360 1361 void Assembler::movsxwq(Register dst, const Operand& src) { 1362 EnsureSpace ensure_space(this); 1363 emit_rex_64(dst, src); 1364 emit(0x0F); 1365 emit(0xBF); 1366 emit_operand(dst, src); 1367 } 1368 1369 1370 void Assembler::movsxlq(Register dst, Register src) { 1371 EnsureSpace ensure_space(this); 1372 emit_rex_64(dst, src); 1373 emit(0x63); 1374 emit_modrm(dst, src); 1375 } 1376 1377 1378 void Assembler::movsxlq(Register dst, const Operand& src) { 1379 EnsureSpace ensure_space(this); 1380 emit_rex_64(dst, src); 1381 emit(0x63); 1382 emit_operand(dst, src); 1383 } 1384 1385 1386 void Assembler::emit_movzxb(Register dst, const Operand& src, int size) { 1387 EnsureSpace ensure_space(this); 1388 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore 1389 // there is no need to make this a 64 bit operation. 1390 emit_optional_rex_32(dst, src); 1391 emit(0x0F); 1392 emit(0xB6); 1393 emit_operand(dst, src); 1394 } 1395 1396 1397 void Assembler::emit_movzxw(Register dst, const Operand& src, int size) { 1398 EnsureSpace ensure_space(this); 1399 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore 1400 // there is no need to make this a 64 bit operation. 1401 emit_optional_rex_32(dst, src); 1402 emit(0x0F); 1403 emit(0xB7); 1404 emit_operand(dst, src); 1405 } 1406 1407 1408 void Assembler::emit_movzxw(Register dst, Register src, int size) { 1409 EnsureSpace ensure_space(this); 1410 // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore 1411 // there is no need to make this a 64 bit operation. 1412 emit_optional_rex_32(dst, src); 1413 emit(0x0F); 1414 emit(0xB7); 1415 emit_modrm(dst, src); 1416 } 1417 1418 1419 void Assembler::repmovsb() { 1420 EnsureSpace ensure_space(this); 1421 emit(0xF3); 1422 emit(0xA4); 1423 } 1424 1425 1426 void Assembler::repmovsw() { 1427 EnsureSpace ensure_space(this); 1428 emit(0x66); // Operand size override. 1429 emit(0xF3); 1430 emit(0xA4); 1431 } 1432 1433 1434 void Assembler::emit_repmovs(int size) { 1435 EnsureSpace ensure_space(this); 1436 emit(0xF3); 1437 emit_rex(size); 1438 emit(0xA5); 1439 } 1440 1441 1442 void Assembler::mul(Register src) { 1443 EnsureSpace ensure_space(this); 1444 emit_rex_64(src); 1445 emit(0xF7); 1446 emit_modrm(0x4, src); 1447 } 1448 1449 1450 void Assembler::emit_neg(Register dst, int size) { 1451 EnsureSpace ensure_space(this); 1452 emit_rex(dst, size); 1453 emit(0xF7); 1454 emit_modrm(0x3, dst); 1455 } 1456 1457 1458 void Assembler::emit_neg(const Operand& dst, int size) { 1459 EnsureSpace ensure_space(this); 1460 emit_rex_64(dst); 1461 emit(0xF7); 1462 emit_operand(3, dst); 1463 } 1464 1465 1466 void Assembler::nop() { 1467 EnsureSpace ensure_space(this); 1468 emit(0x90); 1469 } 1470 1471 1472 void Assembler::emit_not(Register dst, int size) { 1473 EnsureSpace ensure_space(this); 1474 emit_rex(dst, size); 1475 emit(0xF7); 1476 emit_modrm(0x2, dst); 1477 } 1478 1479 1480 void Assembler::emit_not(const Operand& dst, int size) { 1481 EnsureSpace ensure_space(this); 1482 emit_rex(dst, size); 1483 emit(0xF7); 1484 emit_operand(2, dst); 1485 } 1486 1487 1488 void Assembler::Nop(int n) { 1489 // The recommended muti-byte sequences of NOP instructions from the Intel 64 1490 // and IA-32 Architectures Software Developer's Manual. 1491 // 1492 // Length Assembly Byte Sequence 1493 // 2 bytes 66 NOP 66 90H 1494 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H 1495 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H 1496 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H 1497 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H 1498 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H 1499 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H 1500 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00 1501 // 00000000H] 00H 1502 1503 EnsureSpace ensure_space(this); 1504 while (n > 0) { 1505 switch (n) { 1506 case 2: 1507 emit(0x66); 1508 case 1: 1509 emit(0x90); 1510 return; 1511 case 3: 1512 emit(0x0f); 1513 emit(0x1f); 1514 emit(0x00); 1515 return; 1516 case 4: 1517 emit(0x0f); 1518 emit(0x1f); 1519 emit(0x40); 1520 emit(0x00); 1521 return; 1522 case 6: 1523 emit(0x66); 1524 case 5: 1525 emit(0x0f); 1526 emit(0x1f); 1527 emit(0x44); 1528 emit(0x00); 1529 emit(0x00); 1530 return; 1531 case 7: 1532 emit(0x0f); 1533 emit(0x1f); 1534 emit(0x80); 1535 emit(0x00); 1536 emit(0x00); 1537 emit(0x00); 1538 emit(0x00); 1539 return; 1540 default: 1541 case 11: 1542 emit(0x66); 1543 n--; 1544 case 10: 1545 emit(0x66); 1546 n--; 1547 case 9: 1548 emit(0x66); 1549 n--; 1550 case 8: 1551 emit(0x0f); 1552 emit(0x1f); 1553 emit(0x84); 1554 emit(0x00); 1555 emit(0x00); 1556 emit(0x00); 1557 emit(0x00); 1558 emit(0x00); 1559 n -= 8; 1560 } 1561 } 1562 } 1563 1564 1565 void Assembler::popq(Register dst) { 1566 EnsureSpace ensure_space(this); 1567 emit_optional_rex_32(dst); 1568 emit(0x58 | dst.low_bits()); 1569 } 1570 1571 1572 void Assembler::popq(const Operand& dst) { 1573 EnsureSpace ensure_space(this); 1574 emit_optional_rex_32(dst); 1575 emit(0x8F); 1576 emit_operand(0, dst); 1577 } 1578 1579 1580 void Assembler::popfq() { 1581 EnsureSpace ensure_space(this); 1582 emit(0x9D); 1583 } 1584 1585 1586 void Assembler::pushq(Register src) { 1587 EnsureSpace ensure_space(this); 1588 emit_optional_rex_32(src); 1589 emit(0x50 | src.low_bits()); 1590 } 1591 1592 1593 void Assembler::pushq(const Operand& src) { 1594 EnsureSpace ensure_space(this); 1595 emit_optional_rex_32(src); 1596 emit(0xFF); 1597 emit_operand(6, src); 1598 } 1599 1600 1601 void Assembler::pushq(Immediate value) { 1602 EnsureSpace ensure_space(this); 1603 if (is_int8(value.value_)) { 1604 emit(0x6A); 1605 emit(value.value_); // Emit low byte of value. 1606 } else { 1607 emit(0x68); 1608 emitl(value.value_); 1609 } 1610 } 1611 1612 1613 void Assembler::pushq_imm32(int32_t imm32) { 1614 EnsureSpace ensure_space(this); 1615 emit(0x68); 1616 emitl(imm32); 1617 } 1618 1619 1620 void Assembler::pushfq() { 1621 EnsureSpace ensure_space(this); 1622 emit(0x9C); 1623 } 1624 1625 1626 void Assembler::ret(int imm16) { 1627 EnsureSpace ensure_space(this); 1628 ASSERT(is_uint16(imm16)); 1629 if (imm16 == 0) { 1630 emit(0xC3); 1631 } else { 1632 emit(0xC2); 1633 emit(imm16 & 0xFF); 1634 emit((imm16 >> 8) & 0xFF); 1635 } 1636 } 1637 1638 1639 void Assembler::setcc(Condition cc, Register reg) { 1640 if (cc > last_condition) { 1641 movb(reg, Immediate(cc == always ? 1 : 0)); 1642 return; 1643 } 1644 EnsureSpace ensure_space(this); 1645 ASSERT(is_uint4(cc)); 1646 if (!reg.is_byte_register()) { // Use x64 byte registers, where different. 1647 emit_rex_32(reg); 1648 } 1649 emit(0x0F); 1650 emit(0x90 | cc); 1651 emit_modrm(0x0, reg); 1652 } 1653 1654 1655 void Assembler::shld(Register dst, Register src) { 1656 EnsureSpace ensure_space(this); 1657 emit_rex_64(src, dst); 1658 emit(0x0F); 1659 emit(0xA5); 1660 emit_modrm(src, dst); 1661 } 1662 1663 1664 void Assembler::shrd(Register dst, Register src) { 1665 EnsureSpace ensure_space(this); 1666 emit_rex_64(src, dst); 1667 emit(0x0F); 1668 emit(0xAD); 1669 emit_modrm(src, dst); 1670 } 1671 1672 1673 void Assembler::emit_xchg(Register dst, Register src, int size) { 1674 EnsureSpace ensure_space(this); 1675 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding 1676 Register other = src.is(rax) ? dst : src; 1677 emit_rex(other, size); 1678 emit(0x90 | other.low_bits()); 1679 } else if (dst.low_bits() == 4) { 1680 emit_rex(dst, src, size); 1681 emit(0x87); 1682 emit_modrm(dst, src); 1683 } else { 1684 emit_rex(src, dst, size); 1685 emit(0x87); 1686 emit_modrm(src, dst); 1687 } 1688 } 1689 1690 1691 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { 1692 EnsureSpace ensure_space(this); 1693 if (kPointerSize == kInt64Size) { 1694 emit(0x48); // REX.W 1695 emit(0xA3); 1696 emitp(dst, mode); 1697 } else { 1698 ASSERT(kPointerSize == kInt32Size); 1699 emit(0xA3); 1700 emitp(dst, mode); 1701 // In 64-bit mode, need to zero extend the operand to 8 bytes. 1702 // See 2.2.1.4 in Intel64 and IA32 Architectures Software 1703 // Developer's Manual Volume 2. 1704 emitl(0); 1705 } 1706 } 1707 1708 1709 void Assembler::store_rax(ExternalReference ref) { 1710 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1711 } 1712 1713 1714 void Assembler::testb(Register dst, Register src) { 1715 EnsureSpace ensure_space(this); 1716 if (src.low_bits() == 4) { 1717 emit_rex_32(src, dst); 1718 emit(0x84); 1719 emit_modrm(src, dst); 1720 } else { 1721 if (!dst.is_byte_register() || !src.is_byte_register()) { 1722 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1723 emit_rex_32(dst, src); 1724 } 1725 emit(0x84); 1726 emit_modrm(dst, src); 1727 } 1728 } 1729 1730 1731 void Assembler::testb(Register reg, Immediate mask) { 1732 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 1733 EnsureSpace ensure_space(this); 1734 if (reg.is(rax)) { 1735 emit(0xA8); 1736 emit(mask.value_); // Low byte emitted. 1737 } else { 1738 if (!reg.is_byte_register()) { 1739 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1740 emit_rex_32(reg); 1741 } 1742 emit(0xF6); 1743 emit_modrm(0x0, reg); 1744 emit(mask.value_); // Low byte emitted. 1745 } 1746 } 1747 1748 1749 void Assembler::testb(const Operand& op, Immediate mask) { 1750 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 1751 EnsureSpace ensure_space(this); 1752 emit_optional_rex_32(rax, op); 1753 emit(0xF6); 1754 emit_operand(rax, op); // Operation code 0 1755 emit(mask.value_); // Low byte emitted. 1756 } 1757 1758 1759 void Assembler::testb(const Operand& op, Register reg) { 1760 EnsureSpace ensure_space(this); 1761 if (!reg.is_byte_register()) { 1762 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1763 emit_rex_32(reg, op); 1764 } else { 1765 emit_optional_rex_32(reg, op); 1766 } 1767 emit(0x84); 1768 emit_operand(reg, op); 1769 } 1770 1771 1772 void Assembler::emit_test(Register dst, Register src, int size) { 1773 EnsureSpace ensure_space(this); 1774 if (src.low_bits() == 4) { 1775 emit_rex(src, dst, size); 1776 emit(0x85); 1777 emit_modrm(src, dst); 1778 } else { 1779 emit_rex(dst, src, size); 1780 emit(0x85); 1781 emit_modrm(dst, src); 1782 } 1783 } 1784 1785 1786 void Assembler::emit_test(Register reg, Immediate mask, int size) { 1787 // testl with a mask that fits in the low byte is exactly testb. 1788 if (is_uint8(mask.value_)) { 1789 testb(reg, mask); 1790 return; 1791 } 1792 EnsureSpace ensure_space(this); 1793 if (reg.is(rax)) { 1794 emit_rex(rax, size); 1795 emit(0xA9); 1796 emit(mask); 1797 } else { 1798 emit_rex(reg, size); 1799 emit(0xF7); 1800 emit_modrm(0x0, reg); 1801 emit(mask); 1802 } 1803 } 1804 1805 1806 void Assembler::emit_test(const Operand& op, Immediate mask, int size) { 1807 // testl with a mask that fits in the low byte is exactly testb. 1808 if (is_uint8(mask.value_)) { 1809 testb(op, mask); 1810 return; 1811 } 1812 EnsureSpace ensure_space(this); 1813 emit_rex(rax, op, size); 1814 emit(0xF7); 1815 emit_operand(rax, op); // Operation code 0 1816 emit(mask); 1817 } 1818 1819 1820 void Assembler::emit_test(const Operand& op, Register reg, int size) { 1821 EnsureSpace ensure_space(this); 1822 emit_rex(reg, op, size); 1823 emit(0x85); 1824 emit_operand(reg, op); 1825 } 1826 1827 1828 // FPU instructions. 1829 1830 1831 void Assembler::fld(int i) { 1832 EnsureSpace ensure_space(this); 1833 emit_farith(0xD9, 0xC0, i); 1834 } 1835 1836 1837 void Assembler::fld1() { 1838 EnsureSpace ensure_space(this); 1839 emit(0xD9); 1840 emit(0xE8); 1841 } 1842 1843 1844 void Assembler::fldz() { 1845 EnsureSpace ensure_space(this); 1846 emit(0xD9); 1847 emit(0xEE); 1848 } 1849 1850 1851 void Assembler::fldpi() { 1852 EnsureSpace ensure_space(this); 1853 emit(0xD9); 1854 emit(0xEB); 1855 } 1856 1857 1858 void Assembler::fldln2() { 1859 EnsureSpace ensure_space(this); 1860 emit(0xD9); 1861 emit(0xED); 1862 } 1863 1864 1865 void Assembler::fld_s(const Operand& adr) { 1866 EnsureSpace ensure_space(this); 1867 emit_optional_rex_32(adr); 1868 emit(0xD9); 1869 emit_operand(0, adr); 1870 } 1871 1872 1873 void Assembler::fld_d(const Operand& adr) { 1874 EnsureSpace ensure_space(this); 1875 emit_optional_rex_32(adr); 1876 emit(0xDD); 1877 emit_operand(0, adr); 1878 } 1879 1880 1881 void Assembler::fstp_s(const Operand& adr) { 1882 EnsureSpace ensure_space(this); 1883 emit_optional_rex_32(adr); 1884 emit(0xD9); 1885 emit_operand(3, adr); 1886 } 1887 1888 1889 void Assembler::fstp_d(const Operand& adr) { 1890 EnsureSpace ensure_space(this); 1891 emit_optional_rex_32(adr); 1892 emit(0xDD); 1893 emit_operand(3, adr); 1894 } 1895 1896 1897 void Assembler::fstp(int index) { 1898 ASSERT(is_uint3(index)); 1899 EnsureSpace ensure_space(this); 1900 emit_farith(0xDD, 0xD8, index); 1901 } 1902 1903 1904 void Assembler::fild_s(const Operand& adr) { 1905 EnsureSpace ensure_space(this); 1906 emit_optional_rex_32(adr); 1907 emit(0xDB); 1908 emit_operand(0, adr); 1909 } 1910 1911 1912 void Assembler::fild_d(const Operand& adr) { 1913 EnsureSpace ensure_space(this); 1914 emit_optional_rex_32(adr); 1915 emit(0xDF); 1916 emit_operand(5, adr); 1917 } 1918 1919 1920 void Assembler::fistp_s(const Operand& adr) { 1921 EnsureSpace ensure_space(this); 1922 emit_optional_rex_32(adr); 1923 emit(0xDB); 1924 emit_operand(3, adr); 1925 } 1926 1927 1928 void Assembler::fisttp_s(const Operand& adr) { 1929 ASSERT(IsEnabled(SSE3)); 1930 EnsureSpace ensure_space(this); 1931 emit_optional_rex_32(adr); 1932 emit(0xDB); 1933 emit_operand(1, adr); 1934 } 1935 1936 1937 void Assembler::fisttp_d(const Operand& adr) { 1938 ASSERT(IsEnabled(SSE3)); 1939 EnsureSpace ensure_space(this); 1940 emit_optional_rex_32(adr); 1941 emit(0xDD); 1942 emit_operand(1, adr); 1943 } 1944 1945 1946 void Assembler::fist_s(const Operand& adr) { 1947 EnsureSpace ensure_space(this); 1948 emit_optional_rex_32(adr); 1949 emit(0xDB); 1950 emit_operand(2, adr); 1951 } 1952 1953 1954 void Assembler::fistp_d(const Operand& adr) { 1955 EnsureSpace ensure_space(this); 1956 emit_optional_rex_32(adr); 1957 emit(0xDF); 1958 emit_operand(7, adr); 1959 } 1960 1961 1962 void Assembler::fabs() { 1963 EnsureSpace ensure_space(this); 1964 emit(0xD9); 1965 emit(0xE1); 1966 } 1967 1968 1969 void Assembler::fchs() { 1970 EnsureSpace ensure_space(this); 1971 emit(0xD9); 1972 emit(0xE0); 1973 } 1974 1975 1976 void Assembler::fcos() { 1977 EnsureSpace ensure_space(this); 1978 emit(0xD9); 1979 emit(0xFF); 1980 } 1981 1982 1983 void Assembler::fsin() { 1984 EnsureSpace ensure_space(this); 1985 emit(0xD9); 1986 emit(0xFE); 1987 } 1988 1989 1990 void Assembler::fptan() { 1991 EnsureSpace ensure_space(this); 1992 emit(0xD9); 1993 emit(0xF2); 1994 } 1995 1996 1997 void Assembler::fyl2x() { 1998 EnsureSpace ensure_space(this); 1999 emit(0xD9); 2000 emit(0xF1); 2001 } 2002 2003 2004 void Assembler::f2xm1() { 2005 EnsureSpace ensure_space(this); 2006 emit(0xD9); 2007 emit(0xF0); 2008 } 2009 2010 2011 void Assembler::fscale() { 2012 EnsureSpace ensure_space(this); 2013 emit(0xD9); 2014 emit(0xFD); 2015 } 2016 2017 2018 void Assembler::fninit() { 2019 EnsureSpace ensure_space(this); 2020 emit(0xDB); 2021 emit(0xE3); 2022 } 2023 2024 2025 void Assembler::fadd(int i) { 2026 EnsureSpace ensure_space(this); 2027 emit_farith(0xDC, 0xC0, i); 2028 } 2029 2030 2031 void Assembler::fsub(int i) { 2032 EnsureSpace ensure_space(this); 2033 emit_farith(0xDC, 0xE8, i); 2034 } 2035 2036 2037 void Assembler::fisub_s(const Operand& adr) { 2038 EnsureSpace ensure_space(this); 2039 emit_optional_rex_32(adr); 2040 emit(0xDA); 2041 emit_operand(4, adr); 2042 } 2043 2044 2045 void Assembler::fmul(int i) { 2046 EnsureSpace ensure_space(this); 2047 emit_farith(0xDC, 0xC8, i); 2048 } 2049 2050 2051 void Assembler::fdiv(int i) { 2052 EnsureSpace ensure_space(this); 2053 emit_farith(0xDC, 0xF8, i); 2054 } 2055 2056 2057 void Assembler::faddp(int i) { 2058 EnsureSpace ensure_space(this); 2059 emit_farith(0xDE, 0xC0, i); 2060 } 2061 2062 2063 void Assembler::fsubp(int i) { 2064 EnsureSpace ensure_space(this); 2065 emit_farith(0xDE, 0xE8, i); 2066 } 2067 2068 2069 void Assembler::fsubrp(int i) { 2070 EnsureSpace ensure_space(this); 2071 emit_farith(0xDE, 0xE0, i); 2072 } 2073 2074 2075 void Assembler::fmulp(int i) { 2076 EnsureSpace ensure_space(this); 2077 emit_farith(0xDE, 0xC8, i); 2078 } 2079 2080 2081 void Assembler::fdivp(int i) { 2082 EnsureSpace ensure_space(this); 2083 emit_farith(0xDE, 0xF8, i); 2084 } 2085 2086 2087 void Assembler::fprem() { 2088 EnsureSpace ensure_space(this); 2089 emit(0xD9); 2090 emit(0xF8); 2091 } 2092 2093 2094 void Assembler::fprem1() { 2095 EnsureSpace ensure_space(this); 2096 emit(0xD9); 2097 emit(0xF5); 2098 } 2099 2100 2101 void Assembler::fxch(int i) { 2102 EnsureSpace ensure_space(this); 2103 emit_farith(0xD9, 0xC8, i); 2104 } 2105 2106 2107 void Assembler::fincstp() { 2108 EnsureSpace ensure_space(this); 2109 emit(0xD9); 2110 emit(0xF7); 2111 } 2112 2113 2114 void Assembler::ffree(int i) { 2115 EnsureSpace ensure_space(this); 2116 emit_farith(0xDD, 0xC0, i); 2117 } 2118 2119 2120 void Assembler::ftst() { 2121 EnsureSpace ensure_space(this); 2122 emit(0xD9); 2123 emit(0xE4); 2124 } 2125 2126 2127 void Assembler::fucomp(int i) { 2128 EnsureSpace ensure_space(this); 2129 emit_farith(0xDD, 0xE8, i); 2130 } 2131 2132 2133 void Assembler::fucompp() { 2134 EnsureSpace ensure_space(this); 2135 emit(0xDA); 2136 emit(0xE9); 2137 } 2138 2139 2140 void Assembler::fucomi(int i) { 2141 EnsureSpace ensure_space(this); 2142 emit(0xDB); 2143 emit(0xE8 + i); 2144 } 2145 2146 2147 void Assembler::fucomip() { 2148 EnsureSpace ensure_space(this); 2149 emit(0xDF); 2150 emit(0xE9); 2151 } 2152 2153 2154 void Assembler::fcompp() { 2155 EnsureSpace ensure_space(this); 2156 emit(0xDE); 2157 emit(0xD9); 2158 } 2159 2160 2161 void Assembler::fnstsw_ax() { 2162 EnsureSpace ensure_space(this); 2163 emit(0xDF); 2164 emit(0xE0); 2165 } 2166 2167 2168 void Assembler::fwait() { 2169 EnsureSpace ensure_space(this); 2170 emit(0x9B); 2171 } 2172 2173 2174 void Assembler::frndint() { 2175 EnsureSpace ensure_space(this); 2176 emit(0xD9); 2177 emit(0xFC); 2178 } 2179 2180 2181 void Assembler::fnclex() { 2182 EnsureSpace ensure_space(this); 2183 emit(0xDB); 2184 emit(0xE2); 2185 } 2186 2187 2188 void Assembler::sahf() { 2189 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf 2190 // in 64-bit mode. Test CpuID. 2191 ASSERT(IsEnabled(SAHF)); 2192 EnsureSpace ensure_space(this); 2193 emit(0x9E); 2194 } 2195 2196 2197 void Assembler::emit_farith(int b1, int b2, int i) { 2198 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode 2199 ASSERT(is_uint3(i)); // illegal stack offset 2200 emit(b1); 2201 emit(b2 + i); 2202 } 2203 2204 2205 // SSE operations. 2206 2207 void Assembler::andps(XMMRegister dst, XMMRegister src) { 2208 EnsureSpace ensure_space(this); 2209 emit_optional_rex_32(dst, src); 2210 emit(0x0F); 2211 emit(0x54); 2212 emit_sse_operand(dst, src); 2213 } 2214 2215 2216 void Assembler::andps(XMMRegister dst, const Operand& src) { 2217 EnsureSpace ensure_space(this); 2218 emit_optional_rex_32(dst, src); 2219 emit(0x0F); 2220 emit(0x54); 2221 emit_sse_operand(dst, src); 2222 } 2223 2224 2225 void Assembler::orps(XMMRegister dst, XMMRegister src) { 2226 EnsureSpace ensure_space(this); 2227 emit_optional_rex_32(dst, src); 2228 emit(0x0F); 2229 emit(0x56); 2230 emit_sse_operand(dst, src); 2231 } 2232 2233 2234 void Assembler::orps(XMMRegister dst, const Operand& src) { 2235 EnsureSpace ensure_space(this); 2236 emit_optional_rex_32(dst, src); 2237 emit(0x0F); 2238 emit(0x56); 2239 emit_sse_operand(dst, src); 2240 } 2241 2242 2243 void Assembler::xorps(XMMRegister dst, XMMRegister src) { 2244 EnsureSpace ensure_space(this); 2245 emit_optional_rex_32(dst, src); 2246 emit(0x0F); 2247 emit(0x57); 2248 emit_sse_operand(dst, src); 2249 } 2250 2251 2252 void Assembler::xorps(XMMRegister dst, const Operand& src) { 2253 EnsureSpace ensure_space(this); 2254 emit_optional_rex_32(dst, src); 2255 emit(0x0F); 2256 emit(0x57); 2257 emit_sse_operand(dst, src); 2258 } 2259 2260 2261 void Assembler::addps(XMMRegister dst, XMMRegister src) { 2262 EnsureSpace ensure_space(this); 2263 emit_optional_rex_32(dst, src); 2264 emit(0x0F); 2265 emit(0x58); 2266 emit_sse_operand(dst, src); 2267 } 2268 2269 2270 void Assembler::addps(XMMRegister dst, const Operand& src) { 2271 EnsureSpace ensure_space(this); 2272 emit_optional_rex_32(dst, src); 2273 emit(0x0F); 2274 emit(0x58); 2275 emit_sse_operand(dst, src); 2276 } 2277 2278 2279 void Assembler::subps(XMMRegister dst, XMMRegister src) { 2280 EnsureSpace ensure_space(this); 2281 emit_optional_rex_32(dst, src); 2282 emit(0x0F); 2283 emit(0x5C); 2284 emit_sse_operand(dst, src); 2285 } 2286 2287 2288 void Assembler::subps(XMMRegister dst, const Operand& src) { 2289 EnsureSpace ensure_space(this); 2290 emit_optional_rex_32(dst, src); 2291 emit(0x0F); 2292 emit(0x5C); 2293 emit_sse_operand(dst, src); 2294 } 2295 2296 2297 void Assembler::mulps(XMMRegister dst, XMMRegister src) { 2298 EnsureSpace ensure_space(this); 2299 emit_optional_rex_32(dst, src); 2300 emit(0x0F); 2301 emit(0x59); 2302 emit_sse_operand(dst, src); 2303 } 2304 2305 2306 void Assembler::mulps(XMMRegister dst, const Operand& src) { 2307 EnsureSpace ensure_space(this); 2308 emit_optional_rex_32(dst, src); 2309 emit(0x0F); 2310 emit(0x59); 2311 emit_sse_operand(dst, src); 2312 } 2313 2314 2315 void Assembler::divps(XMMRegister dst, XMMRegister src) { 2316 EnsureSpace ensure_space(this); 2317 emit_optional_rex_32(dst, src); 2318 emit(0x0F); 2319 emit(0x5E); 2320 emit_sse_operand(dst, src); 2321 } 2322 2323 2324 void Assembler::divps(XMMRegister dst, const Operand& src) { 2325 EnsureSpace ensure_space(this); 2326 emit_optional_rex_32(dst, src); 2327 emit(0x0F); 2328 emit(0x5E); 2329 emit_sse_operand(dst, src); 2330 } 2331 2332 2333 // SSE 2 operations. 2334 2335 void Assembler::movd(XMMRegister dst, Register src) { 2336 EnsureSpace ensure_space(this); 2337 emit(0x66); 2338 emit_optional_rex_32(dst, src); 2339 emit(0x0F); 2340 emit(0x6E); 2341 emit_sse_operand(dst, src); 2342 } 2343 2344 2345 void Assembler::movd(Register dst, XMMRegister src) { 2346 EnsureSpace ensure_space(this); 2347 emit(0x66); 2348 emit_optional_rex_32(src, dst); 2349 emit(0x0F); 2350 emit(0x7E); 2351 emit_sse_operand(src, dst); 2352 } 2353 2354 2355 void Assembler::movq(XMMRegister dst, Register src) { 2356 EnsureSpace ensure_space(this); 2357 emit(0x66); 2358 emit_rex_64(dst, src); 2359 emit(0x0F); 2360 emit(0x6E); 2361 emit_sse_operand(dst, src); 2362 } 2363 2364 2365 void Assembler::movq(Register dst, XMMRegister src) { 2366 EnsureSpace ensure_space(this); 2367 emit(0x66); 2368 emit_rex_64(src, dst); 2369 emit(0x0F); 2370 emit(0x7E); 2371 emit_sse_operand(src, dst); 2372 } 2373 2374 2375 void Assembler::movq(XMMRegister dst, XMMRegister src) { 2376 EnsureSpace ensure_space(this); 2377 if (dst.low_bits() == 4) { 2378 // Avoid unnecessary SIB byte. 2379 emit(0xf3); 2380 emit_optional_rex_32(dst, src); 2381 emit(0x0F); 2382 emit(0x7e); 2383 emit_sse_operand(dst, src); 2384 } else { 2385 emit(0x66); 2386 emit_optional_rex_32(src, dst); 2387 emit(0x0F); 2388 emit(0xD6); 2389 emit_sse_operand(src, dst); 2390 } 2391 } 2392 2393 2394 void Assembler::movdqa(const Operand& dst, XMMRegister src) { 2395 EnsureSpace ensure_space(this); 2396 emit(0x66); 2397 emit_rex_64(src, dst); 2398 emit(0x0F); 2399 emit(0x7F); 2400 emit_sse_operand(src, dst); 2401 } 2402 2403 2404 void Assembler::movdqa(XMMRegister dst, const Operand& src) { 2405 EnsureSpace ensure_space(this); 2406 emit(0x66); 2407 emit_rex_64(dst, src); 2408 emit(0x0F); 2409 emit(0x6F); 2410 emit_sse_operand(dst, src); 2411 } 2412 2413 2414 void Assembler::movdqu(const Operand& dst, XMMRegister src) { 2415 EnsureSpace ensure_space(this); 2416 emit(0xF3); 2417 emit_rex_64(src, dst); 2418 emit(0x0F); 2419 emit(0x7F); 2420 emit_sse_operand(src, dst); 2421 } 2422 2423 2424 void Assembler::movdqu(XMMRegister dst, const Operand& src) { 2425 EnsureSpace ensure_space(this); 2426 emit(0xF3); 2427 emit_rex_64(dst, src); 2428 emit(0x0F); 2429 emit(0x6F); 2430 emit_sse_operand(dst, src); 2431 } 2432 2433 2434 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) { 2435 ASSERT(IsEnabled(SSE4_1)); 2436 ASSERT(is_uint8(imm8)); 2437 EnsureSpace ensure_space(this); 2438 emit(0x66); 2439 emit_optional_rex_32(src, dst); 2440 emit(0x0F); 2441 emit(0x3A); 2442 emit(0x17); 2443 emit_sse_operand(src, dst); 2444 emit(imm8); 2445 } 2446 2447 2448 void Assembler::movsd(const Operand& dst, XMMRegister src) { 2449 EnsureSpace ensure_space(this); 2450 emit(0xF2); // double 2451 emit_optional_rex_32(src, dst); 2452 emit(0x0F); 2453 emit(0x11); // store 2454 emit_sse_operand(src, dst); 2455 } 2456 2457 2458 void Assembler::movsd(XMMRegister dst, XMMRegister src) { 2459 EnsureSpace ensure_space(this); 2460 emit(0xF2); // double 2461 emit_optional_rex_32(dst, src); 2462 emit(0x0F); 2463 emit(0x10); // load 2464 emit_sse_operand(dst, src); 2465 } 2466 2467 2468 void Assembler::movsd(XMMRegister dst, const Operand& src) { 2469 EnsureSpace ensure_space(this); 2470 emit(0xF2); // double 2471 emit_optional_rex_32(dst, src); 2472 emit(0x0F); 2473 emit(0x10); // load 2474 emit_sse_operand(dst, src); 2475 } 2476 2477 2478 void Assembler::movaps(XMMRegister dst, XMMRegister src) { 2479 EnsureSpace ensure_space(this); 2480 if (src.low_bits() == 4) { 2481 // Try to avoid an unnecessary SIB byte. 2482 emit_optional_rex_32(src, dst); 2483 emit(0x0F); 2484 emit(0x29); 2485 emit_sse_operand(src, dst); 2486 } else { 2487 emit_optional_rex_32(dst, src); 2488 emit(0x0F); 2489 emit(0x28); 2490 emit_sse_operand(dst, src); 2491 } 2492 } 2493 2494 2495 void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) { 2496 ASSERT(is_uint8(imm8)); 2497 EnsureSpace ensure_space(this); 2498 emit_optional_rex_32(src, dst); 2499 emit(0x0F); 2500 emit(0xC6); 2501 emit_sse_operand(dst, src); 2502 emit(imm8); 2503 } 2504 2505 2506 void Assembler::movapd(XMMRegister dst, XMMRegister src) { 2507 EnsureSpace ensure_space(this); 2508 if (src.low_bits() == 4) { 2509 // Try to avoid an unnecessary SIB byte. 2510 emit(0x66); 2511 emit_optional_rex_32(src, dst); 2512 emit(0x0F); 2513 emit(0x29); 2514 emit_sse_operand(src, dst); 2515 } else { 2516 emit(0x66); 2517 emit_optional_rex_32(dst, src); 2518 emit(0x0F); 2519 emit(0x28); 2520 emit_sse_operand(dst, src); 2521 } 2522 } 2523 2524 2525 void Assembler::movss(XMMRegister dst, const Operand& src) { 2526 EnsureSpace ensure_space(this); 2527 emit(0xF3); // single 2528 emit_optional_rex_32(dst, src); 2529 emit(0x0F); 2530 emit(0x10); // load 2531 emit_sse_operand(dst, src); 2532 } 2533 2534 2535 void Assembler::movss(const Operand& src, XMMRegister dst) { 2536 EnsureSpace ensure_space(this); 2537 emit(0xF3); // single 2538 emit_optional_rex_32(dst, src); 2539 emit(0x0F); 2540 emit(0x11); // store 2541 emit_sse_operand(dst, src); 2542 } 2543 2544 2545 void Assembler::psllq(XMMRegister reg, byte imm8) { 2546 EnsureSpace ensure_space(this); 2547 emit(0x66); 2548 emit(0x0F); 2549 emit(0x73); 2550 emit_sse_operand(rsi, reg); // rsi == 6 2551 emit(imm8); 2552 } 2553 2554 2555 void Assembler::cvttss2si(Register dst, const Operand& src) { 2556 EnsureSpace ensure_space(this); 2557 emit(0xF3); 2558 emit_optional_rex_32(dst, src); 2559 emit(0x0F); 2560 emit(0x2C); 2561 emit_operand(dst, src); 2562 } 2563 2564 2565 void Assembler::cvttss2si(Register dst, XMMRegister src) { 2566 EnsureSpace ensure_space(this); 2567 emit(0xF3); 2568 emit_optional_rex_32(dst, src); 2569 emit(0x0F); 2570 emit(0x2C); 2571 emit_sse_operand(dst, src); 2572 } 2573 2574 2575 void Assembler::cvttsd2si(Register dst, const Operand& src) { 2576 EnsureSpace ensure_space(this); 2577 emit(0xF2); 2578 emit_optional_rex_32(dst, src); 2579 emit(0x0F); 2580 emit(0x2C); 2581 emit_operand(dst, src); 2582 } 2583 2584 2585 void Assembler::cvttsd2si(Register dst, XMMRegister src) { 2586 EnsureSpace ensure_space(this); 2587 emit(0xF2); 2588 emit_optional_rex_32(dst, src); 2589 emit(0x0F); 2590 emit(0x2C); 2591 emit_sse_operand(dst, src); 2592 } 2593 2594 2595 void Assembler::cvttsd2siq(Register dst, XMMRegister src) { 2596 EnsureSpace ensure_space(this); 2597 emit(0xF2); 2598 emit_rex_64(dst, src); 2599 emit(0x0F); 2600 emit(0x2C); 2601 emit_sse_operand(dst, src); 2602 } 2603 2604 2605 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) { 2606 EnsureSpace ensure_space(this); 2607 emit(0xF2); 2608 emit_optional_rex_32(dst, src); 2609 emit(0x0F); 2610 emit(0x2A); 2611 emit_sse_operand(dst, src); 2612 } 2613 2614 2615 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) { 2616 EnsureSpace ensure_space(this); 2617 emit(0xF2); 2618 emit_optional_rex_32(dst, src); 2619 emit(0x0F); 2620 emit(0x2A); 2621 emit_sse_operand(dst, src); 2622 } 2623 2624 2625 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) { 2626 EnsureSpace ensure_space(this); 2627 emit(0xF3); 2628 emit_optional_rex_32(dst, src); 2629 emit(0x0F); 2630 emit(0x2A); 2631 emit_sse_operand(dst, src); 2632 } 2633 2634 2635 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) { 2636 EnsureSpace ensure_space(this); 2637 emit(0xF2); 2638 emit_rex_64(dst, src); 2639 emit(0x0F); 2640 emit(0x2A); 2641 emit_sse_operand(dst, src); 2642 } 2643 2644 2645 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) { 2646 EnsureSpace ensure_space(this); 2647 emit(0xF3); 2648 emit_optional_rex_32(dst, src); 2649 emit(0x0F); 2650 emit(0x5A); 2651 emit_sse_operand(dst, src); 2652 } 2653 2654 2655 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) { 2656 EnsureSpace ensure_space(this); 2657 emit(0xF3); 2658 emit_optional_rex_32(dst, src); 2659 emit(0x0F); 2660 emit(0x5A); 2661 emit_sse_operand(dst, src); 2662 } 2663 2664 2665 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) { 2666 EnsureSpace ensure_space(this); 2667 emit(0xF2); 2668 emit_optional_rex_32(dst, src); 2669 emit(0x0F); 2670 emit(0x5A); 2671 emit_sse_operand(dst, src); 2672 } 2673 2674 2675 void Assembler::cvtsd2si(Register dst, XMMRegister src) { 2676 EnsureSpace ensure_space(this); 2677 emit(0xF2); 2678 emit_optional_rex_32(dst, src); 2679 emit(0x0F); 2680 emit(0x2D); 2681 emit_sse_operand(dst, src); 2682 } 2683 2684 2685 void Assembler::cvtsd2siq(Register dst, XMMRegister src) { 2686 EnsureSpace ensure_space(this); 2687 emit(0xF2); 2688 emit_rex_64(dst, src); 2689 emit(0x0F); 2690 emit(0x2D); 2691 emit_sse_operand(dst, src); 2692 } 2693 2694 2695 void Assembler::addsd(XMMRegister dst, XMMRegister src) { 2696 EnsureSpace ensure_space(this); 2697 emit(0xF2); 2698 emit_optional_rex_32(dst, src); 2699 emit(0x0F); 2700 emit(0x58); 2701 emit_sse_operand(dst, src); 2702 } 2703 2704 2705 void Assembler::addsd(XMMRegister dst, const Operand& src) { 2706 EnsureSpace ensure_space(this); 2707 emit(0xF2); 2708 emit_optional_rex_32(dst, src); 2709 emit(0x0F); 2710 emit(0x58); 2711 emit_sse_operand(dst, src); 2712 } 2713 2714 2715 void Assembler::mulsd(XMMRegister dst, XMMRegister src) { 2716 EnsureSpace ensure_space(this); 2717 emit(0xF2); 2718 emit_optional_rex_32(dst, src); 2719 emit(0x0F); 2720 emit(0x59); 2721 emit_sse_operand(dst, src); 2722 } 2723 2724 2725 void Assembler::mulsd(XMMRegister dst, const Operand& src) { 2726 EnsureSpace ensure_space(this); 2727 emit(0xF2); 2728 emit_optional_rex_32(dst, src); 2729 emit(0x0F); 2730 emit(0x59); 2731 emit_sse_operand(dst, src); 2732 } 2733 2734 2735 void Assembler::subsd(XMMRegister dst, XMMRegister src) { 2736 EnsureSpace ensure_space(this); 2737 emit(0xF2); 2738 emit_optional_rex_32(dst, src); 2739 emit(0x0F); 2740 emit(0x5C); 2741 emit_sse_operand(dst, src); 2742 } 2743 2744 2745 void Assembler::divsd(XMMRegister dst, XMMRegister src) { 2746 EnsureSpace ensure_space(this); 2747 emit(0xF2); 2748 emit_optional_rex_32(dst, src); 2749 emit(0x0F); 2750 emit(0x5E); 2751 emit_sse_operand(dst, src); 2752 } 2753 2754 2755 void Assembler::andpd(XMMRegister dst, XMMRegister src) { 2756 EnsureSpace ensure_space(this); 2757 emit(0x66); 2758 emit_optional_rex_32(dst, src); 2759 emit(0x0F); 2760 emit(0x54); 2761 emit_sse_operand(dst, src); 2762 } 2763 2764 2765 void Assembler::orpd(XMMRegister dst, XMMRegister src) { 2766 EnsureSpace ensure_space(this); 2767 emit(0x66); 2768 emit_optional_rex_32(dst, src); 2769 emit(0x0F); 2770 emit(0x56); 2771 emit_sse_operand(dst, src); 2772 } 2773 2774 2775 void Assembler::xorpd(XMMRegister dst, XMMRegister src) { 2776 EnsureSpace ensure_space(this); 2777 emit(0x66); 2778 emit_optional_rex_32(dst, src); 2779 emit(0x0F); 2780 emit(0x57); 2781 emit_sse_operand(dst, src); 2782 } 2783 2784 2785 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) { 2786 EnsureSpace ensure_space(this); 2787 emit(0xF2); 2788 emit_optional_rex_32(dst, src); 2789 emit(0x0F); 2790 emit(0x51); 2791 emit_sse_operand(dst, src); 2792 } 2793 2794 2795 void Assembler::sqrtsd(XMMRegister dst, const Operand& src) { 2796 EnsureSpace ensure_space(this); 2797 emit(0xF2); 2798 emit_optional_rex_32(dst, src); 2799 emit(0x0F); 2800 emit(0x51); 2801 emit_sse_operand(dst, src); 2802 } 2803 2804 2805 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { 2806 EnsureSpace ensure_space(this); 2807 emit(0x66); 2808 emit_optional_rex_32(dst, src); 2809 emit(0x0f); 2810 emit(0x2e); 2811 emit_sse_operand(dst, src); 2812 } 2813 2814 2815 void Assembler::ucomisd(XMMRegister dst, const Operand& src) { 2816 EnsureSpace ensure_space(this); 2817 emit(0x66); 2818 emit_optional_rex_32(dst, src); 2819 emit(0x0f); 2820 emit(0x2e); 2821 emit_sse_operand(dst, src); 2822 } 2823 2824 2825 void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) { 2826 EnsureSpace ensure_space(this); 2827 emit(0xF2); 2828 emit_optional_rex_32(dst, src); 2829 emit(0x0F); 2830 emit(0xC2); 2831 emit_sse_operand(dst, src); 2832 emit(0x01); // LT == 1 2833 } 2834 2835 2836 void Assembler::roundsd(XMMRegister dst, XMMRegister src, 2837 Assembler::RoundingMode mode) { 2838 ASSERT(IsEnabled(SSE4_1)); 2839 EnsureSpace ensure_space(this); 2840 emit(0x66); 2841 emit_optional_rex_32(dst, src); 2842 emit(0x0f); 2843 emit(0x3a); 2844 emit(0x0b); 2845 emit_sse_operand(dst, src); 2846 // Mask precision exeption. 2847 emit(static_cast<byte>(mode) | 0x8); 2848 } 2849 2850 2851 void Assembler::movmskpd(Register dst, XMMRegister src) { 2852 EnsureSpace ensure_space(this); 2853 emit(0x66); 2854 emit_optional_rex_32(dst, src); 2855 emit(0x0f); 2856 emit(0x50); 2857 emit_sse_operand(dst, src); 2858 } 2859 2860 2861 void Assembler::movmskps(Register dst, XMMRegister src) { 2862 EnsureSpace ensure_space(this); 2863 emit_optional_rex_32(dst, src); 2864 emit(0x0f); 2865 emit(0x50); 2866 emit_sse_operand(dst, src); 2867 } 2868 2869 2870 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { 2871 Register ireg = { reg.code() }; 2872 emit_operand(ireg, adr); 2873 } 2874 2875 2876 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { 2877 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2878 } 2879 2880 2881 void Assembler::emit_sse_operand(XMMRegister dst, Register src) { 2882 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2883 } 2884 2885 2886 void Assembler::emit_sse_operand(Register dst, XMMRegister src) { 2887 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2888 } 2889 2890 2891 void Assembler::db(uint8_t data) { 2892 EnsureSpace ensure_space(this); 2893 emit(data); 2894 } 2895 2896 2897 void Assembler::dd(uint32_t data) { 2898 EnsureSpace ensure_space(this); 2899 emitl(data); 2900 } 2901 2902 2903 // Relocation information implementations. 2904 2905 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2906 ASSERT(!RelocInfo::IsNone(rmode)); 2907 // Don't record external references unless the heap will be serialized. 2908 if (rmode == RelocInfo::EXTERNAL_REFERENCE && 2909 !serializer_enabled() && !emit_debug_code()) { 2910 return; 2911 } else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) { 2912 // Don't record psuedo relocation info for code age sequence mode. 2913 return; 2914 } 2915 RelocInfo rinfo(pc_, rmode, data, NULL); 2916 reloc_info_writer.Write(&rinfo); 2917 } 2918 2919 2920 void Assembler::RecordJSReturn() { 2921 positions_recorder()->WriteRecordedPositions(); 2922 EnsureSpace ensure_space(this); 2923 RecordRelocInfo(RelocInfo::JS_RETURN); 2924 } 2925 2926 2927 void Assembler::RecordDebugBreakSlot() { 2928 positions_recorder()->WriteRecordedPositions(); 2929 EnsureSpace ensure_space(this); 2930 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT); 2931 } 2932 2933 2934 void Assembler::RecordComment(const char* msg, bool force) { 2935 if (FLAG_code_comments || force) { 2936 EnsureSpace ensure_space(this); 2937 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); 2938 } 2939 } 2940 2941 2942 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { 2943 // No out-of-line constant pool support. 2944 ASSERT(!FLAG_enable_ool_constant_pool); 2945 return isolate->factory()->empty_constant_pool_array(); 2946 } 2947 2948 2949 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 2950 // No out-of-line constant pool support. 2951 ASSERT(!FLAG_enable_ool_constant_pool); 2952 return; 2953 } 2954 2955 2956 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | 2957 1 << RelocInfo::RUNTIME_ENTRY | 2958 1 << RelocInfo::INTERNAL_REFERENCE | 2959 1 << RelocInfo::CODE_AGE_SEQUENCE; 2960 2961 2962 bool RelocInfo::IsCodedSpecially() { 2963 // The deserializer needs to know whether a pointer is specially coded. Being 2964 // specially coded on x64 means that it is a relative 32 bit address, as used 2965 // by branch instructions. 2966 return (1 << rmode_) & kApplyMask; 2967 } 2968 2969 2970 bool RelocInfo::IsInConstantPool() { 2971 return false; 2972 } 2973 2974 2975 } } // namespace v8::internal 2976 2977 #endif // V8_TARGET_ARCH_X64 2978