1 // Copyright 2011 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #if defined(V8_TARGET_ARCH_X64) 31 32 #include "macro-assembler.h" 33 #include "serialize.h" 34 35 namespace v8 { 36 namespace internal { 37 38 // ----------------------------------------------------------------------------- 39 // Implementation of CpuFeatures 40 41 42 #ifdef DEBUG 43 bool CpuFeatures::initialized_ = false; 44 #endif 45 uint64_t CpuFeatures::supported_ = CpuFeatures::kDefaultCpuFeatures; 46 uint64_t CpuFeatures::found_by_runtime_probing_ = 0; 47 48 49 void CpuFeatures::Probe() { 50 ASSERT(!initialized_); 51 #ifdef DEBUG 52 initialized_ = true; 53 #endif 54 supported_ = kDefaultCpuFeatures; 55 if (Serializer::enabled()) { 56 supported_ |= OS::CpuFeaturesImpliedByPlatform(); 57 return; // No features if we might serialize. 58 } 59 60 const int kBufferSize = 4 * KB; 61 VirtualMemory* memory = new VirtualMemory(kBufferSize); 62 if (!memory->IsReserved()) { 63 delete memory; 64 return; 65 } 66 ASSERT(memory->size() >= static_cast<size_t>(kBufferSize)); 67 if (!memory->Commit(memory->address(), kBufferSize, true/*executable*/)) { 68 delete memory; 69 return; 70 } 71 72 Assembler assm(NULL, memory->address(), kBufferSize); 73 Label cpuid, done; 74 #define __ assm. 75 // Save old rsp, since we are going to modify the stack. 76 __ push(rbp); 77 __ pushfq(); 78 __ push(rcx); 79 __ push(rbx); 80 __ movq(rbp, rsp); 81 82 // If we can modify bit 21 of the EFLAGS register, then CPUID is supported. 83 __ pushfq(); 84 __ pop(rax); 85 __ movq(rdx, rax); 86 __ xor_(rax, Immediate(0x200000)); // Flip bit 21. 87 __ push(rax); 88 __ popfq(); 89 __ pushfq(); 90 __ pop(rax); 91 __ xor_(rax, rdx); // Different if CPUID is supported. 92 __ j(not_zero, &cpuid); 93 94 // CPUID not supported. Clear the supported features in rax. 95 __ xor_(rax, rax); 96 __ jmp(&done); 97 98 // Invoke CPUID with 1 in eax to get feature information in 99 // ecx:edx. Temporarily enable CPUID support because we know it's 100 // safe here. 101 __ bind(&cpuid); 102 __ movl(rax, Immediate(1)); 103 supported_ = kDefaultCpuFeatures | (1 << CPUID); 104 { Scope fscope(CPUID); 105 __ cpuid(); 106 // Move the result from ecx:edx to rdi. 107 __ movl(rdi, rdx); // Zero-extended to 64 bits. 108 __ shl(rcx, Immediate(32)); 109 __ or_(rdi, rcx); 110 111 // Get the sahf supported flag, from CPUID(0x80000001) 112 __ movq(rax, 0x80000001, RelocInfo::NONE); 113 __ cpuid(); 114 } 115 supported_ = kDefaultCpuFeatures; 116 117 // Put the CPU flags in rax. 118 // rax = (rcx & 1) | (rdi & ~1) | (1 << CPUID). 119 __ movl(rax, Immediate(1)); 120 __ and_(rcx, rax); // Bit 0 is set if SAHF instruction supported. 121 __ not_(rax); 122 __ and_(rax, rdi); 123 __ or_(rax, rcx); 124 __ or_(rax, Immediate(1 << CPUID)); 125 126 // Done. 127 __ bind(&done); 128 __ movq(rsp, rbp); 129 __ pop(rbx); 130 __ pop(rcx); 131 __ popfq(); 132 __ pop(rbp); 133 __ ret(0); 134 #undef __ 135 136 typedef uint64_t (*F0)(); 137 F0 probe = FUNCTION_CAST<F0>(reinterpret_cast<Address>(memory->address())); 138 supported_ = probe(); 139 found_by_runtime_probing_ = supported_; 140 found_by_runtime_probing_ &= ~kDefaultCpuFeatures; 141 uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform(); 142 supported_ |= os_guarantees; 143 found_by_runtime_probing_ &= ~os_guarantees; 144 // SSE2 and CMOV must be available on an X64 CPU. 145 ASSERT(IsSupported(CPUID)); 146 ASSERT(IsSupported(SSE2)); 147 ASSERT(IsSupported(CMOV)); 148 149 delete memory; 150 } 151 152 153 // ----------------------------------------------------------------------------- 154 // Implementation of RelocInfo 155 156 // Patch the code at the current PC with a call to the target address. 157 // Additional guard int3 instructions can be added if required. 158 void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { 159 // Load register with immediate 64 and call through a register instructions 160 // takes up 13 bytes and int3 takes up one byte. 161 static const int kCallCodeSize = 13; 162 int code_size = kCallCodeSize + guard_bytes; 163 164 // Create a code patcher. 165 CodePatcher patcher(pc_, code_size); 166 167 // Add a label for checking the size of the code used for returning. 168 #ifdef DEBUG 169 Label check_codesize; 170 patcher.masm()->bind(&check_codesize); 171 #endif 172 173 // Patch the code. 174 patcher.masm()->movq(r10, target, RelocInfo::NONE); 175 patcher.masm()->call(r10); 176 177 // Check that the size of the code generated is as expected. 178 ASSERT_EQ(kCallCodeSize, 179 patcher.masm()->SizeOfCodeGeneratedSince(&check_codesize)); 180 181 // Add the requested number of int3 instructions after the call. 182 for (int i = 0; i < guard_bytes; i++) { 183 patcher.masm()->int3(); 184 } 185 } 186 187 188 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { 189 // Patch the code at the current address with the supplied instructions. 190 for (int i = 0; i < instruction_count; i++) { 191 *(pc_ + i) = *(instructions + i); 192 } 193 194 // Indicate that code has changed. 195 CPU::FlushICache(pc_, instruction_count); 196 } 197 198 199 // ----------------------------------------------------------------------------- 200 // Register constants. 201 202 const int Register::kRegisterCodeByAllocationIndex[kNumAllocatableRegisters] = { 203 // rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15 204 0, 3, 2, 1, 7, 8, 9, 11, 14, 15 205 }; 206 207 const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = { 208 0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9 209 }; 210 211 212 // ----------------------------------------------------------------------------- 213 // Implementation of Operand 214 215 Operand::Operand(Register base, int32_t disp) : rex_(0) { 216 len_ = 1; 217 if (base.is(rsp) || base.is(r12)) { 218 // SIB byte is needed to encode (rsp + offset) or (r12 + offset). 219 set_sib(times_1, rsp, base); 220 } 221 222 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 223 set_modrm(0, base); 224 } else if (is_int8(disp)) { 225 set_modrm(1, base); 226 set_disp8(disp); 227 } else { 228 set_modrm(2, base); 229 set_disp32(disp); 230 } 231 } 232 233 234 Operand::Operand(Register base, 235 Register index, 236 ScaleFactor scale, 237 int32_t disp) : rex_(0) { 238 ASSERT(!index.is(rsp)); 239 len_ = 1; 240 set_sib(scale, index, base); 241 if (disp == 0 && !base.is(rbp) && !base.is(r13)) { 242 // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits 243 // possibly set by set_sib. 244 set_modrm(0, rsp); 245 } else if (is_int8(disp)) { 246 set_modrm(1, rsp); 247 set_disp8(disp); 248 } else { 249 set_modrm(2, rsp); 250 set_disp32(disp); 251 } 252 } 253 254 255 Operand::Operand(Register index, 256 ScaleFactor scale, 257 int32_t disp) : rex_(0) { 258 ASSERT(!index.is(rsp)); 259 len_ = 1; 260 set_modrm(0, rsp); 261 set_sib(scale, index, rbp); 262 set_disp32(disp); 263 } 264 265 266 Operand::Operand(const Operand& operand, int32_t offset) { 267 ASSERT(operand.len_ >= 1); 268 // Operand encodes REX ModR/M [SIB] [Disp]. 269 byte modrm = operand.buf_[0]; 270 ASSERT(modrm < 0xC0); // Disallow mode 3 (register target). 271 bool has_sib = ((modrm & 0x07) == 0x04); 272 byte mode = modrm & 0xC0; 273 int disp_offset = has_sib ? 2 : 1; 274 int base_reg = (has_sib ? operand.buf_[1] : modrm) & 0x07; 275 // Mode 0 with rbp/r13 as ModR/M or SIB base register always has a 32-bit 276 // displacement. 277 bool is_baseless = (mode == 0) && (base_reg == 0x05); // No base or RIP base. 278 int32_t disp_value = 0; 279 if (mode == 0x80 || is_baseless) { 280 // Mode 2 or mode 0 with rbp/r13 as base: Word displacement. 281 disp_value = *BitCast<const int32_t*>(&operand.buf_[disp_offset]); 282 } else if (mode == 0x40) { 283 // Mode 1: Byte displacement. 284 disp_value = static_cast<signed char>(operand.buf_[disp_offset]); 285 } 286 287 // Write new operand with same registers, but with modified displacement. 288 ASSERT(offset >= 0 ? disp_value + offset > disp_value 289 : disp_value + offset < disp_value); // No overflow. 290 disp_value += offset; 291 rex_ = operand.rex_; 292 if (!is_int8(disp_value) || is_baseless) { 293 // Need 32 bits of displacement, mode 2 or mode 1 with register rbp/r13. 294 buf_[0] = (modrm & 0x3f) | (is_baseless ? 0x00 : 0x80); 295 len_ = disp_offset + 4; 296 Memory::int32_at(&buf_[disp_offset]) = disp_value; 297 } else if (disp_value != 0 || (base_reg == 0x05)) { 298 // Need 8 bits of displacement. 299 buf_[0] = (modrm & 0x3f) | 0x40; // Mode 1. 300 len_ = disp_offset + 1; 301 buf_[disp_offset] = static_cast<byte>(disp_value); 302 } else { 303 // Need no displacement. 304 buf_[0] = (modrm & 0x3f); // Mode 0. 305 len_ = disp_offset; 306 } 307 if (has_sib) { 308 buf_[1] = operand.buf_[1]; 309 } 310 } 311 312 313 bool Operand::AddressUsesRegister(Register reg) const { 314 int code = reg.code(); 315 ASSERT((buf_[0] & 0xC0) != 0xC0); // Always a memory operand. 316 // Start with only low three bits of base register. Initial decoding doesn't 317 // distinguish on the REX.B bit. 318 int base_code = buf_[0] & 0x07; 319 if (base_code == rsp.code()) { 320 // SIB byte present in buf_[1]. 321 // Check the index register from the SIB byte + REX.X prefix. 322 int index_code = ((buf_[1] >> 3) & 0x07) | ((rex_ & 0x02) << 2); 323 // Index code (including REX.X) of 0x04 (rsp) means no index register. 324 if (index_code != rsp.code() && index_code == code) return true; 325 // Add REX.B to get the full base register code. 326 base_code = (buf_[1] & 0x07) | ((rex_ & 0x01) << 3); 327 // A base register of 0x05 (rbp) with mod = 0 means no base register. 328 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false; 329 return code == base_code; 330 } else { 331 // A base register with low bits of 0x05 (rbp or r13) and mod = 0 means 332 // no base register. 333 if (base_code == rbp.code() && ((buf_[0] & 0xC0) == 0)) return false; 334 base_code |= ((rex_ & 0x01) << 3); 335 return code == base_code; 336 } 337 } 338 339 340 // ----------------------------------------------------------------------------- 341 // Implementation of Assembler. 342 343 #ifdef GENERATED_CODE_COVERAGE 344 static void InitCoverageLog(); 345 #endif 346 347 Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size) 348 : AssemblerBase(arg_isolate), 349 code_targets_(100), 350 positions_recorder_(this), 351 emit_debug_code_(FLAG_debug_code) { 352 if (buffer == NULL) { 353 // Do our own buffer management. 354 if (buffer_size <= kMinimalBufferSize) { 355 buffer_size = kMinimalBufferSize; 356 357 if (isolate() != NULL && isolate()->assembler_spare_buffer() != NULL) { 358 buffer = isolate()->assembler_spare_buffer(); 359 isolate()->set_assembler_spare_buffer(NULL); 360 } 361 } 362 if (buffer == NULL) { 363 buffer_ = NewArray<byte>(buffer_size); 364 } else { 365 buffer_ = static_cast<byte*>(buffer); 366 } 367 buffer_size_ = buffer_size; 368 own_buffer_ = true; 369 } else { 370 // Use externally provided buffer instead. 371 ASSERT(buffer_size > 0); 372 buffer_ = static_cast<byte*>(buffer); 373 buffer_size_ = buffer_size; 374 own_buffer_ = false; 375 } 376 377 // Clear the buffer in debug mode unless it was provided by the 378 // caller in which case we can't be sure it's okay to overwrite 379 // existing code in it. 380 #ifdef DEBUG 381 if (own_buffer_) { 382 memset(buffer_, 0xCC, buffer_size); // int3 383 } 384 #endif 385 386 // Setup buffer pointers. 387 ASSERT(buffer_ != NULL); 388 pc_ = buffer_; 389 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_); 390 391 392 #ifdef GENERATED_CODE_COVERAGE 393 InitCoverageLog(); 394 #endif 395 } 396 397 398 Assembler::~Assembler() { 399 if (own_buffer_) { 400 if (isolate() != NULL && 401 isolate()->assembler_spare_buffer() == NULL && 402 buffer_size_ == kMinimalBufferSize) { 403 isolate()->set_assembler_spare_buffer(buffer_); 404 } else { 405 DeleteArray(buffer_); 406 } 407 } 408 } 409 410 411 void Assembler::GetCode(CodeDesc* desc) { 412 // Finalize code (at this point overflow() may be true, but the gap ensures 413 // that we are still not overlapping instructions and relocation info). 414 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap. 415 // Setup code descriptor. 416 desc->buffer = buffer_; 417 desc->buffer_size = buffer_size_; 418 desc->instr_size = pc_offset(); 419 ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. 420 desc->reloc_size = 421 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); 422 desc->origin = this; 423 } 424 425 426 void Assembler::Align(int m) { 427 ASSERT(IsPowerOf2(m)); 428 int delta = (m - (pc_offset() & (m - 1))) & (m - 1); 429 while (delta >= 9) { 430 nop(9); 431 delta -= 9; 432 } 433 if (delta > 0) { 434 nop(delta); 435 } 436 } 437 438 439 void Assembler::CodeTargetAlign() { 440 Align(16); // Preferred alignment of jump targets on x64. 441 } 442 443 444 void Assembler::bind_to(Label* L, int pos) { 445 ASSERT(!L->is_bound()); // Label may only be bound once. 446 ASSERT(0 <= pos && pos <= pc_offset()); // Position must be valid. 447 if (L->is_linked()) { 448 int current = L->pos(); 449 int next = long_at(current); 450 while (next != current) { 451 // Relative address, relative to point after address. 452 int imm32 = pos - (current + sizeof(int32_t)); 453 long_at_put(current, imm32); 454 current = next; 455 next = long_at(next); 456 } 457 // Fix up last fixup on linked list. 458 int last_imm32 = pos - (current + sizeof(int32_t)); 459 long_at_put(current, last_imm32); 460 } 461 L->bind_to(pos); 462 } 463 464 465 void Assembler::bind(Label* L) { 466 bind_to(L, pc_offset()); 467 } 468 469 470 void Assembler::bind(NearLabel* L) { 471 ASSERT(!L->is_bound()); 472 while (L->unresolved_branches_ > 0) { 473 int branch_pos = L->unresolved_positions_[L->unresolved_branches_ - 1]; 474 int disp = pc_offset() - branch_pos; 475 ASSERT(is_int8(disp)); 476 set_byte_at(branch_pos - sizeof(int8_t), disp); 477 L->unresolved_branches_--; 478 } 479 L->bind_to(pc_offset()); 480 } 481 482 483 void Assembler::GrowBuffer() { 484 ASSERT(buffer_overflow()); 485 if (!own_buffer_) FATAL("external code buffer is too small"); 486 487 // Compute new buffer size. 488 CodeDesc desc; // the new buffer 489 if (buffer_size_ < 4*KB) { 490 desc.buffer_size = 4*KB; 491 } else { 492 desc.buffer_size = 2*buffer_size_; 493 } 494 // Some internal data structures overflow for very large buffers, 495 // they must ensure that kMaximalBufferSize is not too large. 496 if ((desc.buffer_size > kMaximalBufferSize) || 497 (desc.buffer_size > HEAP->MaxOldGenerationSize())) { 498 V8::FatalProcessOutOfMemory("Assembler::GrowBuffer"); 499 } 500 501 // Setup new buffer. 502 desc.buffer = NewArray<byte>(desc.buffer_size); 503 desc.instr_size = pc_offset(); 504 desc.reloc_size = 505 static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); 506 507 // Clear the buffer in debug mode. Use 'int3' instructions to make 508 // sure to get into problems if we ever run uninitialized code. 509 #ifdef DEBUG 510 memset(desc.buffer, 0xCC, desc.buffer_size); 511 #endif 512 513 // Copy the data. 514 intptr_t pc_delta = desc.buffer - buffer_; 515 intptr_t rc_delta = (desc.buffer + desc.buffer_size) - 516 (buffer_ + buffer_size_); 517 memmove(desc.buffer, buffer_, desc.instr_size); 518 memmove(rc_delta + reloc_info_writer.pos(), 519 reloc_info_writer.pos(), desc.reloc_size); 520 521 // Switch buffers. 522 if (isolate() != NULL && 523 isolate()->assembler_spare_buffer() == NULL && 524 buffer_size_ == kMinimalBufferSize) { 525 isolate()->set_assembler_spare_buffer(buffer_); 526 } else { 527 DeleteArray(buffer_); 528 } 529 buffer_ = desc.buffer; 530 buffer_size_ = desc.buffer_size; 531 pc_ += pc_delta; 532 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, 533 reloc_info_writer.last_pc() + pc_delta); 534 535 // Relocate runtime entries. 536 for (RelocIterator it(desc); !it.done(); it.next()) { 537 RelocInfo::Mode rmode = it.rinfo()->rmode(); 538 if (rmode == RelocInfo::INTERNAL_REFERENCE) { 539 intptr_t* p = reinterpret_cast<intptr_t*>(it.rinfo()->pc()); 540 if (*p != 0) { // 0 means uninitialized. 541 *p += pc_delta; 542 } 543 } 544 } 545 546 ASSERT(!buffer_overflow()); 547 } 548 549 550 void Assembler::emit_operand(int code, const Operand& adr) { 551 ASSERT(is_uint3(code)); 552 const unsigned length = adr.len_; 553 ASSERT(length > 0); 554 555 // Emit updated ModR/M byte containing the given register. 556 ASSERT((adr.buf_[0] & 0x38) == 0); 557 pc_[0] = adr.buf_[0] | code << 3; 558 559 // Emit the rest of the encoded operand. 560 for (unsigned i = 1; i < length; i++) pc_[i] = adr.buf_[i]; 561 pc_ += length; 562 } 563 564 565 // Assembler Instruction implementations. 566 567 void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) { 568 EnsureSpace ensure_space(this); 569 emit_rex_64(reg, op); 570 emit(opcode); 571 emit_operand(reg, op); 572 } 573 574 575 void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) { 576 EnsureSpace ensure_space(this); 577 ASSERT((opcode & 0xC6) == 2); 578 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 579 // Swap reg and rm_reg and change opcode operand order. 580 emit_rex_64(rm_reg, reg); 581 emit(opcode ^ 0x02); 582 emit_modrm(rm_reg, reg); 583 } else { 584 emit_rex_64(reg, rm_reg); 585 emit(opcode); 586 emit_modrm(reg, rm_reg); 587 } 588 } 589 590 591 void Assembler::arithmetic_op_16(byte opcode, Register reg, Register rm_reg) { 592 EnsureSpace ensure_space(this); 593 ASSERT((opcode & 0xC6) == 2); 594 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 595 // Swap reg and rm_reg and change opcode operand order. 596 emit(0x66); 597 emit_optional_rex_32(rm_reg, reg); 598 emit(opcode ^ 0x02); 599 emit_modrm(rm_reg, reg); 600 } else { 601 emit(0x66); 602 emit_optional_rex_32(reg, rm_reg); 603 emit(opcode); 604 emit_modrm(reg, rm_reg); 605 } 606 } 607 608 609 void Assembler::arithmetic_op_16(byte opcode, 610 Register reg, 611 const Operand& rm_reg) { 612 EnsureSpace ensure_space(this); 613 emit(0x66); 614 emit_optional_rex_32(reg, rm_reg); 615 emit(opcode); 616 emit_operand(reg, rm_reg); 617 } 618 619 620 void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) { 621 EnsureSpace ensure_space(this); 622 ASSERT((opcode & 0xC6) == 2); 623 if (rm_reg.low_bits() == 4) { // Forces SIB byte. 624 // Swap reg and rm_reg and change opcode operand order. 625 emit_optional_rex_32(rm_reg, reg); 626 emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD. 627 emit_modrm(rm_reg, reg); 628 } else { 629 emit_optional_rex_32(reg, rm_reg); 630 emit(opcode); 631 emit_modrm(reg, rm_reg); 632 } 633 } 634 635 636 void Assembler::arithmetic_op_32(byte opcode, 637 Register reg, 638 const Operand& rm_reg) { 639 EnsureSpace ensure_space(this); 640 emit_optional_rex_32(reg, rm_reg); 641 emit(opcode); 642 emit_operand(reg, rm_reg); 643 } 644 645 646 void Assembler::immediate_arithmetic_op(byte subcode, 647 Register dst, 648 Immediate src) { 649 EnsureSpace ensure_space(this); 650 emit_rex_64(dst); 651 if (is_int8(src.value_)) { 652 emit(0x83); 653 emit_modrm(subcode, dst); 654 emit(src.value_); 655 } else if (dst.is(rax)) { 656 emit(0x05 | (subcode << 3)); 657 emitl(src.value_); 658 } else { 659 emit(0x81); 660 emit_modrm(subcode, dst); 661 emitl(src.value_); 662 } 663 } 664 665 void Assembler::immediate_arithmetic_op(byte subcode, 666 const Operand& dst, 667 Immediate src) { 668 EnsureSpace ensure_space(this); 669 emit_rex_64(dst); 670 if (is_int8(src.value_)) { 671 emit(0x83); 672 emit_operand(subcode, dst); 673 emit(src.value_); 674 } else { 675 emit(0x81); 676 emit_operand(subcode, dst); 677 emitl(src.value_); 678 } 679 } 680 681 682 void Assembler::immediate_arithmetic_op_16(byte subcode, 683 Register dst, 684 Immediate src) { 685 EnsureSpace ensure_space(this); 686 emit(0x66); // Operand size override prefix. 687 emit_optional_rex_32(dst); 688 if (is_int8(src.value_)) { 689 emit(0x83); 690 emit_modrm(subcode, dst); 691 emit(src.value_); 692 } else if (dst.is(rax)) { 693 emit(0x05 | (subcode << 3)); 694 emitw(src.value_); 695 } else { 696 emit(0x81); 697 emit_modrm(subcode, dst); 698 emitw(src.value_); 699 } 700 } 701 702 703 void Assembler::immediate_arithmetic_op_16(byte subcode, 704 const Operand& dst, 705 Immediate src) { 706 EnsureSpace ensure_space(this); 707 emit(0x66); // Operand size override prefix. 708 emit_optional_rex_32(dst); 709 if (is_int8(src.value_)) { 710 emit(0x83); 711 emit_operand(subcode, dst); 712 emit(src.value_); 713 } else { 714 emit(0x81); 715 emit_operand(subcode, dst); 716 emitw(src.value_); 717 } 718 } 719 720 721 void Assembler::immediate_arithmetic_op_32(byte subcode, 722 Register dst, 723 Immediate src) { 724 EnsureSpace ensure_space(this); 725 emit_optional_rex_32(dst); 726 if (is_int8(src.value_)) { 727 emit(0x83); 728 emit_modrm(subcode, dst); 729 emit(src.value_); 730 } else if (dst.is(rax)) { 731 emit(0x05 | (subcode << 3)); 732 emitl(src.value_); 733 } else { 734 emit(0x81); 735 emit_modrm(subcode, dst); 736 emitl(src.value_); 737 } 738 } 739 740 741 void Assembler::immediate_arithmetic_op_32(byte subcode, 742 const Operand& dst, 743 Immediate src) { 744 EnsureSpace ensure_space(this); 745 emit_optional_rex_32(dst); 746 if (is_int8(src.value_)) { 747 emit(0x83); 748 emit_operand(subcode, dst); 749 emit(src.value_); 750 } else { 751 emit(0x81); 752 emit_operand(subcode, dst); 753 emitl(src.value_); 754 } 755 } 756 757 758 void Assembler::immediate_arithmetic_op_8(byte subcode, 759 const Operand& dst, 760 Immediate src) { 761 EnsureSpace ensure_space(this); 762 emit_optional_rex_32(dst); 763 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 764 emit(0x80); 765 emit_operand(subcode, dst); 766 emit(src.value_); 767 } 768 769 770 void Assembler::immediate_arithmetic_op_8(byte subcode, 771 Register dst, 772 Immediate src) { 773 EnsureSpace ensure_space(this); 774 if (dst.code() > 3) { 775 // Use 64-bit mode byte registers. 776 emit_rex_64(dst); 777 } 778 ASSERT(is_int8(src.value_) || is_uint8(src.value_)); 779 emit(0x80); 780 emit_modrm(subcode, dst); 781 emit(src.value_); 782 } 783 784 785 void Assembler::shift(Register dst, Immediate shift_amount, int subcode) { 786 EnsureSpace ensure_space(this); 787 ASSERT(is_uint6(shift_amount.value_)); // illegal shift count 788 if (shift_amount.value_ == 1) { 789 emit_rex_64(dst); 790 emit(0xD1); 791 emit_modrm(subcode, dst); 792 } else { 793 emit_rex_64(dst); 794 emit(0xC1); 795 emit_modrm(subcode, dst); 796 emit(shift_amount.value_); 797 } 798 } 799 800 801 void Assembler::shift(Register dst, int subcode) { 802 EnsureSpace ensure_space(this); 803 emit_rex_64(dst); 804 emit(0xD3); 805 emit_modrm(subcode, dst); 806 } 807 808 809 void Assembler::shift_32(Register dst, int subcode) { 810 EnsureSpace ensure_space(this); 811 emit_optional_rex_32(dst); 812 emit(0xD3); 813 emit_modrm(subcode, dst); 814 } 815 816 817 void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) { 818 EnsureSpace ensure_space(this); 819 ASSERT(is_uint5(shift_amount.value_)); // illegal shift count 820 if (shift_amount.value_ == 1) { 821 emit_optional_rex_32(dst); 822 emit(0xD1); 823 emit_modrm(subcode, dst); 824 } else { 825 emit_optional_rex_32(dst); 826 emit(0xC1); 827 emit_modrm(subcode, dst); 828 emit(shift_amount.value_); 829 } 830 } 831 832 833 void Assembler::bt(const Operand& dst, Register src) { 834 EnsureSpace ensure_space(this); 835 emit_rex_64(src, dst); 836 emit(0x0F); 837 emit(0xA3); 838 emit_operand(src, dst); 839 } 840 841 842 void Assembler::bts(const Operand& dst, Register src) { 843 EnsureSpace ensure_space(this); 844 emit_rex_64(src, dst); 845 emit(0x0F); 846 emit(0xAB); 847 emit_operand(src, dst); 848 } 849 850 851 void Assembler::call(Label* L) { 852 positions_recorder()->WriteRecordedPositions(); 853 EnsureSpace ensure_space(this); 854 // 1110 1000 #32-bit disp. 855 emit(0xE8); 856 if (L->is_bound()) { 857 int offset = L->pos() - pc_offset() - sizeof(int32_t); 858 ASSERT(offset <= 0); 859 emitl(offset); 860 } else if (L->is_linked()) { 861 emitl(L->pos()); 862 L->link_to(pc_offset() - sizeof(int32_t)); 863 } else { 864 ASSERT(L->is_unused()); 865 int32_t current = pc_offset(); 866 emitl(current); 867 L->link_to(current); 868 } 869 } 870 871 872 void Assembler::call(Handle<Code> target, RelocInfo::Mode rmode) { 873 positions_recorder()->WriteRecordedPositions(); 874 EnsureSpace ensure_space(this); 875 // 1110 1000 #32-bit disp. 876 emit(0xE8); 877 emit_code_target(target, rmode); 878 } 879 880 881 void Assembler::call(Register adr) { 882 positions_recorder()->WriteRecordedPositions(); 883 EnsureSpace ensure_space(this); 884 // Opcode: FF /2 r64. 885 emit_optional_rex_32(adr); 886 emit(0xFF); 887 emit_modrm(0x2, adr); 888 } 889 890 891 void Assembler::call(const Operand& op) { 892 positions_recorder()->WriteRecordedPositions(); 893 EnsureSpace ensure_space(this); 894 // Opcode: FF /2 m64. 895 emit_optional_rex_32(op); 896 emit(0xFF); 897 emit_operand(0x2, op); 898 } 899 900 901 // Calls directly to the given address using a relative offset. 902 // Should only ever be used in Code objects for calls within the 903 // same Code object. Should not be used when generating new code (use labels), 904 // but only when patching existing code. 905 void Assembler::call(Address target) { 906 positions_recorder()->WriteRecordedPositions(); 907 EnsureSpace ensure_space(this); 908 // 1110 1000 #32-bit disp. 909 emit(0xE8); 910 Address source = pc_ + 4; 911 intptr_t displacement = target - source; 912 ASSERT(is_int32(displacement)); 913 emitl(static_cast<int32_t>(displacement)); 914 } 915 916 917 void Assembler::clc() { 918 EnsureSpace ensure_space(this); 919 emit(0xF8); 920 } 921 922 void Assembler::cld() { 923 EnsureSpace ensure_space(this); 924 emit(0xFC); 925 } 926 927 void Assembler::cdq() { 928 EnsureSpace ensure_space(this); 929 emit(0x99); 930 } 931 932 933 void Assembler::cmovq(Condition cc, Register dst, Register src) { 934 if (cc == always) { 935 movq(dst, src); 936 } else if (cc == never) { 937 return; 938 } 939 // No need to check CpuInfo for CMOV support, it's a required part of the 940 // 64-bit architecture. 941 ASSERT(cc >= 0); // Use mov for unconditional moves. 942 EnsureSpace ensure_space(this); 943 // Opcode: REX.W 0f 40 + cc /r. 944 emit_rex_64(dst, src); 945 emit(0x0f); 946 emit(0x40 + cc); 947 emit_modrm(dst, src); 948 } 949 950 951 void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { 952 if (cc == always) { 953 movq(dst, src); 954 } else if (cc == never) { 955 return; 956 } 957 ASSERT(cc >= 0); 958 EnsureSpace ensure_space(this); 959 // Opcode: REX.W 0f 40 + cc /r. 960 emit_rex_64(dst, src); 961 emit(0x0f); 962 emit(0x40 + cc); 963 emit_operand(dst, src); 964 } 965 966 967 void Assembler::cmovl(Condition cc, Register dst, Register src) { 968 if (cc == always) { 969 movl(dst, src); 970 } else if (cc == never) { 971 return; 972 } 973 ASSERT(cc >= 0); 974 EnsureSpace ensure_space(this); 975 // Opcode: 0f 40 + cc /r. 976 emit_optional_rex_32(dst, src); 977 emit(0x0f); 978 emit(0x40 + cc); 979 emit_modrm(dst, src); 980 } 981 982 983 void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { 984 if (cc == always) { 985 movl(dst, src); 986 } else if (cc == never) { 987 return; 988 } 989 ASSERT(cc >= 0); 990 EnsureSpace ensure_space(this); 991 // Opcode: 0f 40 + cc /r. 992 emit_optional_rex_32(dst, src); 993 emit(0x0f); 994 emit(0x40 + cc); 995 emit_operand(dst, src); 996 } 997 998 999 void Assembler::cmpb_al(Immediate imm8) { 1000 ASSERT(is_int8(imm8.value_) || is_uint8(imm8.value_)); 1001 EnsureSpace ensure_space(this); 1002 emit(0x3c); 1003 emit(imm8.value_); 1004 } 1005 1006 1007 void Assembler::cpuid() { 1008 ASSERT(CpuFeatures::IsEnabled(CPUID)); 1009 EnsureSpace ensure_space(this); 1010 emit(0x0F); 1011 emit(0xA2); 1012 } 1013 1014 1015 void Assembler::cqo() { 1016 EnsureSpace ensure_space(this); 1017 emit_rex_64(); 1018 emit(0x99); 1019 } 1020 1021 1022 void Assembler::decq(Register dst) { 1023 EnsureSpace ensure_space(this); 1024 emit_rex_64(dst); 1025 emit(0xFF); 1026 emit_modrm(0x1, dst); 1027 } 1028 1029 1030 void Assembler::decq(const Operand& dst) { 1031 EnsureSpace ensure_space(this); 1032 emit_rex_64(dst); 1033 emit(0xFF); 1034 emit_operand(1, dst); 1035 } 1036 1037 1038 void Assembler::decl(Register dst) { 1039 EnsureSpace ensure_space(this); 1040 emit_optional_rex_32(dst); 1041 emit(0xFF); 1042 emit_modrm(0x1, dst); 1043 } 1044 1045 1046 void Assembler::decl(const Operand& dst) { 1047 EnsureSpace ensure_space(this); 1048 emit_optional_rex_32(dst); 1049 emit(0xFF); 1050 emit_operand(1, dst); 1051 } 1052 1053 1054 void Assembler::decb(Register dst) { 1055 EnsureSpace ensure_space(this); 1056 if (dst.code() > 3) { 1057 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1058 emit_rex_32(dst); 1059 } 1060 emit(0xFE); 1061 emit_modrm(0x1, dst); 1062 } 1063 1064 1065 void Assembler::decb(const Operand& dst) { 1066 EnsureSpace ensure_space(this); 1067 emit_optional_rex_32(dst); 1068 emit(0xFE); 1069 emit_operand(1, dst); 1070 } 1071 1072 1073 void Assembler::enter(Immediate size) { 1074 EnsureSpace ensure_space(this); 1075 emit(0xC8); 1076 emitw(size.value_); // 16 bit operand, always. 1077 emit(0); 1078 } 1079 1080 1081 void Assembler::hlt() { 1082 EnsureSpace ensure_space(this); 1083 emit(0xF4); 1084 } 1085 1086 1087 void Assembler::idivq(Register src) { 1088 EnsureSpace ensure_space(this); 1089 emit_rex_64(src); 1090 emit(0xF7); 1091 emit_modrm(0x7, src); 1092 } 1093 1094 1095 void Assembler::idivl(Register src) { 1096 EnsureSpace ensure_space(this); 1097 emit_optional_rex_32(src); 1098 emit(0xF7); 1099 emit_modrm(0x7, src); 1100 } 1101 1102 1103 void Assembler::imul(Register src) { 1104 EnsureSpace ensure_space(this); 1105 emit_rex_64(src); 1106 emit(0xF7); 1107 emit_modrm(0x5, src); 1108 } 1109 1110 1111 void Assembler::imul(Register dst, Register src) { 1112 EnsureSpace ensure_space(this); 1113 emit_rex_64(dst, src); 1114 emit(0x0F); 1115 emit(0xAF); 1116 emit_modrm(dst, src); 1117 } 1118 1119 1120 void Assembler::imul(Register dst, const Operand& src) { 1121 EnsureSpace ensure_space(this); 1122 emit_rex_64(dst, src); 1123 emit(0x0F); 1124 emit(0xAF); 1125 emit_operand(dst, src); 1126 } 1127 1128 1129 void Assembler::imul(Register dst, Register src, Immediate imm) { 1130 EnsureSpace ensure_space(this); 1131 emit_rex_64(dst, src); 1132 if (is_int8(imm.value_)) { 1133 emit(0x6B); 1134 emit_modrm(dst, src); 1135 emit(imm.value_); 1136 } else { 1137 emit(0x69); 1138 emit_modrm(dst, src); 1139 emitl(imm.value_); 1140 } 1141 } 1142 1143 1144 void Assembler::imull(Register dst, Register src) { 1145 EnsureSpace ensure_space(this); 1146 emit_optional_rex_32(dst, src); 1147 emit(0x0F); 1148 emit(0xAF); 1149 emit_modrm(dst, src); 1150 } 1151 1152 1153 void Assembler::imull(Register dst, const Operand& src) { 1154 EnsureSpace ensure_space(this); 1155 emit_optional_rex_32(dst, src); 1156 emit(0x0F); 1157 emit(0xAF); 1158 emit_operand(dst, src); 1159 } 1160 1161 1162 void Assembler::imull(Register dst, Register src, Immediate imm) { 1163 EnsureSpace ensure_space(this); 1164 emit_optional_rex_32(dst, src); 1165 if (is_int8(imm.value_)) { 1166 emit(0x6B); 1167 emit_modrm(dst, src); 1168 emit(imm.value_); 1169 } else { 1170 emit(0x69); 1171 emit_modrm(dst, src); 1172 emitl(imm.value_); 1173 } 1174 } 1175 1176 1177 void Assembler::incq(Register dst) { 1178 EnsureSpace ensure_space(this); 1179 emit_rex_64(dst); 1180 emit(0xFF); 1181 emit_modrm(0x0, dst); 1182 } 1183 1184 1185 void Assembler::incq(const Operand& dst) { 1186 EnsureSpace ensure_space(this); 1187 emit_rex_64(dst); 1188 emit(0xFF); 1189 emit_operand(0, dst); 1190 } 1191 1192 1193 void Assembler::incl(const Operand& dst) { 1194 EnsureSpace ensure_space(this); 1195 emit_optional_rex_32(dst); 1196 emit(0xFF); 1197 emit_operand(0, dst); 1198 } 1199 1200 1201 void Assembler::incl(Register dst) { 1202 EnsureSpace ensure_space(this); 1203 emit_optional_rex_32(dst); 1204 emit(0xFF); 1205 emit_modrm(0, dst); 1206 } 1207 1208 1209 void Assembler::int3() { 1210 EnsureSpace ensure_space(this); 1211 emit(0xCC); 1212 } 1213 1214 1215 void Assembler::j(Condition cc, Label* L) { 1216 if (cc == always) { 1217 jmp(L); 1218 return; 1219 } else if (cc == never) { 1220 return; 1221 } 1222 EnsureSpace ensure_space(this); 1223 ASSERT(is_uint4(cc)); 1224 if (L->is_bound()) { 1225 const int short_size = 2; 1226 const int long_size = 6; 1227 int offs = L->pos() - pc_offset(); 1228 ASSERT(offs <= 0); 1229 if (is_int8(offs - short_size)) { 1230 // 0111 tttn #8-bit disp. 1231 emit(0x70 | cc); 1232 emit((offs - short_size) & 0xFF); 1233 } else { 1234 // 0000 1111 1000 tttn #32-bit disp. 1235 emit(0x0F); 1236 emit(0x80 | cc); 1237 emitl(offs - long_size); 1238 } 1239 } else if (L->is_linked()) { 1240 // 0000 1111 1000 tttn #32-bit disp. 1241 emit(0x0F); 1242 emit(0x80 | cc); 1243 emitl(L->pos()); 1244 L->link_to(pc_offset() - sizeof(int32_t)); 1245 } else { 1246 ASSERT(L->is_unused()); 1247 emit(0x0F); 1248 emit(0x80 | cc); 1249 int32_t current = pc_offset(); 1250 emitl(current); 1251 L->link_to(current); 1252 } 1253 } 1254 1255 1256 void Assembler::j(Condition cc, 1257 Handle<Code> target, 1258 RelocInfo::Mode rmode) { 1259 EnsureSpace ensure_space(this); 1260 ASSERT(is_uint4(cc)); 1261 // 0000 1111 1000 tttn #32-bit disp. 1262 emit(0x0F); 1263 emit(0x80 | cc); 1264 emit_code_target(target, rmode); 1265 } 1266 1267 1268 void Assembler::j(Condition cc, NearLabel* L, Hint hint) { 1269 EnsureSpace ensure_space(this); 1270 ASSERT(0 <= cc && cc < 16); 1271 if (FLAG_emit_branch_hints && hint != no_hint) emit(hint); 1272 if (L->is_bound()) { 1273 const int short_size = 2; 1274 int offs = L->pos() - pc_offset(); 1275 ASSERT(offs <= 0); 1276 ASSERT(is_int8(offs - short_size)); 1277 // 0111 tttn #8-bit disp 1278 emit(0x70 | cc); 1279 emit((offs - short_size) & 0xFF); 1280 } else { 1281 emit(0x70 | cc); 1282 emit(0x00); // The displacement will be resolved later. 1283 L->link_to(pc_offset()); 1284 } 1285 } 1286 1287 1288 void Assembler::jmp(Label* L) { 1289 EnsureSpace ensure_space(this); 1290 const int short_size = sizeof(int8_t); 1291 const int long_size = sizeof(int32_t); 1292 if (L->is_bound()) { 1293 int offs = L->pos() - pc_offset() - 1; 1294 ASSERT(offs <= 0); 1295 if (is_int8(offs - short_size)) { 1296 // 1110 1011 #8-bit disp. 1297 emit(0xEB); 1298 emit((offs - short_size) & 0xFF); 1299 } else { 1300 // 1110 1001 #32-bit disp. 1301 emit(0xE9); 1302 emitl(offs - long_size); 1303 } 1304 } else if (L->is_linked()) { 1305 // 1110 1001 #32-bit disp. 1306 emit(0xE9); 1307 emitl(L->pos()); 1308 L->link_to(pc_offset() - long_size); 1309 } else { 1310 // 1110 1001 #32-bit disp. 1311 ASSERT(L->is_unused()); 1312 emit(0xE9); 1313 int32_t current = pc_offset(); 1314 emitl(current); 1315 L->link_to(current); 1316 } 1317 } 1318 1319 1320 void Assembler::jmp(Handle<Code> target, RelocInfo::Mode rmode) { 1321 EnsureSpace ensure_space(this); 1322 // 1110 1001 #32-bit disp. 1323 emit(0xE9); 1324 emit_code_target(target, rmode); 1325 } 1326 1327 1328 void Assembler::jmp(NearLabel* L) { 1329 EnsureSpace ensure_space(this); 1330 if (L->is_bound()) { 1331 const int short_size = 2; 1332 int offs = L->pos() - pc_offset(); 1333 ASSERT(offs <= 0); 1334 ASSERT(is_int8(offs - short_size)); 1335 // 1110 1011 #8-bit disp. 1336 emit(0xEB); 1337 emit((offs - short_size) & 0xFF); 1338 } else { 1339 emit(0xEB); 1340 emit(0x00); // The displacement will be resolved later. 1341 L->link_to(pc_offset()); 1342 } 1343 } 1344 1345 1346 void Assembler::jmp(Register target) { 1347 EnsureSpace ensure_space(this); 1348 // Opcode FF/4 r64. 1349 emit_optional_rex_32(target); 1350 emit(0xFF); 1351 emit_modrm(0x4, target); 1352 } 1353 1354 1355 void Assembler::jmp(const Operand& src) { 1356 EnsureSpace ensure_space(this); 1357 // Opcode FF/4 m64. 1358 emit_optional_rex_32(src); 1359 emit(0xFF); 1360 emit_operand(0x4, src); 1361 } 1362 1363 1364 void Assembler::lea(Register dst, const Operand& src) { 1365 EnsureSpace ensure_space(this); 1366 emit_rex_64(dst, src); 1367 emit(0x8D); 1368 emit_operand(dst, src); 1369 } 1370 1371 1372 void Assembler::leal(Register dst, const Operand& src) { 1373 EnsureSpace ensure_space(this); 1374 emit_optional_rex_32(dst, src); 1375 emit(0x8D); 1376 emit_operand(dst, src); 1377 } 1378 1379 1380 void Assembler::load_rax(void* value, RelocInfo::Mode mode) { 1381 EnsureSpace ensure_space(this); 1382 emit(0x48); // REX.W 1383 emit(0xA1); 1384 emitq(reinterpret_cast<uintptr_t>(value), mode); 1385 } 1386 1387 1388 void Assembler::load_rax(ExternalReference ref) { 1389 load_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 1390 } 1391 1392 1393 void Assembler::leave() { 1394 EnsureSpace ensure_space(this); 1395 emit(0xC9); 1396 } 1397 1398 1399 void Assembler::movb(Register dst, const Operand& src) { 1400 EnsureSpace ensure_space(this); 1401 if (dst.code() > 3) { 1402 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 1403 emit_rex_32(dst, src); 1404 } else { 1405 emit_optional_rex_32(dst, src); 1406 } 1407 emit(0x8A); 1408 emit_operand(dst, src); 1409 } 1410 1411 1412 void Assembler::movb(Register dst, Immediate imm) { 1413 EnsureSpace ensure_space(this); 1414 if (dst.code() > 3) { 1415 emit_rex_32(dst); 1416 } 1417 emit(0xB0 + dst.low_bits()); 1418 emit(imm.value_); 1419 } 1420 1421 1422 void Assembler::movb(const Operand& dst, Register src) { 1423 EnsureSpace ensure_space(this); 1424 if (src.code() > 3) { 1425 emit_rex_32(src, dst); 1426 } else { 1427 emit_optional_rex_32(src, dst); 1428 } 1429 emit(0x88); 1430 emit_operand(src, dst); 1431 } 1432 1433 1434 void Assembler::movw(const Operand& dst, Register src) { 1435 EnsureSpace ensure_space(this); 1436 emit(0x66); 1437 emit_optional_rex_32(src, dst); 1438 emit(0x89); 1439 emit_operand(src, dst); 1440 } 1441 1442 1443 void Assembler::movl(Register dst, const Operand& src) { 1444 EnsureSpace ensure_space(this); 1445 emit_optional_rex_32(dst, src); 1446 emit(0x8B); 1447 emit_operand(dst, src); 1448 } 1449 1450 1451 void Assembler::movl(Register dst, Register src) { 1452 EnsureSpace ensure_space(this); 1453 if (src.low_bits() == 4) { 1454 emit_optional_rex_32(src, dst); 1455 emit(0x89); 1456 emit_modrm(src, dst); 1457 } else { 1458 emit_optional_rex_32(dst, src); 1459 emit(0x8B); 1460 emit_modrm(dst, src); 1461 } 1462 } 1463 1464 1465 void Assembler::movl(const Operand& dst, Register src) { 1466 EnsureSpace ensure_space(this); 1467 emit_optional_rex_32(src, dst); 1468 emit(0x89); 1469 emit_operand(src, dst); 1470 } 1471 1472 1473 void Assembler::movl(const Operand& dst, Immediate value) { 1474 EnsureSpace ensure_space(this); 1475 emit_optional_rex_32(dst); 1476 emit(0xC7); 1477 emit_operand(0x0, dst); 1478 emit(value); 1479 } 1480 1481 1482 void Assembler::movl(Register dst, Immediate value) { 1483 EnsureSpace ensure_space(this); 1484 emit_optional_rex_32(dst); 1485 emit(0xB8 + dst.low_bits()); 1486 emit(value); 1487 } 1488 1489 1490 void Assembler::movq(Register dst, const Operand& src) { 1491 EnsureSpace ensure_space(this); 1492 emit_rex_64(dst, src); 1493 emit(0x8B); 1494 emit_operand(dst, src); 1495 } 1496 1497 1498 void Assembler::movq(Register dst, Register src) { 1499 EnsureSpace ensure_space(this); 1500 if (src.low_bits() == 4) { 1501 emit_rex_64(src, dst); 1502 emit(0x89); 1503 emit_modrm(src, dst); 1504 } else { 1505 emit_rex_64(dst, src); 1506 emit(0x8B); 1507 emit_modrm(dst, src); 1508 } 1509 } 1510 1511 1512 void Assembler::movq(Register dst, Immediate value) { 1513 EnsureSpace ensure_space(this); 1514 emit_rex_64(dst); 1515 emit(0xC7); 1516 emit_modrm(0x0, dst); 1517 emit(value); // Only 32-bit immediates are possible, not 8-bit immediates. 1518 } 1519 1520 1521 void Assembler::movq(const Operand& dst, Register src) { 1522 EnsureSpace ensure_space(this); 1523 emit_rex_64(src, dst); 1524 emit(0x89); 1525 emit_operand(src, dst); 1526 } 1527 1528 1529 void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { 1530 // This method must not be used with heap object references. The stored 1531 // address is not GC safe. Use the handle version instead. 1532 ASSERT(rmode > RelocInfo::LAST_GCED_ENUM); 1533 EnsureSpace ensure_space(this); 1534 emit_rex_64(dst); 1535 emit(0xB8 | dst.low_bits()); 1536 emitq(reinterpret_cast<uintptr_t>(value), rmode); 1537 } 1538 1539 1540 void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { 1541 // Non-relocatable values might not need a 64-bit representation. 1542 if (rmode == RelocInfo::NONE) { 1543 // Sadly, there is no zero or sign extending move for 8-bit immediates. 1544 if (is_int32(value)) { 1545 movq(dst, Immediate(static_cast<int32_t>(value))); 1546 return; 1547 } else if (is_uint32(value)) { 1548 movl(dst, Immediate(static_cast<int32_t>(value))); 1549 return; 1550 } 1551 // Value cannot be represented by 32 bits, so do a full 64 bit immediate 1552 // value. 1553 } 1554 EnsureSpace ensure_space(this); 1555 emit_rex_64(dst); 1556 emit(0xB8 | dst.low_bits()); 1557 emitq(value, rmode); 1558 } 1559 1560 1561 void Assembler::movq(Register dst, ExternalReference ref) { 1562 int64_t value = reinterpret_cast<int64_t>(ref.address()); 1563 movq(dst, value, RelocInfo::EXTERNAL_REFERENCE); 1564 } 1565 1566 1567 void Assembler::movq(const Operand& dst, Immediate value) { 1568 EnsureSpace ensure_space(this); 1569 emit_rex_64(dst); 1570 emit(0xC7); 1571 emit_operand(0, dst); 1572 emit(value); 1573 } 1574 1575 1576 // Loads the ip-relative location of the src label into the target location 1577 // (as a 32-bit offset sign extended to 64-bit). 1578 void Assembler::movl(const Operand& dst, Label* src) { 1579 EnsureSpace ensure_space(this); 1580 emit_optional_rex_32(dst); 1581 emit(0xC7); 1582 emit_operand(0, dst); 1583 if (src->is_bound()) { 1584 int offset = src->pos() - pc_offset() - sizeof(int32_t); 1585 ASSERT(offset <= 0); 1586 emitl(offset); 1587 } else if (src->is_linked()) { 1588 emitl(src->pos()); 1589 src->link_to(pc_offset() - sizeof(int32_t)); 1590 } else { 1591 ASSERT(src->is_unused()); 1592 int32_t current = pc_offset(); 1593 emitl(current); 1594 src->link_to(current); 1595 } 1596 } 1597 1598 1599 void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { 1600 // If there is no relocation info, emit the value of the handle efficiently 1601 // (possibly using less that 8 bytes for the value). 1602 if (mode == RelocInfo::NONE) { 1603 // There is no possible reason to store a heap pointer without relocation 1604 // info, so it must be a smi. 1605 ASSERT(value->IsSmi()); 1606 movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE); 1607 } else { 1608 EnsureSpace ensure_space(this); 1609 ASSERT(value->IsHeapObject()); 1610 ASSERT(!HEAP->InNewSpace(*value)); 1611 emit_rex_64(dst); 1612 emit(0xB8 | dst.low_bits()); 1613 emitq(reinterpret_cast<uintptr_t>(value.location()), mode); 1614 } 1615 } 1616 1617 1618 void Assembler::movsxbq(Register dst, const Operand& src) { 1619 EnsureSpace ensure_space(this); 1620 emit_rex_64(dst, src); 1621 emit(0x0F); 1622 emit(0xBE); 1623 emit_operand(dst, src); 1624 } 1625 1626 1627 void Assembler::movsxwq(Register dst, const Operand& src) { 1628 EnsureSpace ensure_space(this); 1629 emit_rex_64(dst, src); 1630 emit(0x0F); 1631 emit(0xBF); 1632 emit_operand(dst, src); 1633 } 1634 1635 1636 void Assembler::movsxlq(Register dst, Register src) { 1637 EnsureSpace ensure_space(this); 1638 emit_rex_64(dst, src); 1639 emit(0x63); 1640 emit_modrm(dst, src); 1641 } 1642 1643 1644 void Assembler::movsxlq(Register dst, const Operand& src) { 1645 EnsureSpace ensure_space(this); 1646 emit_rex_64(dst, src); 1647 emit(0x63); 1648 emit_operand(dst, src); 1649 } 1650 1651 1652 void Assembler::movzxbq(Register dst, const Operand& src) { 1653 EnsureSpace ensure_space(this); 1654 emit_optional_rex_32(dst, src); 1655 emit(0x0F); 1656 emit(0xB6); 1657 emit_operand(dst, src); 1658 } 1659 1660 1661 void Assembler::movzxbl(Register dst, const Operand& src) { 1662 EnsureSpace ensure_space(this); 1663 emit_optional_rex_32(dst, src); 1664 emit(0x0F); 1665 emit(0xB6); 1666 emit_operand(dst, src); 1667 } 1668 1669 1670 void Assembler::movzxwq(Register dst, const Operand& src) { 1671 EnsureSpace ensure_space(this); 1672 emit_optional_rex_32(dst, src); 1673 emit(0x0F); 1674 emit(0xB7); 1675 emit_operand(dst, src); 1676 } 1677 1678 1679 void Assembler::movzxwl(Register dst, const Operand& src) { 1680 EnsureSpace ensure_space(this); 1681 emit_optional_rex_32(dst, src); 1682 emit(0x0F); 1683 emit(0xB7); 1684 emit_operand(dst, src); 1685 } 1686 1687 1688 void Assembler::repmovsb() { 1689 EnsureSpace ensure_space(this); 1690 emit(0xF3); 1691 emit(0xA4); 1692 } 1693 1694 1695 void Assembler::repmovsw() { 1696 EnsureSpace ensure_space(this); 1697 emit(0x66); // Operand size override. 1698 emit(0xF3); 1699 emit(0xA4); 1700 } 1701 1702 1703 void Assembler::repmovsl() { 1704 EnsureSpace ensure_space(this); 1705 emit(0xF3); 1706 emit(0xA5); 1707 } 1708 1709 1710 void Assembler::repmovsq() { 1711 EnsureSpace ensure_space(this); 1712 emit(0xF3); 1713 emit_rex_64(); 1714 emit(0xA5); 1715 } 1716 1717 1718 void Assembler::mul(Register src) { 1719 EnsureSpace ensure_space(this); 1720 emit_rex_64(src); 1721 emit(0xF7); 1722 emit_modrm(0x4, src); 1723 } 1724 1725 1726 void Assembler::neg(Register dst) { 1727 EnsureSpace ensure_space(this); 1728 emit_rex_64(dst); 1729 emit(0xF7); 1730 emit_modrm(0x3, dst); 1731 } 1732 1733 1734 void Assembler::negl(Register dst) { 1735 EnsureSpace ensure_space(this); 1736 emit_optional_rex_32(dst); 1737 emit(0xF7); 1738 emit_modrm(0x3, dst); 1739 } 1740 1741 1742 void Assembler::neg(const Operand& dst) { 1743 EnsureSpace ensure_space(this); 1744 emit_rex_64(dst); 1745 emit(0xF7); 1746 emit_operand(3, dst); 1747 } 1748 1749 1750 void Assembler::nop() { 1751 EnsureSpace ensure_space(this); 1752 emit(0x90); 1753 } 1754 1755 1756 void Assembler::not_(Register dst) { 1757 EnsureSpace ensure_space(this); 1758 emit_rex_64(dst); 1759 emit(0xF7); 1760 emit_modrm(0x2, dst); 1761 } 1762 1763 1764 void Assembler::not_(const Operand& dst) { 1765 EnsureSpace ensure_space(this); 1766 emit_rex_64(dst); 1767 emit(0xF7); 1768 emit_operand(2, dst); 1769 } 1770 1771 1772 void Assembler::notl(Register dst) { 1773 EnsureSpace ensure_space(this); 1774 emit_optional_rex_32(dst); 1775 emit(0xF7); 1776 emit_modrm(0x2, dst); 1777 } 1778 1779 1780 void Assembler::nop(int n) { 1781 // The recommended muti-byte sequences of NOP instructions from the Intel 64 1782 // and IA-32 Architectures Software Developer's Manual. 1783 // 1784 // Length Assembly Byte Sequence 1785 // 2 bytes 66 NOP 66 90H 1786 // 3 bytes NOP DWORD ptr [EAX] 0F 1F 00H 1787 // 4 bytes NOP DWORD ptr [EAX + 00H] 0F 1F 40 00H 1788 // 5 bytes NOP DWORD ptr [EAX + EAX*1 + 00H] 0F 1F 44 00 00H 1789 // 6 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 00H] 66 0F 1F 44 00 00H 1790 // 7 bytes NOP DWORD ptr [EAX + 00000000H] 0F 1F 80 00 00 00 00H 1791 // 8 bytes NOP DWORD ptr [EAX + EAX*1 + 00000000H] 0F 1F 84 00 00 00 00 00H 1792 // 9 bytes 66 NOP DWORD ptr [EAX + EAX*1 + 66 0F 1F 84 00 00 00 00 1793 // 00000000H] 00H 1794 1795 ASSERT(1 <= n); 1796 ASSERT(n <= 9); 1797 EnsureSpace ensure_space(this); 1798 switch (n) { 1799 case 1: 1800 emit(0x90); 1801 return; 1802 case 2: 1803 emit(0x66); 1804 emit(0x90); 1805 return; 1806 case 3: 1807 emit(0x0f); 1808 emit(0x1f); 1809 emit(0x00); 1810 return; 1811 case 4: 1812 emit(0x0f); 1813 emit(0x1f); 1814 emit(0x40); 1815 emit(0x00); 1816 return; 1817 case 5: 1818 emit(0x0f); 1819 emit(0x1f); 1820 emit(0x44); 1821 emit(0x00); 1822 emit(0x00); 1823 return; 1824 case 6: 1825 emit(0x66); 1826 emit(0x0f); 1827 emit(0x1f); 1828 emit(0x44); 1829 emit(0x00); 1830 emit(0x00); 1831 return; 1832 case 7: 1833 emit(0x0f); 1834 emit(0x1f); 1835 emit(0x80); 1836 emit(0x00); 1837 emit(0x00); 1838 emit(0x00); 1839 emit(0x00); 1840 return; 1841 case 8: 1842 emit(0x0f); 1843 emit(0x1f); 1844 emit(0x84); 1845 emit(0x00); 1846 emit(0x00); 1847 emit(0x00); 1848 emit(0x00); 1849 emit(0x00); 1850 return; 1851 case 9: 1852 emit(0x66); 1853 emit(0x0f); 1854 emit(0x1f); 1855 emit(0x84); 1856 emit(0x00); 1857 emit(0x00); 1858 emit(0x00); 1859 emit(0x00); 1860 emit(0x00); 1861 return; 1862 } 1863 } 1864 1865 1866 void Assembler::pop(Register dst) { 1867 EnsureSpace ensure_space(this); 1868 emit_optional_rex_32(dst); 1869 emit(0x58 | dst.low_bits()); 1870 } 1871 1872 1873 void Assembler::pop(const Operand& dst) { 1874 EnsureSpace ensure_space(this); 1875 emit_optional_rex_32(dst); 1876 emit(0x8F); 1877 emit_operand(0, dst); 1878 } 1879 1880 1881 void Assembler::popfq() { 1882 EnsureSpace ensure_space(this); 1883 emit(0x9D); 1884 } 1885 1886 1887 void Assembler::push(Register src) { 1888 EnsureSpace ensure_space(this); 1889 emit_optional_rex_32(src); 1890 emit(0x50 | src.low_bits()); 1891 } 1892 1893 1894 void Assembler::push(const Operand& src) { 1895 EnsureSpace ensure_space(this); 1896 emit_optional_rex_32(src); 1897 emit(0xFF); 1898 emit_operand(6, src); 1899 } 1900 1901 1902 void Assembler::push(Immediate value) { 1903 EnsureSpace ensure_space(this); 1904 if (is_int8(value.value_)) { 1905 emit(0x6A); 1906 emit(value.value_); // Emit low byte of value. 1907 } else { 1908 emit(0x68); 1909 emitl(value.value_); 1910 } 1911 } 1912 1913 1914 void Assembler::push_imm32(int32_t imm32) { 1915 EnsureSpace ensure_space(this); 1916 emit(0x68); 1917 emitl(imm32); 1918 } 1919 1920 1921 void Assembler::pushfq() { 1922 EnsureSpace ensure_space(this); 1923 emit(0x9C); 1924 } 1925 1926 1927 void Assembler::rdtsc() { 1928 EnsureSpace ensure_space(this); 1929 emit(0x0F); 1930 emit(0x31); 1931 } 1932 1933 1934 void Assembler::ret(int imm16) { 1935 EnsureSpace ensure_space(this); 1936 ASSERT(is_uint16(imm16)); 1937 if (imm16 == 0) { 1938 emit(0xC3); 1939 } else { 1940 emit(0xC2); 1941 emit(imm16 & 0xFF); 1942 emit((imm16 >> 8) & 0xFF); 1943 } 1944 } 1945 1946 1947 void Assembler::setcc(Condition cc, Register reg) { 1948 if (cc > last_condition) { 1949 movb(reg, Immediate(cc == always ? 1 : 0)); 1950 return; 1951 } 1952 EnsureSpace ensure_space(this); 1953 ASSERT(is_uint4(cc)); 1954 if (reg.code() > 3) { // Use x64 byte registers, where different. 1955 emit_rex_32(reg); 1956 } 1957 emit(0x0F); 1958 emit(0x90 | cc); 1959 emit_modrm(0x0, reg); 1960 } 1961 1962 1963 void Assembler::shld(Register dst, Register src) { 1964 EnsureSpace ensure_space(this); 1965 emit_rex_64(src, dst); 1966 emit(0x0F); 1967 emit(0xA5); 1968 emit_modrm(src, dst); 1969 } 1970 1971 1972 void Assembler::shrd(Register dst, Register src) { 1973 EnsureSpace ensure_space(this); 1974 emit_rex_64(src, dst); 1975 emit(0x0F); 1976 emit(0xAD); 1977 emit_modrm(src, dst); 1978 } 1979 1980 1981 void Assembler::xchg(Register dst, Register src) { 1982 EnsureSpace ensure_space(this); 1983 if (src.is(rax) || dst.is(rax)) { // Single-byte encoding 1984 Register other = src.is(rax) ? dst : src; 1985 emit_rex_64(other); 1986 emit(0x90 | other.low_bits()); 1987 } else if (dst.low_bits() == 4) { 1988 emit_rex_64(dst, src); 1989 emit(0x87); 1990 emit_modrm(dst, src); 1991 } else { 1992 emit_rex_64(src, dst); 1993 emit(0x87); 1994 emit_modrm(src, dst); 1995 } 1996 } 1997 1998 1999 void Assembler::store_rax(void* dst, RelocInfo::Mode mode) { 2000 EnsureSpace ensure_space(this); 2001 emit(0x48); // REX.W 2002 emit(0xA3); 2003 emitq(reinterpret_cast<uintptr_t>(dst), mode); 2004 } 2005 2006 2007 void Assembler::store_rax(ExternalReference ref) { 2008 store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE); 2009 } 2010 2011 2012 void Assembler::testb(Register dst, Register src) { 2013 EnsureSpace ensure_space(this); 2014 if (src.low_bits() == 4) { 2015 emit_rex_32(src, dst); 2016 emit(0x84); 2017 emit_modrm(src, dst); 2018 } else { 2019 if (dst.code() > 3 || src.code() > 3) { 2020 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 2021 emit_rex_32(dst, src); 2022 } 2023 emit(0x84); 2024 emit_modrm(dst, src); 2025 } 2026 } 2027 2028 2029 void Assembler::testb(Register reg, Immediate mask) { 2030 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 2031 EnsureSpace ensure_space(this); 2032 if (reg.is(rax)) { 2033 emit(0xA8); 2034 emit(mask.value_); // Low byte emitted. 2035 } else { 2036 if (reg.code() > 3) { 2037 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 2038 emit_rex_32(reg); 2039 } 2040 emit(0xF6); 2041 emit_modrm(0x0, reg); 2042 emit(mask.value_); // Low byte emitted. 2043 } 2044 } 2045 2046 2047 void Assembler::testb(const Operand& op, Immediate mask) { 2048 ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); 2049 EnsureSpace ensure_space(this); 2050 emit_optional_rex_32(rax, op); 2051 emit(0xF6); 2052 emit_operand(rax, op); // Operation code 0 2053 emit(mask.value_); // Low byte emitted. 2054 } 2055 2056 2057 void Assembler::testb(const Operand& op, Register reg) { 2058 EnsureSpace ensure_space(this); 2059 if (reg.code() > 3) { 2060 // Register is not one of al, bl, cl, dl. Its encoding needs REX. 2061 emit_rex_32(reg, op); 2062 } else { 2063 emit_optional_rex_32(reg, op); 2064 } 2065 emit(0x84); 2066 emit_operand(reg, op); 2067 } 2068 2069 2070 void Assembler::testl(Register dst, Register src) { 2071 EnsureSpace ensure_space(this); 2072 if (src.low_bits() == 4) { 2073 emit_optional_rex_32(src, dst); 2074 emit(0x85); 2075 emit_modrm(src, dst); 2076 } else { 2077 emit_optional_rex_32(dst, src); 2078 emit(0x85); 2079 emit_modrm(dst, src); 2080 } 2081 } 2082 2083 2084 void Assembler::testl(Register reg, Immediate mask) { 2085 // testl with a mask that fits in the low byte is exactly testb. 2086 if (is_uint8(mask.value_)) { 2087 testb(reg, mask); 2088 return; 2089 } 2090 EnsureSpace ensure_space(this); 2091 if (reg.is(rax)) { 2092 emit(0xA9); 2093 emit(mask); 2094 } else { 2095 emit_optional_rex_32(rax, reg); 2096 emit(0xF7); 2097 emit_modrm(0x0, reg); 2098 emit(mask); 2099 } 2100 } 2101 2102 2103 void Assembler::testl(const Operand& op, Immediate mask) { 2104 // testl with a mask that fits in the low byte is exactly testb. 2105 if (is_uint8(mask.value_)) { 2106 testb(op, mask); 2107 return; 2108 } 2109 EnsureSpace ensure_space(this); 2110 emit_optional_rex_32(rax, op); 2111 emit(0xF7); 2112 emit_operand(rax, op); // Operation code 0 2113 emit(mask); 2114 } 2115 2116 2117 void Assembler::testq(const Operand& op, Register reg) { 2118 EnsureSpace ensure_space(this); 2119 emit_rex_64(reg, op); 2120 emit(0x85); 2121 emit_operand(reg, op); 2122 } 2123 2124 2125 void Assembler::testq(Register dst, Register src) { 2126 EnsureSpace ensure_space(this); 2127 if (src.low_bits() == 4) { 2128 emit_rex_64(src, dst); 2129 emit(0x85); 2130 emit_modrm(src, dst); 2131 } else { 2132 emit_rex_64(dst, src); 2133 emit(0x85); 2134 emit_modrm(dst, src); 2135 } 2136 } 2137 2138 2139 void Assembler::testq(Register dst, Immediate mask) { 2140 EnsureSpace ensure_space(this); 2141 if (dst.is(rax)) { 2142 emit_rex_64(); 2143 emit(0xA9); 2144 emit(mask); 2145 } else { 2146 emit_rex_64(dst); 2147 emit(0xF7); 2148 emit_modrm(0, dst); 2149 emit(mask); 2150 } 2151 } 2152 2153 2154 // FPU instructions. 2155 2156 2157 void Assembler::fld(int i) { 2158 EnsureSpace ensure_space(this); 2159 emit_farith(0xD9, 0xC0, i); 2160 } 2161 2162 2163 void Assembler::fld1() { 2164 EnsureSpace ensure_space(this); 2165 emit(0xD9); 2166 emit(0xE8); 2167 } 2168 2169 2170 void Assembler::fldz() { 2171 EnsureSpace ensure_space(this); 2172 emit(0xD9); 2173 emit(0xEE); 2174 } 2175 2176 2177 void Assembler::fldpi() { 2178 EnsureSpace ensure_space(this); 2179 emit(0xD9); 2180 emit(0xEB); 2181 } 2182 2183 2184 void Assembler::fldln2() { 2185 EnsureSpace ensure_space(this); 2186 emit(0xD9); 2187 emit(0xED); 2188 } 2189 2190 2191 void Assembler::fld_s(const Operand& adr) { 2192 EnsureSpace ensure_space(this); 2193 emit_optional_rex_32(adr); 2194 emit(0xD9); 2195 emit_operand(0, adr); 2196 } 2197 2198 2199 void Assembler::fld_d(const Operand& adr) { 2200 EnsureSpace ensure_space(this); 2201 emit_optional_rex_32(adr); 2202 emit(0xDD); 2203 emit_operand(0, adr); 2204 } 2205 2206 2207 void Assembler::fstp_s(const Operand& adr) { 2208 EnsureSpace ensure_space(this); 2209 emit_optional_rex_32(adr); 2210 emit(0xD9); 2211 emit_operand(3, adr); 2212 } 2213 2214 2215 void Assembler::fstp_d(const Operand& adr) { 2216 EnsureSpace ensure_space(this); 2217 emit_optional_rex_32(adr); 2218 emit(0xDD); 2219 emit_operand(3, adr); 2220 } 2221 2222 2223 void Assembler::fstp(int index) { 2224 ASSERT(is_uint3(index)); 2225 EnsureSpace ensure_space(this); 2226 emit_farith(0xDD, 0xD8, index); 2227 } 2228 2229 2230 void Assembler::fild_s(const Operand& adr) { 2231 EnsureSpace ensure_space(this); 2232 emit_optional_rex_32(adr); 2233 emit(0xDB); 2234 emit_operand(0, adr); 2235 } 2236 2237 2238 void Assembler::fild_d(const Operand& adr) { 2239 EnsureSpace ensure_space(this); 2240 emit_optional_rex_32(adr); 2241 emit(0xDF); 2242 emit_operand(5, adr); 2243 } 2244 2245 2246 void Assembler::fistp_s(const Operand& adr) { 2247 EnsureSpace ensure_space(this); 2248 emit_optional_rex_32(adr); 2249 emit(0xDB); 2250 emit_operand(3, adr); 2251 } 2252 2253 2254 void Assembler::fisttp_s(const Operand& adr) { 2255 ASSERT(CpuFeatures::IsEnabled(SSE3)); 2256 EnsureSpace ensure_space(this); 2257 emit_optional_rex_32(adr); 2258 emit(0xDB); 2259 emit_operand(1, adr); 2260 } 2261 2262 2263 void Assembler::fisttp_d(const Operand& adr) { 2264 ASSERT(CpuFeatures::IsEnabled(SSE3)); 2265 EnsureSpace ensure_space(this); 2266 emit_optional_rex_32(adr); 2267 emit(0xDD); 2268 emit_operand(1, adr); 2269 } 2270 2271 2272 void Assembler::fist_s(const Operand& adr) { 2273 EnsureSpace ensure_space(this); 2274 emit_optional_rex_32(adr); 2275 emit(0xDB); 2276 emit_operand(2, adr); 2277 } 2278 2279 2280 void Assembler::fistp_d(const Operand& adr) { 2281 EnsureSpace ensure_space(this); 2282 emit_optional_rex_32(adr); 2283 emit(0xDF); 2284 emit_operand(7, adr); 2285 } 2286 2287 2288 void Assembler::fabs() { 2289 EnsureSpace ensure_space(this); 2290 emit(0xD9); 2291 emit(0xE1); 2292 } 2293 2294 2295 void Assembler::fchs() { 2296 EnsureSpace ensure_space(this); 2297 emit(0xD9); 2298 emit(0xE0); 2299 } 2300 2301 2302 void Assembler::fcos() { 2303 EnsureSpace ensure_space(this); 2304 emit(0xD9); 2305 emit(0xFF); 2306 } 2307 2308 2309 void Assembler::fsin() { 2310 EnsureSpace ensure_space(this); 2311 emit(0xD9); 2312 emit(0xFE); 2313 } 2314 2315 2316 void Assembler::fyl2x() { 2317 EnsureSpace ensure_space(this); 2318 emit(0xD9); 2319 emit(0xF1); 2320 } 2321 2322 2323 void Assembler::fadd(int i) { 2324 EnsureSpace ensure_space(this); 2325 emit_farith(0xDC, 0xC0, i); 2326 } 2327 2328 2329 void Assembler::fsub(int i) { 2330 EnsureSpace ensure_space(this); 2331 emit_farith(0xDC, 0xE8, i); 2332 } 2333 2334 2335 void Assembler::fisub_s(const Operand& adr) { 2336 EnsureSpace ensure_space(this); 2337 emit_optional_rex_32(adr); 2338 emit(0xDA); 2339 emit_operand(4, adr); 2340 } 2341 2342 2343 void Assembler::fmul(int i) { 2344 EnsureSpace ensure_space(this); 2345 emit_farith(0xDC, 0xC8, i); 2346 } 2347 2348 2349 void Assembler::fdiv(int i) { 2350 EnsureSpace ensure_space(this); 2351 emit_farith(0xDC, 0xF8, i); 2352 } 2353 2354 2355 void Assembler::faddp(int i) { 2356 EnsureSpace ensure_space(this); 2357 emit_farith(0xDE, 0xC0, i); 2358 } 2359 2360 2361 void Assembler::fsubp(int i) { 2362 EnsureSpace ensure_space(this); 2363 emit_farith(0xDE, 0xE8, i); 2364 } 2365 2366 2367 void Assembler::fsubrp(int i) { 2368 EnsureSpace ensure_space(this); 2369 emit_farith(0xDE, 0xE0, i); 2370 } 2371 2372 2373 void Assembler::fmulp(int i) { 2374 EnsureSpace ensure_space(this); 2375 emit_farith(0xDE, 0xC8, i); 2376 } 2377 2378 2379 void Assembler::fdivp(int i) { 2380 EnsureSpace ensure_space(this); 2381 emit_farith(0xDE, 0xF8, i); 2382 } 2383 2384 2385 void Assembler::fprem() { 2386 EnsureSpace ensure_space(this); 2387 emit(0xD9); 2388 emit(0xF8); 2389 } 2390 2391 2392 void Assembler::fprem1() { 2393 EnsureSpace ensure_space(this); 2394 emit(0xD9); 2395 emit(0xF5); 2396 } 2397 2398 2399 void Assembler::fxch(int i) { 2400 EnsureSpace ensure_space(this); 2401 emit_farith(0xD9, 0xC8, i); 2402 } 2403 2404 2405 void Assembler::fincstp() { 2406 EnsureSpace ensure_space(this); 2407 emit(0xD9); 2408 emit(0xF7); 2409 } 2410 2411 2412 void Assembler::ffree(int i) { 2413 EnsureSpace ensure_space(this); 2414 emit_farith(0xDD, 0xC0, i); 2415 } 2416 2417 2418 void Assembler::ftst() { 2419 EnsureSpace ensure_space(this); 2420 emit(0xD9); 2421 emit(0xE4); 2422 } 2423 2424 2425 void Assembler::fucomp(int i) { 2426 EnsureSpace ensure_space(this); 2427 emit_farith(0xDD, 0xE8, i); 2428 } 2429 2430 2431 void Assembler::fucompp() { 2432 EnsureSpace ensure_space(this); 2433 emit(0xDA); 2434 emit(0xE9); 2435 } 2436 2437 2438 void Assembler::fucomi(int i) { 2439 EnsureSpace ensure_space(this); 2440 emit(0xDB); 2441 emit(0xE8 + i); 2442 } 2443 2444 2445 void Assembler::fucomip() { 2446 EnsureSpace ensure_space(this); 2447 emit(0xDF); 2448 emit(0xE9); 2449 } 2450 2451 2452 void Assembler::fcompp() { 2453 EnsureSpace ensure_space(this); 2454 emit(0xDE); 2455 emit(0xD9); 2456 } 2457 2458 2459 void Assembler::fnstsw_ax() { 2460 EnsureSpace ensure_space(this); 2461 emit(0xDF); 2462 emit(0xE0); 2463 } 2464 2465 2466 void Assembler::fwait() { 2467 EnsureSpace ensure_space(this); 2468 emit(0x9B); 2469 } 2470 2471 2472 void Assembler::frndint() { 2473 EnsureSpace ensure_space(this); 2474 emit(0xD9); 2475 emit(0xFC); 2476 } 2477 2478 2479 void Assembler::fnclex() { 2480 EnsureSpace ensure_space(this); 2481 emit(0xDB); 2482 emit(0xE2); 2483 } 2484 2485 2486 void Assembler::sahf() { 2487 // TODO(X64): Test for presence. Not all 64-bit intel CPU's have sahf 2488 // in 64-bit mode. Test CpuID. 2489 EnsureSpace ensure_space(this); 2490 emit(0x9E); 2491 } 2492 2493 2494 void Assembler::emit_farith(int b1, int b2, int i) { 2495 ASSERT(is_uint8(b1) && is_uint8(b2)); // wrong opcode 2496 ASSERT(is_uint3(i)); // illegal stack offset 2497 emit(b1); 2498 emit(b2 + i); 2499 } 2500 2501 // SSE 2 operations. 2502 2503 void Assembler::movd(XMMRegister dst, Register src) { 2504 EnsureSpace ensure_space(this); 2505 emit(0x66); 2506 emit_optional_rex_32(dst, src); 2507 emit(0x0F); 2508 emit(0x6E); 2509 emit_sse_operand(dst, src); 2510 } 2511 2512 2513 void Assembler::movd(Register dst, XMMRegister src) { 2514 EnsureSpace ensure_space(this); 2515 emit(0x66); 2516 emit_optional_rex_32(src, dst); 2517 emit(0x0F); 2518 emit(0x7E); 2519 emit_sse_operand(src, dst); 2520 } 2521 2522 2523 void Assembler::movq(XMMRegister dst, Register src) { 2524 EnsureSpace ensure_space(this); 2525 emit(0x66); 2526 emit_rex_64(dst, src); 2527 emit(0x0F); 2528 emit(0x6E); 2529 emit_sse_operand(dst, src); 2530 } 2531 2532 2533 void Assembler::movq(Register dst, XMMRegister src) { 2534 EnsureSpace ensure_space(this); 2535 emit(0x66); 2536 emit_rex_64(src, dst); 2537 emit(0x0F); 2538 emit(0x7E); 2539 emit_sse_operand(src, dst); 2540 } 2541 2542 2543 void Assembler::movdqa(const Operand& dst, XMMRegister src) { 2544 EnsureSpace ensure_space(this); 2545 emit(0x66); 2546 emit_rex_64(src, dst); 2547 emit(0x0F); 2548 emit(0x7F); 2549 emit_sse_operand(src, dst); 2550 } 2551 2552 2553 void Assembler::movdqa(XMMRegister dst, const Operand& src) { 2554 EnsureSpace ensure_space(this); 2555 emit(0x66); 2556 emit_rex_64(dst, src); 2557 emit(0x0F); 2558 emit(0x6F); 2559 emit_sse_operand(dst, src); 2560 } 2561 2562 2563 void Assembler::extractps(Register dst, XMMRegister src, byte imm8) { 2564 ASSERT(is_uint2(imm8)); 2565 EnsureSpace ensure_space(this); 2566 emit(0x66); 2567 emit_optional_rex_32(dst, src); 2568 emit(0x0F); 2569 emit(0x3A); 2570 emit(0x17); 2571 emit_sse_operand(dst, src); 2572 emit(imm8); 2573 } 2574 2575 2576 void Assembler::movsd(const Operand& dst, XMMRegister src) { 2577 EnsureSpace ensure_space(this); 2578 emit(0xF2); // double 2579 emit_optional_rex_32(src, dst); 2580 emit(0x0F); 2581 emit(0x11); // store 2582 emit_sse_operand(src, dst); 2583 } 2584 2585 2586 void Assembler::movsd(XMMRegister dst, XMMRegister src) { 2587 EnsureSpace ensure_space(this); 2588 emit(0xF2); // double 2589 emit_optional_rex_32(dst, src); 2590 emit(0x0F); 2591 emit(0x10); // load 2592 emit_sse_operand(dst, src); 2593 } 2594 2595 2596 void Assembler::movsd(XMMRegister dst, const Operand& src) { 2597 EnsureSpace ensure_space(this); 2598 emit(0xF2); // double 2599 emit_optional_rex_32(dst, src); 2600 emit(0x0F); 2601 emit(0x10); // load 2602 emit_sse_operand(dst, src); 2603 } 2604 2605 2606 void Assembler::movss(XMMRegister dst, const Operand& src) { 2607 EnsureSpace ensure_space(this); 2608 emit(0xF3); // single 2609 emit_optional_rex_32(dst, src); 2610 emit(0x0F); 2611 emit(0x10); // load 2612 emit_sse_operand(dst, src); 2613 } 2614 2615 2616 void Assembler::movss(const Operand& src, XMMRegister dst) { 2617 EnsureSpace ensure_space(this); 2618 emit(0xF3); // single 2619 emit_optional_rex_32(dst, src); 2620 emit(0x0F); 2621 emit(0x11); // store 2622 emit_sse_operand(dst, src); 2623 } 2624 2625 2626 void Assembler::cvttss2si(Register dst, const Operand& src) { 2627 EnsureSpace ensure_space(this); 2628 emit(0xF3); 2629 emit_optional_rex_32(dst, src); 2630 emit(0x0F); 2631 emit(0x2C); 2632 emit_operand(dst, src); 2633 } 2634 2635 2636 void Assembler::cvttss2si(Register dst, XMMRegister src) { 2637 EnsureSpace ensure_space(this); 2638 emit(0xF3); 2639 emit_optional_rex_32(dst, src); 2640 emit(0x0F); 2641 emit(0x2C); 2642 emit_sse_operand(dst, src); 2643 } 2644 2645 2646 void Assembler::cvttsd2si(Register dst, const Operand& src) { 2647 EnsureSpace ensure_space(this); 2648 emit(0xF2); 2649 emit_optional_rex_32(dst, src); 2650 emit(0x0F); 2651 emit(0x2C); 2652 emit_operand(dst, src); 2653 } 2654 2655 2656 void Assembler::cvttsd2si(Register dst, XMMRegister src) { 2657 EnsureSpace ensure_space(this); 2658 emit(0xF2); 2659 emit_optional_rex_32(dst, src); 2660 emit(0x0F); 2661 emit(0x2C); 2662 emit_sse_operand(dst, src); 2663 } 2664 2665 2666 void Assembler::cvttsd2siq(Register dst, XMMRegister src) { 2667 EnsureSpace ensure_space(this); 2668 emit(0xF2); 2669 emit_rex_64(dst, src); 2670 emit(0x0F); 2671 emit(0x2C); 2672 emit_sse_operand(dst, src); 2673 } 2674 2675 2676 void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) { 2677 EnsureSpace ensure_space(this); 2678 emit(0xF2); 2679 emit_optional_rex_32(dst, src); 2680 emit(0x0F); 2681 emit(0x2A); 2682 emit_sse_operand(dst, src); 2683 } 2684 2685 2686 void Assembler::cvtlsi2sd(XMMRegister dst, Register src) { 2687 EnsureSpace ensure_space(this); 2688 emit(0xF2); 2689 emit_optional_rex_32(dst, src); 2690 emit(0x0F); 2691 emit(0x2A); 2692 emit_sse_operand(dst, src); 2693 } 2694 2695 2696 void Assembler::cvtlsi2ss(XMMRegister dst, Register src) { 2697 EnsureSpace ensure_space(this); 2698 emit(0xF3); 2699 emit_optional_rex_32(dst, src); 2700 emit(0x0F); 2701 emit(0x2A); 2702 emit_sse_operand(dst, src); 2703 } 2704 2705 2706 void Assembler::cvtqsi2sd(XMMRegister dst, Register src) { 2707 EnsureSpace ensure_space(this); 2708 emit(0xF2); 2709 emit_rex_64(dst, src); 2710 emit(0x0F); 2711 emit(0x2A); 2712 emit_sse_operand(dst, src); 2713 } 2714 2715 2716 void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) { 2717 EnsureSpace ensure_space(this); 2718 emit(0xF3); 2719 emit_optional_rex_32(dst, src); 2720 emit(0x0F); 2721 emit(0x5A); 2722 emit_sse_operand(dst, src); 2723 } 2724 2725 2726 void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) { 2727 EnsureSpace ensure_space(this); 2728 emit(0xF3); 2729 emit_optional_rex_32(dst, src); 2730 emit(0x0F); 2731 emit(0x5A); 2732 emit_sse_operand(dst, src); 2733 } 2734 2735 2736 void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) { 2737 EnsureSpace ensure_space(this); 2738 emit(0xF2); 2739 emit_optional_rex_32(dst, src); 2740 emit(0x0F); 2741 emit(0x5A); 2742 emit_sse_operand(dst, src); 2743 } 2744 2745 2746 void Assembler::cvtsd2si(Register dst, XMMRegister src) { 2747 EnsureSpace ensure_space(this); 2748 emit(0xF2); 2749 emit_optional_rex_32(dst, src); 2750 emit(0x0F); 2751 emit(0x2D); 2752 emit_sse_operand(dst, src); 2753 } 2754 2755 2756 void Assembler::cvtsd2siq(Register dst, XMMRegister src) { 2757 EnsureSpace ensure_space(this); 2758 emit(0xF2); 2759 emit_rex_64(dst, src); 2760 emit(0x0F); 2761 emit(0x2D); 2762 emit_sse_operand(dst, src); 2763 } 2764 2765 2766 void Assembler::addsd(XMMRegister dst, XMMRegister src) { 2767 EnsureSpace ensure_space(this); 2768 emit(0xF2); 2769 emit_optional_rex_32(dst, src); 2770 emit(0x0F); 2771 emit(0x58); 2772 emit_sse_operand(dst, src); 2773 } 2774 2775 2776 void Assembler::mulsd(XMMRegister dst, XMMRegister src) { 2777 EnsureSpace ensure_space(this); 2778 emit(0xF2); 2779 emit_optional_rex_32(dst, src); 2780 emit(0x0F); 2781 emit(0x59); 2782 emit_sse_operand(dst, src); 2783 } 2784 2785 2786 void Assembler::subsd(XMMRegister dst, XMMRegister src) { 2787 EnsureSpace ensure_space(this); 2788 emit(0xF2); 2789 emit_optional_rex_32(dst, src); 2790 emit(0x0F); 2791 emit(0x5C); 2792 emit_sse_operand(dst, src); 2793 } 2794 2795 2796 void Assembler::divsd(XMMRegister dst, XMMRegister src) { 2797 EnsureSpace ensure_space(this); 2798 emit(0xF2); 2799 emit_optional_rex_32(dst, src); 2800 emit(0x0F); 2801 emit(0x5E); 2802 emit_sse_operand(dst, src); 2803 } 2804 2805 2806 void Assembler::andpd(XMMRegister dst, XMMRegister src) { 2807 EnsureSpace ensure_space(this); 2808 emit(0x66); 2809 emit_optional_rex_32(dst, src); 2810 emit(0x0F); 2811 emit(0x54); 2812 emit_sse_operand(dst, src); 2813 } 2814 2815 2816 void Assembler::orpd(XMMRegister dst, XMMRegister src) { 2817 EnsureSpace ensure_space(this); 2818 emit(0x66); 2819 emit_optional_rex_32(dst, src); 2820 emit(0x0F); 2821 emit(0x56); 2822 emit_sse_operand(dst, src); 2823 } 2824 2825 2826 void Assembler::xorpd(XMMRegister dst, XMMRegister src) { 2827 EnsureSpace ensure_space(this); 2828 emit(0x66); 2829 emit_optional_rex_32(dst, src); 2830 emit(0x0F); 2831 emit(0x57); 2832 emit_sse_operand(dst, src); 2833 } 2834 2835 2836 void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) { 2837 EnsureSpace ensure_space(this); 2838 emit(0xF2); 2839 emit_optional_rex_32(dst, src); 2840 emit(0x0F); 2841 emit(0x51); 2842 emit_sse_operand(dst, src); 2843 } 2844 2845 2846 void Assembler::ucomisd(XMMRegister dst, XMMRegister src) { 2847 EnsureSpace ensure_space(this); 2848 emit(0x66); 2849 emit_optional_rex_32(dst, src); 2850 emit(0x0f); 2851 emit(0x2e); 2852 emit_sse_operand(dst, src); 2853 } 2854 2855 2856 void Assembler::ucomisd(XMMRegister dst, const Operand& src) { 2857 EnsureSpace ensure_space(this); 2858 emit(0x66); 2859 emit_optional_rex_32(dst, src); 2860 emit(0x0f); 2861 emit(0x2e); 2862 emit_sse_operand(dst, src); 2863 } 2864 2865 2866 void Assembler::movmskpd(Register dst, XMMRegister src) { 2867 EnsureSpace ensure_space(this); 2868 emit(0x66); 2869 emit_optional_rex_32(dst, src); 2870 emit(0x0f); 2871 emit(0x50); 2872 emit_sse_operand(dst, src); 2873 } 2874 2875 2876 void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) { 2877 Register ireg = { reg.code() }; 2878 emit_operand(ireg, adr); 2879 } 2880 2881 2882 void Assembler::emit_sse_operand(XMMRegister dst, XMMRegister src) { 2883 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2884 } 2885 2886 void Assembler::emit_sse_operand(XMMRegister dst, Register src) { 2887 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2888 } 2889 2890 void Assembler::emit_sse_operand(Register dst, XMMRegister src) { 2891 emit(0xC0 | (dst.low_bits() << 3) | src.low_bits()); 2892 } 2893 2894 2895 void Assembler::db(uint8_t data) { 2896 EnsureSpace ensure_space(this); 2897 emit(data); 2898 } 2899 2900 2901 void Assembler::dd(uint32_t data) { 2902 EnsureSpace ensure_space(this); 2903 emitl(data); 2904 } 2905 2906 2907 // Relocation information implementations. 2908 2909 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { 2910 ASSERT(rmode != RelocInfo::NONE); 2911 // Don't record external references unless the heap will be serialized. 2912 if (rmode == RelocInfo::EXTERNAL_REFERENCE) { 2913 #ifdef DEBUG 2914 if (!Serializer::enabled()) { 2915 Serializer::TooLateToEnableNow(); 2916 } 2917 #endif 2918 if (!Serializer::enabled() && !emit_debug_code()) { 2919 return; 2920 } 2921 } 2922 RelocInfo rinfo(pc_, rmode, data); 2923 reloc_info_writer.Write(&rinfo); 2924 } 2925 2926 void Assembler::RecordJSReturn() { 2927 positions_recorder()->WriteRecordedPositions(); 2928 EnsureSpace ensure_space(this); 2929 RecordRelocInfo(RelocInfo::JS_RETURN); 2930 } 2931 2932 2933 void Assembler::RecordDebugBreakSlot() { 2934 positions_recorder()->WriteRecordedPositions(); 2935 EnsureSpace ensure_space(this); 2936 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT); 2937 } 2938 2939 2940 void Assembler::RecordComment(const char* msg, bool force) { 2941 if (FLAG_code_comments || force) { 2942 EnsureSpace ensure_space(this); 2943 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); 2944 } 2945 } 2946 2947 2948 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | 2949 1 << RelocInfo::INTERNAL_REFERENCE; 2950 2951 2952 bool RelocInfo::IsCodedSpecially() { 2953 // The deserializer needs to know whether a pointer is specially coded. Being 2954 // specially coded on x64 means that it is a relative 32 bit address, as used 2955 // by branch instructions. 2956 return (1 << rmode_) & kApplyMask; 2957 } 2958 2959 2960 2961 } } // namespace v8::internal 2962 2963 #endif // V8_TARGET_ARCH_X64 2964