1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/compiler/code-generator.h" 6 7 #include "src/ast/scopes.h" 8 #include "src/compiler/code-generator-impl.h" 9 #include "src/compiler/gap-resolver.h" 10 #include "src/compiler/node-matchers.h" 11 #include "src/compiler/osr.h" 12 #include "src/frames.h" 13 #include "src/x87/assembler-x87.h" 14 #include "src/x87/frames-x87.h" 15 #include "src/x87/macro-assembler-x87.h" 16 17 namespace v8 { 18 namespace internal { 19 namespace compiler { 20 21 #define __ masm()-> 22 23 24 // Adds X87 specific methods for decoding operands. 25 class X87OperandConverter : public InstructionOperandConverter { 26 public: 27 X87OperandConverter(CodeGenerator* gen, Instruction* instr) 28 : InstructionOperandConverter(gen, instr) {} 29 30 Operand InputOperand(size_t index, int extra = 0) { 31 return ToOperand(instr_->InputAt(index), extra); 32 } 33 34 Immediate InputImmediate(size_t index) { 35 return ToImmediate(instr_->InputAt(index)); 36 } 37 38 Operand OutputOperand() { return ToOperand(instr_->Output()); } 39 40 Operand ToOperand(InstructionOperand* op, int extra = 0) { 41 if (op->IsRegister()) { 42 DCHECK(extra == 0); 43 return Operand(ToRegister(op)); 44 } 45 DCHECK(op->IsStackSlot() || op->IsFPStackSlot()); 46 return SlotToOperand(AllocatedOperand::cast(op)->index(), extra); 47 } 48 49 Operand SlotToOperand(int slot, int extra = 0) { 50 FrameOffset offset = frame_access_state()->GetFrameOffset(slot); 51 return Operand(offset.from_stack_pointer() ? esp : ebp, 52 offset.offset() + extra); 53 } 54 55 Operand HighOperand(InstructionOperand* op) { 56 DCHECK(op->IsFPStackSlot()); 57 return ToOperand(op, kPointerSize); 58 } 59 60 Immediate ToImmediate(InstructionOperand* operand) { 61 Constant constant = ToConstant(operand); 62 if (constant.type() == Constant::kInt32 && 63 (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE || 64 constant.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE || 65 constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE)) { 66 return Immediate(reinterpret_cast<Address>(constant.ToInt32()), 67 constant.rmode()); 68 } 69 switch (constant.type()) { 70 case Constant::kInt32: 71 return Immediate(constant.ToInt32()); 72 case Constant::kFloat32: 73 return Immediate( 74 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED)); 75 case Constant::kFloat64: 76 return Immediate( 77 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED)); 78 case Constant::kExternalReference: 79 return Immediate(constant.ToExternalReference()); 80 case Constant::kHeapObject: 81 return Immediate(constant.ToHeapObject()); 82 case Constant::kInt64: 83 break; 84 case Constant::kRpoNumber: 85 return Immediate::CodeRelativeOffset(ToLabel(operand)); 86 } 87 UNREACHABLE(); 88 return Immediate(-1); 89 } 90 91 static size_t NextOffset(size_t* offset) { 92 size_t i = *offset; 93 (*offset)++; 94 return i; 95 } 96 97 static ScaleFactor ScaleFor(AddressingMode one, AddressingMode mode) { 98 STATIC_ASSERT(0 == static_cast<int>(times_1)); 99 STATIC_ASSERT(1 == static_cast<int>(times_2)); 100 STATIC_ASSERT(2 == static_cast<int>(times_4)); 101 STATIC_ASSERT(3 == static_cast<int>(times_8)); 102 int scale = static_cast<int>(mode - one); 103 DCHECK(scale >= 0 && scale < 4); 104 return static_cast<ScaleFactor>(scale); 105 } 106 107 Operand MemoryOperand(size_t* offset) { 108 AddressingMode mode = AddressingModeField::decode(instr_->opcode()); 109 switch (mode) { 110 case kMode_MR: { 111 Register base = InputRegister(NextOffset(offset)); 112 int32_t disp = 0; 113 return Operand(base, disp); 114 } 115 case kMode_MRI: { 116 Register base = InputRegister(NextOffset(offset)); 117 Constant ctant = ToConstant(instr_->InputAt(NextOffset(offset))); 118 return Operand(base, ctant.ToInt32(), ctant.rmode()); 119 } 120 case kMode_MR1: 121 case kMode_MR2: 122 case kMode_MR4: 123 case kMode_MR8: { 124 Register base = InputRegister(NextOffset(offset)); 125 Register index = InputRegister(NextOffset(offset)); 126 ScaleFactor scale = ScaleFor(kMode_MR1, mode); 127 int32_t disp = 0; 128 return Operand(base, index, scale, disp); 129 } 130 case kMode_MR1I: 131 case kMode_MR2I: 132 case kMode_MR4I: 133 case kMode_MR8I: { 134 Register base = InputRegister(NextOffset(offset)); 135 Register index = InputRegister(NextOffset(offset)); 136 ScaleFactor scale = ScaleFor(kMode_MR1I, mode); 137 Constant ctant = ToConstant(instr_->InputAt(NextOffset(offset))); 138 return Operand(base, index, scale, ctant.ToInt32(), ctant.rmode()); 139 } 140 case kMode_M1: 141 case kMode_M2: 142 case kMode_M4: 143 case kMode_M8: { 144 Register index = InputRegister(NextOffset(offset)); 145 ScaleFactor scale = ScaleFor(kMode_M1, mode); 146 int32_t disp = 0; 147 return Operand(index, scale, disp); 148 } 149 case kMode_M1I: 150 case kMode_M2I: 151 case kMode_M4I: 152 case kMode_M8I: { 153 Register index = InputRegister(NextOffset(offset)); 154 ScaleFactor scale = ScaleFor(kMode_M1I, mode); 155 Constant ctant = ToConstant(instr_->InputAt(NextOffset(offset))); 156 return Operand(index, scale, ctant.ToInt32(), ctant.rmode()); 157 } 158 case kMode_MI: { 159 Constant ctant = ToConstant(instr_->InputAt(NextOffset(offset))); 160 return Operand(ctant.ToInt32(), ctant.rmode()); 161 } 162 case kMode_None: 163 UNREACHABLE(); 164 return Operand(no_reg, 0); 165 } 166 UNREACHABLE(); 167 return Operand(no_reg, 0); 168 } 169 170 Operand MemoryOperand(size_t first_input = 0) { 171 return MemoryOperand(&first_input); 172 } 173 }; 174 175 176 namespace { 177 178 bool HasImmediateInput(Instruction* instr, size_t index) { 179 return instr->InputAt(index)->IsImmediate(); 180 } 181 182 183 class OutOfLineLoadInteger final : public OutOfLineCode { 184 public: 185 OutOfLineLoadInteger(CodeGenerator* gen, Register result) 186 : OutOfLineCode(gen), result_(result) {} 187 188 void Generate() final { __ xor_(result_, result_); } 189 190 private: 191 Register const result_; 192 }; 193 194 195 class OutOfLineLoadFloat final : public OutOfLineCode { 196 public: 197 OutOfLineLoadFloat(CodeGenerator* gen, X87Register result) 198 : OutOfLineCode(gen), result_(result) {} 199 200 void Generate() final { 201 DCHECK(result_.code() == 0); 202 USE(result_); 203 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 204 __ VerifyX87StackDepth(1); 205 } 206 __ fstp(0); 207 __ push(Immediate(0xffffffff)); 208 __ push(Immediate(0x7fffffff)); 209 __ fld_d(MemOperand(esp, 0)); 210 __ lea(esp, Operand(esp, kDoubleSize)); 211 } 212 213 private: 214 X87Register const result_; 215 }; 216 217 218 class OutOfLineTruncateDoubleToI final : public OutOfLineCode { 219 public: 220 OutOfLineTruncateDoubleToI(CodeGenerator* gen, Register result, 221 X87Register input) 222 : OutOfLineCode(gen), result_(result), input_(input) {} 223 224 void Generate() final { 225 UNIMPLEMENTED(); 226 USE(result_); 227 USE(input_); 228 } 229 230 private: 231 Register const result_; 232 X87Register const input_; 233 }; 234 235 236 class OutOfLineRecordWrite final : public OutOfLineCode { 237 public: 238 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand operand, 239 Register value, Register scratch0, Register scratch1, 240 RecordWriteMode mode) 241 : OutOfLineCode(gen), 242 object_(object), 243 operand_(operand), 244 value_(value), 245 scratch0_(scratch0), 246 scratch1_(scratch1), 247 mode_(mode) {} 248 249 void Generate() final { 250 if (mode_ > RecordWriteMode::kValueIsPointer) { 251 __ JumpIfSmi(value_, exit()); 252 } 253 __ CheckPageFlag(value_, scratch0_, 254 MemoryChunk::kPointersToHereAreInterestingMask, zero, 255 exit()); 256 RememberedSetAction const remembered_set_action = 257 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET 258 : OMIT_REMEMBERED_SET; 259 SaveFPRegsMode const save_fp_mode = 260 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs; 261 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_, 262 remembered_set_action, save_fp_mode); 263 __ lea(scratch1_, operand_); 264 __ CallStub(&stub); 265 } 266 267 private: 268 Register const object_; 269 Operand const operand_; 270 Register const value_; 271 Register const scratch0_; 272 Register const scratch1_; 273 RecordWriteMode const mode_; 274 }; 275 276 } // namespace 277 278 279 #define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr) \ 280 do { \ 281 auto result = i.OutputDoubleRegister(); \ 282 auto offset = i.InputRegister(0); \ 283 DCHECK(result.code() == 0); \ 284 if (instr->InputAt(1)->IsRegister()) { \ 285 __ cmp(offset, i.InputRegister(1)); \ 286 } else { \ 287 __ cmp(offset, i.InputImmediate(1)); \ 288 } \ 289 OutOfLineCode* ool = new (zone()) OutOfLineLoadFloat(this, result); \ 290 __ j(above_equal, ool->entry()); \ 291 __ fstp(0); \ 292 __ asm_instr(i.MemoryOperand(2)); \ 293 __ bind(ool->exit()); \ 294 } while (false) 295 296 297 #define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \ 298 do { \ 299 auto result = i.OutputRegister(); \ 300 auto offset = i.InputRegister(0); \ 301 if (instr->InputAt(1)->IsRegister()) { \ 302 __ cmp(offset, i.InputRegister(1)); \ 303 } else { \ 304 __ cmp(offset, i.InputImmediate(1)); \ 305 } \ 306 OutOfLineCode* ool = new (zone()) OutOfLineLoadInteger(this, result); \ 307 __ j(above_equal, ool->entry()); \ 308 __ asm_instr(result, i.MemoryOperand(2)); \ 309 __ bind(ool->exit()); \ 310 } while (false) 311 312 313 #define ASSEMBLE_CHECKED_STORE_FLOAT(asm_instr) \ 314 do { \ 315 auto offset = i.InputRegister(0); \ 316 if (instr->InputAt(1)->IsRegister()) { \ 317 __ cmp(offset, i.InputRegister(1)); \ 318 } else { \ 319 __ cmp(offset, i.InputImmediate(1)); \ 320 } \ 321 Label done; \ 322 DCHECK(i.InputDoubleRegister(2).code() == 0); \ 323 __ j(above_equal, &done, Label::kNear); \ 324 __ asm_instr(i.MemoryOperand(3)); \ 325 __ bind(&done); \ 326 } while (false) 327 328 329 #define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \ 330 do { \ 331 auto offset = i.InputRegister(0); \ 332 if (instr->InputAt(1)->IsRegister()) { \ 333 __ cmp(offset, i.InputRegister(1)); \ 334 } else { \ 335 __ cmp(offset, i.InputImmediate(1)); \ 336 } \ 337 Label done; \ 338 __ j(above_equal, &done, Label::kNear); \ 339 if (instr->InputAt(2)->IsRegister()) { \ 340 __ asm_instr(i.MemoryOperand(3), i.InputRegister(2)); \ 341 } else { \ 342 __ asm_instr(i.MemoryOperand(3), i.InputImmediate(2)); \ 343 } \ 344 __ bind(&done); \ 345 } while (false) 346 347 #define ASSEMBLE_COMPARE(asm_instr) \ 348 do { \ 349 if (AddressingModeField::decode(instr->opcode()) != kMode_None) { \ 350 size_t index = 0; \ 351 Operand left = i.MemoryOperand(&index); \ 352 if (HasImmediateInput(instr, index)) { \ 353 __ asm_instr(left, i.InputImmediate(index)); \ 354 } else { \ 355 __ asm_instr(left, i.InputRegister(index)); \ 356 } \ 357 } else { \ 358 if (HasImmediateInput(instr, 1)) { \ 359 if (instr->InputAt(0)->IsRegister()) { \ 360 __ asm_instr(i.InputRegister(0), i.InputImmediate(1)); \ 361 } else { \ 362 __ asm_instr(i.InputOperand(0), i.InputImmediate(1)); \ 363 } \ 364 } else { \ 365 if (instr->InputAt(1)->IsRegister()) { \ 366 __ asm_instr(i.InputRegister(0), i.InputRegister(1)); \ 367 } else { \ 368 __ asm_instr(i.InputRegister(0), i.InputOperand(1)); \ 369 } \ 370 } \ 371 } \ 372 } while (0) 373 374 #define ASSEMBLE_IEEE754_BINOP(name) \ 375 do { \ 376 /* Saves the esp into ebx */ \ 377 __ push(ebx); \ 378 __ mov(ebx, esp); \ 379 /* Pass one double as argument on the stack. */ \ 380 __ PrepareCallCFunction(4, eax); \ 381 __ fstp(0); \ 382 /* Load first operand from original stack */ \ 383 __ fld_d(MemOperand(ebx, 4 + kDoubleSize)); \ 384 /* Put first operand into stack for function call */ \ 385 __ fstp_d(Operand(esp, 0 * kDoubleSize)); \ 386 /* Load second operand from original stack */ \ 387 __ fld_d(MemOperand(ebx, 4)); \ 388 /* Put second operand into stack for function call */ \ 389 __ fstp_d(Operand(esp, 1 * kDoubleSize)); \ 390 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \ 391 4); \ 392 /* Restore the ebx */ \ 393 __ pop(ebx); \ 394 /* Return value is in st(0) on x87. */ \ 395 __ lea(esp, Operand(esp, 2 * kDoubleSize)); \ 396 } while (false) 397 398 #define ASSEMBLE_IEEE754_UNOP(name) \ 399 do { \ 400 /* Saves the esp into ebx */ \ 401 __ push(ebx); \ 402 __ mov(ebx, esp); \ 403 /* Pass one double as argument on the stack. */ \ 404 __ PrepareCallCFunction(2, eax); \ 405 __ fstp(0); \ 406 /* Load operand from original stack */ \ 407 __ fld_d(MemOperand(ebx, 4)); \ 408 /* Put operand into stack for function call */ \ 409 __ fstp_d(Operand(esp, 0)); \ 410 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \ 411 2); \ 412 /* Restore the ebx */ \ 413 __ pop(ebx); \ 414 /* Return value is in st(0) on x87. */ \ 415 __ lea(esp, Operand(esp, kDoubleSize)); \ 416 } while (false) 417 418 void CodeGenerator::AssembleDeconstructFrame() { 419 __ mov(esp, ebp); 420 __ pop(ebp); 421 } 422 423 void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) { 424 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); 425 if (sp_slot_delta > 0) { 426 __ add(esp, Immediate(sp_slot_delta * kPointerSize)); 427 } 428 frame_access_state()->SetFrameAccessToDefault(); 429 } 430 431 432 void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) { 433 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta); 434 if (sp_slot_delta < 0) { 435 __ sub(esp, Immediate(-sp_slot_delta * kPointerSize)); 436 frame_access_state()->IncreaseSPDelta(-sp_slot_delta); 437 } 438 if (frame_access_state()->has_frame()) { 439 __ mov(ebp, MemOperand(ebp, 0)); 440 } 441 frame_access_state()->SetFrameAccessToSP(); 442 } 443 444 void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg, 445 Register, Register, 446 Register) { 447 // There are not enough temp registers left on ia32 for a call instruction 448 // so we pick some scratch registers and save/restore them manually here. 449 int scratch_count = 3; 450 Register scratch1 = ebx; 451 Register scratch2 = ecx; 452 Register scratch3 = edx; 453 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3)); 454 Label done; 455 456 // Check if current frame is an arguments adaptor frame. 457 __ cmp(Operand(ebp, StandardFrameConstants::kContextOffset), 458 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 459 __ j(not_equal, &done, Label::kNear); 460 461 __ push(scratch1); 462 __ push(scratch2); 463 __ push(scratch3); 464 465 // Load arguments count from current arguments adaptor frame (note, it 466 // does not include receiver). 467 Register caller_args_count_reg = scratch1; 468 __ mov(caller_args_count_reg, 469 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset)); 470 __ SmiUntag(caller_args_count_reg); 471 472 ParameterCount callee_args_count(args_reg); 473 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, 474 scratch3, ReturnAddressState::kOnStack, scratch_count); 475 __ pop(scratch3); 476 __ pop(scratch2); 477 __ pop(scratch1); 478 479 __ bind(&done); 480 } 481 482 // Assembles an instruction after register allocation, producing machine code. 483 CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( 484 Instruction* instr) { 485 X87OperandConverter i(this, instr); 486 InstructionCode opcode = instr->opcode(); 487 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode); 488 489 switch (arch_opcode) { 490 case kArchCallCodeObject: { 491 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 492 __ VerifyX87StackDepth(1); 493 } 494 __ fstp(0); 495 EnsureSpaceForLazyDeopt(); 496 if (HasImmediateInput(instr, 0)) { 497 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); 498 __ call(code, RelocInfo::CODE_TARGET); 499 } else { 500 Register reg = i.InputRegister(0); 501 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); 502 __ call(reg); 503 } 504 RecordCallPosition(instr); 505 bool double_result = 506 instr->HasOutput() && instr->Output()->IsFPRegister(); 507 if (double_result) { 508 __ lea(esp, Operand(esp, -kDoubleSize)); 509 __ fstp_d(Operand(esp, 0)); 510 } 511 __ fninit(); 512 if (double_result) { 513 __ fld_d(Operand(esp, 0)); 514 __ lea(esp, Operand(esp, kDoubleSize)); 515 } else { 516 __ fld1(); 517 } 518 frame_access_state()->ClearSPDelta(); 519 break; 520 } 521 case kArchTailCallCodeObjectFromJSFunction: 522 case kArchTailCallCodeObject: { 523 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 524 __ VerifyX87StackDepth(1); 525 } 526 __ fstp(0); 527 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 528 AssembleDeconstructActivationRecord(stack_param_delta); 529 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) { 530 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, 531 no_reg, no_reg, no_reg); 532 } 533 if (HasImmediateInput(instr, 0)) { 534 Handle<Code> code = Handle<Code>::cast(i.InputHeapObject(0)); 535 __ jmp(code, RelocInfo::CODE_TARGET); 536 } else { 537 Register reg = i.InputRegister(0); 538 __ add(reg, Immediate(Code::kHeaderSize - kHeapObjectTag)); 539 __ jmp(reg); 540 } 541 frame_access_state()->ClearSPDelta(); 542 break; 543 } 544 case kArchTailCallAddress: { 545 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 546 AssembleDeconstructActivationRecord(stack_param_delta); 547 CHECK(!HasImmediateInput(instr, 0)); 548 Register reg = i.InputRegister(0); 549 __ jmp(reg); 550 frame_access_state()->ClearSPDelta(); 551 break; 552 } 553 case kArchCallJSFunction: { 554 EnsureSpaceForLazyDeopt(); 555 Register func = i.InputRegister(0); 556 if (FLAG_debug_code) { 557 // Check the function's context matches the context argument. 558 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); 559 __ Assert(equal, kWrongFunctionContext); 560 } 561 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 562 __ VerifyX87StackDepth(1); 563 } 564 __ fstp(0); 565 __ call(FieldOperand(func, JSFunction::kCodeEntryOffset)); 566 RecordCallPosition(instr); 567 bool double_result = 568 instr->HasOutput() && instr->Output()->IsFPRegister(); 569 if (double_result) { 570 __ lea(esp, Operand(esp, -kDoubleSize)); 571 __ fstp_d(Operand(esp, 0)); 572 } 573 __ fninit(); 574 if (double_result) { 575 __ fld_d(Operand(esp, 0)); 576 __ lea(esp, Operand(esp, kDoubleSize)); 577 } else { 578 __ fld1(); 579 } 580 frame_access_state()->ClearSPDelta(); 581 break; 582 } 583 case kArchTailCallJSFunctionFromJSFunction: 584 case kArchTailCallJSFunction: { 585 Register func = i.InputRegister(0); 586 if (FLAG_debug_code) { 587 // Check the function's context matches the context argument. 588 __ cmp(esi, FieldOperand(func, JSFunction::kContextOffset)); 589 __ Assert(equal, kWrongFunctionContext); 590 } 591 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 592 __ VerifyX87StackDepth(1); 593 } 594 __ fstp(0); 595 int stack_param_delta = i.InputInt32(instr->InputCount() - 1); 596 AssembleDeconstructActivationRecord(stack_param_delta); 597 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) { 598 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister, 599 no_reg, no_reg, no_reg); 600 } 601 __ jmp(FieldOperand(func, JSFunction::kCodeEntryOffset)); 602 frame_access_state()->ClearSPDelta(); 603 break; 604 } 605 case kArchPrepareCallCFunction: { 606 // Frame alignment requires using FP-relative frame addressing. 607 frame_access_state()->SetFrameAccessToFP(); 608 int const num_parameters = MiscField::decode(instr->opcode()); 609 __ PrepareCallCFunction(num_parameters, i.TempRegister(0)); 610 break; 611 } 612 case kArchPrepareTailCall: 613 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1)); 614 break; 615 case kArchCallCFunction: { 616 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 617 __ VerifyX87StackDepth(1); 618 } 619 __ fstp(0); 620 int const num_parameters = MiscField::decode(instr->opcode()); 621 if (HasImmediateInput(instr, 0)) { 622 ExternalReference ref = i.InputExternalReference(0); 623 __ CallCFunction(ref, num_parameters); 624 } else { 625 Register func = i.InputRegister(0); 626 __ CallCFunction(func, num_parameters); 627 } 628 bool double_result = 629 instr->HasOutput() && instr->Output()->IsFPRegister(); 630 if (double_result) { 631 __ lea(esp, Operand(esp, -kDoubleSize)); 632 __ fstp_d(Operand(esp, 0)); 633 } 634 __ fninit(); 635 if (double_result) { 636 __ fld_d(Operand(esp, 0)); 637 __ lea(esp, Operand(esp, kDoubleSize)); 638 } else { 639 __ fld1(); 640 } 641 frame_access_state()->SetFrameAccessToDefault(); 642 frame_access_state()->ClearSPDelta(); 643 break; 644 } 645 case kArchJmp: 646 AssembleArchJump(i.InputRpo(0)); 647 break; 648 case kArchLookupSwitch: 649 AssembleArchLookupSwitch(instr); 650 break; 651 case kArchTableSwitch: 652 AssembleArchTableSwitch(instr); 653 break; 654 case kArchComment: { 655 Address comment_string = i.InputExternalReference(0).address(); 656 __ RecordComment(reinterpret_cast<const char*>(comment_string)); 657 break; 658 } 659 case kArchDebugBreak: 660 __ int3(); 661 break; 662 case kArchNop: 663 case kArchThrowTerminator: 664 // don't emit code for nops. 665 break; 666 case kArchDeoptimize: { 667 int deopt_state_id = 668 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore()); 669 int double_register_param_count = 0; 670 int x87_layout = 0; 671 for (size_t i = 0; i < instr->InputCount(); i++) { 672 if (instr->InputAt(i)->IsFPRegister()) { 673 double_register_param_count++; 674 } 675 } 676 // Currently we use only one X87 register. If double_register_param_count 677 // is bigger than 1, it means duplicated double register is added to input 678 // of this instruction. 679 if (double_register_param_count > 0) { 680 x87_layout = (0 << 3) | 1; 681 } 682 // The layout of x87 register stack is loaded on the top of FPU register 683 // stack for deoptimization. 684 __ push(Immediate(x87_layout)); 685 __ fild_s(MemOperand(esp, 0)); 686 __ lea(esp, Operand(esp, kPointerSize)); 687 688 Deoptimizer::BailoutType bailout_type = 689 Deoptimizer::BailoutType(MiscField::decode(instr->opcode())); 690 CodeGenResult result = 691 AssembleDeoptimizerCall(deopt_state_id, bailout_type); 692 if (result != kSuccess) return result; 693 break; 694 } 695 case kArchRet: 696 AssembleReturn(); 697 break; 698 case kArchFramePointer: 699 __ mov(i.OutputRegister(), ebp); 700 break; 701 case kArchStackPointer: 702 __ mov(i.OutputRegister(), esp); 703 break; 704 case kArchParentFramePointer: 705 if (frame_access_state()->has_frame()) { 706 __ mov(i.OutputRegister(), Operand(ebp, 0)); 707 } else { 708 __ mov(i.OutputRegister(), ebp); 709 } 710 break; 711 case kArchTruncateDoubleToI: { 712 if (!instr->InputAt(0)->IsFPRegister()) { 713 __ fld_d(i.InputOperand(0)); 714 } 715 __ TruncateX87TOSToI(i.OutputRegister()); 716 if (!instr->InputAt(0)->IsFPRegister()) { 717 __ fstp(0); 718 } 719 break; 720 } 721 case kArchStoreWithWriteBarrier: { 722 RecordWriteMode mode = 723 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode())); 724 Register object = i.InputRegister(0); 725 size_t index = 0; 726 Operand operand = i.MemoryOperand(&index); 727 Register value = i.InputRegister(index); 728 Register scratch0 = i.TempRegister(0); 729 Register scratch1 = i.TempRegister(1); 730 auto ool = new (zone()) OutOfLineRecordWrite(this, object, operand, value, 731 scratch0, scratch1, mode); 732 __ mov(operand, value); 733 __ CheckPageFlag(object, scratch0, 734 MemoryChunk::kPointersFromHereAreInterestingMask, 735 not_zero, ool->entry()); 736 __ bind(ool->exit()); 737 break; 738 } 739 case kArchStackSlot: { 740 FrameOffset offset = 741 frame_access_state()->GetFrameOffset(i.InputInt32(0)); 742 Register base; 743 if (offset.from_stack_pointer()) { 744 base = esp; 745 } else { 746 base = ebp; 747 } 748 __ lea(i.OutputRegister(), Operand(base, offset.offset())); 749 break; 750 } 751 case kIeee754Float64Atan: 752 ASSEMBLE_IEEE754_UNOP(atan); 753 break; 754 case kIeee754Float64Atan2: 755 ASSEMBLE_IEEE754_BINOP(atan2); 756 break; 757 case kIeee754Float64Cbrt: 758 ASSEMBLE_IEEE754_UNOP(cbrt); 759 break; 760 case kIeee754Float64Cos: 761 __ X87SetFPUCW(0x027F); 762 ASSEMBLE_IEEE754_UNOP(cos); 763 __ X87SetFPUCW(0x037F); 764 break; 765 case kIeee754Float64Expm1: 766 __ X87SetFPUCW(0x027F); 767 ASSEMBLE_IEEE754_UNOP(expm1); 768 __ X87SetFPUCW(0x037F); 769 break; 770 case kIeee754Float64Exp: 771 ASSEMBLE_IEEE754_UNOP(exp); 772 break; 773 case kIeee754Float64Atanh: 774 ASSEMBLE_IEEE754_UNOP(atanh); 775 break; 776 case kIeee754Float64Log: 777 ASSEMBLE_IEEE754_UNOP(log); 778 break; 779 case kIeee754Float64Log1p: 780 ASSEMBLE_IEEE754_UNOP(log1p); 781 break; 782 case kIeee754Float64Log2: 783 ASSEMBLE_IEEE754_UNOP(log2); 784 break; 785 case kIeee754Float64Log10: 786 ASSEMBLE_IEEE754_UNOP(log10); 787 break; 788 case kIeee754Float64Sin: 789 __ X87SetFPUCW(0x027F); 790 ASSEMBLE_IEEE754_UNOP(sin); 791 __ X87SetFPUCW(0x037F); 792 break; 793 case kIeee754Float64Tan: 794 __ X87SetFPUCW(0x027F); 795 ASSEMBLE_IEEE754_UNOP(tan); 796 __ X87SetFPUCW(0x037F); 797 break; 798 case kX87Add: 799 if (HasImmediateInput(instr, 1)) { 800 __ add(i.InputOperand(0), i.InputImmediate(1)); 801 } else { 802 __ add(i.InputRegister(0), i.InputOperand(1)); 803 } 804 break; 805 case kX87And: 806 if (HasImmediateInput(instr, 1)) { 807 __ and_(i.InputOperand(0), i.InputImmediate(1)); 808 } else { 809 __ and_(i.InputRegister(0), i.InputOperand(1)); 810 } 811 break; 812 case kX87Cmp: 813 ASSEMBLE_COMPARE(cmp); 814 break; 815 case kX87Cmp16: 816 ASSEMBLE_COMPARE(cmpw); 817 break; 818 case kX87Cmp8: 819 ASSEMBLE_COMPARE(cmpb); 820 break; 821 case kX87Test: 822 ASSEMBLE_COMPARE(test); 823 break; 824 case kX87Test16: 825 ASSEMBLE_COMPARE(test_w); 826 break; 827 case kX87Test8: 828 ASSEMBLE_COMPARE(test_b); 829 break; 830 case kX87Imul: 831 if (HasImmediateInput(instr, 1)) { 832 __ imul(i.OutputRegister(), i.InputOperand(0), i.InputInt32(1)); 833 } else { 834 __ imul(i.OutputRegister(), i.InputOperand(1)); 835 } 836 break; 837 case kX87ImulHigh: 838 __ imul(i.InputRegister(1)); 839 break; 840 case kX87UmulHigh: 841 __ mul(i.InputRegister(1)); 842 break; 843 case kX87Idiv: 844 __ cdq(); 845 __ idiv(i.InputOperand(1)); 846 break; 847 case kX87Udiv: 848 __ Move(edx, Immediate(0)); 849 __ div(i.InputOperand(1)); 850 break; 851 case kX87Not: 852 __ not_(i.OutputOperand()); 853 break; 854 case kX87Neg: 855 __ neg(i.OutputOperand()); 856 break; 857 case kX87Or: 858 if (HasImmediateInput(instr, 1)) { 859 __ or_(i.InputOperand(0), i.InputImmediate(1)); 860 } else { 861 __ or_(i.InputRegister(0), i.InputOperand(1)); 862 } 863 break; 864 case kX87Xor: 865 if (HasImmediateInput(instr, 1)) { 866 __ xor_(i.InputOperand(0), i.InputImmediate(1)); 867 } else { 868 __ xor_(i.InputRegister(0), i.InputOperand(1)); 869 } 870 break; 871 case kX87Sub: 872 if (HasImmediateInput(instr, 1)) { 873 __ sub(i.InputOperand(0), i.InputImmediate(1)); 874 } else { 875 __ sub(i.InputRegister(0), i.InputOperand(1)); 876 } 877 break; 878 case kX87Shl: 879 if (HasImmediateInput(instr, 1)) { 880 __ shl(i.OutputOperand(), i.InputInt5(1)); 881 } else { 882 __ shl_cl(i.OutputOperand()); 883 } 884 break; 885 case kX87Shr: 886 if (HasImmediateInput(instr, 1)) { 887 __ shr(i.OutputOperand(), i.InputInt5(1)); 888 } else { 889 __ shr_cl(i.OutputOperand()); 890 } 891 break; 892 case kX87Sar: 893 if (HasImmediateInput(instr, 1)) { 894 __ sar(i.OutputOperand(), i.InputInt5(1)); 895 } else { 896 __ sar_cl(i.OutputOperand()); 897 } 898 break; 899 case kX87AddPair: { 900 // i.OutputRegister(0) == i.InputRegister(0) ... left low word. 901 // i.InputRegister(1) ... left high word. 902 // i.InputRegister(2) ... right low word. 903 // i.InputRegister(3) ... right high word. 904 bool use_temp = false; 905 if (i.OutputRegister(0).code() == i.InputRegister(1).code() || 906 i.OutputRegister(0).code() == i.InputRegister(3).code()) { 907 // We cannot write to the output register directly, because it would 908 // overwrite an input for adc. We have to use the temp register. 909 use_temp = true; 910 __ Move(i.TempRegister(0), i.InputRegister(0)); 911 __ add(i.TempRegister(0), i.InputRegister(2)); 912 } else { 913 __ add(i.OutputRegister(0), i.InputRegister(2)); 914 } 915 __ adc(i.InputRegister(1), Operand(i.InputRegister(3))); 916 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) { 917 __ Move(i.OutputRegister(1), i.InputRegister(1)); 918 } 919 if (use_temp) { 920 __ Move(i.OutputRegister(0), i.TempRegister(0)); 921 } 922 break; 923 } 924 case kX87SubPair: { 925 // i.OutputRegister(0) == i.InputRegister(0) ... left low word. 926 // i.InputRegister(1) ... left high word. 927 // i.InputRegister(2) ... right low word. 928 // i.InputRegister(3) ... right high word. 929 bool use_temp = false; 930 if (i.OutputRegister(0).code() == i.InputRegister(1).code() || 931 i.OutputRegister(0).code() == i.InputRegister(3).code()) { 932 // We cannot write to the output register directly, because it would 933 // overwrite an input for adc. We have to use the temp register. 934 use_temp = true; 935 __ Move(i.TempRegister(0), i.InputRegister(0)); 936 __ sub(i.TempRegister(0), i.InputRegister(2)); 937 } else { 938 __ sub(i.OutputRegister(0), i.InputRegister(2)); 939 } 940 __ sbb(i.InputRegister(1), Operand(i.InputRegister(3))); 941 if (i.OutputRegister(1).code() != i.InputRegister(1).code()) { 942 __ Move(i.OutputRegister(1), i.InputRegister(1)); 943 } 944 if (use_temp) { 945 __ Move(i.OutputRegister(0), i.TempRegister(0)); 946 } 947 break; 948 } 949 case kX87MulPair: { 950 __ imul(i.OutputRegister(1), i.InputOperand(0)); 951 __ mov(i.TempRegister(0), i.InputOperand(1)); 952 __ imul(i.TempRegister(0), i.InputOperand(2)); 953 __ add(i.OutputRegister(1), i.TempRegister(0)); 954 __ mov(i.OutputRegister(0), i.InputOperand(0)); 955 // Multiplies the low words and stores them in eax and edx. 956 __ mul(i.InputRegister(2)); 957 __ add(i.OutputRegister(1), i.TempRegister(0)); 958 959 break; 960 } 961 case kX87ShlPair: 962 if (HasImmediateInput(instr, 2)) { 963 __ ShlPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2)); 964 } else { 965 // Shift has been loaded into CL by the register allocator. 966 __ ShlPair_cl(i.InputRegister(1), i.InputRegister(0)); 967 } 968 break; 969 case kX87ShrPair: 970 if (HasImmediateInput(instr, 2)) { 971 __ ShrPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2)); 972 } else { 973 // Shift has been loaded into CL by the register allocator. 974 __ ShrPair_cl(i.InputRegister(1), i.InputRegister(0)); 975 } 976 break; 977 case kX87SarPair: 978 if (HasImmediateInput(instr, 2)) { 979 __ SarPair(i.InputRegister(1), i.InputRegister(0), i.InputInt6(2)); 980 } else { 981 // Shift has been loaded into CL by the register allocator. 982 __ SarPair_cl(i.InputRegister(1), i.InputRegister(0)); 983 } 984 break; 985 case kX87Ror: 986 if (HasImmediateInput(instr, 1)) { 987 __ ror(i.OutputOperand(), i.InputInt5(1)); 988 } else { 989 __ ror_cl(i.OutputOperand()); 990 } 991 break; 992 case kX87Lzcnt: 993 __ Lzcnt(i.OutputRegister(), i.InputOperand(0)); 994 break; 995 case kX87Popcnt: 996 __ Popcnt(i.OutputRegister(), i.InputOperand(0)); 997 break; 998 case kX87LoadFloat64Constant: { 999 InstructionOperand* source = instr->InputAt(0); 1000 InstructionOperand* destination = instr->Output(); 1001 DCHECK(source->IsConstant()); 1002 X87OperandConverter g(this, nullptr); 1003 Constant src_constant = g.ToConstant(source); 1004 1005 DCHECK_EQ(Constant::kFloat64, src_constant.type()); 1006 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64()); 1007 uint32_t lower = static_cast<uint32_t>(src); 1008 uint32_t upper = static_cast<uint32_t>(src >> 32); 1009 if (destination->IsFPRegister()) { 1010 __ sub(esp, Immediate(kDoubleSize)); 1011 __ mov(MemOperand(esp, 0), Immediate(lower)); 1012 __ mov(MemOperand(esp, kInt32Size), Immediate(upper)); 1013 __ fstp(0); 1014 __ fld_d(MemOperand(esp, 0)); 1015 __ add(esp, Immediate(kDoubleSize)); 1016 } else { 1017 UNREACHABLE(); 1018 } 1019 break; 1020 } 1021 case kX87Float32Cmp: { 1022 __ fld_s(MemOperand(esp, kFloatSize)); 1023 __ fld_s(MemOperand(esp, 0)); 1024 __ FCmp(); 1025 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1026 break; 1027 } 1028 case kX87Float32Add: { 1029 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1030 __ VerifyX87StackDepth(1); 1031 } 1032 __ X87SetFPUCW(0x027F); 1033 __ fstp(0); 1034 __ fld_s(MemOperand(esp, 0)); 1035 __ fld_s(MemOperand(esp, kFloatSize)); 1036 __ faddp(); 1037 // Clear stack. 1038 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1039 // Restore the default value of control word. 1040 __ X87SetFPUCW(0x037F); 1041 break; 1042 } 1043 case kX87Float32Sub: { 1044 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1045 __ VerifyX87StackDepth(1); 1046 } 1047 __ X87SetFPUCW(0x027F); 1048 __ fstp(0); 1049 __ fld_s(MemOperand(esp, kFloatSize)); 1050 __ fld_s(MemOperand(esp, 0)); 1051 __ fsubp(); 1052 // Clear stack. 1053 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1054 // Restore the default value of control word. 1055 __ X87SetFPUCW(0x037F); 1056 break; 1057 } 1058 case kX87Float32Mul: { 1059 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1060 __ VerifyX87StackDepth(1); 1061 } 1062 __ X87SetFPUCW(0x027F); 1063 __ fstp(0); 1064 __ fld_s(MemOperand(esp, kFloatSize)); 1065 __ fld_s(MemOperand(esp, 0)); 1066 __ fmulp(); 1067 // Clear stack. 1068 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1069 // Restore the default value of control word. 1070 __ X87SetFPUCW(0x037F); 1071 break; 1072 } 1073 case kX87Float32Div: { 1074 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1075 __ VerifyX87StackDepth(1); 1076 } 1077 __ X87SetFPUCW(0x027F); 1078 __ fstp(0); 1079 __ fld_s(MemOperand(esp, kFloatSize)); 1080 __ fld_s(MemOperand(esp, 0)); 1081 __ fdivp(); 1082 // Clear stack. 1083 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1084 // Restore the default value of control word. 1085 __ X87SetFPUCW(0x037F); 1086 break; 1087 } 1088 case kX87Float32Max: { 1089 Label check_nan_left, check_zero, return_left, return_right; 1090 Condition condition = below; 1091 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1092 __ VerifyX87StackDepth(1); 1093 } 1094 __ fstp(0); 1095 __ fld_s(MemOperand(esp, kFloatSize)); 1096 __ fld_s(MemOperand(esp, 0)); 1097 __ fld(1); 1098 __ fld(1); 1099 __ FCmp(); 1100 1101 // At least one NaN. 1102 // Return the second operands if one of the two operands is NaN 1103 __ j(parity_even, &return_right, Label::kNear); 1104 __ j(equal, &check_zero, Label::kNear); // left == right. 1105 __ j(condition, &return_left, Label::kNear); 1106 __ jmp(&return_right, Label::kNear); 1107 1108 __ bind(&check_zero); 1109 __ fld(0); 1110 __ fldz(); 1111 __ FCmp(); 1112 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. 1113 1114 __ fadd(1); 1115 __ jmp(&return_left, Label::kNear); 1116 1117 __ bind(&return_right); 1118 __ fxch(); 1119 1120 __ bind(&return_left); 1121 __ fstp(0); 1122 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1123 break; 1124 } 1125 case kX87Float32Min: { 1126 Label check_nan_left, check_zero, return_left, return_right; 1127 Condition condition = above; 1128 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1129 __ VerifyX87StackDepth(1); 1130 } 1131 __ fstp(0); 1132 __ fld_s(MemOperand(esp, kFloatSize)); 1133 __ fld_s(MemOperand(esp, 0)); 1134 __ fld(1); 1135 __ fld(1); 1136 __ FCmp(); 1137 // At least one NaN. 1138 // Return the second operands if one of the two operands is NaN 1139 __ j(parity_even, &return_right, Label::kNear); 1140 __ j(equal, &check_zero, Label::kNear); // left == right. 1141 __ j(condition, &return_left, Label::kNear); 1142 __ jmp(&return_right, Label::kNear); 1143 1144 __ bind(&check_zero); 1145 __ fld(0); 1146 __ fldz(); 1147 __ FCmp(); 1148 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. 1149 // At this point, both left and right are either 0 or -0. 1150 // Push st0 and st1 to stack, then pop them to temp registers and OR them, 1151 // load it to left. 1152 __ push(eax); 1153 __ fld(1); 1154 __ fld(1); 1155 __ sub(esp, Immediate(2 * kPointerSize)); 1156 __ fstp_s(MemOperand(esp, 0)); 1157 __ fstp_s(MemOperand(esp, kPointerSize)); 1158 __ pop(eax); 1159 __ xor_(MemOperand(esp, 0), eax); 1160 __ fstp(0); 1161 __ fld_s(MemOperand(esp, 0)); 1162 __ pop(eax); // restore esp 1163 __ pop(eax); // restore esp 1164 __ jmp(&return_left, Label::kNear); 1165 1166 1167 __ bind(&return_right); 1168 __ fxch(); 1169 1170 __ bind(&return_left); 1171 __ fstp(0); 1172 __ lea(esp, Operand(esp, 2 * kFloatSize)); 1173 break; 1174 } 1175 case kX87Float32Sqrt: { 1176 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1177 __ VerifyX87StackDepth(1); 1178 } 1179 __ fstp(0); 1180 __ fld_s(MemOperand(esp, 0)); 1181 __ fsqrt(); 1182 __ lea(esp, Operand(esp, kFloatSize)); 1183 break; 1184 } 1185 case kX87Float32Abs: { 1186 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1187 __ VerifyX87StackDepth(1); 1188 } 1189 __ fstp(0); 1190 __ fld_s(MemOperand(esp, 0)); 1191 __ fabs(); 1192 __ lea(esp, Operand(esp, kFloatSize)); 1193 break; 1194 } 1195 case kX87Float32Round: { 1196 RoundingMode mode = 1197 static_cast<RoundingMode>(MiscField::decode(instr->opcode())); 1198 // Set the correct round mode in x87 control register 1199 __ X87SetRC((mode << 10)); 1200 1201 if (!instr->InputAt(0)->IsFPRegister()) { 1202 InstructionOperand* input = instr->InputAt(0); 1203 USE(input); 1204 DCHECK(input->IsFPStackSlot()); 1205 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1206 __ VerifyX87StackDepth(1); 1207 } 1208 __ fstp(0); 1209 __ fld_s(i.InputOperand(0)); 1210 } 1211 __ frndint(); 1212 __ X87SetRC(0x0000); 1213 break; 1214 } 1215 case kX87Float64Add: { 1216 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1217 __ VerifyX87StackDepth(1); 1218 } 1219 __ X87SetFPUCW(0x027F); 1220 __ fstp(0); 1221 __ fld_d(MemOperand(esp, 0)); 1222 __ fld_d(MemOperand(esp, kDoubleSize)); 1223 __ faddp(); 1224 // Clear stack. 1225 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1226 // Restore the default value of control word. 1227 __ X87SetFPUCW(0x037F); 1228 break; 1229 } 1230 case kX87Float64Sub: { 1231 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1232 __ VerifyX87StackDepth(1); 1233 } 1234 __ X87SetFPUCW(0x027F); 1235 __ fstp(0); 1236 __ fld_d(MemOperand(esp, kDoubleSize)); 1237 __ fsub_d(MemOperand(esp, 0)); 1238 // Clear stack. 1239 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1240 // Restore the default value of control word. 1241 __ X87SetFPUCW(0x037F); 1242 break; 1243 } 1244 case kX87Float64Mul: { 1245 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1246 __ VerifyX87StackDepth(1); 1247 } 1248 __ X87SetFPUCW(0x027F); 1249 __ fstp(0); 1250 __ fld_d(MemOperand(esp, kDoubleSize)); 1251 __ fmul_d(MemOperand(esp, 0)); 1252 // Clear stack. 1253 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1254 // Restore the default value of control word. 1255 __ X87SetFPUCW(0x037F); 1256 break; 1257 } 1258 case kX87Float64Div: { 1259 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1260 __ VerifyX87StackDepth(1); 1261 } 1262 __ X87SetFPUCW(0x027F); 1263 __ fstp(0); 1264 __ fld_d(MemOperand(esp, kDoubleSize)); 1265 __ fdiv_d(MemOperand(esp, 0)); 1266 // Clear stack. 1267 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1268 // Restore the default value of control word. 1269 __ X87SetFPUCW(0x037F); 1270 break; 1271 } 1272 case kX87Float64Mod: { 1273 FrameScope frame_scope(&masm_, StackFrame::MANUAL); 1274 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1275 __ VerifyX87StackDepth(1); 1276 } 1277 __ mov(eax, esp); 1278 __ PrepareCallCFunction(4, eax); 1279 __ fstp(0); 1280 __ fld_d(MemOperand(eax, 0)); 1281 __ fstp_d(Operand(esp, 1 * kDoubleSize)); 1282 __ fld_d(MemOperand(eax, kDoubleSize)); 1283 __ fstp_d(Operand(esp, 0)); 1284 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), 1285 4); 1286 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1287 break; 1288 } 1289 case kX87Float64Max: { 1290 Label check_zero, return_left, return_right; 1291 Condition condition = below; 1292 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1293 __ VerifyX87StackDepth(1); 1294 } 1295 __ fstp(0); 1296 __ fld_d(MemOperand(esp, kDoubleSize)); 1297 __ fld_d(MemOperand(esp, 0)); 1298 __ fld(1); 1299 __ fld(1); 1300 __ FCmp(); 1301 __ j(parity_even, &return_right, 1302 Label::kNear); // At least one NaN, Return right. 1303 __ j(equal, &check_zero, Label::kNear); // left == right. 1304 __ j(condition, &return_left, Label::kNear); 1305 __ jmp(&return_right, Label::kNear); 1306 1307 __ bind(&check_zero); 1308 __ fld(0); 1309 __ fldz(); 1310 __ FCmp(); 1311 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. 1312 1313 __ bind(&return_right); 1314 __ fxch(); 1315 1316 __ bind(&return_left); 1317 __ fstp(0); 1318 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1319 break; 1320 } 1321 case kX87Float64Min: { 1322 Label check_zero, return_left, return_right; 1323 Condition condition = above; 1324 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1325 __ VerifyX87StackDepth(1); 1326 } 1327 __ fstp(0); 1328 __ fld_d(MemOperand(esp, kDoubleSize)); 1329 __ fld_d(MemOperand(esp, 0)); 1330 __ fld(1); 1331 __ fld(1); 1332 __ FCmp(); 1333 __ j(parity_even, &return_right, 1334 Label::kNear); // At least one NaN, return right value. 1335 __ j(equal, &check_zero, Label::kNear); // left == right. 1336 __ j(condition, &return_left, Label::kNear); 1337 __ jmp(&return_right, Label::kNear); 1338 1339 __ bind(&check_zero); 1340 __ fld(0); 1341 __ fldz(); 1342 __ FCmp(); 1343 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. 1344 1345 __ bind(&return_right); 1346 __ fxch(); 1347 1348 __ bind(&return_left); 1349 __ fstp(0); 1350 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1351 break; 1352 } 1353 case kX87Float64Abs: { 1354 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1355 __ VerifyX87StackDepth(1); 1356 } 1357 __ fstp(0); 1358 __ fld_d(MemOperand(esp, 0)); 1359 __ fabs(); 1360 __ lea(esp, Operand(esp, kDoubleSize)); 1361 break; 1362 } 1363 case kX87Int32ToFloat32: { 1364 InstructionOperand* input = instr->InputAt(0); 1365 DCHECK(input->IsRegister() || input->IsStackSlot()); 1366 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1367 __ VerifyX87StackDepth(1); 1368 } 1369 __ fstp(0); 1370 if (input->IsRegister()) { 1371 Register input_reg = i.InputRegister(0); 1372 __ push(input_reg); 1373 __ fild_s(Operand(esp, 0)); 1374 __ pop(input_reg); 1375 } else { 1376 __ fild_s(i.InputOperand(0)); 1377 } 1378 break; 1379 } 1380 case kX87Uint32ToFloat32: { 1381 InstructionOperand* input = instr->InputAt(0); 1382 DCHECK(input->IsRegister() || input->IsStackSlot()); 1383 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1384 __ VerifyX87StackDepth(1); 1385 } 1386 __ fstp(0); 1387 Label msb_set_src; 1388 Label jmp_return; 1389 // Put input integer into eax(tmporarilly) 1390 __ push(eax); 1391 if (input->IsRegister()) 1392 __ mov(eax, i.InputRegister(0)); 1393 else 1394 __ mov(eax, i.InputOperand(0)); 1395 1396 __ test(eax, eax); 1397 __ j(sign, &msb_set_src, Label::kNear); 1398 __ push(eax); 1399 __ fild_s(Operand(esp, 0)); 1400 __ pop(eax); 1401 1402 __ jmp(&jmp_return, Label::kNear); 1403 __ bind(&msb_set_src); 1404 // Need another temp reg 1405 __ push(ebx); 1406 __ mov(ebx, eax); 1407 __ shr(eax, 1); 1408 // Recover the least significant bit to avoid rounding errors. 1409 __ and_(ebx, Immediate(1)); 1410 __ or_(eax, ebx); 1411 __ push(eax); 1412 __ fild_s(Operand(esp, 0)); 1413 __ pop(eax); 1414 __ fld(0); 1415 __ faddp(); 1416 // Restore the ebx 1417 __ pop(ebx); 1418 __ bind(&jmp_return); 1419 // Restore the eax 1420 __ pop(eax); 1421 break; 1422 } 1423 case kX87Int32ToFloat64: { 1424 InstructionOperand* input = instr->InputAt(0); 1425 DCHECK(input->IsRegister() || input->IsStackSlot()); 1426 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1427 __ VerifyX87StackDepth(1); 1428 } 1429 __ fstp(0); 1430 if (input->IsRegister()) { 1431 Register input_reg = i.InputRegister(0); 1432 __ push(input_reg); 1433 __ fild_s(Operand(esp, 0)); 1434 __ pop(input_reg); 1435 } else { 1436 __ fild_s(i.InputOperand(0)); 1437 } 1438 break; 1439 } 1440 case kX87Float32ToFloat64: { 1441 InstructionOperand* input = instr->InputAt(0); 1442 if (input->IsFPRegister()) { 1443 __ sub(esp, Immediate(kDoubleSize)); 1444 __ fstp_s(MemOperand(esp, 0)); 1445 __ fld_s(MemOperand(esp, 0)); 1446 __ add(esp, Immediate(kDoubleSize)); 1447 } else { 1448 DCHECK(input->IsFPStackSlot()); 1449 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1450 __ VerifyX87StackDepth(1); 1451 } 1452 __ fstp(0); 1453 __ fld_s(i.InputOperand(0)); 1454 } 1455 break; 1456 } 1457 case kX87Uint32ToFloat64: { 1458 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1459 __ VerifyX87StackDepth(1); 1460 } 1461 __ fstp(0); 1462 __ LoadUint32NoSSE2(i.InputRegister(0)); 1463 break; 1464 } 1465 case kX87Float32ToInt32: { 1466 if (!instr->InputAt(0)->IsFPRegister()) { 1467 __ fld_s(i.InputOperand(0)); 1468 } 1469 __ TruncateX87TOSToI(i.OutputRegister(0)); 1470 if (!instr->InputAt(0)->IsFPRegister()) { 1471 __ fstp(0); 1472 } 1473 break; 1474 } 1475 case kX87Float32ToUint32: { 1476 if (!instr->InputAt(0)->IsFPRegister()) { 1477 __ fld_s(i.InputOperand(0)); 1478 } 1479 Label success; 1480 __ TruncateX87TOSToI(i.OutputRegister(0)); 1481 __ test(i.OutputRegister(0), i.OutputRegister(0)); 1482 __ j(positive, &success); 1483 __ push(Immediate(INT32_MIN)); 1484 __ fild_s(Operand(esp, 0)); 1485 __ lea(esp, Operand(esp, kPointerSize)); 1486 __ faddp(); 1487 __ TruncateX87TOSToI(i.OutputRegister(0)); 1488 __ or_(i.OutputRegister(0), Immediate(0x80000000)); 1489 __ bind(&success); 1490 if (!instr->InputAt(0)->IsFPRegister()) { 1491 __ fstp(0); 1492 } 1493 break; 1494 } 1495 case kX87Float64ToInt32: { 1496 if (!instr->InputAt(0)->IsFPRegister()) { 1497 __ fld_d(i.InputOperand(0)); 1498 } 1499 __ TruncateX87TOSToI(i.OutputRegister(0)); 1500 if (!instr->InputAt(0)->IsFPRegister()) { 1501 __ fstp(0); 1502 } 1503 break; 1504 } 1505 case kX87Float64ToFloat32: { 1506 InstructionOperand* input = instr->InputAt(0); 1507 if (input->IsFPRegister()) { 1508 __ sub(esp, Immediate(kDoubleSize)); 1509 __ fstp_s(MemOperand(esp, 0)); 1510 __ fld_s(MemOperand(esp, 0)); 1511 __ add(esp, Immediate(kDoubleSize)); 1512 } else { 1513 DCHECK(input->IsFPStackSlot()); 1514 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1515 __ VerifyX87StackDepth(1); 1516 } 1517 __ fstp(0); 1518 __ fld_d(i.InputOperand(0)); 1519 __ sub(esp, Immediate(kDoubleSize)); 1520 __ fstp_s(MemOperand(esp, 0)); 1521 __ fld_s(MemOperand(esp, 0)); 1522 __ add(esp, Immediate(kDoubleSize)); 1523 } 1524 break; 1525 } 1526 case kX87Float64ToUint32: { 1527 __ push_imm32(-2147483648); 1528 if (!instr->InputAt(0)->IsFPRegister()) { 1529 __ fld_d(i.InputOperand(0)); 1530 } 1531 __ fild_s(Operand(esp, 0)); 1532 __ fld(1); 1533 __ faddp(); 1534 __ TruncateX87TOSToI(i.OutputRegister(0)); 1535 __ add(esp, Immediate(kInt32Size)); 1536 __ add(i.OutputRegister(), Immediate(0x80000000)); 1537 __ fstp(0); 1538 if (!instr->InputAt(0)->IsFPRegister()) { 1539 __ fstp(0); 1540 } 1541 break; 1542 } 1543 case kX87Float64ExtractHighWord32: { 1544 if (instr->InputAt(0)->IsFPRegister()) { 1545 __ sub(esp, Immediate(kDoubleSize)); 1546 __ fst_d(MemOperand(esp, 0)); 1547 __ mov(i.OutputRegister(), MemOperand(esp, kDoubleSize / 2)); 1548 __ add(esp, Immediate(kDoubleSize)); 1549 } else { 1550 InstructionOperand* input = instr->InputAt(0); 1551 USE(input); 1552 DCHECK(input->IsFPStackSlot()); 1553 __ mov(i.OutputRegister(), i.InputOperand(0, kDoubleSize / 2)); 1554 } 1555 break; 1556 } 1557 case kX87Float64ExtractLowWord32: { 1558 if (instr->InputAt(0)->IsFPRegister()) { 1559 __ sub(esp, Immediate(kDoubleSize)); 1560 __ fst_d(MemOperand(esp, 0)); 1561 __ mov(i.OutputRegister(), MemOperand(esp, 0)); 1562 __ add(esp, Immediate(kDoubleSize)); 1563 } else { 1564 InstructionOperand* input = instr->InputAt(0); 1565 USE(input); 1566 DCHECK(input->IsFPStackSlot()); 1567 __ mov(i.OutputRegister(), i.InputOperand(0)); 1568 } 1569 break; 1570 } 1571 case kX87Float64InsertHighWord32: { 1572 __ sub(esp, Immediate(kDoubleSize)); 1573 __ fstp_d(MemOperand(esp, 0)); 1574 __ mov(MemOperand(esp, kDoubleSize / 2), i.InputRegister(1)); 1575 __ fld_d(MemOperand(esp, 0)); 1576 __ add(esp, Immediate(kDoubleSize)); 1577 break; 1578 } 1579 case kX87Float64InsertLowWord32: { 1580 __ sub(esp, Immediate(kDoubleSize)); 1581 __ fstp_d(MemOperand(esp, 0)); 1582 __ mov(MemOperand(esp, 0), i.InputRegister(1)); 1583 __ fld_d(MemOperand(esp, 0)); 1584 __ add(esp, Immediate(kDoubleSize)); 1585 break; 1586 } 1587 case kX87Float64Sqrt: { 1588 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1589 __ VerifyX87StackDepth(1); 1590 } 1591 __ X87SetFPUCW(0x027F); 1592 __ fstp(0); 1593 __ fld_d(MemOperand(esp, 0)); 1594 __ fsqrt(); 1595 __ lea(esp, Operand(esp, kDoubleSize)); 1596 __ X87SetFPUCW(0x037F); 1597 break; 1598 } 1599 case kX87Float64Round: { 1600 RoundingMode mode = 1601 static_cast<RoundingMode>(MiscField::decode(instr->opcode())); 1602 // Set the correct round mode in x87 control register 1603 __ X87SetRC((mode << 10)); 1604 1605 if (!instr->InputAt(0)->IsFPRegister()) { 1606 InstructionOperand* input = instr->InputAt(0); 1607 USE(input); 1608 DCHECK(input->IsFPStackSlot()); 1609 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1610 __ VerifyX87StackDepth(1); 1611 } 1612 __ fstp(0); 1613 __ fld_d(i.InputOperand(0)); 1614 } 1615 __ frndint(); 1616 __ X87SetRC(0x0000); 1617 break; 1618 } 1619 case kX87Float64Cmp: { 1620 __ fld_d(MemOperand(esp, kDoubleSize)); 1621 __ fld_d(MemOperand(esp, 0)); 1622 __ FCmp(); 1623 __ lea(esp, Operand(esp, 2 * kDoubleSize)); 1624 break; 1625 } 1626 case kX87Float64SilenceNaN: { 1627 Label end, return_qnan; 1628 __ fstp(0); 1629 __ push(ebx); 1630 // Load Half word of HoleNan(SNaN) into ebx 1631 __ mov(ebx, MemOperand(esp, 2 * kInt32Size)); 1632 __ cmp(ebx, Immediate(kHoleNanUpper32)); 1633 // Check input is HoleNaN(SNaN)? 1634 __ j(equal, &return_qnan, Label::kNear); 1635 // If input isn't HoleNaN(SNaN), just load it and return 1636 __ fld_d(MemOperand(esp, 1 * kInt32Size)); 1637 __ jmp(&end); 1638 __ bind(&return_qnan); 1639 // If input is HoleNaN(SNaN), Return QNaN 1640 __ push(Immediate(0xffffffff)); 1641 __ push(Immediate(0xfff7ffff)); 1642 __ fld_d(MemOperand(esp, 0)); 1643 __ lea(esp, Operand(esp, kDoubleSize)); 1644 __ bind(&end); 1645 __ pop(ebx); 1646 // Clear stack. 1647 __ lea(esp, Operand(esp, 1 * kDoubleSize)); 1648 break; 1649 } 1650 case kX87Movsxbl: 1651 __ movsx_b(i.OutputRegister(), i.MemoryOperand()); 1652 break; 1653 case kX87Movzxbl: 1654 __ movzx_b(i.OutputRegister(), i.MemoryOperand()); 1655 break; 1656 case kX87Movb: { 1657 size_t index = 0; 1658 Operand operand = i.MemoryOperand(&index); 1659 if (HasImmediateInput(instr, index)) { 1660 __ mov_b(operand, i.InputInt8(index)); 1661 } else { 1662 __ mov_b(operand, i.InputRegister(index)); 1663 } 1664 break; 1665 } 1666 case kX87Movsxwl: 1667 __ movsx_w(i.OutputRegister(), i.MemoryOperand()); 1668 break; 1669 case kX87Movzxwl: 1670 __ movzx_w(i.OutputRegister(), i.MemoryOperand()); 1671 break; 1672 case kX87Movw: { 1673 size_t index = 0; 1674 Operand operand = i.MemoryOperand(&index); 1675 if (HasImmediateInput(instr, index)) { 1676 __ mov_w(operand, i.InputInt16(index)); 1677 } else { 1678 __ mov_w(operand, i.InputRegister(index)); 1679 } 1680 break; 1681 } 1682 case kX87Movl: 1683 if (instr->HasOutput()) { 1684 __ mov(i.OutputRegister(), i.MemoryOperand()); 1685 } else { 1686 size_t index = 0; 1687 Operand operand = i.MemoryOperand(&index); 1688 if (HasImmediateInput(instr, index)) { 1689 __ mov(operand, i.InputImmediate(index)); 1690 } else { 1691 __ mov(operand, i.InputRegister(index)); 1692 } 1693 } 1694 break; 1695 case kX87Movsd: { 1696 if (instr->HasOutput()) { 1697 X87Register output = i.OutputDoubleRegister(); 1698 USE(output); 1699 DCHECK(output.code() == 0); 1700 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1701 __ VerifyX87StackDepth(1); 1702 } 1703 __ fstp(0); 1704 __ fld_d(i.MemoryOperand()); 1705 } else { 1706 size_t index = 0; 1707 Operand operand = i.MemoryOperand(&index); 1708 __ fst_d(operand); 1709 } 1710 break; 1711 } 1712 case kX87Movss: { 1713 if (instr->HasOutput()) { 1714 X87Register output = i.OutputDoubleRegister(); 1715 USE(output); 1716 DCHECK(output.code() == 0); 1717 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1718 __ VerifyX87StackDepth(1); 1719 } 1720 __ fstp(0); 1721 __ fld_s(i.MemoryOperand()); 1722 } else { 1723 size_t index = 0; 1724 Operand operand = i.MemoryOperand(&index); 1725 __ fst_s(operand); 1726 } 1727 break; 1728 } 1729 case kX87BitcastFI: { 1730 __ mov(i.OutputRegister(), MemOperand(esp, 0)); 1731 __ lea(esp, Operand(esp, kFloatSize)); 1732 break; 1733 } 1734 case kX87BitcastIF: { 1735 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 1736 __ VerifyX87StackDepth(1); 1737 } 1738 __ fstp(0); 1739 if (instr->InputAt(0)->IsRegister()) { 1740 __ lea(esp, Operand(esp, -kFloatSize)); 1741 __ mov(MemOperand(esp, 0), i.InputRegister(0)); 1742 __ fld_s(MemOperand(esp, 0)); 1743 __ lea(esp, Operand(esp, kFloatSize)); 1744 } else { 1745 __ fld_s(i.InputOperand(0)); 1746 } 1747 break; 1748 } 1749 case kX87Lea: { 1750 AddressingMode mode = AddressingModeField::decode(instr->opcode()); 1751 // Shorten "leal" to "addl", "subl" or "shll" if the register allocation 1752 // and addressing mode just happens to work out. The "addl"/"subl" forms 1753 // in these cases are faster based on measurements. 1754 if (mode == kMode_MI) { 1755 __ Move(i.OutputRegister(), Immediate(i.InputInt32(0))); 1756 } else if (i.InputRegister(0).is(i.OutputRegister())) { 1757 if (mode == kMode_MRI) { 1758 int32_t constant_summand = i.InputInt32(1); 1759 if (constant_summand > 0) { 1760 __ add(i.OutputRegister(), Immediate(constant_summand)); 1761 } else if (constant_summand < 0) { 1762 __ sub(i.OutputRegister(), Immediate(-constant_summand)); 1763 } 1764 } else if (mode == kMode_MR1) { 1765 if (i.InputRegister(1).is(i.OutputRegister())) { 1766 __ shl(i.OutputRegister(), 1); 1767 } else { 1768 __ lea(i.OutputRegister(), i.MemoryOperand()); 1769 } 1770 } else if (mode == kMode_M2) { 1771 __ shl(i.OutputRegister(), 1); 1772 } else if (mode == kMode_M4) { 1773 __ shl(i.OutputRegister(), 2); 1774 } else if (mode == kMode_M8) { 1775 __ shl(i.OutputRegister(), 3); 1776 } else { 1777 __ lea(i.OutputRegister(), i.MemoryOperand()); 1778 } 1779 } else { 1780 __ lea(i.OutputRegister(), i.MemoryOperand()); 1781 } 1782 break; 1783 } 1784 case kX87Push: 1785 if (instr->InputAt(0)->IsFPRegister()) { 1786 auto allocated = AllocatedOperand::cast(*instr->InputAt(0)); 1787 if (allocated.representation() == MachineRepresentation::kFloat32) { 1788 __ sub(esp, Immediate(kFloatSize)); 1789 __ fst_s(Operand(esp, 0)); 1790 frame_access_state()->IncreaseSPDelta(kFloatSize / kPointerSize); 1791 } else { 1792 DCHECK(allocated.representation() == MachineRepresentation::kFloat64); 1793 __ sub(esp, Immediate(kDoubleSize)); 1794 __ fst_d(Operand(esp, 0)); 1795 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); 1796 } 1797 } else if (instr->InputAt(0)->IsFPStackSlot()) { 1798 auto allocated = AllocatedOperand::cast(*instr->InputAt(0)); 1799 if (allocated.representation() == MachineRepresentation::kFloat32) { 1800 __ sub(esp, Immediate(kFloatSize)); 1801 __ fld_s(i.InputOperand(0)); 1802 __ fstp_s(MemOperand(esp, 0)); 1803 frame_access_state()->IncreaseSPDelta(kFloatSize / kPointerSize); 1804 } else { 1805 DCHECK(allocated.representation() == MachineRepresentation::kFloat64); 1806 __ sub(esp, Immediate(kDoubleSize)); 1807 __ fld_d(i.InputOperand(0)); 1808 __ fstp_d(MemOperand(esp, 0)); 1809 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); 1810 } 1811 } else if (HasImmediateInput(instr, 0)) { 1812 __ push(i.InputImmediate(0)); 1813 frame_access_state()->IncreaseSPDelta(1); 1814 } else { 1815 __ push(i.InputOperand(0)); 1816 frame_access_state()->IncreaseSPDelta(1); 1817 } 1818 break; 1819 case kX87Poke: { 1820 int const slot = MiscField::decode(instr->opcode()); 1821 if (HasImmediateInput(instr, 0)) { 1822 __ mov(Operand(esp, slot * kPointerSize), i.InputImmediate(0)); 1823 } else { 1824 __ mov(Operand(esp, slot * kPointerSize), i.InputRegister(0)); 1825 } 1826 break; 1827 } 1828 case kX87Xchgb: { 1829 size_t index = 0; 1830 Operand operand = i.MemoryOperand(&index); 1831 __ xchg_b(i.InputRegister(index), operand); 1832 break; 1833 } 1834 case kX87Xchgw: { 1835 size_t index = 0; 1836 Operand operand = i.MemoryOperand(&index); 1837 __ xchg_w(i.InputRegister(index), operand); 1838 break; 1839 } 1840 case kX87Xchgl: { 1841 size_t index = 0; 1842 Operand operand = i.MemoryOperand(&index); 1843 __ xchg(i.InputRegister(index), operand); 1844 break; 1845 } 1846 case kX87PushFloat32: 1847 __ lea(esp, Operand(esp, -kFloatSize)); 1848 if (instr->InputAt(0)->IsFPStackSlot()) { 1849 __ fld_s(i.InputOperand(0)); 1850 __ fstp_s(MemOperand(esp, 0)); 1851 } else if (instr->InputAt(0)->IsFPRegister()) { 1852 __ fst_s(MemOperand(esp, 0)); 1853 } else { 1854 UNREACHABLE(); 1855 } 1856 break; 1857 case kX87PushFloat64: 1858 __ lea(esp, Operand(esp, -kDoubleSize)); 1859 if (instr->InputAt(0)->IsFPStackSlot()) { 1860 __ fld_d(i.InputOperand(0)); 1861 __ fstp_d(MemOperand(esp, 0)); 1862 } else if (instr->InputAt(0)->IsFPRegister()) { 1863 __ fst_d(MemOperand(esp, 0)); 1864 } else { 1865 UNREACHABLE(); 1866 } 1867 break; 1868 case kCheckedLoadInt8: 1869 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_b); 1870 break; 1871 case kCheckedLoadUint8: 1872 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_b); 1873 break; 1874 case kCheckedLoadInt16: 1875 ASSEMBLE_CHECKED_LOAD_INTEGER(movsx_w); 1876 break; 1877 case kCheckedLoadUint16: 1878 ASSEMBLE_CHECKED_LOAD_INTEGER(movzx_w); 1879 break; 1880 case kCheckedLoadWord32: 1881 ASSEMBLE_CHECKED_LOAD_INTEGER(mov); 1882 break; 1883 case kCheckedLoadFloat32: 1884 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_s); 1885 break; 1886 case kCheckedLoadFloat64: 1887 ASSEMBLE_CHECKED_LOAD_FLOAT(fld_d); 1888 break; 1889 case kCheckedStoreWord8: 1890 ASSEMBLE_CHECKED_STORE_INTEGER(mov_b); 1891 break; 1892 case kCheckedStoreWord16: 1893 ASSEMBLE_CHECKED_STORE_INTEGER(mov_w); 1894 break; 1895 case kCheckedStoreWord32: 1896 ASSEMBLE_CHECKED_STORE_INTEGER(mov); 1897 break; 1898 case kCheckedStoreFloat32: 1899 ASSEMBLE_CHECKED_STORE_FLOAT(fst_s); 1900 break; 1901 case kCheckedStoreFloat64: 1902 ASSEMBLE_CHECKED_STORE_FLOAT(fst_d); 1903 break; 1904 case kX87StackCheck: { 1905 ExternalReference const stack_limit = 1906 ExternalReference::address_of_stack_limit(isolate()); 1907 __ cmp(esp, Operand::StaticVariable(stack_limit)); 1908 break; 1909 } 1910 case kCheckedLoadWord64: 1911 case kCheckedStoreWord64: 1912 UNREACHABLE(); // currently unsupported checked int64 load/store. 1913 break; 1914 case kAtomicLoadInt8: 1915 case kAtomicLoadUint8: 1916 case kAtomicLoadInt16: 1917 case kAtomicLoadUint16: 1918 case kAtomicLoadWord32: 1919 case kAtomicStoreWord8: 1920 case kAtomicStoreWord16: 1921 case kAtomicStoreWord32: 1922 UNREACHABLE(); // Won't be generated by instruction selector. 1923 break; 1924 } 1925 return kSuccess; 1926 } // NOLINT(readability/fn_size) 1927 1928 1929 // Assembles a branch after an instruction. 1930 void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { 1931 X87OperandConverter i(this, instr); 1932 Label::Distance flabel_distance = 1933 branch->fallthru ? Label::kNear : Label::kFar; 1934 1935 Label done; 1936 Label tlabel_tmp; 1937 Label flabel_tmp; 1938 Label* tlabel = &tlabel_tmp; 1939 Label* flabel = &flabel_tmp; 1940 1941 Label* tlabel_dst = branch->true_label; 1942 Label* flabel_dst = branch->false_label; 1943 1944 switch (branch->condition) { 1945 case kUnorderedEqual: 1946 __ j(parity_even, flabel, flabel_distance); 1947 // Fall through. 1948 case kEqual: 1949 __ j(equal, tlabel); 1950 break; 1951 case kUnorderedNotEqual: 1952 __ j(parity_even, tlabel); 1953 // Fall through. 1954 case kNotEqual: 1955 __ j(not_equal, tlabel); 1956 break; 1957 case kSignedLessThan: 1958 __ j(less, tlabel); 1959 break; 1960 case kSignedGreaterThanOrEqual: 1961 __ j(greater_equal, tlabel); 1962 break; 1963 case kSignedLessThanOrEqual: 1964 __ j(less_equal, tlabel); 1965 break; 1966 case kSignedGreaterThan: 1967 __ j(greater, tlabel); 1968 break; 1969 case kUnsignedLessThan: 1970 __ j(below, tlabel); 1971 break; 1972 case kUnsignedGreaterThanOrEqual: 1973 __ j(above_equal, tlabel); 1974 break; 1975 case kUnsignedLessThanOrEqual: 1976 __ j(below_equal, tlabel); 1977 break; 1978 case kUnsignedGreaterThan: 1979 __ j(above, tlabel); 1980 break; 1981 case kOverflow: 1982 __ j(overflow, tlabel); 1983 break; 1984 case kNotOverflow: 1985 __ j(no_overflow, tlabel); 1986 break; 1987 default: 1988 UNREACHABLE(); 1989 break; 1990 } 1991 // Add a jump if not falling through to the next block. 1992 if (!branch->fallthru) __ jmp(flabel); 1993 1994 __ jmp(&done); 1995 __ bind(&tlabel_tmp); 1996 FlagsMode mode = FlagsModeField::decode(instr->opcode()); 1997 if (mode == kFlags_deoptimize) { 1998 int double_register_param_count = 0; 1999 int x87_layout = 0; 2000 for (size_t i = 0; i < instr->InputCount(); i++) { 2001 if (instr->InputAt(i)->IsFPRegister()) { 2002 double_register_param_count++; 2003 } 2004 } 2005 // Currently we use only one X87 register. If double_register_param_count 2006 // is bigger than 1, it means duplicated double register is added to input 2007 // of this instruction. 2008 if (double_register_param_count > 0) { 2009 x87_layout = (0 << 3) | 1; 2010 } 2011 // The layout of x87 register stack is loaded on the top of FPU register 2012 // stack for deoptimization. 2013 __ push(Immediate(x87_layout)); 2014 __ fild_s(MemOperand(esp, 0)); 2015 __ lea(esp, Operand(esp, kPointerSize)); 2016 } 2017 __ jmp(tlabel_dst); 2018 __ bind(&flabel_tmp); 2019 __ jmp(flabel_dst); 2020 __ bind(&done); 2021 } 2022 2023 2024 void CodeGenerator::AssembleArchJump(RpoNumber target) { 2025 if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); 2026 } 2027 2028 2029 // Assembles boolean materializations after an instruction. 2030 void CodeGenerator::AssembleArchBoolean(Instruction* instr, 2031 FlagsCondition condition) { 2032 X87OperandConverter i(this, instr); 2033 Label done; 2034 2035 // Materialize a full 32-bit 1 or 0 value. The result register is always the 2036 // last output of the instruction. 2037 Label check; 2038 DCHECK_NE(0u, instr->OutputCount()); 2039 Register reg = i.OutputRegister(instr->OutputCount() - 1); 2040 Condition cc = no_condition; 2041 switch (condition) { 2042 case kUnorderedEqual: 2043 __ j(parity_odd, &check, Label::kNear); 2044 __ Move(reg, Immediate(0)); 2045 __ jmp(&done, Label::kNear); 2046 // Fall through. 2047 case kEqual: 2048 cc = equal; 2049 break; 2050 case kUnorderedNotEqual: 2051 __ j(parity_odd, &check, Label::kNear); 2052 __ mov(reg, Immediate(1)); 2053 __ jmp(&done, Label::kNear); 2054 // Fall through. 2055 case kNotEqual: 2056 cc = not_equal; 2057 break; 2058 case kSignedLessThan: 2059 cc = less; 2060 break; 2061 case kSignedGreaterThanOrEqual: 2062 cc = greater_equal; 2063 break; 2064 case kSignedLessThanOrEqual: 2065 cc = less_equal; 2066 break; 2067 case kSignedGreaterThan: 2068 cc = greater; 2069 break; 2070 case kUnsignedLessThan: 2071 cc = below; 2072 break; 2073 case kUnsignedGreaterThanOrEqual: 2074 cc = above_equal; 2075 break; 2076 case kUnsignedLessThanOrEqual: 2077 cc = below_equal; 2078 break; 2079 case kUnsignedGreaterThan: 2080 cc = above; 2081 break; 2082 case kOverflow: 2083 cc = overflow; 2084 break; 2085 case kNotOverflow: 2086 cc = no_overflow; 2087 break; 2088 default: 2089 UNREACHABLE(); 2090 break; 2091 } 2092 __ bind(&check); 2093 if (reg.is_byte_register()) { 2094 // setcc for byte registers (al, bl, cl, dl). 2095 __ setcc(cc, reg); 2096 __ movzx_b(reg, reg); 2097 } else { 2098 // Emit a branch to set a register to either 1 or 0. 2099 Label set; 2100 __ j(cc, &set, Label::kNear); 2101 __ Move(reg, Immediate(0)); 2102 __ jmp(&done, Label::kNear); 2103 __ bind(&set); 2104 __ mov(reg, Immediate(1)); 2105 } 2106 __ bind(&done); 2107 } 2108 2109 2110 void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) { 2111 X87OperandConverter i(this, instr); 2112 Register input = i.InputRegister(0); 2113 for (size_t index = 2; index < instr->InputCount(); index += 2) { 2114 __ cmp(input, Immediate(i.InputInt32(index + 0))); 2115 __ j(equal, GetLabel(i.InputRpo(index + 1))); 2116 } 2117 AssembleArchJump(i.InputRpo(1)); 2118 } 2119 2120 2121 void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) { 2122 X87OperandConverter i(this, instr); 2123 Register input = i.InputRegister(0); 2124 size_t const case_count = instr->InputCount() - 2; 2125 Label** cases = zone()->NewArray<Label*>(case_count); 2126 for (size_t index = 0; index < case_count; ++index) { 2127 cases[index] = GetLabel(i.InputRpo(index + 2)); 2128 } 2129 Label* const table = AddJumpTable(cases, case_count); 2130 __ cmp(input, Immediate(case_count)); 2131 __ j(above_equal, GetLabel(i.InputRpo(1))); 2132 __ jmp(Operand::JumpTable(input, times_4, table)); 2133 } 2134 2135 CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall( 2136 int deoptimization_id, Deoptimizer::BailoutType bailout_type) { 2137 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry( 2138 isolate(), deoptimization_id, bailout_type); 2139 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts; 2140 __ call(deopt_entry, RelocInfo::RUNTIME_ENTRY); 2141 return kSuccess; 2142 } 2143 2144 2145 // The calling convention for JSFunctions on X87 passes arguments on the 2146 // stack and the JSFunction and context in EDI and ESI, respectively, thus 2147 // the steps of the call look as follows: 2148 2149 // --{ before the call instruction }-------------------------------------------- 2150 // | caller frame | 2151 // ^ esp ^ ebp 2152 2153 // --{ push arguments and setup ESI, EDI }-------------------------------------- 2154 // | args + receiver | caller frame | 2155 // ^ esp ^ ebp 2156 // [edi = JSFunction, esi = context] 2157 2158 // --{ call [edi + kCodeEntryOffset] }------------------------------------------ 2159 // | RET | args + receiver | caller frame | 2160 // ^ esp ^ ebp 2161 2162 // =={ prologue of called function }============================================ 2163 // --{ push ebp }--------------------------------------------------------------- 2164 // | FP | RET | args + receiver | caller frame | 2165 // ^ esp ^ ebp 2166 2167 // --{ mov ebp, esp }----------------------------------------------------------- 2168 // | FP | RET | args + receiver | caller frame | 2169 // ^ ebp,esp 2170 2171 // --{ push esi }--------------------------------------------------------------- 2172 // | CTX | FP | RET | args + receiver | caller frame | 2173 // ^esp ^ ebp 2174 2175 // --{ push edi }--------------------------------------------------------------- 2176 // | FNC | CTX | FP | RET | args + receiver | caller frame | 2177 // ^esp ^ ebp 2178 2179 // --{ subi esp, #N }----------------------------------------------------------- 2180 // | callee frame | FNC | CTX | FP | RET | args + receiver | caller frame | 2181 // ^esp ^ ebp 2182 2183 // =={ body of called function }================================================ 2184 2185 // =={ epilogue of called function }============================================ 2186 // --{ mov esp, ebp }----------------------------------------------------------- 2187 // | FP | RET | args + receiver | caller frame | 2188 // ^ esp,ebp 2189 2190 // --{ pop ebp }----------------------------------------------------------- 2191 // | | RET | args + receiver | caller frame | 2192 // ^ esp ^ ebp 2193 2194 // --{ ret #A+1 }----------------------------------------------------------- 2195 // | | caller frame | 2196 // ^ esp ^ ebp 2197 2198 2199 // Runtime function calls are accomplished by doing a stub call to the 2200 // CEntryStub (a real code object). On X87 passes arguments on the 2201 // stack, the number of arguments in EAX, the address of the runtime function 2202 // in EBX, and the context in ESI. 2203 2204 // --{ before the call instruction }-------------------------------------------- 2205 // | caller frame | 2206 // ^ esp ^ ebp 2207 2208 // --{ push arguments and setup EAX, EBX, and ESI }----------------------------- 2209 // | args + receiver | caller frame | 2210 // ^ esp ^ ebp 2211 // [eax = #args, ebx = runtime function, esi = context] 2212 2213 // --{ call #CEntryStub }------------------------------------------------------- 2214 // | RET | args + receiver | caller frame | 2215 // ^ esp ^ ebp 2216 2217 // =={ body of runtime function }=============================================== 2218 2219 // --{ runtime returns }-------------------------------------------------------- 2220 // | caller frame | 2221 // ^ esp ^ ebp 2222 2223 // Other custom linkages (e.g. for calling directly into and out of C++) may 2224 // need to save callee-saved registers on the stack, which is done in the 2225 // function prologue of generated code. 2226 2227 // --{ before the call instruction }-------------------------------------------- 2228 // | caller frame | 2229 // ^ esp ^ ebp 2230 2231 // --{ set up arguments in registers on stack }--------------------------------- 2232 // | args | caller frame | 2233 // ^ esp ^ ebp 2234 // [r0 = arg0, r1 = arg1, ...] 2235 2236 // --{ call code }-------------------------------------------------------------- 2237 // | RET | args | caller frame | 2238 // ^ esp ^ ebp 2239 2240 // =={ prologue of called function }============================================ 2241 // --{ push ebp }--------------------------------------------------------------- 2242 // | FP | RET | args | caller frame | 2243 // ^ esp ^ ebp 2244 2245 // --{ mov ebp, esp }----------------------------------------------------------- 2246 // | FP | RET | args | caller frame | 2247 // ^ ebp,esp 2248 2249 // --{ save registers }--------------------------------------------------------- 2250 // | regs | FP | RET | args | caller frame | 2251 // ^ esp ^ ebp 2252 2253 // --{ subi esp, #N }----------------------------------------------------------- 2254 // | callee frame | regs | FP | RET | args | caller frame | 2255 // ^esp ^ ebp 2256 2257 // =={ body of called function }================================================ 2258 2259 // =={ epilogue of called function }============================================ 2260 // --{ restore registers }------------------------------------------------------ 2261 // | regs | FP | RET | args | caller frame | 2262 // ^ esp ^ ebp 2263 2264 // --{ mov esp, ebp }----------------------------------------------------------- 2265 // | FP | RET | args | caller frame | 2266 // ^ esp,ebp 2267 2268 // --{ pop ebp }---------------------------------------------------------------- 2269 // | RET | args | caller frame | 2270 // ^ esp ^ ebp 2271 2272 void CodeGenerator::FinishFrame(Frame* frame) { 2273 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 2274 const RegList saves = descriptor->CalleeSavedRegisters(); 2275 if (saves != 0) { // Save callee-saved registers. 2276 DCHECK(!info()->is_osr()); 2277 int pushed = 0; 2278 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { 2279 if (!((1 << i) & saves)) continue; 2280 ++pushed; 2281 } 2282 frame->AllocateSavedCalleeRegisterSlots(pushed); 2283 } 2284 2285 // Initailize FPU state. 2286 __ fninit(); 2287 __ fld1(); 2288 } 2289 2290 void CodeGenerator::AssembleConstructFrame() { 2291 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 2292 if (frame_access_state()->has_frame()) { 2293 if (descriptor->IsCFunctionCall()) { 2294 __ push(ebp); 2295 __ mov(ebp, esp); 2296 } else if (descriptor->IsJSFunctionCall()) { 2297 __ Prologue(this->info()->GeneratePreagedPrologue()); 2298 } else { 2299 __ StubPrologue(info()->GetOutputStackFrameType()); 2300 } 2301 } 2302 2303 int shrink_slots = frame()->GetSpillSlotCount(); 2304 2305 if (info()->is_osr()) { 2306 // TurboFan OSR-compiled functions cannot be entered directly. 2307 __ Abort(kShouldNotDirectlyEnterOsrFunction); 2308 2309 // Unoptimized code jumps directly to this entrypoint while the unoptimized 2310 // frame is still on the stack. Optimized code uses OSR values directly from 2311 // the unoptimized frame. Thus, all that needs to be done is to allocate the 2312 // remaining stack slots. 2313 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --"); 2314 osr_pc_offset_ = __ pc_offset(); 2315 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots(); 2316 2317 // Initailize FPU state. 2318 __ fninit(); 2319 __ fld1(); 2320 } 2321 2322 const RegList saves = descriptor->CalleeSavedRegisters(); 2323 if (shrink_slots > 0) { 2324 __ sub(esp, Immediate(shrink_slots * kPointerSize)); 2325 } 2326 2327 if (saves != 0) { // Save callee-saved registers. 2328 DCHECK(!info()->is_osr()); 2329 int pushed = 0; 2330 for (int i = Register::kNumRegisters - 1; i >= 0; i--) { 2331 if (!((1 << i) & saves)) continue; 2332 __ push(Register::from_code(i)); 2333 ++pushed; 2334 } 2335 } 2336 } 2337 2338 2339 void CodeGenerator::AssembleReturn() { 2340 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor(); 2341 2342 // Clear the FPU stack only if there is no return value in the stack. 2343 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 2344 __ VerifyX87StackDepth(1); 2345 } 2346 bool clear_stack = true; 2347 for (int i = 0; i < descriptor->ReturnCount(); i++) { 2348 MachineRepresentation rep = descriptor->GetReturnType(i).representation(); 2349 LinkageLocation loc = descriptor->GetReturnLocation(i); 2350 if (IsFloatingPoint(rep) && loc == LinkageLocation::ForRegister(0)) { 2351 clear_stack = false; 2352 break; 2353 } 2354 } 2355 if (clear_stack) __ fstp(0); 2356 2357 int pop_count = static_cast<int>(descriptor->StackParameterCount()); 2358 const RegList saves = descriptor->CalleeSavedRegisters(); 2359 // Restore registers. 2360 if (saves != 0) { 2361 for (int i = 0; i < Register::kNumRegisters; i++) { 2362 if (!((1 << i) & saves)) continue; 2363 __ pop(Register::from_code(i)); 2364 } 2365 } 2366 2367 if (descriptor->IsCFunctionCall()) { 2368 AssembleDeconstructFrame(); 2369 } else if (frame_access_state()->has_frame()) { 2370 // Canonicalize JSFunction return sites for now. 2371 if (return_label_.is_bound()) { 2372 __ jmp(&return_label_); 2373 return; 2374 } else { 2375 __ bind(&return_label_); 2376 AssembleDeconstructFrame(); 2377 } 2378 } 2379 if (pop_count == 0) { 2380 __ ret(0); 2381 } else { 2382 __ Ret(pop_count * kPointerSize, ebx); 2383 } 2384 } 2385 2386 2387 void CodeGenerator::AssembleMove(InstructionOperand* source, 2388 InstructionOperand* destination) { 2389 X87OperandConverter g(this, nullptr); 2390 // Dispatch on the source and destination operand kinds. Not all 2391 // combinations are possible. 2392 if (source->IsRegister()) { 2393 DCHECK(destination->IsRegister() || destination->IsStackSlot()); 2394 Register src = g.ToRegister(source); 2395 Operand dst = g.ToOperand(destination); 2396 __ mov(dst, src); 2397 } else if (source->IsStackSlot()) { 2398 DCHECK(destination->IsRegister() || destination->IsStackSlot()); 2399 Operand src = g.ToOperand(source); 2400 if (destination->IsRegister()) { 2401 Register dst = g.ToRegister(destination); 2402 __ mov(dst, src); 2403 } else { 2404 Operand dst = g.ToOperand(destination); 2405 __ push(src); 2406 __ pop(dst); 2407 } 2408 } else if (source->IsConstant()) { 2409 Constant src_constant = g.ToConstant(source); 2410 if (src_constant.type() == Constant::kHeapObject) { 2411 Handle<HeapObject> src = src_constant.ToHeapObject(); 2412 int slot; 2413 if (IsMaterializableFromFrame(src, &slot)) { 2414 if (destination->IsRegister()) { 2415 Register dst = g.ToRegister(destination); 2416 __ mov(dst, g.SlotToOperand(slot)); 2417 } else { 2418 DCHECK(destination->IsStackSlot()); 2419 Operand dst = g.ToOperand(destination); 2420 __ push(g.SlotToOperand(slot)); 2421 __ pop(dst); 2422 } 2423 } else if (destination->IsRegister()) { 2424 Register dst = g.ToRegister(destination); 2425 __ LoadHeapObject(dst, src); 2426 } else { 2427 DCHECK(destination->IsStackSlot()); 2428 Operand dst = g.ToOperand(destination); 2429 AllowDeferredHandleDereference embedding_raw_address; 2430 if (isolate()->heap()->InNewSpace(*src)) { 2431 __ PushHeapObject(src); 2432 __ pop(dst); 2433 } else { 2434 __ mov(dst, src); 2435 } 2436 } 2437 } else if (destination->IsRegister()) { 2438 Register dst = g.ToRegister(destination); 2439 __ Move(dst, g.ToImmediate(source)); 2440 } else if (destination->IsStackSlot()) { 2441 Operand dst = g.ToOperand(destination); 2442 __ Move(dst, g.ToImmediate(source)); 2443 } else if (src_constant.type() == Constant::kFloat32) { 2444 // TODO(turbofan): Can we do better here? 2445 uint32_t src = bit_cast<uint32_t>(src_constant.ToFloat32()); 2446 if (destination->IsFPRegister()) { 2447 __ sub(esp, Immediate(kInt32Size)); 2448 __ mov(MemOperand(esp, 0), Immediate(src)); 2449 // always only push one value into the x87 stack. 2450 __ fstp(0); 2451 __ fld_s(MemOperand(esp, 0)); 2452 __ add(esp, Immediate(kInt32Size)); 2453 } else { 2454 DCHECK(destination->IsFPStackSlot()); 2455 Operand dst = g.ToOperand(destination); 2456 __ Move(dst, Immediate(src)); 2457 } 2458 } else { 2459 DCHECK_EQ(Constant::kFloat64, src_constant.type()); 2460 uint64_t src = bit_cast<uint64_t>(src_constant.ToFloat64()); 2461 uint32_t lower = static_cast<uint32_t>(src); 2462 uint32_t upper = static_cast<uint32_t>(src >> 32); 2463 if (destination->IsFPRegister()) { 2464 __ sub(esp, Immediate(kDoubleSize)); 2465 __ mov(MemOperand(esp, 0), Immediate(lower)); 2466 __ mov(MemOperand(esp, kInt32Size), Immediate(upper)); 2467 // always only push one value into the x87 stack. 2468 __ fstp(0); 2469 __ fld_d(MemOperand(esp, 0)); 2470 __ add(esp, Immediate(kDoubleSize)); 2471 } else { 2472 DCHECK(destination->IsFPStackSlot()); 2473 Operand dst0 = g.ToOperand(destination); 2474 Operand dst1 = g.HighOperand(destination); 2475 __ Move(dst0, Immediate(lower)); 2476 __ Move(dst1, Immediate(upper)); 2477 } 2478 } 2479 } else if (source->IsFPRegister()) { 2480 DCHECK(destination->IsFPStackSlot()); 2481 Operand dst = g.ToOperand(destination); 2482 auto allocated = AllocatedOperand::cast(*source); 2483 switch (allocated.representation()) { 2484 case MachineRepresentation::kFloat32: 2485 __ fst_s(dst); 2486 break; 2487 case MachineRepresentation::kFloat64: 2488 __ fst_d(dst); 2489 break; 2490 default: 2491 UNREACHABLE(); 2492 } 2493 } else if (source->IsFPStackSlot()) { 2494 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); 2495 Operand src = g.ToOperand(source); 2496 auto allocated = AllocatedOperand::cast(*source); 2497 if (destination->IsFPRegister()) { 2498 // always only push one value into the x87 stack. 2499 __ fstp(0); 2500 switch (allocated.representation()) { 2501 case MachineRepresentation::kFloat32: 2502 __ fld_s(src); 2503 break; 2504 case MachineRepresentation::kFloat64: 2505 __ fld_d(src); 2506 break; 2507 default: 2508 UNREACHABLE(); 2509 } 2510 } else { 2511 Operand dst = g.ToOperand(destination); 2512 switch (allocated.representation()) { 2513 case MachineRepresentation::kFloat32: 2514 __ fld_s(src); 2515 __ fstp_s(dst); 2516 break; 2517 case MachineRepresentation::kFloat64: 2518 __ fld_d(src); 2519 __ fstp_d(dst); 2520 break; 2521 default: 2522 UNREACHABLE(); 2523 } 2524 } 2525 } else { 2526 UNREACHABLE(); 2527 } 2528 } 2529 2530 2531 void CodeGenerator::AssembleSwap(InstructionOperand* source, 2532 InstructionOperand* destination) { 2533 X87OperandConverter g(this, nullptr); 2534 // Dispatch on the source and destination operand kinds. Not all 2535 // combinations are possible. 2536 if (source->IsRegister() && destination->IsRegister()) { 2537 // Register-register. 2538 Register src = g.ToRegister(source); 2539 Register dst = g.ToRegister(destination); 2540 __ xchg(dst, src); 2541 } else if (source->IsRegister() && destination->IsStackSlot()) { 2542 // Register-memory. 2543 __ xchg(g.ToRegister(source), g.ToOperand(destination)); 2544 } else if (source->IsStackSlot() && destination->IsStackSlot()) { 2545 // Memory-memory. 2546 Operand dst1 = g.ToOperand(destination); 2547 __ push(dst1); 2548 frame_access_state()->IncreaseSPDelta(1); 2549 Operand src1 = g.ToOperand(source); 2550 __ push(src1); 2551 Operand dst2 = g.ToOperand(destination); 2552 __ pop(dst2); 2553 frame_access_state()->IncreaseSPDelta(-1); 2554 Operand src2 = g.ToOperand(source); 2555 __ pop(src2); 2556 } else if (source->IsFPRegister() && destination->IsFPRegister()) { 2557 UNREACHABLE(); 2558 } else if (source->IsFPRegister() && destination->IsFPStackSlot()) { 2559 auto allocated = AllocatedOperand::cast(*source); 2560 switch (allocated.representation()) { 2561 case MachineRepresentation::kFloat32: 2562 __ fld_s(g.ToOperand(destination)); 2563 __ fxch(); 2564 __ fstp_s(g.ToOperand(destination)); 2565 break; 2566 case MachineRepresentation::kFloat64: 2567 __ fld_d(g.ToOperand(destination)); 2568 __ fxch(); 2569 __ fstp_d(g.ToOperand(destination)); 2570 break; 2571 default: 2572 UNREACHABLE(); 2573 } 2574 } else if (source->IsFPStackSlot() && destination->IsFPStackSlot()) { 2575 auto allocated = AllocatedOperand::cast(*source); 2576 switch (allocated.representation()) { 2577 case MachineRepresentation::kFloat32: 2578 __ fld_s(g.ToOperand(source)); 2579 __ fld_s(g.ToOperand(destination)); 2580 __ fstp_s(g.ToOperand(source)); 2581 __ fstp_s(g.ToOperand(destination)); 2582 break; 2583 case MachineRepresentation::kFloat64: 2584 __ fld_d(g.ToOperand(source)); 2585 __ fld_d(g.ToOperand(destination)); 2586 __ fstp_d(g.ToOperand(source)); 2587 __ fstp_d(g.ToOperand(destination)); 2588 break; 2589 default: 2590 UNREACHABLE(); 2591 } 2592 } else { 2593 // No other combinations are possible. 2594 UNREACHABLE(); 2595 } 2596 } 2597 2598 2599 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { 2600 for (size_t index = 0; index < target_count; ++index) { 2601 __ dd(targets[index]); 2602 } 2603 } 2604 2605 2606 void CodeGenerator::EnsureSpaceForLazyDeopt() { 2607 if (!info()->ShouldEnsureSpaceForLazyDeopt()) { 2608 return; 2609 } 2610 2611 int space_needed = Deoptimizer::patch_size(); 2612 // Ensure that we have enough space after the previous lazy-bailout 2613 // instruction for patching the code here. 2614 int current_pc = masm()->pc_offset(); 2615 if (current_pc < last_lazy_deopt_pc_ + space_needed) { 2616 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc; 2617 __ Nop(padding_size); 2618 } 2619 } 2620 2621 #undef __ 2622 2623 } // namespace compiler 2624 } // namespace internal 2625 } // namespace v8 2626