1 // Copyright 2014 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/base/adapters.h" 6 #include "src/compiler/instruction-selector-impl.h" 7 #include "src/compiler/node-matchers.h" 8 #include "src/compiler/node-properties.h" 9 10 namespace v8 { 11 namespace internal { 12 namespace compiler { 13 14 // Adds X87-specific methods for generating operands. 15 class X87OperandGenerator final : public OperandGenerator { 16 public: 17 explicit X87OperandGenerator(InstructionSelector* selector) 18 : OperandGenerator(selector) {} 19 20 InstructionOperand UseByteRegister(Node* node) { 21 // TODO(titzer): encode byte register use constraints. 22 return UseFixed(node, edx); 23 } 24 25 InstructionOperand DefineAsByteRegister(Node* node) { 26 // TODO(titzer): encode byte register def constraints. 27 return DefineAsRegister(node); 28 } 29 30 bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input, 31 int effect_level) { 32 if (input->opcode() != IrOpcode::kLoad || 33 !selector()->CanCover(node, input)) { 34 return false; 35 } 36 if (effect_level != selector()->GetEffectLevel(input)) { 37 return false; 38 } 39 MachineRepresentation rep = 40 LoadRepresentationOf(input->op()).representation(); 41 switch (opcode) { 42 case kX87Cmp: 43 case kX87Test: 44 return rep == MachineRepresentation::kWord32 || 45 rep == MachineRepresentation::kTagged; 46 case kX87Cmp16: 47 case kX87Test16: 48 return rep == MachineRepresentation::kWord16; 49 case kX87Cmp8: 50 case kX87Test8: 51 return rep == MachineRepresentation::kWord8; 52 default: 53 break; 54 } 55 return false; 56 } 57 58 InstructionOperand CreateImmediate(int imm) { 59 return sequence()->AddImmediate(Constant(imm)); 60 } 61 62 bool CanBeImmediate(Node* node) { 63 switch (node->opcode()) { 64 case IrOpcode::kInt32Constant: 65 case IrOpcode::kNumberConstant: 66 case IrOpcode::kExternalConstant: 67 case IrOpcode::kRelocatableInt32Constant: 68 case IrOpcode::kRelocatableInt64Constant: 69 return true; 70 case IrOpcode::kHeapConstant: { 71 // TODO(bmeurer): We must not dereference handles concurrently. If we 72 // really have to this here, then we need to find a way to put this 73 // information on the HeapConstant node already. 74 #if 0 75 // Constants in new space cannot be used as immediates in V8 because 76 // the GC does not scan code objects when collecting the new generation. 77 Handle<HeapObject> value = OpParameter<Handle<HeapObject>>(node); 78 Isolate* isolate = value->GetIsolate(); 79 return !isolate->heap()->InNewSpace(*value); 80 #endif 81 } 82 default: 83 return false; 84 } 85 } 86 87 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base, 88 Node* displacement_node, 89 DisplacementMode displacement_mode, 90 InstructionOperand inputs[], 91 size_t* input_count) { 92 AddressingMode mode = kMode_MRI; 93 int32_t displacement = (displacement_node == nullptr) 94 ? 0 95 : OpParameter<int32_t>(displacement_node); 96 if (displacement_mode == kNegativeDisplacement) { 97 displacement = -displacement; 98 } 99 if (base != nullptr) { 100 if (base->opcode() == IrOpcode::kInt32Constant) { 101 displacement += OpParameter<int32_t>(base); 102 base = nullptr; 103 } 104 } 105 if (base != nullptr) { 106 inputs[(*input_count)++] = UseRegister(base); 107 if (index != nullptr) { 108 DCHECK(scale >= 0 && scale <= 3); 109 inputs[(*input_count)++] = UseRegister(index); 110 if (displacement != 0) { 111 inputs[(*input_count)++] = TempImmediate(displacement); 112 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I, 113 kMode_MR4I, kMode_MR8I}; 114 mode = kMRnI_modes[scale]; 115 } else { 116 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2, 117 kMode_MR4, kMode_MR8}; 118 mode = kMRn_modes[scale]; 119 } 120 } else { 121 if (displacement == 0) { 122 mode = kMode_MR; 123 } else { 124 inputs[(*input_count)++] = TempImmediate(displacement); 125 mode = kMode_MRI; 126 } 127 } 128 } else { 129 DCHECK(scale >= 0 && scale <= 3); 130 if (index != nullptr) { 131 inputs[(*input_count)++] = UseRegister(index); 132 if (displacement != 0) { 133 inputs[(*input_count)++] = TempImmediate(displacement); 134 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I, 135 kMode_M4I, kMode_M8I}; 136 mode = kMnI_modes[scale]; 137 } else { 138 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2, 139 kMode_M4, kMode_M8}; 140 mode = kMn_modes[scale]; 141 } 142 } else { 143 inputs[(*input_count)++] = TempImmediate(displacement); 144 return kMode_MI; 145 } 146 } 147 return mode; 148 } 149 150 AddressingMode GetEffectiveAddressMemoryOperand(Node* node, 151 InstructionOperand inputs[], 152 size_t* input_count) { 153 BaseWithIndexAndDisplacement32Matcher m(node, AddressOption::kAllowAll); 154 DCHECK(m.matches()); 155 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) { 156 return GenerateMemoryOperandInputs( 157 m.index(), m.scale(), m.base(), m.displacement(), 158 m.displacement_mode(), inputs, input_count); 159 } else { 160 inputs[(*input_count)++] = UseRegister(node->InputAt(0)); 161 inputs[(*input_count)++] = UseRegister(node->InputAt(1)); 162 return kMode_MR1; 163 } 164 } 165 166 bool CanBeBetterLeftOperand(Node* node) const { 167 return !selector()->IsLive(node); 168 } 169 }; 170 171 172 void InstructionSelector::VisitLoad(Node* node) { 173 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); 174 175 ArchOpcode opcode = kArchNop; 176 switch (load_rep.representation()) { 177 case MachineRepresentation::kFloat32: 178 opcode = kX87Movss; 179 break; 180 case MachineRepresentation::kFloat64: 181 opcode = kX87Movsd; 182 break; 183 case MachineRepresentation::kBit: // Fall through. 184 case MachineRepresentation::kWord8: 185 opcode = load_rep.IsSigned() ? kX87Movsxbl : kX87Movzxbl; 186 break; 187 case MachineRepresentation::kWord16: 188 opcode = load_rep.IsSigned() ? kX87Movsxwl : kX87Movzxwl; 189 break; 190 case MachineRepresentation::kTaggedSigned: // Fall through. 191 case MachineRepresentation::kTaggedPointer: // Fall through. 192 case MachineRepresentation::kTagged: // Fall through. 193 case MachineRepresentation::kWord32: 194 opcode = kX87Movl; 195 break; 196 case MachineRepresentation::kWord64: // Fall through. 197 case MachineRepresentation::kSimd128: // Fall through. 198 case MachineRepresentation::kSimd1x4: // Fall through. 199 case MachineRepresentation::kSimd1x8: // Fall through. 200 case MachineRepresentation::kSimd1x16: // Fall through. 201 case MachineRepresentation::kNone: 202 UNREACHABLE(); 203 return; 204 } 205 206 X87OperandGenerator g(this); 207 InstructionOperand outputs[1]; 208 outputs[0] = g.DefineAsRegister(node); 209 InstructionOperand inputs[3]; 210 size_t input_count = 0; 211 AddressingMode mode = 212 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); 213 InstructionCode code = opcode | AddressingModeField::encode(mode); 214 Emit(code, 1, outputs, input_count, inputs); 215 } 216 217 void InstructionSelector::VisitProtectedLoad(Node* node) { 218 // TODO(eholk) 219 UNIMPLEMENTED(); 220 } 221 222 void InstructionSelector::VisitStore(Node* node) { 223 X87OperandGenerator g(this); 224 Node* base = node->InputAt(0); 225 Node* index = node->InputAt(1); 226 Node* value = node->InputAt(2); 227 228 StoreRepresentation store_rep = StoreRepresentationOf(node->op()); 229 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind(); 230 MachineRepresentation rep = store_rep.representation(); 231 232 if (write_barrier_kind != kNoWriteBarrier) { 233 DCHECK(CanBeTaggedPointer(rep)); 234 AddressingMode addressing_mode; 235 InstructionOperand inputs[3]; 236 size_t input_count = 0; 237 inputs[input_count++] = g.UseUniqueRegister(base); 238 if (g.CanBeImmediate(index)) { 239 inputs[input_count++] = g.UseImmediate(index); 240 addressing_mode = kMode_MRI; 241 } else { 242 inputs[input_count++] = g.UseUniqueRegister(index); 243 addressing_mode = kMode_MR1; 244 } 245 inputs[input_count++] = g.UseUniqueRegister(value); 246 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny; 247 switch (write_barrier_kind) { 248 case kNoWriteBarrier: 249 UNREACHABLE(); 250 break; 251 case kMapWriteBarrier: 252 record_write_mode = RecordWriteMode::kValueIsMap; 253 break; 254 case kPointerWriteBarrier: 255 record_write_mode = RecordWriteMode::kValueIsPointer; 256 break; 257 case kFullWriteBarrier: 258 record_write_mode = RecordWriteMode::kValueIsAny; 259 break; 260 } 261 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()}; 262 size_t const temp_count = arraysize(temps); 263 InstructionCode code = kArchStoreWithWriteBarrier; 264 code |= AddressingModeField::encode(addressing_mode); 265 code |= MiscField::encode(static_cast<int>(record_write_mode)); 266 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps); 267 } else { 268 ArchOpcode opcode = kArchNop; 269 switch (rep) { 270 case MachineRepresentation::kFloat32: 271 opcode = kX87Movss; 272 break; 273 case MachineRepresentation::kFloat64: 274 opcode = kX87Movsd; 275 break; 276 case MachineRepresentation::kBit: // Fall through. 277 case MachineRepresentation::kWord8: 278 opcode = kX87Movb; 279 break; 280 case MachineRepresentation::kWord16: 281 opcode = kX87Movw; 282 break; 283 case MachineRepresentation::kTaggedSigned: // Fall through. 284 case MachineRepresentation::kTaggedPointer: // Fall through. 285 case MachineRepresentation::kTagged: // Fall through. 286 case MachineRepresentation::kWord32: 287 opcode = kX87Movl; 288 break; 289 case MachineRepresentation::kWord64: // Fall through. 290 case MachineRepresentation::kSimd128: // Fall through. 291 case MachineRepresentation::kSimd1x4: // Fall through. 292 case MachineRepresentation::kSimd1x8: // Fall through. 293 case MachineRepresentation::kSimd1x16: // Fall through. 294 case MachineRepresentation::kNone: 295 UNREACHABLE(); 296 return; 297 } 298 299 InstructionOperand val; 300 if (g.CanBeImmediate(value)) { 301 val = g.UseImmediate(value); 302 } else if (rep == MachineRepresentation::kWord8 || 303 rep == MachineRepresentation::kBit) { 304 val = g.UseByteRegister(value); 305 } else { 306 val = g.UseRegister(value); 307 } 308 309 InstructionOperand inputs[4]; 310 size_t input_count = 0; 311 AddressingMode addressing_mode = 312 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); 313 InstructionCode code = 314 opcode | AddressingModeField::encode(addressing_mode); 315 inputs[input_count++] = val; 316 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, 317 inputs); 318 } 319 } 320 321 void InstructionSelector::VisitProtectedStore(Node* node) { 322 // TODO(eholk) 323 UNIMPLEMENTED(); 324 } 325 326 // Architecture supports unaligned access, therefore VisitLoad is used instead 327 void InstructionSelector::VisitUnalignedLoad(Node* node) { UNREACHABLE(); } 328 329 // Architecture supports unaligned access, therefore VisitStore is used instead 330 void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); } 331 332 void InstructionSelector::VisitCheckedLoad(Node* node) { 333 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op()); 334 X87OperandGenerator g(this); 335 Node* const buffer = node->InputAt(0); 336 Node* const offset = node->InputAt(1); 337 Node* const length = node->InputAt(2); 338 ArchOpcode opcode = kArchNop; 339 switch (load_rep.representation()) { 340 case MachineRepresentation::kWord8: 341 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8; 342 break; 343 case MachineRepresentation::kWord16: 344 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16; 345 break; 346 case MachineRepresentation::kWord32: 347 opcode = kCheckedLoadWord32; 348 break; 349 case MachineRepresentation::kFloat32: 350 opcode = kCheckedLoadFloat32; 351 break; 352 case MachineRepresentation::kFloat64: 353 opcode = kCheckedLoadFloat64; 354 break; 355 case MachineRepresentation::kBit: // Fall through. 356 case MachineRepresentation::kTaggedSigned: // Fall through. 357 case MachineRepresentation::kTaggedPointer: // Fall through. 358 case MachineRepresentation::kTagged: // Fall through. 359 case MachineRepresentation::kWord64: // Fall through. 360 case MachineRepresentation::kSimd128: // Fall through. 361 case MachineRepresentation::kSimd1x4: // Fall through. 362 case MachineRepresentation::kSimd1x8: // Fall through. 363 case MachineRepresentation::kSimd1x16: // Fall through. 364 case MachineRepresentation::kNone: 365 UNREACHABLE(); 366 return; 367 } 368 InstructionOperand offset_operand = g.UseRegister(offset); 369 InstructionOperand length_operand = 370 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length); 371 if (g.CanBeImmediate(buffer)) { 372 Emit(opcode | AddressingModeField::encode(kMode_MRI), 373 g.DefineAsRegister(node), offset_operand, length_operand, 374 offset_operand, g.UseImmediate(buffer)); 375 } else { 376 Emit(opcode | AddressingModeField::encode(kMode_MR1), 377 g.DefineAsRegister(node), offset_operand, length_operand, 378 g.UseRegister(buffer), offset_operand); 379 } 380 } 381 382 383 void InstructionSelector::VisitCheckedStore(Node* node) { 384 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op()); 385 X87OperandGenerator g(this); 386 Node* const buffer = node->InputAt(0); 387 Node* const offset = node->InputAt(1); 388 Node* const length = node->InputAt(2); 389 Node* const value = node->InputAt(3); 390 ArchOpcode opcode = kArchNop; 391 switch (rep) { 392 case MachineRepresentation::kWord8: 393 opcode = kCheckedStoreWord8; 394 break; 395 case MachineRepresentation::kWord16: 396 opcode = kCheckedStoreWord16; 397 break; 398 case MachineRepresentation::kWord32: 399 opcode = kCheckedStoreWord32; 400 break; 401 case MachineRepresentation::kFloat32: 402 opcode = kCheckedStoreFloat32; 403 break; 404 case MachineRepresentation::kFloat64: 405 opcode = kCheckedStoreFloat64; 406 break; 407 case MachineRepresentation::kBit: // Fall through. 408 case MachineRepresentation::kTaggedSigned: // Fall through. 409 case MachineRepresentation::kTaggedPointer: // Fall through. 410 case MachineRepresentation::kTagged: // Fall through. 411 case MachineRepresentation::kWord64: // Fall through. 412 case MachineRepresentation::kSimd128: // Fall through. 413 case MachineRepresentation::kSimd1x4: // Fall through. 414 case MachineRepresentation::kSimd1x8: // Fall through. 415 case MachineRepresentation::kSimd1x16: // Fall through. 416 case MachineRepresentation::kNone: 417 UNREACHABLE(); 418 return; 419 } 420 InstructionOperand value_operand = 421 g.CanBeImmediate(value) ? g.UseImmediate(value) 422 : ((rep == MachineRepresentation::kWord8 || 423 rep == MachineRepresentation::kBit) 424 ? g.UseByteRegister(value) 425 : g.UseRegister(value)); 426 InstructionOperand offset_operand = g.UseRegister(offset); 427 InstructionOperand length_operand = 428 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length); 429 if (g.CanBeImmediate(buffer)) { 430 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(), 431 offset_operand, length_operand, value_operand, offset_operand, 432 g.UseImmediate(buffer)); 433 } else { 434 Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(), 435 offset_operand, length_operand, value_operand, g.UseRegister(buffer), 436 offset_operand); 437 } 438 } 439 440 namespace { 441 442 // Shared routine for multiple binary operations. 443 void VisitBinop(InstructionSelector* selector, Node* node, 444 InstructionCode opcode, FlagsContinuation* cont) { 445 X87OperandGenerator g(selector); 446 Int32BinopMatcher m(node); 447 Node* left = m.left().node(); 448 Node* right = m.right().node(); 449 InstructionOperand inputs[4]; 450 size_t input_count = 0; 451 InstructionOperand outputs[2]; 452 size_t output_count = 0; 453 454 // TODO(turbofan): match complex addressing modes. 455 if (left == right) { 456 // If both inputs refer to the same operand, enforce allocating a register 457 // for both of them to ensure that we don't end up generating code like 458 // this: 459 // 460 // mov eax, [ebp-0x10] 461 // add eax, [ebp-0x10] 462 // jo label 463 InstructionOperand const input = g.UseRegister(left); 464 inputs[input_count++] = input; 465 inputs[input_count++] = input; 466 } else if (g.CanBeImmediate(right)) { 467 inputs[input_count++] = g.UseRegister(left); 468 inputs[input_count++] = g.UseImmediate(right); 469 } else { 470 if (node->op()->HasProperty(Operator::kCommutative) && 471 g.CanBeBetterLeftOperand(right)) { 472 std::swap(left, right); 473 } 474 inputs[input_count++] = g.UseRegister(left); 475 inputs[input_count++] = g.Use(right); 476 } 477 478 if (cont->IsBranch()) { 479 inputs[input_count++] = g.Label(cont->true_block()); 480 inputs[input_count++] = g.Label(cont->false_block()); 481 } 482 483 outputs[output_count++] = g.DefineSameAsFirst(node); 484 if (cont->IsSet()) { 485 outputs[output_count++] = g.DefineAsRegister(cont->result()); 486 } 487 488 DCHECK_NE(0u, input_count); 489 DCHECK_NE(0u, output_count); 490 DCHECK_GE(arraysize(inputs), input_count); 491 DCHECK_GE(arraysize(outputs), output_count); 492 493 opcode = cont->Encode(opcode); 494 if (cont->IsDeoptimize()) { 495 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs, 496 cont->kind(), cont->reason(), cont->frame_state()); 497 } else { 498 selector->Emit(opcode, output_count, outputs, input_count, inputs); 499 } 500 } 501 502 503 // Shared routine for multiple binary operations. 504 void VisitBinop(InstructionSelector* selector, Node* node, 505 InstructionCode opcode) { 506 FlagsContinuation cont; 507 VisitBinop(selector, node, opcode, &cont); 508 } 509 510 } // namespace 511 512 void InstructionSelector::VisitWord32And(Node* node) { 513 VisitBinop(this, node, kX87And); 514 } 515 516 517 void InstructionSelector::VisitWord32Or(Node* node) { 518 VisitBinop(this, node, kX87Or); 519 } 520 521 522 void InstructionSelector::VisitWord32Xor(Node* node) { 523 X87OperandGenerator g(this); 524 Int32BinopMatcher m(node); 525 if (m.right().Is(-1)) { 526 Emit(kX87Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node())); 527 } else { 528 VisitBinop(this, node, kX87Xor); 529 } 530 } 531 532 533 // Shared routine for multiple shift operations. 534 static inline void VisitShift(InstructionSelector* selector, Node* node, 535 ArchOpcode opcode) { 536 X87OperandGenerator g(selector); 537 Node* left = node->InputAt(0); 538 Node* right = node->InputAt(1); 539 540 if (g.CanBeImmediate(right)) { 541 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), 542 g.UseImmediate(right)); 543 } else { 544 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left), 545 g.UseFixed(right, ecx)); 546 } 547 } 548 549 550 namespace { 551 552 void VisitMulHigh(InstructionSelector* selector, Node* node, 553 ArchOpcode opcode) { 554 X87OperandGenerator g(selector); 555 InstructionOperand temps[] = {g.TempRegister(eax)}; 556 selector->Emit( 557 opcode, g.DefineAsFixed(node, edx), g.UseFixed(node->InputAt(0), eax), 558 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps); 559 } 560 561 562 void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) { 563 X87OperandGenerator g(selector); 564 InstructionOperand temps[] = {g.TempRegister(edx)}; 565 selector->Emit(opcode, g.DefineAsFixed(node, eax), 566 g.UseFixed(node->InputAt(0), eax), 567 g.UseUnique(node->InputAt(1)), arraysize(temps), temps); 568 } 569 570 571 void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) { 572 X87OperandGenerator g(selector); 573 InstructionOperand temps[] = {g.TempRegister(eax)}; 574 selector->Emit(opcode, g.DefineAsFixed(node, edx), 575 g.UseFixed(node->InputAt(0), eax), 576 g.UseUnique(node->InputAt(1)), arraysize(temps), temps); 577 } 578 579 void EmitLea(InstructionSelector* selector, Node* result, Node* index, 580 int scale, Node* base, Node* displacement, 581 DisplacementMode displacement_mode) { 582 X87OperandGenerator g(selector); 583 InstructionOperand inputs[4]; 584 size_t input_count = 0; 585 AddressingMode mode = 586 g.GenerateMemoryOperandInputs(index, scale, base, displacement, 587 displacement_mode, inputs, &input_count); 588 589 DCHECK_NE(0u, input_count); 590 DCHECK_GE(arraysize(inputs), input_count); 591 592 InstructionOperand outputs[1]; 593 outputs[0] = g.DefineAsRegister(result); 594 595 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea; 596 597 selector->Emit(opcode, 1, outputs, input_count, inputs); 598 } 599 600 } // namespace 601 602 603 void InstructionSelector::VisitWord32Shl(Node* node) { 604 Int32ScaleMatcher m(node, true); 605 if (m.matches()) { 606 Node* index = node->InputAt(0); 607 Node* base = m.power_of_two_plus_one() ? index : nullptr; 608 EmitLea(this, node, index, m.scale(), base, nullptr, kPositiveDisplacement); 609 return; 610 } 611 VisitShift(this, node, kX87Shl); 612 } 613 614 615 void InstructionSelector::VisitWord32Shr(Node* node) { 616 VisitShift(this, node, kX87Shr); 617 } 618 619 620 void InstructionSelector::VisitWord32Sar(Node* node) { 621 VisitShift(this, node, kX87Sar); 622 } 623 624 void InstructionSelector::VisitInt32PairAdd(Node* node) { 625 X87OperandGenerator g(this); 626 627 Node* projection1 = NodeProperties::FindProjection(node, 1); 628 if (projection1) { 629 // We use UseUniqueRegister here to avoid register sharing with the temp 630 // register. 631 InstructionOperand inputs[] = { 632 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)), 633 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))}; 634 635 InstructionOperand outputs[] = {g.DefineSameAsFirst(node), 636 g.DefineAsRegister(projection1)}; 637 638 InstructionOperand temps[] = {g.TempRegister()}; 639 640 Emit(kX87AddPair, 2, outputs, 4, inputs, 1, temps); 641 } else { 642 // The high word of the result is not used, so we emit the standard 32 bit 643 // instruction. 644 Emit(kX87Add, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)), 645 g.Use(node->InputAt(2))); 646 } 647 } 648 649 void InstructionSelector::VisitInt32PairSub(Node* node) { 650 X87OperandGenerator g(this); 651 652 Node* projection1 = NodeProperties::FindProjection(node, 1); 653 if (projection1) { 654 // We use UseUniqueRegister here to avoid register sharing with the temp 655 // register. 656 InstructionOperand inputs[] = { 657 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)), 658 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))}; 659 660 InstructionOperand outputs[] = {g.DefineSameAsFirst(node), 661 g.DefineAsRegister(projection1)}; 662 663 InstructionOperand temps[] = {g.TempRegister()}; 664 665 Emit(kX87SubPair, 2, outputs, 4, inputs, 1, temps); 666 } else { 667 // The high word of the result is not used, so we emit the standard 32 bit 668 // instruction. 669 Emit(kX87Sub, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)), 670 g.Use(node->InputAt(2))); 671 } 672 } 673 674 void InstructionSelector::VisitInt32PairMul(Node* node) { 675 X87OperandGenerator g(this); 676 677 Node* projection1 = NodeProperties::FindProjection(node, 1); 678 if (projection1) { 679 // InputAt(3) explicitly shares ecx with OutputRegister(1) to save one 680 // register and one mov instruction. 681 InstructionOperand inputs[] = {g.UseUnique(node->InputAt(0)), 682 g.UseUnique(node->InputAt(1)), 683 g.UseUniqueRegister(node->InputAt(2)), 684 g.UseFixed(node->InputAt(3), ecx)}; 685 686 InstructionOperand outputs[] = { 687 g.DefineAsFixed(node, eax), 688 g.DefineAsFixed(NodeProperties::FindProjection(node, 1), ecx)}; 689 690 InstructionOperand temps[] = {g.TempRegister(edx)}; 691 692 Emit(kX87MulPair, 2, outputs, 4, inputs, 1, temps); 693 } else { 694 // The high word of the result is not used, so we emit the standard 32 bit 695 // instruction. 696 Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(node->InputAt(0)), 697 g.Use(node->InputAt(2))); 698 } 699 } 700 701 void VisitWord32PairShift(InstructionSelector* selector, InstructionCode opcode, 702 Node* node) { 703 X87OperandGenerator g(selector); 704 705 Node* shift = node->InputAt(2); 706 InstructionOperand shift_operand; 707 if (g.CanBeImmediate(shift)) { 708 shift_operand = g.UseImmediate(shift); 709 } else { 710 shift_operand = g.UseFixed(shift, ecx); 711 } 712 InstructionOperand inputs[] = {g.UseFixed(node->InputAt(0), eax), 713 g.UseFixed(node->InputAt(1), edx), 714 shift_operand}; 715 716 InstructionOperand outputs[2]; 717 InstructionOperand temps[1]; 718 int32_t output_count = 0; 719 int32_t temp_count = 0; 720 outputs[output_count++] = g.DefineAsFixed(node, eax); 721 Node* projection1 = NodeProperties::FindProjection(node, 1); 722 if (projection1) { 723 outputs[output_count++] = g.DefineAsFixed(projection1, edx); 724 } else { 725 temps[temp_count++] = g.TempRegister(edx); 726 } 727 728 selector->Emit(opcode, output_count, outputs, 3, inputs, temp_count, temps); 729 } 730 731 void InstructionSelector::VisitWord32PairShl(Node* node) { 732 VisitWord32PairShift(this, kX87ShlPair, node); 733 } 734 735 void InstructionSelector::VisitWord32PairShr(Node* node) { 736 VisitWord32PairShift(this, kX87ShrPair, node); 737 } 738 739 void InstructionSelector::VisitWord32PairSar(Node* node) { 740 VisitWord32PairShift(this, kX87SarPair, node); 741 } 742 743 void InstructionSelector::VisitWord32Ror(Node* node) { 744 VisitShift(this, node, kX87Ror); 745 } 746 747 748 void InstructionSelector::VisitWord32Clz(Node* node) { 749 X87OperandGenerator g(this); 750 Emit(kX87Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 751 } 752 753 754 void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); } 755 756 757 void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); } 758 759 void InstructionSelector::VisitWord64ReverseBytes(Node* node) { UNREACHABLE(); } 760 761 void InstructionSelector::VisitWord32ReverseBytes(Node* node) { UNREACHABLE(); } 762 763 void InstructionSelector::VisitWord32Popcnt(Node* node) { 764 X87OperandGenerator g(this); 765 Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 766 } 767 768 769 void InstructionSelector::VisitInt32Add(Node* node) { 770 X87OperandGenerator g(this); 771 772 // Try to match the Add to a lea pattern 773 BaseWithIndexAndDisplacement32Matcher m(node); 774 if (m.matches() && 775 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) { 776 InstructionOperand inputs[4]; 777 size_t input_count = 0; 778 AddressingMode mode = g.GenerateMemoryOperandInputs( 779 m.index(), m.scale(), m.base(), m.displacement(), m.displacement_mode(), 780 inputs, &input_count); 781 782 DCHECK_NE(0u, input_count); 783 DCHECK_GE(arraysize(inputs), input_count); 784 785 InstructionOperand outputs[1]; 786 outputs[0] = g.DefineAsRegister(node); 787 788 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea; 789 Emit(opcode, 1, outputs, input_count, inputs); 790 return; 791 } 792 793 // No lea pattern match, use add 794 VisitBinop(this, node, kX87Add); 795 } 796 797 798 void InstructionSelector::VisitInt32Sub(Node* node) { 799 X87OperandGenerator g(this); 800 Int32BinopMatcher m(node); 801 if (m.left().Is(0)) { 802 Emit(kX87Neg, g.DefineSameAsFirst(node), g.Use(m.right().node())); 803 } else { 804 VisitBinop(this, node, kX87Sub); 805 } 806 } 807 808 809 void InstructionSelector::VisitInt32Mul(Node* node) { 810 Int32ScaleMatcher m(node, true); 811 if (m.matches()) { 812 Node* index = node->InputAt(0); 813 Node* base = m.power_of_two_plus_one() ? index : nullptr; 814 EmitLea(this, node, index, m.scale(), base, nullptr, kPositiveDisplacement); 815 return; 816 } 817 X87OperandGenerator g(this); 818 Node* left = node->InputAt(0); 819 Node* right = node->InputAt(1); 820 if (g.CanBeImmediate(right)) { 821 Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left), 822 g.UseImmediate(right)); 823 } else { 824 if (g.CanBeBetterLeftOperand(right)) { 825 std::swap(left, right); 826 } 827 Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(left), 828 g.Use(right)); 829 } 830 } 831 832 833 void InstructionSelector::VisitInt32MulHigh(Node* node) { 834 VisitMulHigh(this, node, kX87ImulHigh); 835 } 836 837 838 void InstructionSelector::VisitUint32MulHigh(Node* node) { 839 VisitMulHigh(this, node, kX87UmulHigh); 840 } 841 842 843 void InstructionSelector::VisitInt32Div(Node* node) { 844 VisitDiv(this, node, kX87Idiv); 845 } 846 847 848 void InstructionSelector::VisitUint32Div(Node* node) { 849 VisitDiv(this, node, kX87Udiv); 850 } 851 852 853 void InstructionSelector::VisitInt32Mod(Node* node) { 854 VisitMod(this, node, kX87Idiv); 855 } 856 857 858 void InstructionSelector::VisitUint32Mod(Node* node) { 859 VisitMod(this, node, kX87Udiv); 860 } 861 862 863 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) { 864 X87OperandGenerator g(this); 865 Emit(kX87Float32ToFloat64, g.DefineAsFixed(node, stX_0), 866 g.Use(node->InputAt(0))); 867 } 868 869 870 void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) { 871 X87OperandGenerator g(this); 872 Emit(kX87Int32ToFloat32, g.DefineAsFixed(node, stX_0), 873 g.Use(node->InputAt(0))); 874 } 875 876 877 void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) { 878 X87OperandGenerator g(this); 879 Emit(kX87Uint32ToFloat32, g.DefineAsFixed(node, stX_0), 880 g.Use(node->InputAt(0))); 881 } 882 883 884 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) { 885 X87OperandGenerator g(this); 886 Emit(kX87Int32ToFloat64, g.DefineAsFixed(node, stX_0), 887 g.Use(node->InputAt(0))); 888 } 889 890 891 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) { 892 X87OperandGenerator g(this); 893 Emit(kX87Uint32ToFloat64, g.DefineAsFixed(node, stX_0), 894 g.UseRegister(node->InputAt(0))); 895 } 896 897 898 void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) { 899 X87OperandGenerator g(this); 900 Emit(kX87Float32ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 901 } 902 903 904 void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) { 905 X87OperandGenerator g(this); 906 Emit(kX87Float32ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 907 } 908 909 910 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) { 911 X87OperandGenerator g(this); 912 Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 913 } 914 915 916 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) { 917 X87OperandGenerator g(this); 918 Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 919 } 920 921 void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) { 922 X87OperandGenerator g(this); 923 Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 924 } 925 926 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) { 927 X87OperandGenerator g(this); 928 Emit(kX87Float64ToFloat32, g.DefineAsFixed(node, stX_0), 929 g.Use(node->InputAt(0))); 930 } 931 932 void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) { 933 X87OperandGenerator g(this); 934 Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node), 935 g.Use(node->InputAt(0))); 936 } 937 938 void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) { 939 X87OperandGenerator g(this); 940 Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0))); 941 } 942 943 944 void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) { 945 X87OperandGenerator g(this); 946 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 947 Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, nullptr); 948 } 949 950 951 void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) { 952 X87OperandGenerator g(this); 953 Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0))); 954 } 955 956 957 void InstructionSelector::VisitFloat32Add(Node* node) { 958 X87OperandGenerator g(this); 959 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 960 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 961 Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, nullptr); 962 } 963 964 965 void InstructionSelector::VisitFloat64Add(Node* node) { 966 X87OperandGenerator g(this); 967 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 968 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 969 Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, nullptr); 970 } 971 972 973 void InstructionSelector::VisitFloat32Sub(Node* node) { 974 X87OperandGenerator g(this); 975 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 976 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 977 Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr); 978 } 979 980 void InstructionSelector::VisitFloat64Sub(Node* node) { 981 X87OperandGenerator g(this); 982 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 983 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 984 Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr); 985 } 986 987 void InstructionSelector::VisitFloat32Mul(Node* node) { 988 X87OperandGenerator g(this); 989 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 990 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 991 Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, nullptr); 992 } 993 994 995 void InstructionSelector::VisitFloat64Mul(Node* node) { 996 X87OperandGenerator g(this); 997 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 998 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 999 Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, nullptr); 1000 } 1001 1002 1003 void InstructionSelector::VisitFloat32Div(Node* node) { 1004 X87OperandGenerator g(this); 1005 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1006 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 1007 Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, nullptr); 1008 } 1009 1010 1011 void InstructionSelector::VisitFloat64Div(Node* node) { 1012 X87OperandGenerator g(this); 1013 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1014 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1015 Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, nullptr); 1016 } 1017 1018 1019 void InstructionSelector::VisitFloat64Mod(Node* node) { 1020 X87OperandGenerator g(this); 1021 InstructionOperand temps[] = {g.TempRegister(eax)}; 1022 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1023 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1024 Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall(); 1025 } 1026 1027 void InstructionSelector::VisitFloat32Max(Node* node) { 1028 X87OperandGenerator g(this); 1029 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1030 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 1031 Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, nullptr); 1032 } 1033 1034 void InstructionSelector::VisitFloat64Max(Node* node) { 1035 X87OperandGenerator g(this); 1036 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1037 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1038 Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, nullptr); 1039 } 1040 1041 void InstructionSelector::VisitFloat32Min(Node* node) { 1042 X87OperandGenerator g(this); 1043 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1044 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 1045 Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, nullptr); 1046 } 1047 1048 void InstructionSelector::VisitFloat64Min(Node* node) { 1049 X87OperandGenerator g(this); 1050 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1051 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1052 Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, nullptr); 1053 } 1054 1055 1056 void InstructionSelector::VisitFloat32Abs(Node* node) { 1057 X87OperandGenerator g(this); 1058 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1059 Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, nullptr); 1060 } 1061 1062 1063 void InstructionSelector::VisitFloat64Abs(Node* node) { 1064 X87OperandGenerator g(this); 1065 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1066 Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, nullptr); 1067 } 1068 1069 void InstructionSelector::VisitFloat32Sqrt(Node* node) { 1070 X87OperandGenerator g(this); 1071 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1072 Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr); 1073 } 1074 1075 1076 void InstructionSelector::VisitFloat64Sqrt(Node* node) { 1077 X87OperandGenerator g(this); 1078 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1079 Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr); 1080 } 1081 1082 1083 void InstructionSelector::VisitFloat32RoundDown(Node* node) { 1084 X87OperandGenerator g(this); 1085 Emit(kX87Float32Round | MiscField::encode(kRoundDown), 1086 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1087 } 1088 1089 1090 void InstructionSelector::VisitFloat64RoundDown(Node* node) { 1091 X87OperandGenerator g(this); 1092 Emit(kX87Float64Round | MiscField::encode(kRoundDown), 1093 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1094 } 1095 1096 1097 void InstructionSelector::VisitFloat32RoundUp(Node* node) { 1098 X87OperandGenerator g(this); 1099 Emit(kX87Float32Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0), 1100 g.Use(node->InputAt(0))); 1101 } 1102 1103 1104 void InstructionSelector::VisitFloat64RoundUp(Node* node) { 1105 X87OperandGenerator g(this); 1106 Emit(kX87Float64Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0), 1107 g.Use(node->InputAt(0))); 1108 } 1109 1110 1111 void InstructionSelector::VisitFloat32RoundTruncate(Node* node) { 1112 X87OperandGenerator g(this); 1113 Emit(kX87Float32Round | MiscField::encode(kRoundToZero), 1114 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1115 } 1116 1117 1118 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) { 1119 X87OperandGenerator g(this); 1120 Emit(kX87Float64Round | MiscField::encode(kRoundToZero), 1121 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1122 } 1123 1124 1125 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) { 1126 UNREACHABLE(); 1127 } 1128 1129 1130 void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) { 1131 X87OperandGenerator g(this); 1132 Emit(kX87Float32Round | MiscField::encode(kRoundToNearest), 1133 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1134 } 1135 1136 1137 void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) { 1138 X87OperandGenerator g(this); 1139 Emit(kX87Float64Round | MiscField::encode(kRoundToNearest), 1140 g.UseFixed(node, stX_0), g.Use(node->InputAt(0))); 1141 } 1142 1143 void InstructionSelector::VisitFloat32Neg(Node* node) { 1144 X87OperandGenerator g(this); 1145 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1146 Emit(kX87Float32Neg, g.DefineAsFixed(node, stX_0), 0, nullptr); 1147 } 1148 1149 void InstructionSelector::VisitFloat64Neg(Node* node) { 1150 X87OperandGenerator g(this); 1151 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1152 Emit(kX87Float64Neg, g.DefineAsFixed(node, stX_0), 0, nullptr); 1153 } 1154 1155 void InstructionSelector::VisitFloat64Ieee754Binop(Node* node, 1156 InstructionCode opcode) { 1157 X87OperandGenerator g(this); 1158 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1159 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1160 Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall(); 1161 } 1162 1163 void InstructionSelector::VisitFloat64Ieee754Unop(Node* node, 1164 InstructionCode opcode) { 1165 X87OperandGenerator g(this); 1166 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1167 Emit(opcode, g.DefineAsFixed(node, stX_0), 0, nullptr)->MarkAsCall(); 1168 } 1169 1170 void InstructionSelector::EmitPrepareArguments( 1171 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor, 1172 Node* node) { 1173 X87OperandGenerator g(this); 1174 1175 // Prepare for C function call. 1176 if (descriptor->IsCFunctionCall()) { 1177 InstructionOperand temps[] = {g.TempRegister()}; 1178 size_t const temp_count = arraysize(temps); 1179 Emit(kArchPrepareCallCFunction | 1180 MiscField::encode(static_cast<int>(descriptor->ParameterCount())), 1181 0, nullptr, 0, nullptr, temp_count, temps); 1182 1183 // Poke any stack arguments. 1184 for (size_t n = 0; n < arguments->size(); ++n) { 1185 PushParameter input = (*arguments)[n]; 1186 if (input.node()) { 1187 int const slot = static_cast<int>(n); 1188 InstructionOperand value = g.CanBeImmediate(input.node()) 1189 ? g.UseImmediate(input.node()) 1190 : g.UseRegister(input.node()); 1191 Emit(kX87Poke | MiscField::encode(slot), g.NoOutput(), value); 1192 } 1193 } 1194 } else { 1195 // Push any stack arguments. 1196 for (PushParameter input : base::Reversed(*arguments)) { 1197 // TODO(titzer): handle pushing double parameters. 1198 if (input.node() == nullptr) continue; 1199 InstructionOperand value = 1200 g.CanBeImmediate(input.node()) 1201 ? g.UseImmediate(input.node()) 1202 : IsSupported(ATOM) || 1203 sequence()->IsFP(GetVirtualRegister(input.node())) 1204 ? g.UseRegister(input.node()) 1205 : g.Use(input.node()); 1206 Emit(kX87Push, g.NoOutput(), value); 1207 } 1208 } 1209 } 1210 1211 1212 bool InstructionSelector::IsTailCallAddressImmediate() { return true; } 1213 1214 int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 0; } 1215 1216 namespace { 1217 1218 void VisitCompareWithMemoryOperand(InstructionSelector* selector, 1219 InstructionCode opcode, Node* left, 1220 InstructionOperand right, 1221 FlagsContinuation* cont) { 1222 DCHECK(left->opcode() == IrOpcode::kLoad); 1223 X87OperandGenerator g(selector); 1224 size_t input_count = 0; 1225 InstructionOperand inputs[6]; 1226 AddressingMode addressing_mode = 1227 g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count); 1228 opcode |= AddressingModeField::encode(addressing_mode); 1229 opcode = cont->Encode(opcode); 1230 inputs[input_count++] = right; 1231 1232 if (cont->IsBranch()) { 1233 inputs[input_count++] = g.Label(cont->true_block()); 1234 inputs[input_count++] = g.Label(cont->false_block()); 1235 selector->Emit(opcode, 0, nullptr, input_count, inputs); 1236 } else if (cont->IsDeoptimize()) { 1237 selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs, 1238 cont->kind(), cont->reason(), cont->frame_state()); 1239 } else if (cont->IsSet()) { 1240 InstructionOperand output = g.DefineAsRegister(cont->result()); 1241 selector->Emit(opcode, 1, &output, input_count, inputs); 1242 } else { 1243 DCHECK(cont->IsTrap()); 1244 inputs[input_count++] = g.UseImmediate(cont->trap_id()); 1245 selector->Emit(opcode, 0, nullptr, input_count, inputs); 1246 } 1247 } 1248 1249 // Shared routine for multiple compare operations. 1250 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, 1251 InstructionOperand left, InstructionOperand right, 1252 FlagsContinuation* cont) { 1253 X87OperandGenerator g(selector); 1254 opcode = cont->Encode(opcode); 1255 if (cont->IsBranch()) { 1256 selector->Emit(opcode, g.NoOutput(), left, right, 1257 g.Label(cont->true_block()), g.Label(cont->false_block())); 1258 } else if (cont->IsDeoptimize()) { 1259 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right, cont->kind(), 1260 cont->reason(), cont->frame_state()); 1261 } else if (cont->IsSet()) { 1262 selector->Emit(opcode, g.DefineAsByteRegister(cont->result()), left, right); 1263 } else { 1264 DCHECK(cont->IsTrap()); 1265 selector->Emit(opcode, g.NoOutput(), left, right, 1266 g.UseImmediate(cont->trap_id())); 1267 } 1268 } 1269 1270 1271 // Shared routine for multiple compare operations. 1272 void VisitCompare(InstructionSelector* selector, InstructionCode opcode, 1273 Node* left, Node* right, FlagsContinuation* cont, 1274 bool commutative) { 1275 X87OperandGenerator g(selector); 1276 if (commutative && g.CanBeBetterLeftOperand(right)) { 1277 std::swap(left, right); 1278 } 1279 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont); 1280 } 1281 1282 MachineType MachineTypeForNarrow(Node* node, Node* hint_node) { 1283 if (hint_node->opcode() == IrOpcode::kLoad) { 1284 MachineType hint = LoadRepresentationOf(hint_node->op()); 1285 if (node->opcode() == IrOpcode::kInt32Constant || 1286 node->opcode() == IrOpcode::kInt64Constant) { 1287 int64_t constant = node->opcode() == IrOpcode::kInt32Constant 1288 ? OpParameter<int32_t>(node) 1289 : OpParameter<int64_t>(node); 1290 if (hint == MachineType::Int8()) { 1291 if (constant >= std::numeric_limits<int8_t>::min() && 1292 constant <= std::numeric_limits<int8_t>::max()) { 1293 return hint; 1294 } 1295 } else if (hint == MachineType::Uint8()) { 1296 if (constant >= std::numeric_limits<uint8_t>::min() && 1297 constant <= std::numeric_limits<uint8_t>::max()) { 1298 return hint; 1299 } 1300 } else if (hint == MachineType::Int16()) { 1301 if (constant >= std::numeric_limits<int16_t>::min() && 1302 constant <= std::numeric_limits<int16_t>::max()) { 1303 return hint; 1304 } 1305 } else if (hint == MachineType::Uint16()) { 1306 if (constant >= std::numeric_limits<uint16_t>::min() && 1307 constant <= std::numeric_limits<uint16_t>::max()) { 1308 return hint; 1309 } 1310 } else if (hint == MachineType::Int32()) { 1311 return hint; 1312 } else if (hint == MachineType::Uint32()) { 1313 if (constant >= 0) return hint; 1314 } 1315 } 1316 } 1317 return node->opcode() == IrOpcode::kLoad ? LoadRepresentationOf(node->op()) 1318 : MachineType::None(); 1319 } 1320 1321 // Tries to match the size of the given opcode to that of the operands, if 1322 // possible. 1323 InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left, 1324 Node* right, FlagsContinuation* cont) { 1325 // TODO(epertoso): we can probably get some size information out of phi nodes. 1326 // If the load representations don't match, both operands will be 1327 // zero/sign-extended to 32bit. 1328 MachineType left_type = MachineTypeForNarrow(left, right); 1329 MachineType right_type = MachineTypeForNarrow(right, left); 1330 if (left_type == right_type) { 1331 switch (left_type.representation()) { 1332 case MachineRepresentation::kBit: 1333 case MachineRepresentation::kWord8: { 1334 if (opcode == kX87Test) return kX87Test8; 1335 if (opcode == kX87Cmp) { 1336 if (left_type.semantic() == MachineSemantic::kUint32) { 1337 cont->OverwriteUnsignedIfSigned(); 1338 } else { 1339 CHECK_EQ(MachineSemantic::kInt32, left_type.semantic()); 1340 } 1341 return kX87Cmp8; 1342 } 1343 break; 1344 } 1345 case MachineRepresentation::kWord16: 1346 if (opcode == kX87Test) return kX87Test16; 1347 if (opcode == kX87Cmp) { 1348 if (left_type.semantic() == MachineSemantic::kUint32) { 1349 cont->OverwriteUnsignedIfSigned(); 1350 } else { 1351 CHECK_EQ(MachineSemantic::kInt32, left_type.semantic()); 1352 } 1353 return kX87Cmp16; 1354 } 1355 break; 1356 default: 1357 break; 1358 } 1359 } 1360 return opcode; 1361 } 1362 1363 // Shared routine for multiple float32 compare operations (inputs commuted). 1364 void VisitFloat32Compare(InstructionSelector* selector, Node* node, 1365 FlagsContinuation* cont) { 1366 X87OperandGenerator g(selector); 1367 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0))); 1368 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1))); 1369 if (cont->IsBranch()) { 1370 selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(), 1371 g.Label(cont->true_block()), g.Label(cont->false_block())); 1372 } else if (cont->IsDeoptimize()) { 1373 selector->EmitDeoptimize(cont->Encode(kX87Float32Cmp), g.NoOutput(), 1374 g.Use(node->InputAt(0)), g.Use(node->InputAt(1)), 1375 cont->kind(), cont->reason(), cont->frame_state()); 1376 } else if (cont->IsSet()) { 1377 selector->Emit(cont->Encode(kX87Float32Cmp), 1378 g.DefineAsByteRegister(cont->result())); 1379 } else { 1380 DCHECK(cont->IsTrap()); 1381 selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(), 1382 g.UseImmediate(cont->trap_id())); 1383 } 1384 } 1385 1386 1387 // Shared routine for multiple float64 compare operations (inputs commuted). 1388 void VisitFloat64Compare(InstructionSelector* selector, Node* node, 1389 FlagsContinuation* cont) { 1390 X87OperandGenerator g(selector); 1391 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1392 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1))); 1393 if (cont->IsBranch()) { 1394 selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(), 1395 g.Label(cont->true_block()), g.Label(cont->false_block())); 1396 } else if (cont->IsDeoptimize()) { 1397 selector->EmitDeoptimize(cont->Encode(kX87Float64Cmp), g.NoOutput(), 1398 g.Use(node->InputAt(0)), g.Use(node->InputAt(1)), 1399 cont->kind(), cont->reason(), cont->frame_state()); 1400 } else if (cont->IsSet()) { 1401 selector->Emit(cont->Encode(kX87Float64Cmp), 1402 g.DefineAsByteRegister(cont->result())); 1403 } else { 1404 DCHECK(cont->IsTrap()); 1405 selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(), 1406 g.UseImmediate(cont->trap_id())); 1407 } 1408 } 1409 1410 // Shared routine for multiple word compare operations. 1411 void VisitWordCompare(InstructionSelector* selector, Node* node, 1412 InstructionCode opcode, FlagsContinuation* cont) { 1413 X87OperandGenerator g(selector); 1414 Node* left = node->InputAt(0); 1415 Node* right = node->InputAt(1); 1416 1417 InstructionCode narrowed_opcode = 1418 TryNarrowOpcodeSize(opcode, left, right, cont); 1419 1420 int effect_level = selector->GetEffectLevel(node); 1421 if (cont->IsBranch()) { 1422 effect_level = selector->GetEffectLevel( 1423 cont->true_block()->PredecessorAt(0)->control_input()); 1424 } 1425 1426 // If one of the two inputs is an immediate, make sure it's on the right, or 1427 // if one of the two inputs is a memory operand, make sure it's on the left. 1428 if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) || 1429 (g.CanBeMemoryOperand(narrowed_opcode, node, right, effect_level) && 1430 !g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level))) { 1431 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); 1432 std::swap(left, right); 1433 } 1434 1435 // Match immediates on right side of comparison. 1436 if (g.CanBeImmediate(right)) { 1437 if (g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level)) { 1438 return VisitCompareWithMemoryOperand(selector, narrowed_opcode, left, 1439 g.UseImmediate(right), cont); 1440 } 1441 return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right), 1442 cont); 1443 } 1444 1445 // Match memory operands on left side of comparison. 1446 if (g.CanBeMemoryOperand(narrowed_opcode, node, left, effect_level)) { 1447 bool needs_byte_register = 1448 narrowed_opcode == kX87Test8 || narrowed_opcode == kX87Cmp8; 1449 return VisitCompareWithMemoryOperand( 1450 selector, narrowed_opcode, left, 1451 needs_byte_register ? g.UseByteRegister(right) : g.UseRegister(right), 1452 cont); 1453 } 1454 1455 if (g.CanBeBetterLeftOperand(right)) { 1456 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); 1457 std::swap(left, right); 1458 } 1459 1460 return VisitCompare(selector, opcode, left, right, cont, 1461 node->op()->HasProperty(Operator::kCommutative)); 1462 } 1463 1464 void VisitWordCompare(InstructionSelector* selector, Node* node, 1465 FlagsContinuation* cont) { 1466 X87OperandGenerator g(selector); 1467 Int32BinopMatcher m(node); 1468 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) { 1469 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node()); 1470 ExternalReference js_stack_limit = 1471 ExternalReference::address_of_stack_limit(selector->isolate()); 1472 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) { 1473 // Compare(Load(js_stack_limit), LoadStackPointer) 1474 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute(); 1475 InstructionCode opcode = cont->Encode(kX87StackCheck); 1476 if (cont->IsBranch()) { 1477 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()), 1478 g.Label(cont->false_block())); 1479 } else if (cont->IsDeoptimize()) { 1480 selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr, cont->kind(), 1481 cont->reason(), cont->frame_state()); 1482 } else { 1483 DCHECK(cont->IsSet()); 1484 selector->Emit(opcode, g.DefineAsRegister(cont->result())); 1485 } 1486 return; 1487 } 1488 } 1489 VisitWordCompare(selector, node, kX87Cmp, cont); 1490 } 1491 1492 1493 // Shared routine for word comparison with zero. 1494 void VisitWordCompareZero(InstructionSelector* selector, Node* user, 1495 Node* value, FlagsContinuation* cont) { 1496 // Try to combine with comparisons against 0 by simply inverting the branch. 1497 while (value->opcode() == IrOpcode::kWord32Equal && 1498 selector->CanCover(user, value)) { 1499 Int32BinopMatcher m(value); 1500 if (!m.right().Is(0)) break; 1501 1502 user = value; 1503 value = m.left().node(); 1504 cont->Negate(); 1505 } 1506 1507 if (selector->CanCover(user, value)) { 1508 switch (value->opcode()) { 1509 case IrOpcode::kWord32Equal: 1510 cont->OverwriteAndNegateIfEqual(kEqual); 1511 return VisitWordCompare(selector, value, cont); 1512 case IrOpcode::kInt32LessThan: 1513 cont->OverwriteAndNegateIfEqual(kSignedLessThan); 1514 return VisitWordCompare(selector, value, cont); 1515 case IrOpcode::kInt32LessThanOrEqual: 1516 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual); 1517 return VisitWordCompare(selector, value, cont); 1518 case IrOpcode::kUint32LessThan: 1519 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan); 1520 return VisitWordCompare(selector, value, cont); 1521 case IrOpcode::kUint32LessThanOrEqual: 1522 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual); 1523 return VisitWordCompare(selector, value, cont); 1524 case IrOpcode::kFloat32Equal: 1525 cont->OverwriteAndNegateIfEqual(kUnorderedEqual); 1526 return VisitFloat32Compare(selector, value, cont); 1527 case IrOpcode::kFloat32LessThan: 1528 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan); 1529 return VisitFloat32Compare(selector, value, cont); 1530 case IrOpcode::kFloat32LessThanOrEqual: 1531 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual); 1532 return VisitFloat32Compare(selector, value, cont); 1533 case IrOpcode::kFloat64Equal: 1534 cont->OverwriteAndNegateIfEqual(kUnorderedEqual); 1535 return VisitFloat64Compare(selector, value, cont); 1536 case IrOpcode::kFloat64LessThan: 1537 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan); 1538 return VisitFloat64Compare(selector, value, cont); 1539 case IrOpcode::kFloat64LessThanOrEqual: 1540 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual); 1541 return VisitFloat64Compare(selector, value, cont); 1542 case IrOpcode::kProjection: 1543 // Check if this is the overflow output projection of an 1544 // <Operation>WithOverflow node. 1545 if (ProjectionIndexOf(value->op()) == 1u) { 1546 // We cannot combine the <Operation>WithOverflow with this branch 1547 // unless the 0th projection (the use of the actual value of the 1548 // <Operation> is either nullptr, which means there's no use of the 1549 // actual value, or was already defined, which means it is scheduled 1550 // *AFTER* this branch). 1551 Node* const node = value->InputAt(0); 1552 Node* const result = NodeProperties::FindProjection(node, 0); 1553 if (result == nullptr || selector->IsDefined(result)) { 1554 switch (node->opcode()) { 1555 case IrOpcode::kInt32AddWithOverflow: 1556 cont->OverwriteAndNegateIfEqual(kOverflow); 1557 return VisitBinop(selector, node, kX87Add, cont); 1558 case IrOpcode::kInt32SubWithOverflow: 1559 cont->OverwriteAndNegateIfEqual(kOverflow); 1560 return VisitBinop(selector, node, kX87Sub, cont); 1561 case IrOpcode::kInt32MulWithOverflow: 1562 cont->OverwriteAndNegateIfEqual(kOverflow); 1563 return VisitBinop(selector, node, kX87Imul, cont); 1564 default: 1565 break; 1566 } 1567 } 1568 } 1569 break; 1570 case IrOpcode::kInt32Sub: 1571 return VisitWordCompare(selector, value, cont); 1572 case IrOpcode::kWord32And: 1573 return VisitWordCompare(selector, value, kX87Test, cont); 1574 default: 1575 break; 1576 } 1577 } 1578 1579 // Continuation could not be combined with a compare, emit compare against 0. 1580 X87OperandGenerator g(selector); 1581 VisitCompare(selector, kX87Cmp, g.Use(value), g.TempImmediate(0), cont); 1582 } 1583 1584 } // namespace 1585 1586 1587 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, 1588 BasicBlock* fbranch) { 1589 FlagsContinuation cont(kNotEqual, tbranch, fbranch); 1590 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); 1591 } 1592 1593 void InstructionSelector::VisitDeoptimizeIf(Node* node) { 1594 DeoptimizeParameters p = DeoptimizeParametersOf(node->op()); 1595 FlagsContinuation cont = FlagsContinuation::ForDeoptimize( 1596 kNotEqual, p.kind(), p.reason(), node->InputAt(1)); 1597 VisitWordCompareZero(this, node, node->InputAt(0), &cont); 1598 } 1599 1600 void InstructionSelector::VisitDeoptimizeUnless(Node* node) { 1601 DeoptimizeParameters p = DeoptimizeParametersOf(node->op()); 1602 FlagsContinuation cont = FlagsContinuation::ForDeoptimize( 1603 kEqual, p.kind(), p.reason(), node->InputAt(1)); 1604 VisitWordCompareZero(this, node, node->InputAt(0), &cont); 1605 } 1606 1607 void InstructionSelector::VisitTrapIf(Node* node, Runtime::FunctionId func_id) { 1608 FlagsContinuation cont = 1609 FlagsContinuation::ForTrap(kNotEqual, func_id, node->InputAt(1)); 1610 VisitWordCompareZero(this, node, node->InputAt(0), &cont); 1611 } 1612 1613 void InstructionSelector::VisitTrapUnless(Node* node, 1614 Runtime::FunctionId func_id) { 1615 FlagsContinuation cont = 1616 FlagsContinuation::ForTrap(kEqual, func_id, node->InputAt(1)); 1617 VisitWordCompareZero(this, node, node->InputAt(0), &cont); 1618 } 1619 1620 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) { 1621 X87OperandGenerator g(this); 1622 InstructionOperand value_operand = g.UseRegister(node->InputAt(0)); 1623 1624 // Emit either ArchTableSwitch or ArchLookupSwitch. 1625 size_t table_space_cost = 4 + sw.value_range; 1626 size_t table_time_cost = 3; 1627 size_t lookup_space_cost = 3 + 2 * sw.case_count; 1628 size_t lookup_time_cost = sw.case_count; 1629 if (sw.case_count > 4 && 1630 table_space_cost + 3 * table_time_cost <= 1631 lookup_space_cost + 3 * lookup_time_cost && 1632 sw.min_value > std::numeric_limits<int32_t>::min()) { 1633 InstructionOperand index_operand = value_operand; 1634 if (sw.min_value) { 1635 index_operand = g.TempRegister(); 1636 Emit(kX87Lea | AddressingModeField::encode(kMode_MRI), index_operand, 1637 value_operand, g.TempImmediate(-sw.min_value)); 1638 } 1639 // Generate a table lookup. 1640 return EmitTableSwitch(sw, index_operand); 1641 } 1642 1643 // Generate a sequence of conditional jumps. 1644 return EmitLookupSwitch(sw, value_operand); 1645 } 1646 1647 1648 void InstructionSelector::VisitWord32Equal(Node* const node) { 1649 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node); 1650 Int32BinopMatcher m(node); 1651 if (m.right().Is(0)) { 1652 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont); 1653 } 1654 VisitWordCompare(this, node, &cont); 1655 } 1656 1657 1658 void InstructionSelector::VisitInt32LessThan(Node* node) { 1659 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node); 1660 VisitWordCompare(this, node, &cont); 1661 } 1662 1663 1664 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) { 1665 FlagsContinuation cont = 1666 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node); 1667 VisitWordCompare(this, node, &cont); 1668 } 1669 1670 1671 void InstructionSelector::VisitUint32LessThan(Node* node) { 1672 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node); 1673 VisitWordCompare(this, node, &cont); 1674 } 1675 1676 1677 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) { 1678 FlagsContinuation cont = 1679 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node); 1680 VisitWordCompare(this, node, &cont); 1681 } 1682 1683 1684 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) { 1685 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { 1686 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); 1687 return VisitBinop(this, node, kX87Add, &cont); 1688 } 1689 FlagsContinuation cont; 1690 VisitBinop(this, node, kX87Add, &cont); 1691 } 1692 1693 1694 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) { 1695 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { 1696 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); 1697 return VisitBinop(this, node, kX87Sub, &cont); 1698 } 1699 FlagsContinuation cont; 1700 VisitBinop(this, node, kX87Sub, &cont); 1701 } 1702 1703 void InstructionSelector::VisitInt32MulWithOverflow(Node* node) { 1704 if (Node* ovf = NodeProperties::FindProjection(node, 1)) { 1705 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf); 1706 return VisitBinop(this, node, kX87Imul, &cont); 1707 } 1708 FlagsContinuation cont; 1709 VisitBinop(this, node, kX87Imul, &cont); 1710 } 1711 1712 void InstructionSelector::VisitFloat32Equal(Node* node) { 1713 FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node); 1714 VisitFloat32Compare(this, node, &cont); 1715 } 1716 1717 1718 void InstructionSelector::VisitFloat32LessThan(Node* node) { 1719 FlagsContinuation cont = 1720 FlagsContinuation::ForSet(kUnsignedGreaterThan, node); 1721 VisitFloat32Compare(this, node, &cont); 1722 } 1723 1724 1725 void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) { 1726 FlagsContinuation cont = 1727 FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node); 1728 VisitFloat32Compare(this, node, &cont); 1729 } 1730 1731 1732 void InstructionSelector::VisitFloat64Equal(Node* node) { 1733 FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node); 1734 VisitFloat64Compare(this, node, &cont); 1735 } 1736 1737 1738 void InstructionSelector::VisitFloat64LessThan(Node* node) { 1739 FlagsContinuation cont = 1740 FlagsContinuation::ForSet(kUnsignedGreaterThan, node); 1741 VisitFloat64Compare(this, node, &cont); 1742 } 1743 1744 1745 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) { 1746 FlagsContinuation cont = 1747 FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node); 1748 VisitFloat64Compare(this, node, &cont); 1749 } 1750 1751 1752 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) { 1753 X87OperandGenerator g(this); 1754 Emit(kX87Float64ExtractLowWord32, g.DefineAsRegister(node), 1755 g.Use(node->InputAt(0))); 1756 } 1757 1758 1759 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) { 1760 X87OperandGenerator g(this); 1761 Emit(kX87Float64ExtractHighWord32, g.DefineAsRegister(node), 1762 g.Use(node->InputAt(0))); 1763 } 1764 1765 1766 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) { 1767 X87OperandGenerator g(this); 1768 Node* left = node->InputAt(0); 1769 Node* right = node->InputAt(1); 1770 Emit(kX87Float64InsertLowWord32, g.UseFixed(node, stX_0), g.UseRegister(left), 1771 g.UseRegister(right)); 1772 } 1773 1774 1775 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) { 1776 X87OperandGenerator g(this); 1777 Node* left = node->InputAt(0); 1778 Node* right = node->InputAt(1); 1779 Emit(kX87Float64InsertHighWord32, g.UseFixed(node, stX_0), 1780 g.UseRegister(left), g.UseRegister(right)); 1781 } 1782 1783 void InstructionSelector::VisitFloat64SilenceNaN(Node* node) { 1784 X87OperandGenerator g(this); 1785 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0))); 1786 Emit(kX87Float64SilenceNaN, g.DefineAsFixed(node, stX_0), 0, nullptr); 1787 } 1788 1789 void InstructionSelector::VisitAtomicLoad(Node* node) { 1790 LoadRepresentation load_rep = LoadRepresentationOf(node->op()); 1791 DCHECK(load_rep.representation() == MachineRepresentation::kWord8 || 1792 load_rep.representation() == MachineRepresentation::kWord16 || 1793 load_rep.representation() == MachineRepresentation::kWord32); 1794 USE(load_rep); 1795 VisitLoad(node); 1796 } 1797 1798 void InstructionSelector::VisitAtomicStore(Node* node) { 1799 X87OperandGenerator g(this); 1800 Node* base = node->InputAt(0); 1801 Node* index = node->InputAt(1); 1802 Node* value = node->InputAt(2); 1803 1804 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op()); 1805 ArchOpcode opcode = kArchNop; 1806 switch (rep) { 1807 case MachineRepresentation::kWord8: 1808 opcode = kX87Xchgb; 1809 break; 1810 case MachineRepresentation::kWord16: 1811 opcode = kX87Xchgw; 1812 break; 1813 case MachineRepresentation::kWord32: 1814 opcode = kX87Xchgl; 1815 break; 1816 default: 1817 UNREACHABLE(); 1818 break; 1819 } 1820 AddressingMode addressing_mode; 1821 InstructionOperand inputs[4]; 1822 size_t input_count = 0; 1823 inputs[input_count++] = g.UseUniqueRegister(base); 1824 if (g.CanBeImmediate(index)) { 1825 inputs[input_count++] = g.UseImmediate(index); 1826 addressing_mode = kMode_MRI; 1827 } else { 1828 inputs[input_count++] = g.UseUniqueRegister(index); 1829 addressing_mode = kMode_MR1; 1830 } 1831 inputs[input_count++] = g.UseUniqueRegister(value); 1832 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode); 1833 Emit(code, 0, nullptr, input_count, inputs); 1834 } 1835 1836 // static 1837 MachineOperatorBuilder::Flags 1838 InstructionSelector::SupportedMachineOperatorFlags() { 1839 MachineOperatorBuilder::Flags flags = 1840 MachineOperatorBuilder::kWord32ShiftIsSafe; 1841 if (CpuFeatures::IsSupported(POPCNT)) { 1842 flags |= MachineOperatorBuilder::kWord32Popcnt; 1843 } 1844 1845 flags |= MachineOperatorBuilder::kFloat32RoundDown | 1846 MachineOperatorBuilder::kFloat64RoundDown | 1847 MachineOperatorBuilder::kFloat32RoundUp | 1848 MachineOperatorBuilder::kFloat64RoundUp | 1849 MachineOperatorBuilder::kFloat32RoundTruncate | 1850 MachineOperatorBuilder::kFloat64RoundTruncate | 1851 MachineOperatorBuilder::kFloat32RoundTiesEven | 1852 MachineOperatorBuilder::kFloat64RoundTiesEven; 1853 return flags; 1854 } 1855 1856 // static 1857 MachineOperatorBuilder::AlignmentRequirements 1858 InstructionSelector::AlignmentRequirements() { 1859 return MachineOperatorBuilder::AlignmentRequirements:: 1860 FullUnalignedAccessSupport(); 1861 } 1862 1863 } // namespace compiler 1864 } // namespace internal 1865 } // namespace v8 1866