1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "codegen-inl.h" 31 #include "register-allocator-inl.h" 32 #include "scopes.h" 33 34 namespace v8 { 35 namespace internal { 36 37 #define __ ACCESS_MASM(masm()) 38 39 // ------------------------------------------------------------------------- 40 // VirtualFrame implementation. 41 42 // On entry to a function, the virtual frame already contains the receiver, 43 // the parameters, and a return address. All frame elements are in memory. 44 VirtualFrame::VirtualFrame() 45 : elements_(parameter_count() + local_count() + kPreallocatedElements), 46 stack_pointer_(parameter_count() + 1) { // 0-based index of TOS. 47 for (int i = 0; i <= stack_pointer_; i++) { 48 elements_.Add(FrameElement::MemoryElement(NumberInfo::kUnknown)); 49 } 50 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { 51 register_locations_[i] = kIllegalIndex; 52 } 53 } 54 55 56 void VirtualFrame::Enter() { 57 // Registers live on entry to a JS frame: 58 // rsp: stack pointer, points to return address from this function. 59 // rbp: base pointer, points to previous JS, ArgumentsAdaptor, or 60 // Trampoline frame. 61 // rsi: context of this function call. 62 // rdi: pointer to this function object. 63 Comment cmnt(masm(), "[ Enter JS frame"); 64 65 #ifdef DEBUG 66 if (FLAG_debug_code) { 67 // Verify that rdi contains a JS function. The following code 68 // relies on rax being available for use. 69 Condition not_smi = NegateCondition(masm()->CheckSmi(rdi)); 70 __ Check(not_smi, 71 "VirtualFrame::Enter - rdi is not a function (smi check)."); 72 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax); 73 __ Check(equal, 74 "VirtualFrame::Enter - rdi is not a function (map check)."); 75 } 76 #endif 77 78 EmitPush(rbp); 79 80 __ movq(rbp, rsp); 81 82 // Store the context in the frame. The context is kept in rsi and a 83 // copy is stored in the frame. The external reference to rsi 84 // remains. 85 EmitPush(rsi); 86 87 // Store the function in the frame. The frame owns the register 88 // reference now (ie, it can keep it in rdi or spill it later). 89 Push(rdi); 90 SyncElementAt(element_count() - 1); 91 cgen()->allocator()->Unuse(rdi); 92 } 93 94 95 void VirtualFrame::Exit() { 96 Comment cmnt(masm(), "[ Exit JS frame"); 97 // Record the location of the JS exit code for patching when setting 98 // break point. 99 __ RecordJSReturn(); 100 101 // Avoid using the leave instruction here, because it is too 102 // short. We need the return sequence to be a least the size of a 103 // call instruction to support patching the exit code in the 104 // debugger. See GenerateReturnSequence for the full return sequence. 105 // TODO(X64): A patched call will be very long now. Make sure we 106 // have enough room. 107 __ movq(rsp, rbp); 108 stack_pointer_ = frame_pointer(); 109 for (int i = element_count() - 1; i > stack_pointer_; i--) { 110 FrameElement last = elements_.RemoveLast(); 111 if (last.is_register()) { 112 Unuse(last.reg()); 113 } 114 } 115 116 EmitPop(rbp); 117 } 118 119 120 void VirtualFrame::AllocateStackSlots() { 121 int count = local_count(); 122 if (count > 0) { 123 Comment cmnt(masm(), "[ Allocate space for locals"); 124 // The locals are initialized to a constant (the undefined value), but 125 // we sync them with the actual frame to allocate space for spilling 126 // them later. First sync everything above the stack pointer so we can 127 // use pushes to allocate and initialize the locals. 128 SyncRange(stack_pointer_ + 1, element_count() - 1); 129 Handle<Object> undefined = Factory::undefined_value(); 130 FrameElement initial_value = 131 FrameElement::ConstantElement(undefined, FrameElement::SYNCED); 132 if (count == 1) { 133 __ Push(undefined); 134 } else if (count < kLocalVarBound) { 135 // For less locals the unrolled loop is more compact. 136 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT); 137 for (int i = 0; i < count; i++) { 138 __ push(kScratchRegister); 139 } 140 } else { 141 // For more locals a loop in generated code is more compact. 142 Label alloc_locals_loop; 143 Result cnt = cgen()->allocator()->Allocate(); 144 ASSERT(cnt.is_valid()); 145 __ movq(cnt.reg(), Immediate(count)); 146 __ movq(kScratchRegister, undefined, RelocInfo::EMBEDDED_OBJECT); 147 __ bind(&alloc_locals_loop); 148 __ push(kScratchRegister); 149 __ decl(cnt.reg()); 150 __ j(not_zero, &alloc_locals_loop); 151 } 152 for (int i = 0; i < count; i++) { 153 elements_.Add(initial_value); 154 stack_pointer_++; 155 } 156 } 157 } 158 159 160 void VirtualFrame::SaveContextRegister() { 161 ASSERT(elements_[context_index()].is_memory()); 162 __ movq(Operand(rbp, fp_relative(context_index())), rsi); 163 } 164 165 166 void VirtualFrame::RestoreContextRegister() { 167 ASSERT(elements_[context_index()].is_memory()); 168 __ movq(rsi, Operand(rbp, fp_relative(context_index()))); 169 } 170 171 172 void VirtualFrame::PushReceiverSlotAddress() { 173 Result temp = cgen()->allocator()->Allocate(); 174 ASSERT(temp.is_valid()); 175 __ lea(temp.reg(), ParameterAt(-1)); 176 Push(&temp); 177 } 178 179 180 void VirtualFrame::EmitPop(Register reg) { 181 ASSERT(stack_pointer_ == element_count() - 1); 182 stack_pointer_--; 183 elements_.RemoveLast(); 184 __ pop(reg); 185 } 186 187 188 void VirtualFrame::EmitPop(const Operand& operand) { 189 ASSERT(stack_pointer_ == element_count() - 1); 190 stack_pointer_--; 191 elements_.RemoveLast(); 192 __ pop(operand); 193 } 194 195 196 void VirtualFrame::EmitPush(Register reg, NumberInfo::Type info) { 197 ASSERT(stack_pointer_ == element_count() - 1); 198 elements_.Add(FrameElement::MemoryElement(info)); 199 stack_pointer_++; 200 __ push(reg); 201 } 202 203 204 void VirtualFrame::EmitPush(const Operand& operand, NumberInfo::Type info) { 205 ASSERT(stack_pointer_ == element_count() - 1); 206 elements_.Add(FrameElement::MemoryElement(info)); 207 stack_pointer_++; 208 __ push(operand); 209 } 210 211 212 void VirtualFrame::EmitPush(Immediate immediate, NumberInfo::Type info) { 213 ASSERT(stack_pointer_ == element_count() - 1); 214 elements_.Add(FrameElement::MemoryElement(info)); 215 stack_pointer_++; 216 __ push(immediate); 217 } 218 219 220 void VirtualFrame::EmitPush(Smi* smi_value) { 221 ASSERT(stack_pointer_ == element_count() - 1); 222 elements_.Add(FrameElement::MemoryElement(NumberInfo::kSmi)); 223 stack_pointer_++; 224 __ Push(smi_value); 225 } 226 227 228 void VirtualFrame::EmitPush(Handle<Object> value) { 229 ASSERT(stack_pointer_ == element_count() - 1); 230 NumberInfo::Type info = NumberInfo::kUnknown; 231 if (value->IsSmi()) { 232 info = NumberInfo::kSmi; 233 } else if (value->IsHeapNumber()) { 234 info = NumberInfo::kHeapNumber; 235 } 236 elements_.Add(FrameElement::MemoryElement(info)); 237 stack_pointer_++; 238 __ Push(value); 239 } 240 241 242 void VirtualFrame::EmitPush(Heap::RootListIndex index, NumberInfo::Type info) { 243 ASSERT(stack_pointer_ == element_count() - 1); 244 elements_.Add(FrameElement::MemoryElement(info)); 245 stack_pointer_++; 246 __ PushRoot(index); 247 } 248 249 250 void VirtualFrame::Drop(int count) { 251 ASSERT(count >= 0); 252 ASSERT(height() >= count); 253 int num_virtual_elements = (element_count() - 1) - stack_pointer_; 254 255 // Emit code to lower the stack pointer if necessary. 256 if (num_virtual_elements < count) { 257 int num_dropped = count - num_virtual_elements; 258 stack_pointer_ -= num_dropped; 259 __ addq(rsp, Immediate(num_dropped * kPointerSize)); 260 } 261 262 // Discard elements from the virtual frame and free any registers. 263 for (int i = 0; i < count; i++) { 264 FrameElement dropped = elements_.RemoveLast(); 265 if (dropped.is_register()) { 266 Unuse(dropped.reg()); 267 } 268 } 269 } 270 271 272 int VirtualFrame::InvalidateFrameSlotAt(int index) { 273 FrameElement original = elements_[index]; 274 275 // Is this element the backing store of any copies? 276 int new_backing_index = kIllegalIndex; 277 if (original.is_copied()) { 278 // Verify it is copied, and find first copy. 279 for (int i = index + 1; i < element_count(); i++) { 280 if (elements_[i].is_copy() && elements_[i].index() == index) { 281 new_backing_index = i; 282 break; 283 } 284 } 285 } 286 287 if (new_backing_index == kIllegalIndex) { 288 // No copies found, return kIllegalIndex. 289 if (original.is_register()) { 290 Unuse(original.reg()); 291 } 292 elements_[index] = FrameElement::InvalidElement(); 293 return kIllegalIndex; 294 } 295 296 // This is the backing store of copies. 297 Register backing_reg; 298 if (original.is_memory()) { 299 Result fresh = cgen()->allocator()->Allocate(); 300 ASSERT(fresh.is_valid()); 301 Use(fresh.reg(), new_backing_index); 302 backing_reg = fresh.reg(); 303 __ movq(backing_reg, Operand(rbp, fp_relative(index))); 304 } else { 305 // The original was in a register. 306 backing_reg = original.reg(); 307 set_register_location(backing_reg, new_backing_index); 308 } 309 // Invalidate the element at index. 310 elements_[index] = FrameElement::InvalidElement(); 311 // Set the new backing element. 312 if (elements_[new_backing_index].is_synced()) { 313 elements_[new_backing_index] = 314 FrameElement::RegisterElement(backing_reg, 315 FrameElement::SYNCED, 316 original.number_info()); 317 } else { 318 elements_[new_backing_index] = 319 FrameElement::RegisterElement(backing_reg, 320 FrameElement::NOT_SYNCED, 321 original.number_info()); 322 } 323 // Update the other copies. 324 for (int i = new_backing_index + 1; i < element_count(); i++) { 325 if (elements_[i].is_copy() && elements_[i].index() == index) { 326 elements_[i].set_index(new_backing_index); 327 elements_[new_backing_index].set_copied(); 328 } 329 } 330 return new_backing_index; 331 } 332 333 334 void VirtualFrame::TakeFrameSlotAt(int index) { 335 ASSERT(index >= 0); 336 ASSERT(index <= element_count()); 337 FrameElement original = elements_[index]; 338 int new_backing_store_index = InvalidateFrameSlotAt(index); 339 if (new_backing_store_index != kIllegalIndex) { 340 elements_.Add(CopyElementAt(new_backing_store_index)); 341 return; 342 } 343 344 switch (original.type()) { 345 case FrameElement::MEMORY: { 346 // Emit code to load the original element's data into a register. 347 // Push that register as a FrameElement on top of the frame. 348 Result fresh = cgen()->allocator()->Allocate(); 349 ASSERT(fresh.is_valid()); 350 FrameElement new_element = 351 FrameElement::RegisterElement(fresh.reg(), 352 FrameElement::NOT_SYNCED, 353 original.number_info()); 354 Use(fresh.reg(), element_count()); 355 elements_.Add(new_element); 356 __ movq(fresh.reg(), Operand(rbp, fp_relative(index))); 357 break; 358 } 359 case FrameElement::REGISTER: 360 Use(original.reg(), element_count()); 361 // Fall through. 362 case FrameElement::CONSTANT: 363 case FrameElement::COPY: 364 original.clear_sync(); 365 elements_.Add(original); 366 break; 367 case FrameElement::INVALID: 368 UNREACHABLE(); 369 break; 370 } 371 } 372 373 374 void VirtualFrame::StoreToFrameSlotAt(int index) { 375 // Store the value on top of the frame to the virtual frame slot at 376 // a given index. The value on top of the frame is left in place. 377 // This is a duplicating operation, so it can create copies. 378 ASSERT(index >= 0); 379 ASSERT(index < element_count()); 380 381 int top_index = element_count() - 1; 382 FrameElement top = elements_[top_index]; 383 FrameElement original = elements_[index]; 384 if (top.is_copy() && top.index() == index) return; 385 ASSERT(top.is_valid()); 386 387 InvalidateFrameSlotAt(index); 388 389 // InvalidateFrameSlotAt can potentially change any frame element, due 390 // to spilling registers to allocate temporaries in order to preserve 391 // the copy-on-write semantics of aliased elements. Reload top from 392 // the frame. 393 top = elements_[top_index]; 394 395 if (top.is_copy()) { 396 // There are two cases based on the relative positions of the 397 // stored-to slot and the backing slot of the top element. 398 int backing_index = top.index(); 399 ASSERT(backing_index != index); 400 if (backing_index < index) { 401 // 1. The top element is a copy of a slot below the stored-to 402 // slot. The stored-to slot becomes an unsynced copy of that 403 // same backing slot. 404 elements_[index] = CopyElementAt(backing_index); 405 } else { 406 // 2. The top element is a copy of a slot above the stored-to 407 // slot. The stored-to slot becomes the new (unsynced) backing 408 // slot and both the top element and the element at the former 409 // backing slot become copies of it. The sync state of the top 410 // and former backing elements is preserved. 411 FrameElement backing_element = elements_[backing_index]; 412 ASSERT(backing_element.is_memory() || backing_element.is_register()); 413 if (backing_element.is_memory()) { 414 // Because sets of copies are canonicalized to be backed by 415 // their lowest frame element, and because memory frame 416 // elements are backed by the corresponding stack address, we 417 // have to move the actual value down in the stack. 418 // 419 // TODO(209): considering allocating the stored-to slot to the 420 // temp register. Alternatively, allow copies to appear in 421 // any order in the frame and lazily move the value down to 422 // the slot. 423 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index))); 424 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister); 425 } else { 426 set_register_location(backing_element.reg(), index); 427 if (backing_element.is_synced()) { 428 // If the element is a register, we will not actually move 429 // anything on the stack but only update the virtual frame 430 // element. 431 backing_element.clear_sync(); 432 } 433 } 434 elements_[index] = backing_element; 435 436 // The old backing element becomes a copy of the new backing 437 // element. 438 FrameElement new_element = CopyElementAt(index); 439 elements_[backing_index] = new_element; 440 if (backing_element.is_synced()) { 441 elements_[backing_index].set_sync(); 442 } 443 444 // All the copies of the old backing element (including the top 445 // element) become copies of the new backing element. 446 for (int i = backing_index + 1; i < element_count(); i++) { 447 if (elements_[i].is_copy() && elements_[i].index() == backing_index) { 448 elements_[i].set_index(index); 449 } 450 } 451 } 452 return; 453 } 454 455 // Move the top element to the stored-to slot and replace it (the 456 // top element) with a copy. 457 elements_[index] = top; 458 if (top.is_memory()) { 459 // TODO(209): consider allocating the stored-to slot to the temp 460 // register. Alternatively, allow copies to appear in any order 461 // in the frame and lazily move the value down to the slot. 462 FrameElement new_top = CopyElementAt(index); 463 new_top.set_sync(); 464 elements_[top_index] = new_top; 465 466 // The sync state of the former top element is correct (synced). 467 // Emit code to move the value down in the frame. 468 __ movq(kScratchRegister, Operand(rsp, 0)); 469 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister); 470 } else if (top.is_register()) { 471 set_register_location(top.reg(), index); 472 // The stored-to slot has the (unsynced) register reference and 473 // the top element becomes a copy. The sync state of the top is 474 // preserved. 475 FrameElement new_top = CopyElementAt(index); 476 if (top.is_synced()) { 477 new_top.set_sync(); 478 elements_[index].clear_sync(); 479 } 480 elements_[top_index] = new_top; 481 } else { 482 // The stored-to slot holds the same value as the top but 483 // unsynced. (We do not have copies of constants yet.) 484 ASSERT(top.is_constant()); 485 elements_[index].clear_sync(); 486 } 487 } 488 489 490 void VirtualFrame::MakeMergable() { 491 for (int i = 0; i < element_count(); i++) { 492 FrameElement element = elements_[i]; 493 494 // In all cases we have to reset the number type information 495 // to unknown for a mergable frame because of incoming back edges. 496 if (element.is_constant() || element.is_copy()) { 497 if (element.is_synced()) { 498 // Just spill. 499 elements_[i] = FrameElement::MemoryElement(NumberInfo::kUnknown); 500 } else { 501 // Allocate to a register. 502 FrameElement backing_element; // Invalid if not a copy. 503 if (element.is_copy()) { 504 backing_element = elements_[element.index()]; 505 } 506 Result fresh = cgen()->allocator()->Allocate(); 507 ASSERT(fresh.is_valid()); // A register was spilled if all were in use. 508 elements_[i] = 509 FrameElement::RegisterElement(fresh.reg(), 510 FrameElement::NOT_SYNCED, 511 NumberInfo::kUnknown); 512 Use(fresh.reg(), i); 513 514 // Emit a move. 515 if (element.is_constant()) { 516 __ Move(fresh.reg(), element.handle()); 517 } else { 518 ASSERT(element.is_copy()); 519 // Copies are only backed by register or memory locations. 520 if (backing_element.is_register()) { 521 // The backing store may have been spilled by allocating, 522 // but that's OK. If it was, the value is right where we 523 // want it. 524 if (!fresh.reg().is(backing_element.reg())) { 525 __ movq(fresh.reg(), backing_element.reg()); 526 } 527 } else { 528 ASSERT(backing_element.is_memory()); 529 __ movq(fresh.reg(), Operand(rbp, fp_relative(element.index()))); 530 } 531 } 532 } 533 // No need to set the copied flag --- there are no copies. 534 } else { 535 // Clear the copy flag of non-constant, non-copy elements. 536 // They cannot be copied because copies are not allowed. 537 // The copy flag is not relied on before the end of this loop, 538 // including when registers are spilled. 539 elements_[i].clear_copied(); 540 elements_[i].set_number_info(NumberInfo::kUnknown); 541 } 542 } 543 } 544 545 546 void VirtualFrame::MergeTo(VirtualFrame* expected) { 547 Comment cmnt(masm(), "[ Merge frame"); 548 // We should always be merging the code generator's current frame to an 549 // expected frame. 550 ASSERT(cgen()->frame() == this); 551 552 // Adjust the stack pointer upward (toward the top of the virtual 553 // frame) if necessary. 554 if (stack_pointer_ < expected->stack_pointer_) { 555 int difference = expected->stack_pointer_ - stack_pointer_; 556 stack_pointer_ = expected->stack_pointer_; 557 __ subq(rsp, Immediate(difference * kPointerSize)); 558 } 559 560 MergeMoveRegistersToMemory(expected); 561 MergeMoveRegistersToRegisters(expected); 562 MergeMoveMemoryToRegisters(expected); 563 564 // Adjust the stack pointer downward if necessary. 565 if (stack_pointer_ > expected->stack_pointer_) { 566 int difference = stack_pointer_ - expected->stack_pointer_; 567 stack_pointer_ = expected->stack_pointer_; 568 __ addq(rsp, Immediate(difference * kPointerSize)); 569 } 570 571 // At this point, the frames should be identical. 572 ASSERT(Equals(expected)); 573 } 574 575 576 void VirtualFrame::MergeMoveRegistersToMemory(VirtualFrame* expected) { 577 ASSERT(stack_pointer_ >= expected->stack_pointer_); 578 579 // Move registers, constants, and copies to memory. Perform moves 580 // from the top downward in the frame in order to leave the backing 581 // stores of copies in registers. 582 for (int i = element_count() - 1; i >= 0; i--) { 583 FrameElement target = expected->elements_[i]; 584 if (target.is_register()) continue; // Handle registers later. 585 if (target.is_memory()) { 586 FrameElement source = elements_[i]; 587 switch (source.type()) { 588 case FrameElement::INVALID: 589 // Not a legal merge move. 590 UNREACHABLE(); 591 break; 592 593 case FrameElement::MEMORY: 594 // Already in place. 595 break; 596 597 case FrameElement::REGISTER: 598 Unuse(source.reg()); 599 if (!source.is_synced()) { 600 __ movq(Operand(rbp, fp_relative(i)), source.reg()); 601 } 602 break; 603 604 case FrameElement::CONSTANT: 605 if (!source.is_synced()) { 606 __ Move(Operand(rbp, fp_relative(i)), source.handle()); 607 } 608 break; 609 610 case FrameElement::COPY: 611 if (!source.is_synced()) { 612 int backing_index = source.index(); 613 FrameElement backing_element = elements_[backing_index]; 614 if (backing_element.is_memory()) { 615 __ movq(kScratchRegister, 616 Operand(rbp, fp_relative(backing_index))); 617 __ movq(Operand(rbp, fp_relative(i)), kScratchRegister); 618 } else { 619 ASSERT(backing_element.is_register()); 620 __ movq(Operand(rbp, fp_relative(i)), backing_element.reg()); 621 } 622 } 623 break; 624 } 625 } 626 elements_[i] = target; 627 } 628 } 629 630 631 void VirtualFrame::MergeMoveRegistersToRegisters(VirtualFrame* expected) { 632 // We have already done X-to-memory moves. 633 ASSERT(stack_pointer_ >= expected->stack_pointer_); 634 635 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { 636 // Move the right value into register i if it is currently in a register. 637 int index = expected->register_location(i); 638 int use_index = register_location(i); 639 // Skip if register i is unused in the target or else if source is 640 // not a register (this is not a register-to-register move). 641 if (index == kIllegalIndex || !elements_[index].is_register()) continue; 642 643 Register target = RegisterAllocator::ToRegister(i); 644 Register source = elements_[index].reg(); 645 if (index != use_index) { 646 if (use_index == kIllegalIndex) { // Target is currently unused. 647 // Copy contents of source from source to target. 648 // Set frame element register to target. 649 Use(target, index); 650 Unuse(source); 651 __ movq(target, source); 652 } else { 653 // Exchange contents of registers source and target. 654 // Nothing except the register backing use_index has changed. 655 elements_[use_index].set_reg(source); 656 set_register_location(target, index); 657 set_register_location(source, use_index); 658 __ xchg(source, target); 659 } 660 } 661 662 if (!elements_[index].is_synced() && 663 expected->elements_[index].is_synced()) { 664 __ movq(Operand(rbp, fp_relative(index)), target); 665 } 666 elements_[index] = expected->elements_[index]; 667 } 668 } 669 670 671 void VirtualFrame::MergeMoveMemoryToRegisters(VirtualFrame* expected) { 672 // Move memory, constants, and copies to registers. This is the 673 // final step and since it is not done from the bottom up, but in 674 // register code order, we have special code to ensure that the backing 675 // elements of copies are in their correct locations when we 676 // encounter the copies. 677 for (int i = 0; i < RegisterAllocator::kNumRegisters; i++) { 678 int index = expected->register_location(i); 679 if (index != kIllegalIndex) { 680 FrameElement source = elements_[index]; 681 FrameElement target = expected->elements_[index]; 682 Register target_reg = RegisterAllocator::ToRegister(i); 683 ASSERT(target.reg().is(target_reg)); 684 switch (source.type()) { 685 case FrameElement::INVALID: // Fall through. 686 UNREACHABLE(); 687 break; 688 case FrameElement::REGISTER: 689 ASSERT(source.Equals(target)); 690 // Go to next iteration. Skips Use(target_reg) and syncing 691 // below. It is safe to skip syncing because a target 692 // register frame element would only be synced if all source 693 // elements were. 694 continue; 695 break; 696 case FrameElement::MEMORY: 697 ASSERT(index <= stack_pointer_); 698 __ movq(target_reg, Operand(rbp, fp_relative(index))); 699 break; 700 701 case FrameElement::CONSTANT: 702 __ Move(target_reg, source.handle()); 703 break; 704 705 case FrameElement::COPY: { 706 int backing_index = source.index(); 707 FrameElement backing = elements_[backing_index]; 708 ASSERT(backing.is_memory() || backing.is_register()); 709 if (backing.is_memory()) { 710 ASSERT(backing_index <= stack_pointer_); 711 // Code optimization if backing store should also move 712 // to a register: move backing store to its register first. 713 if (expected->elements_[backing_index].is_register()) { 714 FrameElement new_backing = expected->elements_[backing_index]; 715 Register new_backing_reg = new_backing.reg(); 716 ASSERT(!is_used(new_backing_reg)); 717 elements_[backing_index] = new_backing; 718 Use(new_backing_reg, backing_index); 719 __ movq(new_backing_reg, 720 Operand(rbp, fp_relative(backing_index))); 721 __ movq(target_reg, new_backing_reg); 722 } else { 723 __ movq(target_reg, Operand(rbp, fp_relative(backing_index))); 724 } 725 } else { 726 __ movq(target_reg, backing.reg()); 727 } 728 } 729 } 730 // Ensure the proper sync state. 731 if (target.is_synced() && !source.is_synced()) { 732 __ movq(Operand(rbp, fp_relative(index)), target_reg); 733 } 734 Use(target_reg, index); 735 elements_[index] = target; 736 } 737 } 738 } 739 740 741 Result VirtualFrame::Pop() { 742 FrameElement element = elements_.RemoveLast(); 743 int index = element_count(); 744 ASSERT(element.is_valid()); 745 746 // Get number type information of the result. 747 NumberInfo::Type info; 748 if (!element.is_copy()) { 749 info = element.number_info(); 750 } else { 751 info = elements_[element.index()].number_info(); 752 } 753 754 bool pop_needed = (stack_pointer_ == index); 755 if (pop_needed) { 756 stack_pointer_--; 757 if (element.is_memory()) { 758 Result temp = cgen()->allocator()->Allocate(); 759 ASSERT(temp.is_valid()); 760 __ pop(temp.reg()); 761 temp.set_number_info(info); 762 return temp; 763 } 764 765 __ addq(rsp, Immediate(kPointerSize)); 766 } 767 ASSERT(!element.is_memory()); 768 769 // The top element is a register, constant, or a copy. Unuse 770 // registers and follow copies to their backing store. 771 if (element.is_register()) { 772 Unuse(element.reg()); 773 } else if (element.is_copy()) { 774 ASSERT(element.index() < index); 775 index = element.index(); 776 element = elements_[index]; 777 } 778 ASSERT(!element.is_copy()); 779 780 // The element is memory, a register, or a constant. 781 if (element.is_memory()) { 782 // Memory elements could only be the backing store of a copy. 783 // Allocate the original to a register. 784 ASSERT(index <= stack_pointer_); 785 Result temp = cgen()->allocator()->Allocate(); 786 ASSERT(temp.is_valid()); 787 Use(temp.reg(), index); 788 FrameElement new_element = 789 FrameElement::RegisterElement(temp.reg(), 790 FrameElement::SYNCED, 791 element.number_info()); 792 // Preserve the copy flag on the element. 793 if (element.is_copied()) new_element.set_copied(); 794 elements_[index] = new_element; 795 __ movq(temp.reg(), Operand(rbp, fp_relative(index))); 796 return Result(temp.reg(), info); 797 } else if (element.is_register()) { 798 return Result(element.reg(), info); 799 } else { 800 ASSERT(element.is_constant()); 801 return Result(element.handle()); 802 } 803 } 804 805 806 Result VirtualFrame::RawCallStub(CodeStub* stub) { 807 ASSERT(cgen()->HasValidEntryRegisters()); 808 __ CallStub(stub); 809 Result result = cgen()->allocator()->Allocate(rax); 810 ASSERT(result.is_valid()); 811 return result; 812 } 813 814 815 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg) { 816 PrepareForCall(0, 0); 817 arg->ToRegister(rax); 818 arg->Unuse(); 819 return RawCallStub(stub); 820 } 821 822 823 Result VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) { 824 PrepareForCall(0, 0); 825 826 if (arg0->is_register() && arg0->reg().is(rax)) { 827 if (arg1->is_register() && arg1->reg().is(rdx)) { 828 // Wrong registers. 829 __ xchg(rax, rdx); 830 } else { 831 // Register rdx is free for arg0, which frees rax for arg1. 832 arg0->ToRegister(rdx); 833 arg1->ToRegister(rax); 834 } 835 } else { 836 // Register rax is free for arg1, which guarantees rdx is free for 837 // arg0. 838 arg1->ToRegister(rax); 839 arg0->ToRegister(rdx); 840 } 841 842 arg0->Unuse(); 843 arg1->Unuse(); 844 return RawCallStub(stub); 845 } 846 847 848 void VirtualFrame::SyncElementBelowStackPointer(int index) { 849 // Emit code to write elements below the stack pointer to their 850 // (already allocated) stack address. 851 ASSERT(index <= stack_pointer_); 852 FrameElement element = elements_[index]; 853 ASSERT(!element.is_synced()); 854 switch (element.type()) { 855 case FrameElement::INVALID: 856 break; 857 858 case FrameElement::MEMORY: 859 // This function should not be called with synced elements. 860 // (memory elements are always synced). 861 UNREACHABLE(); 862 break; 863 864 case FrameElement::REGISTER: 865 __ movq(Operand(rbp, fp_relative(index)), element.reg()); 866 break; 867 868 case FrameElement::CONSTANT: 869 __ Move(Operand(rbp, fp_relative(index)), element.handle()); 870 break; 871 872 case FrameElement::COPY: { 873 int backing_index = element.index(); 874 FrameElement backing_element = elements_[backing_index]; 875 if (backing_element.is_memory()) { 876 __ movq(kScratchRegister, Operand(rbp, fp_relative(backing_index))); 877 __ movq(Operand(rbp, fp_relative(index)), kScratchRegister); 878 } else { 879 ASSERT(backing_element.is_register()); 880 __ movq(Operand(rbp, fp_relative(index)), backing_element.reg()); 881 } 882 break; 883 } 884 } 885 elements_[index].set_sync(); 886 } 887 888 889 void VirtualFrame::SyncElementByPushing(int index) { 890 // Sync an element of the frame that is just above the stack pointer 891 // by pushing it. 892 ASSERT(index == stack_pointer_ + 1); 893 stack_pointer_++; 894 FrameElement element = elements_[index]; 895 896 switch (element.type()) { 897 case FrameElement::INVALID: 898 __ Push(Smi::FromInt(0)); 899 break; 900 901 case FrameElement::MEMORY: 902 // No memory elements exist above the stack pointer. 903 UNREACHABLE(); 904 break; 905 906 case FrameElement::REGISTER: 907 __ push(element.reg()); 908 break; 909 910 case FrameElement::CONSTANT: 911 __ Move(kScratchRegister, element.handle()); 912 __ push(kScratchRegister); 913 break; 914 915 case FrameElement::COPY: { 916 int backing_index = element.index(); 917 FrameElement backing = elements_[backing_index]; 918 ASSERT(backing.is_memory() || backing.is_register()); 919 if (backing.is_memory()) { 920 __ push(Operand(rbp, fp_relative(backing_index))); 921 } else { 922 __ push(backing.reg()); 923 } 924 break; 925 } 926 } 927 elements_[index].set_sync(); 928 } 929 930 931 // Clear the dirty bits for the range of elements in 932 // [min(stack_pointer_ + 1,begin), end]. 933 void VirtualFrame::SyncRange(int begin, int end) { 934 ASSERT(begin >= 0); 935 ASSERT(end < element_count()); 936 // Sync elements below the range if they have not been materialized 937 // on the stack. 938 int start = Min(begin, stack_pointer_ + 1); 939 940 // If positive we have to adjust the stack pointer. 941 int delta = end - stack_pointer_; 942 if (delta > 0) { 943 stack_pointer_ = end; 944 __ subq(rsp, Immediate(delta * kPointerSize)); 945 } 946 947 for (int i = start; i <= end; i++) { 948 if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); 949 } 950 } 951 952 953 Result VirtualFrame::InvokeBuiltin(Builtins::JavaScript id, 954 InvokeFlag flag, 955 int arg_count) { 956 PrepareForCall(arg_count, arg_count); 957 ASSERT(cgen()->HasValidEntryRegisters()); 958 __ InvokeBuiltin(id, flag); 959 Result result = cgen()->allocator()->Allocate(rax); 960 ASSERT(result.is_valid()); 961 return result; 962 } 963 964 965 //------------------------------------------------------------------------------ 966 // Virtual frame stub and IC calling functions. 967 968 Result VirtualFrame::RawCallCodeObject(Handle<Code> code, 969 RelocInfo::Mode rmode) { 970 ASSERT(cgen()->HasValidEntryRegisters()); 971 __ Call(code, rmode); 972 Result result = cgen()->allocator()->Allocate(rax); 973 ASSERT(result.is_valid()); 974 return result; 975 } 976 977 978 Result VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) { 979 PrepareForCall(arg_count, arg_count); 980 ASSERT(cgen()->HasValidEntryRegisters()); 981 __ CallRuntime(f, arg_count); 982 Result result = cgen()->allocator()->Allocate(rax); 983 ASSERT(result.is_valid()); 984 return result; 985 } 986 987 988 Result VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) { 989 PrepareForCall(arg_count, arg_count); 990 ASSERT(cgen()->HasValidEntryRegisters()); 991 __ CallRuntime(id, arg_count); 992 Result result = cgen()->allocator()->Allocate(rax); 993 ASSERT(result.is_valid()); 994 return result; 995 } 996 997 998 #ifdef ENABLE_DEBUGGER_SUPPORT 999 void VirtualFrame::DebugBreak() { 1000 PrepareForCall(0, 0); 1001 ASSERT(cgen()->HasValidEntryRegisters()); 1002 __ DebugBreak(); 1003 Result result = cgen()->allocator()->Allocate(rax); 1004 ASSERT(result.is_valid()); 1005 } 1006 #endif 1007 1008 1009 Result VirtualFrame::CallLoadIC(RelocInfo::Mode mode) { 1010 // Name and receiver are on the top of the frame. The IC expects 1011 // name in rcx and receiver on the stack. It does not drop the 1012 // receiver. 1013 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1014 Result name = Pop(); 1015 PrepareForCall(1, 0); // One stack arg, not callee-dropped. 1016 name.ToRegister(rcx); 1017 name.Unuse(); 1018 return RawCallCodeObject(ic, mode); 1019 } 1020 1021 1022 Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) { 1023 // Key and receiver are on top of the frame. The IC expects them on 1024 // the stack. It does not drop them. 1025 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1026 PrepareForCall(2, 0); // Two stack args, neither callee-dropped. 1027 return RawCallCodeObject(ic, mode); 1028 } 1029 1030 1031 Result VirtualFrame::CallKeyedStoreIC() { 1032 // Value, key, and receiver are on the top of the frame. The IC 1033 // expects value in rax and key and receiver on the stack. It does 1034 // not drop the key and receiver. 1035 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1036 Result value = Pop(); 1037 PrepareForCall(2, 0); // Two stack args, neither callee-dropped. 1038 value.ToRegister(rax); 1039 value.Unuse(); 1040 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET); 1041 } 1042 1043 1044 Result VirtualFrame::CallCallIC(RelocInfo::Mode mode, 1045 int arg_count, 1046 int loop_nesting) { 1047 // Function name, arguments, and receiver are found on top of the frame 1048 // and dropped by the call. The IC expects the name in rcx and the rest 1049 // on the stack, and drops them all. 1050 InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP; 1051 Handle<Code> ic = cgen()->ComputeCallInitialize(arg_count, in_loop); 1052 Result name = Pop(); 1053 // Spill args, receiver, and function. The call will drop args and 1054 // receiver. 1055 PrepareForCall(arg_count + 1, arg_count + 1); 1056 name.ToRegister(rcx); 1057 name.Unuse(); 1058 return RawCallCodeObject(ic, mode); 1059 } 1060 1061 1062 Result VirtualFrame::CallConstructor(int arg_count) { 1063 // Arguments, receiver, and function are on top of the frame. The 1064 // IC expects arg count in rax, function in rdi, and the arguments 1065 // and receiver on the stack. 1066 Handle<Code> ic(Builtins::builtin(Builtins::JSConstructCall)); 1067 // Duplicate the function before preparing the frame. 1068 PushElementAt(arg_count + 1); 1069 Result function = Pop(); 1070 PrepareForCall(arg_count + 1, arg_count + 1); // Spill args and receiver. 1071 function.ToRegister(rdi); 1072 1073 // Constructors are called with the number of arguments in register 1074 // rax for now. Another option would be to have separate construct 1075 // call trampolines per different arguments counts encountered. 1076 Result num_args = cgen()->allocator()->Allocate(rax); 1077 ASSERT(num_args.is_valid()); 1078 __ movq(num_args.reg(), Immediate(arg_count)); 1079 1080 function.Unuse(); 1081 num_args.Unuse(); 1082 return RawCallCodeObject(ic, RelocInfo::CONSTRUCT_CALL); 1083 } 1084 1085 1086 Result VirtualFrame::CallStoreIC() { 1087 // Name, value, and receiver are on top of the frame. The IC 1088 // expects name in rcx, value in rax, and receiver in edx. 1089 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1090 Result name = Pop(); 1091 Result value = Pop(); 1092 Result receiver = Pop(); 1093 PrepareForCall(0, 0); 1094 1095 // Optimized for case in which name is a constant value. 1096 if (name.is_register() && (name.reg().is(rdx) || name.reg().is(rax))) { 1097 if (!is_used(rcx)) { 1098 name.ToRegister(rcx); 1099 } else if (!is_used(rbx)) { 1100 name.ToRegister(rbx); 1101 } else { 1102 ASSERT(!is_used(rdi)); // Only three results are live, so rdi is free. 1103 name.ToRegister(rdi); 1104 } 1105 } 1106 // Now name is not in edx or eax, so we can fix them, then move name to ecx. 1107 if (value.is_register() && value.reg().is(rdx)) { 1108 if (receiver.is_register() && receiver.reg().is(rax)) { 1109 // Wrong registers. 1110 __ xchg(rax, rdx); 1111 } else { 1112 // Register rax is free for value, which frees rcx for receiver. 1113 value.ToRegister(rax); 1114 receiver.ToRegister(rdx); 1115 } 1116 } else { 1117 // Register rcx is free for receiver, which guarantees rax is free for 1118 // value. 1119 receiver.ToRegister(rdx); 1120 value.ToRegister(rax); 1121 } 1122 // Receiver and value are in the right place, so rcx is free for name. 1123 name.ToRegister(rcx); 1124 name.Unuse(); 1125 value.Unuse(); 1126 receiver.Unuse(); 1127 return RawCallCodeObject(ic, RelocInfo::CODE_TARGET); 1128 } 1129 1130 1131 void VirtualFrame::PushTryHandler(HandlerType type) { 1132 ASSERT(cgen()->HasValidEntryRegisters()); 1133 // Grow the expression stack by handler size less one (the return 1134 // address is already pushed by a call instruction). 1135 Adjust(kHandlerSize - 1); 1136 __ PushTryHandler(IN_JAVASCRIPT, type); 1137 } 1138 1139 1140 #undef __ 1141 1142 } } // namespace v8::internal 1143