1 // Copyright 2010 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 29 #ifndef V8_MIPS_VIRTUAL_FRAME_MIPS_H_ 30 #define V8_MIPS_VIRTUAL_FRAME_MIPS_H_ 31 32 #include "register-allocator.h" 33 34 namespace v8 { 35 namespace internal { 36 37 // This dummy class is only used to create invalid virtual frames. 38 extern class InvalidVirtualFrameInitializer {}* kInvalidVirtualFrameInitializer; 39 40 41 // ------------------------------------------------------------------------- 42 // Virtual frames 43 // 44 // The virtual frame is an abstraction of the physical stack frame. It 45 // encapsulates the parameters, frame-allocated locals, and the expression 46 // stack. It supports push/pop operations on the expression stack, as well 47 // as random access to the expression stack elements, locals, and 48 // parameters. 49 50 class VirtualFrame : public ZoneObject { 51 public: 52 class RegisterAllocationScope; 53 // A utility class to introduce a scope where the virtual frame is 54 // expected to remain spilled. The constructor spills the code 55 // generator's current frame, and keeps it spilled. 56 class SpilledScope BASE_EMBEDDED { 57 public: 58 explicit SpilledScope(VirtualFrame* frame) 59 : old_is_spilled_( 60 Isolate::Current()->is_virtual_frame_in_spilled_scope()) { 61 if (frame != NULL) { 62 if (!old_is_spilled_) { 63 frame->SpillAll(); 64 } else { 65 frame->AssertIsSpilled(); 66 } 67 } 68 Isolate::Current()->set_is_virtual_frame_in_spilled_scope(true); 69 } 70 ~SpilledScope() { 71 Isolate::Current()->set_is_virtual_frame_in_spilled_scope( 72 old_is_spilled_); 73 } 74 static bool is_spilled() { 75 return Isolate::Current()->is_virtual_frame_in_spilled_scope(); 76 } 77 78 private: 79 int old_is_spilled_; 80 81 SpilledScope() {} 82 83 friend class RegisterAllocationScope; 84 }; 85 86 class RegisterAllocationScope BASE_EMBEDDED { 87 public: 88 // A utility class to introduce a scope where the virtual frame 89 // is not spilled, ie. where register allocation occurs. Eventually 90 // when RegisterAllocationScope is ubiquitous it can be removed 91 // along with the (by then unused) SpilledScope class. 92 inline explicit RegisterAllocationScope(CodeGenerator* cgen); 93 inline ~RegisterAllocationScope(); 94 95 private: 96 CodeGenerator* cgen_; 97 bool old_is_spilled_; 98 99 RegisterAllocationScope() {} 100 }; 101 102 // An illegal index into the virtual frame. 103 static const int kIllegalIndex = -1; 104 105 // Construct an initial virtual frame on entry to a JS function. 106 inline VirtualFrame(); 107 108 // Construct an invalid virtual frame, used by JumpTargets. 109 explicit inline VirtualFrame(InvalidVirtualFrameInitializer* dummy); 110 111 // Construct a virtual frame as a clone of an existing one. 112 explicit inline VirtualFrame(VirtualFrame* original); 113 114 inline CodeGenerator* cgen() const; 115 inline MacroAssembler* masm(); 116 117 // The number of elements on the virtual frame. 118 int element_count() const { return element_count_; } 119 120 // The height of the virtual expression stack. 121 inline int height() const; 122 123 bool is_used(int num) { 124 switch (num) { 125 case 0: { // a0. 126 return kA0InUse[top_of_stack_state_]; 127 } 128 case 1: { // a1. 129 return kA1InUse[top_of_stack_state_]; 130 } 131 case 2: 132 case 3: 133 case 4: 134 case 5: 135 case 6: { // a2 to a3, t0 to t2. 136 ASSERT(num - kFirstAllocatedRegister < kNumberOfAllocatedRegisters); 137 ASSERT(num >= kFirstAllocatedRegister); 138 if ((register_allocation_map_ & 139 (1 << (num - kFirstAllocatedRegister))) == 0) { 140 return false; 141 } else { 142 return true; 143 } 144 } 145 default: { 146 ASSERT(num < kFirstAllocatedRegister || 147 num >= kFirstAllocatedRegister + kNumberOfAllocatedRegisters); 148 return false; 149 } 150 } 151 } 152 153 // Add extra in-memory elements to the top of the frame to match an actual 154 // frame (eg, the frame after an exception handler is pushed). No code is 155 // emitted. 156 void Adjust(int count); 157 158 // Forget elements from the top of the frame to match an actual frame (eg, 159 // the frame after a runtime call). No code is emitted except to bring the 160 // frame to a spilled state. 161 void Forget(int count); 162 163 164 // Spill all values from the frame to memory. 165 void SpillAll(); 166 167 void AssertIsSpilled() const { 168 ASSERT(top_of_stack_state_ == NO_TOS_REGISTERS); 169 ASSERT(register_allocation_map_ == 0); 170 } 171 172 void AssertIsNotSpilled() { 173 ASSERT(!SpilledScope::is_spilled()); 174 } 175 176 // Spill all occurrences of a specific register from the frame. 177 void Spill(Register reg) { 178 UNIMPLEMENTED(); 179 } 180 181 // Spill all occurrences of an arbitrary register if possible. Return the 182 // register spilled or no_reg if it was not possible to free any register 183 // (ie, they all have frame-external references). Unimplemented. 184 Register SpillAnyRegister(); 185 186 // Make this virtual frame have a state identical to an expected virtual 187 // frame. As a side effect, code may be emitted to make this frame match 188 // the expected one. 189 void MergeTo(const VirtualFrame* expected, 190 Condition cond = al, 191 Register r1 = no_reg, 192 const Operand& r2 = Operand(no_reg)); 193 194 void MergeTo(VirtualFrame* expected, 195 Condition cond = al, 196 Register r1 = no_reg, 197 const Operand& r2 = Operand(no_reg)); 198 199 // Checks whether this frame can be branched to by the other frame. 200 bool IsCompatibleWith(const VirtualFrame* other) const { 201 return (tos_known_smi_map_ & (~other->tos_known_smi_map_)) == 0; 202 } 203 204 inline void ForgetTypeInfo() { 205 tos_known_smi_map_ = 0; 206 } 207 208 // Detach a frame from its code generator, perhaps temporarily. This 209 // tells the register allocator that it is free to use frame-internal 210 // registers. Used when the code generator's frame is switched from this 211 // one to NULL by an unconditional jump. 212 void DetachFromCodeGenerator() { 213 } 214 215 // (Re)attach a frame to its code generator. This informs the register 216 // allocator that the frame-internal register references are active again. 217 // Used when a code generator's frame is switched from NULL to this one by 218 // binding a label. 219 void AttachToCodeGenerator() { 220 } 221 222 // Emit code for the physical JS entry and exit frame sequences. After 223 // calling Enter, the virtual frame is ready for use; and after calling 224 // Exit it should not be used. Note that Enter does not allocate space in 225 // the physical frame for storing frame-allocated locals. 226 void Enter(); 227 void Exit(); 228 229 // Prepare for returning from the frame by elements in the virtual frame. 230 // This avoids generating unnecessary merge code when jumping to the shared 231 // return site. No spill code emitted. Value to return should be in v0. 232 inline void PrepareForReturn(); 233 234 // Number of local variables after when we use a loop for allocating. 235 static const int kLocalVarBound = 5; 236 237 // Allocate and initialize the frame-allocated locals. 238 void AllocateStackSlots(); 239 240 // The current top of the expression stack as an assembly operand. 241 MemOperand Top() { 242 AssertIsSpilled(); 243 return MemOperand(sp, 0); 244 } 245 246 // An element of the expression stack as an assembly operand. 247 MemOperand ElementAt(int index) { 248 int adjusted_index = index - kVirtualElements[top_of_stack_state_]; 249 ASSERT(adjusted_index >= 0); 250 return MemOperand(sp, adjusted_index * kPointerSize); 251 } 252 253 bool KnownSmiAt(int index) { 254 if (index >= kTOSKnownSmiMapSize) return false; 255 return (tos_known_smi_map_ & (1 << index)) != 0; 256 } 257 // A frame-allocated local as an assembly operand. 258 inline MemOperand LocalAt(int index); 259 260 // Push the address of the receiver slot on the frame. 261 void PushReceiverSlotAddress(); 262 263 // The function frame slot. 264 MemOperand Function() { return MemOperand(fp, kFunctionOffset); } 265 266 // The context frame slot. 267 MemOperand Context() { return MemOperand(fp, kContextOffset); } 268 269 // A parameter as an assembly operand. 270 inline MemOperand ParameterAt(int index); 271 272 // The receiver frame slot. 273 inline MemOperand Receiver(); 274 275 // Push a try-catch or try-finally handler on top of the virtual frame. 276 void PushTryHandler(HandlerType type); 277 278 // Call stub given the number of arguments it expects on (and 279 // removes from) the stack. 280 inline void CallStub(CodeStub* stub, int arg_count); 281 282 // Call JS function from top of the stack with arguments 283 // taken from the stack. 284 void CallJSFunction(int arg_count); 285 286 // Call runtime given the number of arguments expected on (and 287 // removed from) the stack. 288 void CallRuntime(const Runtime::Function* f, int arg_count); 289 void CallRuntime(Runtime::FunctionId id, int arg_count); 290 291 #ifdef ENABLE_DEBUGGER_SUPPORT 292 void DebugBreak(); 293 #endif 294 295 // Invoke builtin given the number of arguments it expects on (and 296 // removes from) the stack. 297 void InvokeBuiltin(Builtins::JavaScript id, 298 InvokeJSFlags flag, 299 int arg_count); 300 301 // Call load IC. Receiver is on the stack and is consumed. Result is returned 302 // in v0. 303 void CallLoadIC(Handle<String> name, RelocInfo::Mode mode); 304 305 // Call store IC. If the load is contextual, value is found on top of the 306 // frame. If not, value and receiver are on the frame. Both are consumed. 307 // Result is returned in v0. 308 void CallStoreIC(Handle<String> name, bool is_contextual); 309 310 // Call keyed load IC. Key and receiver are on the stack. Both are consumed. 311 // Result is returned in v0. 312 void CallKeyedLoadIC(); 313 314 // Call keyed store IC. Value, key and receiver are on the stack. All three 315 // are consumed. Result is returned in v0 (and a0). 316 void CallKeyedStoreIC(); 317 318 // Call into an IC stub given the number of arguments it removes 319 // from the stack. Register arguments to the IC stub are implicit, 320 // and depend on the type of IC stub. 321 void CallCodeObject(Handle<Code> ic, 322 RelocInfo::Mode rmode, 323 int dropped_args); 324 325 // Drop a number of elements from the top of the expression stack. May 326 // emit code to affect the physical frame. Does not clobber any registers 327 // excepting possibly the stack pointer. 328 void Drop(int count); 329 330 // Drop one element. 331 void Drop() { Drop(1); } 332 333 // Pop an element from the top of the expression stack. Discards 334 // the result. 335 void Pop(); 336 337 // Pop an element from the top of the expression stack. The register 338 // will be one normally used for the top of stack register allocation 339 // so you can't hold on to it if you push on the stack. 340 Register PopToRegister(Register but_not_to_this_one = no_reg); 341 342 // Look at the top of the stack. The register returned is aliased and 343 // must be copied to a scratch register before modification. 344 Register Peek(); 345 346 // Look at the value beneath the top of the stack. The register returned is 347 // aliased and must be copied to a scratch register before modification. 348 Register Peek2(); 349 350 // Duplicate the top of stack. 351 void Dup(); 352 353 // Duplicate the two elements on top of stack. 354 void Dup2(); 355 356 // Flushes all registers, but it puts a copy of the top-of-stack in a0. 357 void SpillAllButCopyTOSToA0(); 358 359 // Flushes all registers, but it puts a copy of the top-of-stack in a1. 360 void SpillAllButCopyTOSToA1(); 361 362 // Flushes all registers, but it puts a copy of the top-of-stack in a1 363 // and the next value on the stack in a0. 364 void SpillAllButCopyTOSToA1A0(); 365 366 // Pop and save an element from the top of the expression stack and 367 // emit a corresponding pop instruction. 368 void EmitPop(Register reg); 369 // Same but for multiple registers 370 void EmitMultiPop(RegList regs); 371 void EmitMultiPopReversed(RegList regs); 372 373 374 // Takes the top two elements and puts them in a0 (top element) and a1 375 // (second element). 376 void PopToA1A0(); 377 378 // Takes the top element and puts it in a1. 379 void PopToA1(); 380 381 // Takes the top element and puts it in a0. 382 void PopToA0(); 383 384 // Push an element on top of the expression stack and emit a 385 // corresponding push instruction. 386 void EmitPush(Register reg, TypeInfo type_info = TypeInfo::Unknown()); 387 void EmitPush(Operand operand, TypeInfo type_info = TypeInfo::Unknown()); 388 void EmitPush(MemOperand operand, TypeInfo type_info = TypeInfo::Unknown()); 389 void EmitPushRoot(Heap::RootListIndex index); 390 391 // Overwrite the nth thing on the stack. If the nth position is in a 392 // register then this turns into a Move, otherwise an sw. Afterwards 393 // you can still use the register even if it is a register that can be 394 // used for TOS (a0 or a1). 395 void SetElementAt(Register reg, int this_far_down); 396 397 // Get a register which is free and which must be immediately used to 398 // push on the top of the stack. 399 Register GetTOSRegister(); 400 401 // Same but for multiple registers. 402 void EmitMultiPush(RegList regs); 403 void EmitMultiPushReversed(RegList regs); 404 405 static Register scratch0() { return t4; } 406 static Register scratch1() { return t5; } 407 static Register scratch2() { return t6; } 408 409 private: 410 static const int kLocal0Offset = JavaScriptFrameConstants::kLocal0Offset; 411 static const int kFunctionOffset = JavaScriptFrameConstants::kFunctionOffset; 412 static const int kContextOffset = StandardFrameConstants::kContextOffset; 413 414 static const int kHandlerSize = StackHandlerConstants::kSize / kPointerSize; 415 static const int kPreallocatedElements = 5 + 8; // 8 expression stack slots. 416 417 // 5 states for the top of stack, which can be in memory or in a0 and a1. 418 enum TopOfStack { NO_TOS_REGISTERS, A0_TOS, A1_TOS, A1_A0_TOS, A0_A1_TOS, 419 TOS_STATES}; 420 static const int kMaxTOSRegisters = 2; 421 422 static const bool kA0InUse[TOS_STATES]; 423 static const bool kA1InUse[TOS_STATES]; 424 static const int kVirtualElements[TOS_STATES]; 425 static const TopOfStack kStateAfterPop[TOS_STATES]; 426 static const TopOfStack kStateAfterPush[TOS_STATES]; 427 static const Register kTopRegister[TOS_STATES]; 428 static const Register kBottomRegister[TOS_STATES]; 429 430 // We allocate up to 5 locals in registers. 431 static const int kNumberOfAllocatedRegisters = 5; 432 // r2 to r6 are allocated to locals. 433 static const int kFirstAllocatedRegister = 2; 434 435 static const Register kAllocatedRegisters[kNumberOfAllocatedRegisters]; 436 437 static Register AllocatedRegister(int r) { 438 ASSERT(r >= 0 && r < kNumberOfAllocatedRegisters); 439 return kAllocatedRegisters[r]; 440 } 441 442 // The number of elements on the stack frame. 443 int element_count_; 444 TopOfStack top_of_stack_state_:3; 445 int register_allocation_map_:kNumberOfAllocatedRegisters; 446 static const int kTOSKnownSmiMapSize = 4; 447 unsigned tos_known_smi_map_:kTOSKnownSmiMapSize; 448 449 // The index of the element that is at the processor's stack pointer 450 // (the sp register). For now since everything is in memory it is given 451 // by the number of elements on the not-very-virtual stack frame. 452 int stack_pointer() { return element_count_ - 1; } 453 454 // The number of frame-allocated locals and parameters respectively. 455 inline int parameter_count() const; 456 inline int local_count() const; 457 458 // The index of the element that is at the processor's frame pointer 459 // (the fp register). The parameters, receiver, function, and context 460 // are below the frame pointer. 461 inline int frame_pointer() const; 462 463 // The index of the first parameter. The receiver lies below the first 464 // parameter. 465 int param0_index() { return 1; } 466 467 // The index of the context slot in the frame. It is immediately 468 // below the frame pointer. 469 inline int context_index(); 470 471 // The index of the function slot in the frame. It is below the frame 472 // pointer and context slot. 473 inline int function_index(); 474 475 // The index of the first local. Between the frame pointer and the 476 // locals lies the return address. 477 inline int local0_index() const; 478 479 // The index of the base of the expression stack. 480 inline int expression_base_index() const; 481 482 // Convert a frame index into a frame pointer relative offset into the 483 // actual stack. 484 inline int fp_relative(int index); 485 486 // Spill all elements in registers. Spill the top spilled_args elements 487 // on the frame. Sync all other frame elements. 488 // Then drop dropped_args elements from the virtual frame, to match 489 // the effect of an upcoming call that will drop them from the stack. 490 void PrepareForCall(int spilled_args, int dropped_args); 491 492 // If all top-of-stack registers are in use then the lowest one is pushed 493 // onto the physical stack and made free. 494 void EnsureOneFreeTOSRegister(); 495 496 // Emit instructions to get the top of stack state from where we are to where 497 // we want to be. 498 void MergeTOSTo(TopOfStack expected_state, 499 Condition cond = al, 500 Register r1 = no_reg, 501 const Operand& r2 = Operand(no_reg)); 502 503 inline bool Equals(const VirtualFrame* other); 504 505 inline void LowerHeight(int count) { 506 element_count_ -= count; 507 if (count >= kTOSKnownSmiMapSize) { 508 tos_known_smi_map_ = 0; 509 } else { 510 tos_known_smi_map_ >>= count; 511 } 512 } 513 514 inline void RaiseHeight(int count, unsigned known_smi_map = 0) { 515 ASSERT(known_smi_map < (1u << count)); 516 element_count_ += count; 517 if (count >= kTOSKnownSmiMapSize) { 518 tos_known_smi_map_ = known_smi_map; 519 } else { 520 tos_known_smi_map_ = ((tos_known_smi_map_ << count) | known_smi_map); 521 } 522 } 523 friend class JumpTarget; 524 }; 525 526 527 } } // namespace v8::internal 528 529 #endif // V8_MIPS_VIRTUAL_FRAME_MIPS_H_ 530 531