1 // Copyright 2009 the V8 project authors. All rights reserved. 2 // Redistribution and use in source and binary forms, with or without 3 // modification, are permitted provided that the following conditions are 4 // met: 5 // 6 // * Redistributions of source code must retain the above copyright 7 // notice, this list of conditions and the following disclaimer. 8 // * Redistributions in binary form must reproduce the above 9 // copyright notice, this list of conditions and the following 10 // disclaimer in the documentation and/or other materials provided 11 // with the distribution. 12 // * Neither the name of Google Inc. nor the names of its 13 // contributors may be used to endorse or promote products derived 14 // from this software without specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28 #include "v8.h" 29 30 #include "codegen-inl.h" 31 #include "compiler.h" 32 #include "debug.h" 33 #include "full-codegen.h" 34 #include "parser.h" 35 36 namespace v8 { 37 namespace internal { 38 39 #define __ ACCESS_MASM(masm_) 40 41 // Generate code for a JS function. On entry to the function the receiver 42 // and arguments have been pushed on the stack left to right. The actual 43 // argument count matches the formal parameter count expected by the 44 // function. 45 // 46 // The live registers are: 47 // o r1: the JS function object being called (ie, ourselves) 48 // o cp: our context 49 // o fp: our caller's frame pointer 50 // o sp: stack pointer 51 // o lr: return address 52 // 53 // The function builds a JS frame. Please see JavaScriptFrameConstants in 54 // frames-arm.h for its layout. 55 void FullCodeGenerator::Generate(CompilationInfo* info, Mode mode) { 56 ASSERT(info_ == NULL); 57 info_ = info; 58 SetFunctionPosition(function()); 59 60 if (mode == PRIMARY) { 61 int locals_count = scope()->num_stack_slots(); 62 63 __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); 64 if (locals_count > 0) { 65 // Load undefined value here, so the value is ready for the loop 66 // below. 67 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 68 } 69 // Adjust fp to point to caller's fp. 70 __ add(fp, sp, Operand(2 * kPointerSize)); 71 72 { Comment cmnt(masm_, "[ Allocate locals"); 73 for (int i = 0; i < locals_count; i++) { 74 __ push(ip); 75 } 76 } 77 78 bool function_in_register = true; 79 80 // Possibly allocate a local context. 81 if (scope()->num_heap_slots() > 0) { 82 Comment cmnt(masm_, "[ Allocate local context"); 83 // Argument to NewContext is the function, which is in r1. 84 __ push(r1); 85 __ CallRuntime(Runtime::kNewContext, 1); 86 function_in_register = false; 87 // Context is returned in both r0 and cp. It replaces the context 88 // passed to us. It's saved in the stack and kept live in cp. 89 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 90 // Copy any necessary parameters into the context. 91 int num_parameters = scope()->num_parameters(); 92 for (int i = 0; i < num_parameters; i++) { 93 Slot* slot = scope()->parameter(i)->slot(); 94 if (slot != NULL && slot->type() == Slot::CONTEXT) { 95 int parameter_offset = StandardFrameConstants::kCallerSPOffset + 96 (num_parameters - 1 - i) * kPointerSize; 97 // Load parameter from stack. 98 __ ldr(r0, MemOperand(fp, parameter_offset)); 99 // Store it in the context. 100 __ mov(r1, Operand(Context::SlotOffset(slot->index()))); 101 __ str(r0, MemOperand(cp, r1)); 102 // Update the write barrier. This clobbers all involved 103 // registers, so we have use a third register to avoid 104 // clobbering cp. 105 __ mov(r2, Operand(cp)); 106 __ RecordWrite(r2, r1, r0); 107 } 108 } 109 } 110 111 Variable* arguments = scope()->arguments()->AsVariable(); 112 if (arguments != NULL) { 113 // Function uses arguments object. 114 Comment cmnt(masm_, "[ Allocate arguments object"); 115 if (!function_in_register) { 116 // Load this again, if it's used by the local context below. 117 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 118 } else { 119 __ mov(r3, r1); 120 } 121 // Receiver is just before the parameters on the caller's stack. 122 int offset = scope()->num_parameters() * kPointerSize; 123 __ add(r2, fp, 124 Operand(StandardFrameConstants::kCallerSPOffset + offset)); 125 __ mov(r1, Operand(Smi::FromInt(scope()->num_parameters()))); 126 __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit()); 127 128 // Arguments to ArgumentsAccessStub: 129 // function, receiver address, parameter count. 130 // The stub will rewrite receiever and parameter count if the previous 131 // stack frame was an arguments adapter frame. 132 ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); 133 __ CallStub(&stub); 134 // Duplicate the value; move-to-slot operation might clobber registers. 135 __ mov(r3, r0); 136 Move(arguments->slot(), r0, r1, r2); 137 Slot* dot_arguments_slot = 138 scope()->arguments_shadow()->AsVariable()->slot(); 139 Move(dot_arguments_slot, r3, r1, r2); 140 } 141 } 142 143 // Check the stack for overflow or break request. 144 // Put the lr setup instruction in the delay slot. The kInstrSize is 145 // added to the implicit 8 byte offset that always applies to operations 146 // with pc and gives a return address 12 bytes down. 147 { Comment cmnt(masm_, "[ Stack check"); 148 __ LoadRoot(r2, Heap::kStackLimitRootIndex); 149 __ add(lr, pc, Operand(Assembler::kInstrSize)); 150 __ cmp(sp, Operand(r2)); 151 StackCheckStub stub; 152 __ mov(pc, 153 Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()), 154 RelocInfo::CODE_TARGET), 155 LeaveCC, 156 lo); 157 } 158 159 { Comment cmnt(masm_, "[ Declarations"); 160 VisitDeclarations(scope()->declarations()); 161 } 162 163 if (FLAG_trace) { 164 __ CallRuntime(Runtime::kTraceEnter, 0); 165 } 166 167 { Comment cmnt(masm_, "[ Body"); 168 ASSERT(loop_depth() == 0); 169 VisitStatements(function()->body()); 170 ASSERT(loop_depth() == 0); 171 } 172 173 { Comment cmnt(masm_, "[ return <undefined>;"); 174 // Emit a 'return undefined' in case control fell off the end of the 175 // body. 176 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); 177 } 178 EmitReturnSequence(function()->end_position()); 179 } 180 181 182 void FullCodeGenerator::EmitReturnSequence(int position) { 183 Comment cmnt(masm_, "[ Return sequence"); 184 if (return_label_.is_bound()) { 185 __ b(&return_label_); 186 } else { 187 __ bind(&return_label_); 188 if (FLAG_trace) { 189 // Push the return value on the stack as the parameter. 190 // Runtime::TraceExit returns its parameter in r0. 191 __ push(r0); 192 __ CallRuntime(Runtime::kTraceExit, 1); 193 } 194 195 // Add a label for checking the size of the code used for returning. 196 Label check_exit_codesize; 197 masm_->bind(&check_exit_codesize); 198 199 // Calculate the exact length of the return sequence and make sure that 200 // the constant pool is not emitted inside of the return sequence. 201 int num_parameters = scope()->num_parameters(); 202 int32_t sp_delta = (num_parameters + 1) * kPointerSize; 203 int return_sequence_length = Assembler::kJSReturnSequenceLength; 204 if (!masm_->ImmediateFitsAddrMode1Instruction(sp_delta)) { 205 // Additional mov instruction generated. 206 return_sequence_length++; 207 } 208 masm_->BlockConstPoolFor(return_sequence_length); 209 210 CodeGenerator::RecordPositions(masm_, position); 211 __ RecordJSReturn(); 212 __ mov(sp, fp); 213 __ ldm(ia_w, sp, fp.bit() | lr.bit()); 214 __ add(sp, sp, Operand(sp_delta)); 215 __ Jump(lr); 216 217 // Check that the size of the code used for returning matches what is 218 // expected by the debugger. The add instruction above is an addressing 219 // mode 1 instruction where there are restrictions on which immediate values 220 // can be encoded in the instruction and which immediate values requires 221 // use of an additional instruction for moving the immediate to a temporary 222 // register. 223 ASSERT_EQ(return_sequence_length, 224 masm_->InstructionsGeneratedSince(&check_exit_codesize)); 225 } 226 } 227 228 229 void FullCodeGenerator::Apply(Expression::Context context, Register reg) { 230 switch (context) { 231 case Expression::kUninitialized: 232 UNREACHABLE(); 233 234 case Expression::kEffect: 235 // Nothing to do. 236 break; 237 238 case Expression::kValue: 239 // Move value into place. 240 switch (location_) { 241 case kAccumulator: 242 if (!reg.is(result_register())) __ mov(result_register(), reg); 243 break; 244 case kStack: 245 __ push(reg); 246 break; 247 } 248 break; 249 250 case Expression::kValueTest: 251 case Expression::kTestValue: 252 // Push an extra copy of the value in case it's needed. 253 __ push(reg); 254 // Fall through. 255 256 case Expression::kTest: 257 // We always call the runtime on ARM, so push the value as argument. 258 __ push(reg); 259 DoTest(context); 260 break; 261 } 262 } 263 264 265 void FullCodeGenerator::Apply(Expression::Context context, Slot* slot) { 266 switch (context) { 267 case Expression::kUninitialized: 268 UNREACHABLE(); 269 case Expression::kEffect: 270 // Nothing to do. 271 break; 272 case Expression::kValue: 273 case Expression::kTest: 274 case Expression::kValueTest: 275 case Expression::kTestValue: 276 // On ARM we have to move the value into a register to do anything 277 // with it. 278 Move(result_register(), slot); 279 Apply(context, result_register()); 280 break; 281 } 282 } 283 284 285 void FullCodeGenerator::Apply(Expression::Context context, Literal* lit) { 286 switch (context) { 287 case Expression::kUninitialized: 288 UNREACHABLE(); 289 case Expression::kEffect: 290 break; 291 // Nothing to do. 292 case Expression::kValue: 293 case Expression::kTest: 294 case Expression::kValueTest: 295 case Expression::kTestValue: 296 // On ARM we have to move the value into a register to do anything 297 // with it. 298 __ mov(result_register(), Operand(lit->handle())); 299 Apply(context, result_register()); 300 break; 301 } 302 } 303 304 305 void FullCodeGenerator::ApplyTOS(Expression::Context context) { 306 switch (context) { 307 case Expression::kUninitialized: 308 UNREACHABLE(); 309 310 case Expression::kEffect: 311 __ Drop(1); 312 break; 313 314 case Expression::kValue: 315 switch (location_) { 316 case kAccumulator: 317 __ pop(result_register()); 318 break; 319 case kStack: 320 break; 321 } 322 break; 323 324 case Expression::kValueTest: 325 case Expression::kTestValue: 326 // Duplicate the value on the stack in case it's needed. 327 __ ldr(ip, MemOperand(sp)); 328 __ push(ip); 329 // Fall through. 330 331 case Expression::kTest: 332 DoTest(context); 333 break; 334 } 335 } 336 337 338 void FullCodeGenerator::DropAndApply(int count, 339 Expression::Context context, 340 Register reg) { 341 ASSERT(count > 0); 342 ASSERT(!reg.is(sp)); 343 switch (context) { 344 case Expression::kUninitialized: 345 UNREACHABLE(); 346 347 case Expression::kEffect: 348 __ Drop(count); 349 break; 350 351 case Expression::kValue: 352 switch (location_) { 353 case kAccumulator: 354 __ Drop(count); 355 if (!reg.is(result_register())) __ mov(result_register(), reg); 356 break; 357 case kStack: 358 if (count > 1) __ Drop(count - 1); 359 __ str(reg, MemOperand(sp)); 360 break; 361 } 362 break; 363 364 case Expression::kTest: 365 if (count > 1) __ Drop(count - 1); 366 __ str(reg, MemOperand(sp)); 367 DoTest(context); 368 break; 369 370 case Expression::kValueTest: 371 case Expression::kTestValue: 372 if (count == 1) { 373 __ str(reg, MemOperand(sp)); 374 __ push(reg); 375 } else { // count > 1 376 __ Drop(count - 2); 377 __ str(reg, MemOperand(sp, kPointerSize)); 378 __ str(reg, MemOperand(sp)); 379 } 380 DoTest(context); 381 break; 382 } 383 } 384 385 386 void FullCodeGenerator::Apply(Expression::Context context, 387 Label* materialize_true, 388 Label* materialize_false) { 389 switch (context) { 390 case Expression::kUninitialized: 391 392 case Expression::kEffect: 393 ASSERT_EQ(materialize_true, materialize_false); 394 __ bind(materialize_true); 395 break; 396 397 case Expression::kValue: { 398 Label done; 399 __ bind(materialize_true); 400 __ mov(result_register(), Operand(Factory::true_value())); 401 __ jmp(&done); 402 __ bind(materialize_false); 403 __ mov(result_register(), Operand(Factory::false_value())); 404 __ bind(&done); 405 switch (location_) { 406 case kAccumulator: 407 break; 408 case kStack: 409 __ push(result_register()); 410 break; 411 } 412 break; 413 } 414 415 case Expression::kTest: 416 break; 417 418 case Expression::kValueTest: 419 __ bind(materialize_true); 420 __ mov(result_register(), Operand(Factory::true_value())); 421 switch (location_) { 422 case kAccumulator: 423 break; 424 case kStack: 425 __ push(result_register()); 426 break; 427 } 428 __ jmp(true_label_); 429 break; 430 431 case Expression::kTestValue: 432 __ bind(materialize_false); 433 __ mov(result_register(), Operand(Factory::false_value())); 434 switch (location_) { 435 case kAccumulator: 436 break; 437 case kStack: 438 __ push(result_register()); 439 break; 440 } 441 __ jmp(false_label_); 442 break; 443 } 444 } 445 446 447 void FullCodeGenerator::DoTest(Expression::Context context) { 448 // The value to test is pushed on the stack, and duplicated on the stack 449 // if necessary (for value/test and test/value contexts). 450 ASSERT_NE(NULL, true_label_); 451 ASSERT_NE(NULL, false_label_); 452 453 // Call the runtime to find the boolean value of the source and then 454 // translate it into control flow to the pair of labels. 455 __ CallRuntime(Runtime::kToBool, 1); 456 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 457 __ cmp(r0, ip); 458 459 // Complete based on the context. 460 switch (context) { 461 case Expression::kUninitialized: 462 case Expression::kEffect: 463 case Expression::kValue: 464 UNREACHABLE(); 465 466 case Expression::kTest: 467 __ b(eq, true_label_); 468 __ jmp(false_label_); 469 break; 470 471 case Expression::kValueTest: { 472 Label discard; 473 switch (location_) { 474 case kAccumulator: 475 __ b(ne, &discard); 476 __ pop(result_register()); 477 __ jmp(true_label_); 478 break; 479 case kStack: 480 __ b(eq, true_label_); 481 break; 482 } 483 __ bind(&discard); 484 __ Drop(1); 485 __ jmp(false_label_); 486 break; 487 } 488 489 case Expression::kTestValue: { 490 Label discard; 491 switch (location_) { 492 case kAccumulator: 493 __ b(eq, &discard); 494 __ pop(result_register()); 495 __ jmp(false_label_); 496 break; 497 case kStack: 498 __ b(ne, false_label_); 499 break; 500 } 501 __ bind(&discard); 502 __ Drop(1); 503 __ jmp(true_label_); 504 break; 505 } 506 } 507 } 508 509 510 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) { 511 switch (slot->type()) { 512 case Slot::PARAMETER: 513 case Slot::LOCAL: 514 return MemOperand(fp, SlotOffset(slot)); 515 case Slot::CONTEXT: { 516 int context_chain_length = 517 scope()->ContextChainLength(slot->var()->scope()); 518 __ LoadContext(scratch, context_chain_length); 519 return CodeGenerator::ContextOperand(scratch, slot->index()); 520 } 521 case Slot::LOOKUP: 522 UNREACHABLE(); 523 } 524 UNREACHABLE(); 525 return MemOperand(r0, 0); 526 } 527 528 529 void FullCodeGenerator::Move(Register destination, Slot* source) { 530 // Use destination as scratch. 531 MemOperand slot_operand = EmitSlotSearch(source, destination); 532 __ ldr(destination, slot_operand); 533 } 534 535 536 void FullCodeGenerator::Move(Slot* dst, 537 Register src, 538 Register scratch1, 539 Register scratch2) { 540 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented. 541 ASSERT(!scratch1.is(src) && !scratch2.is(src)); 542 MemOperand location = EmitSlotSearch(dst, scratch1); 543 __ str(src, location); 544 // Emit the write barrier code if the location is in the heap. 545 if (dst->type() == Slot::CONTEXT) { 546 __ mov(scratch2, Operand(Context::SlotOffset(dst->index()))); 547 __ RecordWrite(scratch1, scratch2, src); 548 } 549 } 550 551 552 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { 553 Comment cmnt(masm_, "[ Declaration"); 554 Variable* var = decl->proxy()->var(); 555 ASSERT(var != NULL); // Must have been resolved. 556 Slot* slot = var->slot(); 557 Property* prop = var->AsProperty(); 558 559 if (slot != NULL) { 560 switch (slot->type()) { 561 case Slot::PARAMETER: 562 case Slot::LOCAL: 563 if (decl->mode() == Variable::CONST) { 564 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 565 __ str(ip, MemOperand(fp, SlotOffset(slot))); 566 } else if (decl->fun() != NULL) { 567 VisitForValue(decl->fun(), kAccumulator); 568 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 569 } 570 break; 571 572 case Slot::CONTEXT: 573 // We bypass the general EmitSlotSearch because we know more about 574 // this specific context. 575 576 // The variable in the decl always resides in the current context. 577 ASSERT_EQ(0, scope()->ContextChainLength(var->scope())); 578 if (FLAG_debug_code) { 579 // Check if we have the correct context pointer. 580 __ ldr(r1, 581 CodeGenerator::ContextOperand(cp, Context::FCONTEXT_INDEX)); 582 __ cmp(r1, cp); 583 __ Check(eq, "Unexpected declaration in current context."); 584 } 585 if (decl->mode() == Variable::CONST) { 586 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); 587 __ str(ip, CodeGenerator::ContextOperand(cp, slot->index())); 588 // No write barrier since the_hole_value is in old space. 589 } else if (decl->fun() != NULL) { 590 VisitForValue(decl->fun(), kAccumulator); 591 __ str(result_register(), 592 CodeGenerator::ContextOperand(cp, slot->index())); 593 int offset = Context::SlotOffset(slot->index()); 594 __ mov(r2, Operand(offset)); 595 // We know that we have written a function, which is not a smi. 596 __ mov(r1, Operand(cp)); 597 __ RecordWrite(r1, r2, result_register()); 598 } 599 break; 600 601 case Slot::LOOKUP: { 602 __ mov(r2, Operand(var->name())); 603 // Declaration nodes are always introduced in one of two modes. 604 ASSERT(decl->mode() == Variable::VAR || 605 decl->mode() == Variable::CONST); 606 PropertyAttributes attr = 607 (decl->mode() == Variable::VAR) ? NONE : READ_ONLY; 608 __ mov(r1, Operand(Smi::FromInt(attr))); 609 // Push initial value, if any. 610 // Note: For variables we must not push an initial value (such as 611 // 'undefined') because we may have a (legal) redeclaration and we 612 // must not destroy the current value. 613 if (decl->mode() == Variable::CONST) { 614 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex); 615 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit()); 616 } else if (decl->fun() != NULL) { 617 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit()); 618 // Push initial value for function declaration. 619 VisitForValue(decl->fun(), kStack); 620 } else { 621 __ mov(r0, Operand(Smi::FromInt(0))); // No initial value! 622 __ stm(db_w, sp, cp.bit() | r2.bit() | r1.bit() | r0.bit()); 623 } 624 __ CallRuntime(Runtime::kDeclareContextSlot, 4); 625 break; 626 } 627 } 628 629 } else if (prop != NULL) { 630 if (decl->fun() != NULL || decl->mode() == Variable::CONST) { 631 // We are declaring a function or constant that rewrites to a 632 // property. Use (keyed) IC to set the initial value. 633 VisitForValue(prop->obj(), kStack); 634 VisitForValue(prop->key(), kStack); 635 636 if (decl->fun() != NULL) { 637 VisitForValue(decl->fun(), kAccumulator); 638 } else { 639 __ LoadRoot(result_register(), Heap::kTheHoleValueRootIndex); 640 } 641 642 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 643 __ Call(ic, RelocInfo::CODE_TARGET); 644 645 // Value in r0 is ignored (declarations are statements). Receiver 646 // and key on stack are discarded. 647 __ Drop(2); 648 } 649 } 650 } 651 652 653 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { 654 // Call the runtime to declare the globals. 655 // The context is the first argument. 656 __ mov(r1, Operand(pairs)); 657 __ mov(r0, Operand(Smi::FromInt(is_eval() ? 1 : 0))); 658 __ stm(db_w, sp, cp.bit() | r1.bit() | r0.bit()); 659 __ CallRuntime(Runtime::kDeclareGlobals, 3); 660 // Return value is ignored. 661 } 662 663 664 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { 665 Comment cmnt(masm_, "[ FunctionLiteral"); 666 667 // Build the function boilerplate and instantiate it. 668 Handle<JSFunction> boilerplate = 669 Compiler::BuildBoilerplate(expr, script(), this); 670 if (HasStackOverflow()) return; 671 672 ASSERT(boilerplate->IsBoilerplate()); 673 674 // Create a new closure. 675 __ mov(r0, Operand(boilerplate)); 676 __ stm(db_w, sp, cp.bit() | r0.bit()); 677 __ CallRuntime(Runtime::kNewClosure, 2); 678 Apply(context_, r0); 679 } 680 681 682 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) { 683 Comment cmnt(masm_, "[ VariableProxy"); 684 EmitVariableLoad(expr->var(), context_); 685 } 686 687 688 void FullCodeGenerator::EmitVariableLoad(Variable* var, 689 Expression::Context context) { 690 // Four cases: non-this global variables, lookup slots, all other 691 // types of slots, and parameters that rewrite to explicit property 692 // accesses on the arguments object. 693 Slot* slot = var->slot(); 694 Property* property = var->AsProperty(); 695 696 if (var->is_global() && !var->is_this()) { 697 Comment cmnt(masm_, "Global variable"); 698 // Use inline caching. Variable name is passed in r2 and the global 699 // object on the stack. 700 __ ldr(ip, CodeGenerator::GlobalObject()); 701 __ push(ip); 702 __ mov(r2, Operand(var->name())); 703 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 704 __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); 705 DropAndApply(1, context, r0); 706 707 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { 708 Comment cmnt(masm_, "Lookup slot"); 709 __ mov(r1, Operand(var->name())); 710 __ stm(db_w, sp, cp.bit() | r1.bit()); // Context and name. 711 __ CallRuntime(Runtime::kLoadContextSlot, 2); 712 Apply(context, r0); 713 714 } else if (slot != NULL) { 715 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT) 716 ? "Context slot" 717 : "Stack slot"); 718 Apply(context, slot); 719 720 } else { 721 Comment cmnt(masm_, "Rewritten parameter"); 722 ASSERT_NOT_NULL(property); 723 // Rewritten parameter accesses are of the form "slot[literal]". 724 725 // Assert that the object is in a slot. 726 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable(); 727 ASSERT_NOT_NULL(object_var); 728 Slot* object_slot = object_var->slot(); 729 ASSERT_NOT_NULL(object_slot); 730 731 // Load the object. 732 Move(r2, object_slot); 733 734 // Assert that the key is a smi. 735 Literal* key_literal = property->key()->AsLiteral(); 736 ASSERT_NOT_NULL(key_literal); 737 ASSERT(key_literal->handle()->IsSmi()); 738 739 // Load the key. 740 __ mov(r1, Operand(key_literal->handle())); 741 742 // Push both as arguments to ic. 743 __ stm(db_w, sp, r2.bit() | r1.bit()); 744 745 // Do a keyed property load. 746 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 747 __ Call(ic, RelocInfo::CODE_TARGET); 748 749 // Drop key and object left on the stack by IC, and push the result. 750 DropAndApply(2, context, r0); 751 } 752 } 753 754 755 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { 756 Comment cmnt(masm_, "[ RegExpLiteral"); 757 Label done; 758 // Registers will be used as follows: 759 // r4 = JS function, literals array 760 // r3 = literal index 761 // r2 = RegExp pattern 762 // r1 = RegExp flags 763 // r0 = temp + return value (RegExp literal) 764 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 765 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset)); 766 int literal_offset = 767 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize; 768 __ ldr(r0, FieldMemOperand(r4, literal_offset)); 769 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex); 770 __ cmp(r0, ip); 771 __ b(ne, &done); 772 __ mov(r3, Operand(Smi::FromInt(expr->literal_index()))); 773 __ mov(r2, Operand(expr->pattern())); 774 __ mov(r1, Operand(expr->flags())); 775 __ stm(db_w, sp, r4.bit() | r3.bit() | r2.bit() | r1.bit()); 776 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); 777 __ bind(&done); 778 Apply(context_, r0); 779 } 780 781 782 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { 783 Comment cmnt(masm_, "[ ObjectLiteral"); 784 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 785 __ ldr(r2, FieldMemOperand(r2, JSFunction::kLiteralsOffset)); 786 __ mov(r1, Operand(Smi::FromInt(expr->literal_index()))); 787 __ mov(r0, Operand(expr->constant_properties())); 788 __ stm(db_w, sp, r2.bit() | r1.bit() | r0.bit()); 789 if (expr->depth() > 1) { 790 __ CallRuntime(Runtime::kCreateObjectLiteral, 3); 791 } else { 792 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 3); 793 } 794 795 // If result_saved is true the result is on top of the stack. If 796 // result_saved is false the result is in r0. 797 bool result_saved = false; 798 799 for (int i = 0; i < expr->properties()->length(); i++) { 800 ObjectLiteral::Property* property = expr->properties()->at(i); 801 if (property->IsCompileTimeValue()) continue; 802 803 Literal* key = property->key(); 804 Expression* value = property->value(); 805 if (!result_saved) { 806 __ push(r0); // Save result on stack 807 result_saved = true; 808 } 809 switch (property->kind()) { 810 case ObjectLiteral::Property::CONSTANT: 811 UNREACHABLE(); 812 case ObjectLiteral::Property::MATERIALIZED_LITERAL: 813 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value())); 814 // Fall through. 815 case ObjectLiteral::Property::COMPUTED: 816 if (key->handle()->IsSymbol()) { 817 VisitForValue(value, kAccumulator); 818 __ mov(r2, Operand(key->handle())); 819 __ ldr(r1, MemOperand(sp)); 820 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 821 __ Call(ic, RelocInfo::CODE_TARGET); 822 break; 823 } 824 // Fall through. 825 case ObjectLiteral::Property::PROTOTYPE: 826 // Duplicate receiver on stack. 827 __ ldr(r0, MemOperand(sp)); 828 __ push(r0); 829 VisitForValue(key, kStack); 830 VisitForValue(value, kStack); 831 __ CallRuntime(Runtime::kSetProperty, 3); 832 break; 833 case ObjectLiteral::Property::GETTER: 834 case ObjectLiteral::Property::SETTER: 835 // Duplicate receiver on stack. 836 __ ldr(r0, MemOperand(sp)); 837 __ push(r0); 838 VisitForValue(key, kStack); 839 __ mov(r1, Operand(property->kind() == ObjectLiteral::Property::SETTER ? 840 Smi::FromInt(1) : 841 Smi::FromInt(0))); 842 __ push(r1); 843 VisitForValue(value, kStack); 844 __ CallRuntime(Runtime::kDefineAccessor, 4); 845 break; 846 } 847 } 848 849 if (result_saved) { 850 ApplyTOS(context_); 851 } else { 852 Apply(context_, r0); 853 } 854 } 855 856 857 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { 858 Comment cmnt(masm_, "[ ArrayLiteral"); 859 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 860 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset)); 861 __ mov(r2, Operand(Smi::FromInt(expr->literal_index()))); 862 __ mov(r1, Operand(expr->constant_elements())); 863 __ stm(db_w, sp, r3.bit() | r2.bit() | r1.bit()); 864 if (expr->depth() > 1) { 865 __ CallRuntime(Runtime::kCreateArrayLiteral, 3); 866 } else { 867 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3); 868 } 869 870 bool result_saved = false; // Is the result saved to the stack? 871 872 // Emit code to evaluate all the non-constant subexpressions and to store 873 // them into the newly cloned array. 874 ZoneList<Expression*>* subexprs = expr->values(); 875 for (int i = 0, len = subexprs->length(); i < len; i++) { 876 Expression* subexpr = subexprs->at(i); 877 // If the subexpression is a literal or a simple materialized literal it 878 // is already set in the cloned array. 879 if (subexpr->AsLiteral() != NULL || 880 CompileTimeValue::IsCompileTimeValue(subexpr)) { 881 continue; 882 } 883 884 if (!result_saved) { 885 __ push(r0); 886 result_saved = true; 887 } 888 VisitForValue(subexpr, kAccumulator); 889 890 // Store the subexpression value in the array's elements. 891 __ ldr(r1, MemOperand(sp)); // Copy of array literal. 892 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset)); 893 int offset = FixedArray::kHeaderSize + (i * kPointerSize); 894 __ str(result_register(), FieldMemOperand(r1, offset)); 895 896 // Update the write barrier for the array store with r0 as the scratch 897 // register. 898 __ mov(r2, Operand(offset)); 899 __ RecordWrite(r1, r2, result_register()); 900 } 901 902 if (result_saved) { 903 ApplyTOS(context_); 904 } else { 905 Apply(context_, r0); 906 } 907 } 908 909 910 void FullCodeGenerator::VisitAssignment(Assignment* expr) { 911 Comment cmnt(masm_, "[ Assignment"); 912 ASSERT(expr->op() != Token::INIT_CONST); 913 // Left-hand side can only be a property, a global or a (parameter or local) 914 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 915 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 916 LhsKind assign_type = VARIABLE; 917 Property* prop = expr->target()->AsProperty(); 918 if (prop != NULL) { 919 assign_type = 920 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 921 } 922 923 // Evaluate LHS expression. 924 switch (assign_type) { 925 case VARIABLE: 926 // Nothing to do here. 927 break; 928 case NAMED_PROPERTY: 929 if (expr->is_compound()) { 930 // We need the receiver both on the stack and in the accumulator. 931 VisitForValue(prop->obj(), kAccumulator); 932 __ push(result_register()); 933 } else { 934 VisitForValue(prop->obj(), kStack); 935 } 936 break; 937 case KEYED_PROPERTY: 938 VisitForValue(prop->obj(), kStack); 939 VisitForValue(prop->key(), kStack); 940 break; 941 } 942 943 // If we have a compound assignment: Get value of LHS expression and 944 // store in on top of the stack. 945 if (expr->is_compound()) { 946 Location saved_location = location_; 947 location_ = kStack; 948 switch (assign_type) { 949 case VARIABLE: 950 EmitVariableLoad(expr->target()->AsVariableProxy()->var(), 951 Expression::kValue); 952 break; 953 case NAMED_PROPERTY: 954 EmitNamedPropertyLoad(prop); 955 __ push(result_register()); 956 break; 957 case KEYED_PROPERTY: 958 EmitKeyedPropertyLoad(prop); 959 __ push(result_register()); 960 break; 961 } 962 location_ = saved_location; 963 } 964 965 // Evaluate RHS expression. 966 Expression* rhs = expr->value(); 967 VisitForValue(rhs, kAccumulator); 968 969 // If we have a compound assignment: Apply operator. 970 if (expr->is_compound()) { 971 Location saved_location = location_; 972 location_ = kAccumulator; 973 EmitBinaryOp(expr->binary_op(), Expression::kValue); 974 location_ = saved_location; 975 } 976 977 // Record source position before possible IC call. 978 SetSourcePosition(expr->position()); 979 980 // Store the value. 981 switch (assign_type) { 982 case VARIABLE: 983 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(), 984 context_); 985 break; 986 case NAMED_PROPERTY: 987 EmitNamedPropertyAssignment(expr); 988 break; 989 case KEYED_PROPERTY: 990 EmitKeyedPropertyAssignment(expr); 991 break; 992 } 993 } 994 995 996 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) { 997 SetSourcePosition(prop->position()); 998 Literal* key = prop->key()->AsLiteral(); 999 __ mov(r2, Operand(key->handle())); 1000 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1001 __ Call(ic, RelocInfo::CODE_TARGET); 1002 } 1003 1004 1005 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { 1006 SetSourcePosition(prop->position()); 1007 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1008 __ Call(ic, RelocInfo::CODE_TARGET); 1009 } 1010 1011 1012 void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1013 Expression::Context context) { 1014 __ pop(r1); 1015 GenericBinaryOpStub stub(op, NO_OVERWRITE); 1016 __ CallStub(&stub); 1017 Apply(context, r0); 1018 } 1019 1020 1021 void FullCodeGenerator::EmitVariableAssignment(Variable* var, 1022 Expression::Context context) { 1023 // Three main cases: global variables, lookup slots, and all other 1024 // types of slots. Left-hand-side parameters that rewrite to 1025 // explicit property accesses do not reach here. 1026 ASSERT(var != NULL); 1027 ASSERT(var->is_global() || var->slot() != NULL); 1028 1029 Slot* slot = var->slot(); 1030 if (var->is_global()) { 1031 ASSERT(!var->is_this()); 1032 // Assignment to a global variable. Use inline caching for the 1033 // assignment. Right-hand-side value is passed in r0, variable name in 1034 // r2, and the global object in r1. 1035 __ mov(r2, Operand(var->name())); 1036 __ ldr(r1, CodeGenerator::GlobalObject()); 1037 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1038 __ Call(ic, RelocInfo::CODE_TARGET); 1039 1040 } else if (slot != NULL && slot->type() == Slot::LOOKUP) { 1041 __ push(result_register()); // Value. 1042 __ mov(r1, Operand(var->name())); 1043 __ stm(db_w, sp, cp.bit() | r1.bit()); // Context and name. 1044 __ CallRuntime(Runtime::kStoreContextSlot, 3); 1045 1046 } else if (var->slot() != NULL) { 1047 Slot* slot = var->slot(); 1048 switch (slot->type()) { 1049 case Slot::LOCAL: 1050 case Slot::PARAMETER: 1051 __ str(result_register(), MemOperand(fp, SlotOffset(slot))); 1052 break; 1053 1054 case Slot::CONTEXT: { 1055 MemOperand target = EmitSlotSearch(slot, r1); 1056 __ str(result_register(), target); 1057 1058 // RecordWrite may destroy all its register arguments. 1059 __ mov(r3, result_register()); 1060 int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; 1061 1062 __ mov(r2, Operand(offset)); 1063 __ RecordWrite(r1, r2, r3); 1064 break; 1065 } 1066 1067 case Slot::LOOKUP: 1068 UNREACHABLE(); 1069 break; 1070 } 1071 1072 } else { 1073 // Variables rewritten as properties are not treated as variables in 1074 // assignments. 1075 UNREACHABLE(); 1076 } 1077 Apply(context, result_register()); 1078 } 1079 1080 1081 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { 1082 // Assignment to a property, using a named store IC. 1083 Property* prop = expr->target()->AsProperty(); 1084 ASSERT(prop != NULL); 1085 ASSERT(prop->key()->AsLiteral() != NULL); 1086 1087 // If the assignment starts a block of assignments to the same object, 1088 // change to slow case to avoid the quadratic behavior of repeatedly 1089 // adding fast properties. 1090 if (expr->starts_initialization_block()) { 1091 __ push(result_register()); 1092 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value. 1093 __ push(ip); 1094 __ CallRuntime(Runtime::kToSlowProperties, 1); 1095 __ pop(result_register()); 1096 } 1097 1098 // Record source code position before IC call. 1099 SetSourcePosition(expr->position()); 1100 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 1101 if (expr->ends_initialization_block()) { 1102 __ ldr(r1, MemOperand(sp)); 1103 } else { 1104 __ pop(r1); 1105 } 1106 1107 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1108 __ Call(ic, RelocInfo::CODE_TARGET); 1109 1110 // If the assignment ends an initialization block, revert to fast case. 1111 if (expr->ends_initialization_block()) { 1112 __ push(r0); // Result of assignment, saved even if not needed. 1113 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is under value. 1114 __ push(ip); 1115 __ CallRuntime(Runtime::kToFastProperties, 1); 1116 __ pop(r0); 1117 DropAndApply(1, context_, r0); 1118 } else { 1119 Apply(context_, r0); 1120 } 1121 } 1122 1123 1124 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { 1125 // Assignment to a property, using a keyed store IC. 1126 1127 // If the assignment starts a block of assignments to the same object, 1128 // change to slow case to avoid the quadratic behavior of repeatedly 1129 // adding fast properties. 1130 if (expr->starts_initialization_block()) { 1131 __ push(result_register()); 1132 // Receiver is now under the key and value. 1133 __ ldr(ip, MemOperand(sp, 2 * kPointerSize)); 1134 __ push(ip); 1135 __ CallRuntime(Runtime::kToSlowProperties, 1); 1136 __ pop(result_register()); 1137 } 1138 1139 // Record source code position before IC call. 1140 SetSourcePosition(expr->position()); 1141 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1142 __ Call(ic, RelocInfo::CODE_TARGET); 1143 1144 // If the assignment ends an initialization block, revert to fast case. 1145 if (expr->ends_initialization_block()) { 1146 __ push(r0); // Result of assignment, saved even if not needed. 1147 // Receiver is under the key and value. 1148 __ ldr(ip, MemOperand(sp, 2 * kPointerSize)); 1149 __ push(ip); 1150 __ CallRuntime(Runtime::kToFastProperties, 1); 1151 __ pop(r0); 1152 } 1153 1154 // Receiver and key are still on stack. 1155 DropAndApply(2, context_, r0); 1156 } 1157 1158 1159 void FullCodeGenerator::VisitProperty(Property* expr) { 1160 Comment cmnt(masm_, "[ Property"); 1161 Expression* key = expr->key(); 1162 1163 // Evaluate receiver. 1164 VisitForValue(expr->obj(), kStack); 1165 1166 if (key->IsPropertyName()) { 1167 EmitNamedPropertyLoad(expr); 1168 // Drop receiver left on the stack by IC. 1169 DropAndApply(1, context_, r0); 1170 } else { 1171 VisitForValue(expr->key(), kStack); 1172 EmitKeyedPropertyLoad(expr); 1173 // Drop key and receiver left on the stack by IC. 1174 DropAndApply(2, context_, r0); 1175 } 1176 } 1177 1178 void FullCodeGenerator::EmitCallWithIC(Call* expr, 1179 Handle<Object> name, 1180 RelocInfo::Mode mode) { 1181 // Code common for calls using the IC. 1182 ZoneList<Expression*>* args = expr->arguments(); 1183 int arg_count = args->length(); 1184 for (int i = 0; i < arg_count; i++) { 1185 VisitForValue(args->at(i), kStack); 1186 } 1187 __ mov(r2, Operand(name)); 1188 // Record source position for debugger. 1189 SetSourcePosition(expr->position()); 1190 // Call the IC initialization code. 1191 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP; 1192 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop); 1193 __ Call(ic, mode); 1194 // Restore context register. 1195 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1196 Apply(context_, r0); 1197 } 1198 1199 1200 void FullCodeGenerator::EmitCallWithStub(Call* expr) { 1201 // Code common for calls using the call stub. 1202 ZoneList<Expression*>* args = expr->arguments(); 1203 int arg_count = args->length(); 1204 for (int i = 0; i < arg_count; i++) { 1205 VisitForValue(args->at(i), kStack); 1206 } 1207 // Record source position for debugger. 1208 SetSourcePosition(expr->position()); 1209 CallFunctionStub stub(arg_count, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE); 1210 __ CallStub(&stub); 1211 // Restore context register. 1212 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1213 DropAndApply(1, context_, r0); 1214 } 1215 1216 1217 void FullCodeGenerator::VisitCall(Call* expr) { 1218 Comment cmnt(masm_, "[ Call"); 1219 Expression* fun = expr->expression(); 1220 Variable* var = fun->AsVariableProxy()->AsVariable(); 1221 1222 if (var != NULL && var->is_possibly_eval()) { 1223 // Call to the identifier 'eval'. 1224 UNREACHABLE(); 1225 } else if (var != NULL && !var->is_this() && var->is_global()) { 1226 // Push global object as receiver for the call IC. 1227 __ ldr(r0, CodeGenerator::GlobalObject()); 1228 __ push(r0); 1229 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT); 1230 } else if (var != NULL && var->slot() != NULL && 1231 var->slot()->type() == Slot::LOOKUP) { 1232 // Call to a lookup slot. 1233 UNREACHABLE(); 1234 } else if (fun->AsProperty() != NULL) { 1235 // Call to an object property. 1236 Property* prop = fun->AsProperty(); 1237 Literal* key = prop->key()->AsLiteral(); 1238 if (key != NULL && key->handle()->IsSymbol()) { 1239 // Call to a named property, use call IC. 1240 VisitForValue(prop->obj(), kStack); 1241 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET); 1242 } else { 1243 // Call to a keyed property, use keyed load IC followed by function 1244 // call. 1245 VisitForValue(prop->obj(), kStack); 1246 VisitForValue(prop->key(), kStack); 1247 // Record source code position for IC call. 1248 SetSourcePosition(prop->position()); 1249 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1250 __ Call(ic, RelocInfo::CODE_TARGET); 1251 // Load receiver object into r1. 1252 if (prop->is_synthetic()) { 1253 __ ldr(r1, CodeGenerator::GlobalObject()); 1254 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 1255 } else { 1256 __ ldr(r1, MemOperand(sp, kPointerSize)); 1257 } 1258 // Overwrite (object, key) with (function, receiver). 1259 __ str(r0, MemOperand(sp, kPointerSize)); 1260 __ str(r1, MemOperand(sp)); 1261 EmitCallWithStub(expr); 1262 } 1263 } else { 1264 // Call to some other expression. If the expression is an anonymous 1265 // function literal not called in a loop, mark it as one that should 1266 // also use the fast code generator. 1267 FunctionLiteral* lit = fun->AsFunctionLiteral(); 1268 if (lit != NULL && 1269 lit->name()->Equals(Heap::empty_string()) && 1270 loop_depth() == 0) { 1271 lit->set_try_full_codegen(true); 1272 } 1273 VisitForValue(fun, kStack); 1274 // Load global receiver object. 1275 __ ldr(r1, CodeGenerator::GlobalObject()); 1276 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset)); 1277 __ push(r1); 1278 // Emit function call. 1279 EmitCallWithStub(expr); 1280 } 1281 } 1282 1283 1284 void FullCodeGenerator::VisitCallNew(CallNew* expr) { 1285 Comment cmnt(masm_, "[ CallNew"); 1286 // According to ECMA-262, section 11.2.2, page 44, the function 1287 // expression in new calls must be evaluated before the 1288 // arguments. 1289 // Push function on the stack. 1290 VisitForValue(expr->expression(), kStack); 1291 1292 // Push global object (receiver). 1293 __ ldr(r0, CodeGenerator::GlobalObject()); 1294 __ push(r0); 1295 // Push the arguments ("left-to-right") on the stack. 1296 ZoneList<Expression*>* args = expr->arguments(); 1297 int arg_count = args->length(); 1298 for (int i = 0; i < arg_count; i++) { 1299 VisitForValue(args->at(i), kStack); 1300 } 1301 1302 // Call the construct call builtin that handles allocation and 1303 // constructor invocation. 1304 SetSourcePosition(expr->position()); 1305 1306 // Load function, arg_count into r1 and r0. 1307 __ mov(r0, Operand(arg_count)); 1308 // Function is in sp[arg_count + 1]. 1309 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize)); 1310 1311 Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall)); 1312 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL); 1313 1314 // Replace function on TOS with result in r0, or pop it. 1315 DropAndApply(1, context_, r0); 1316 } 1317 1318 1319 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) { 1320 Comment cmnt(masm_, "[ CallRuntime"); 1321 ZoneList<Expression*>* args = expr->arguments(); 1322 1323 if (expr->is_jsruntime()) { 1324 // Prepare for calling JS runtime function. 1325 __ ldr(r0, CodeGenerator::GlobalObject()); 1326 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset)); 1327 __ push(r0); 1328 } 1329 1330 // Push the arguments ("left-to-right"). 1331 int arg_count = args->length(); 1332 for (int i = 0; i < arg_count; i++) { 1333 VisitForValue(args->at(i), kStack); 1334 } 1335 1336 if (expr->is_jsruntime()) { 1337 // Call the JS runtime function. 1338 __ mov(r2, Operand(expr->name())); 1339 Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, 1340 NOT_IN_LOOP); 1341 __ Call(ic, RelocInfo::CODE_TARGET); 1342 // Restore context register. 1343 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); 1344 } else { 1345 // Call the C runtime function. 1346 __ CallRuntime(expr->function(), arg_count); 1347 } 1348 Apply(context_, r0); 1349 } 1350 1351 1352 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { 1353 switch (expr->op()) { 1354 case Token::VOID: { 1355 Comment cmnt(masm_, "[ UnaryOperation (VOID)"); 1356 VisitForEffect(expr->expression()); 1357 switch (context_) { 1358 case Expression::kUninitialized: 1359 UNREACHABLE(); 1360 break; 1361 case Expression::kEffect: 1362 break; 1363 case Expression::kValue: 1364 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex); 1365 switch (location_) { 1366 case kAccumulator: 1367 break; 1368 case kStack: 1369 __ push(result_register()); 1370 break; 1371 } 1372 break; 1373 case Expression::kTestValue: 1374 // Value is false so it's needed. 1375 __ LoadRoot(result_register(), Heap::kUndefinedValueRootIndex); 1376 switch (location_) { 1377 case kAccumulator: 1378 break; 1379 case kStack: 1380 __ push(result_register()); 1381 break; 1382 } 1383 // Fall through. 1384 case Expression::kTest: 1385 case Expression::kValueTest: 1386 __ jmp(false_label_); 1387 break; 1388 } 1389 break; 1390 } 1391 1392 case Token::NOT: { 1393 Comment cmnt(masm_, "[ UnaryOperation (NOT)"); 1394 Label materialize_true, materialize_false, done; 1395 // Initially assume a pure test context. Notice that the labels are 1396 // swapped. 1397 Label* if_true = false_label_; 1398 Label* if_false = true_label_; 1399 switch (context_) { 1400 case Expression::kUninitialized: 1401 UNREACHABLE(); 1402 break; 1403 case Expression::kEffect: 1404 if_true = &done; 1405 if_false = &done; 1406 break; 1407 case Expression::kValue: 1408 if_true = &materialize_false; 1409 if_false = &materialize_true; 1410 break; 1411 case Expression::kTest: 1412 break; 1413 case Expression::kValueTest: 1414 if_false = &materialize_true; 1415 break; 1416 case Expression::kTestValue: 1417 if_true = &materialize_false; 1418 break; 1419 } 1420 VisitForControl(expr->expression(), if_true, if_false); 1421 Apply(context_, if_false, if_true); // Labels swapped. 1422 break; 1423 } 1424 1425 case Token::TYPEOF: { 1426 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); 1427 VariableProxy* proxy = expr->expression()->AsVariableProxy(); 1428 if (proxy != NULL && 1429 !proxy->var()->is_this() && 1430 proxy->var()->is_global()) { 1431 Comment cmnt(masm_, "Global variable"); 1432 __ ldr(r0, CodeGenerator::GlobalObject()); 1433 __ push(r0); 1434 __ mov(r2, Operand(proxy->name())); 1435 Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); 1436 // Use a regular load, not a contextual load, to avoid a reference 1437 // error. 1438 __ Call(ic, RelocInfo::CODE_TARGET); 1439 __ str(r0, MemOperand(sp)); 1440 } else if (proxy != NULL && 1441 proxy->var()->slot() != NULL && 1442 proxy->var()->slot()->type() == Slot::LOOKUP) { 1443 __ mov(r0, Operand(proxy->name())); 1444 __ stm(db_w, sp, cp.bit() | r0.bit()); 1445 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); 1446 __ push(r0); 1447 } else { 1448 // This expression cannot throw a reference error at the top level. 1449 VisitForValue(expr->expression(), kStack); 1450 } 1451 1452 __ CallRuntime(Runtime::kTypeof, 1); 1453 Apply(context_, r0); 1454 break; 1455 } 1456 1457 case Token::ADD: { 1458 Comment cmt(masm_, "[ UnaryOperation (ADD)"); 1459 VisitForValue(expr->expression(), kAccumulator); 1460 Label no_conversion; 1461 __ tst(result_register(), Operand(kSmiTagMask)); 1462 __ b(eq, &no_conversion); 1463 __ push(r0); 1464 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS); 1465 __ bind(&no_conversion); 1466 Apply(context_, result_register()); 1467 break; 1468 } 1469 1470 case Token::SUB: { 1471 Comment cmt(masm_, "[ UnaryOperation (SUB)"); 1472 bool overwrite = 1473 (expr->expression()->AsBinaryOperation() != NULL && 1474 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 1475 GenericUnaryOpStub stub(Token::SUB, overwrite); 1476 // GenericUnaryOpStub expects the argument to be in the 1477 // accumulator register r0. 1478 VisitForValue(expr->expression(), kAccumulator); 1479 __ CallStub(&stub); 1480 Apply(context_, r0); 1481 break; 1482 } 1483 1484 case Token::BIT_NOT: { 1485 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)"); 1486 bool overwrite = 1487 (expr->expression()->AsBinaryOperation() != NULL && 1488 expr->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); 1489 GenericUnaryOpStub stub(Token::BIT_NOT, overwrite); 1490 // GenericUnaryOpStub expects the argument to be in the 1491 // accumulator register r0. 1492 VisitForValue(expr->expression(), kAccumulator); 1493 // Avoid calling the stub for Smis. 1494 Label smi, done; 1495 __ tst(result_register(), Operand(kSmiTagMask)); 1496 __ b(eq, &smi); 1497 // Non-smi: call stub leaving result in accumulator register. 1498 __ CallStub(&stub); 1499 __ b(&done); 1500 // Perform operation directly on Smis. 1501 __ bind(&smi); 1502 __ mvn(result_register(), Operand(result_register())); 1503 // Bit-clear inverted smi-tag. 1504 __ bic(result_register(), result_register(), Operand(kSmiTagMask)); 1505 __ bind(&done); 1506 Apply(context_, result_register()); 1507 break; 1508 } 1509 1510 default: 1511 UNREACHABLE(); 1512 } 1513 } 1514 1515 1516 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { 1517 Comment cmnt(masm_, "[ CountOperation"); 1518 1519 // Expression can only be a property, a global or a (parameter or local) 1520 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY. 1521 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY }; 1522 LhsKind assign_type = VARIABLE; 1523 Property* prop = expr->expression()->AsProperty(); 1524 // In case of a property we use the uninitialized expression context 1525 // of the key to detect a named property. 1526 if (prop != NULL) { 1527 assign_type = 1528 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY; 1529 } 1530 1531 // Evaluate expression and get value. 1532 if (assign_type == VARIABLE) { 1533 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL); 1534 Location saved_location = location_; 1535 location_ = kAccumulator; 1536 EmitVariableLoad(expr->expression()->AsVariableProxy()->var(), 1537 Expression::kValue); 1538 location_ = saved_location; 1539 } else { 1540 // Reserve space for result of postfix operation. 1541 if (expr->is_postfix() && context_ != Expression::kEffect) { 1542 __ mov(ip, Operand(Smi::FromInt(0))); 1543 __ push(ip); 1544 } 1545 VisitForValue(prop->obj(), kStack); 1546 if (assign_type == NAMED_PROPERTY) { 1547 EmitNamedPropertyLoad(prop); 1548 } else { 1549 VisitForValue(prop->key(), kStack); 1550 EmitKeyedPropertyLoad(prop); 1551 } 1552 } 1553 1554 // Call ToNumber only if operand is not a smi. 1555 Label no_conversion; 1556 __ tst(r0, Operand(kSmiTagMask)); 1557 __ b(eq, &no_conversion); 1558 __ push(r0); 1559 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_JS); 1560 __ bind(&no_conversion); 1561 1562 // Save result for postfix expressions. 1563 if (expr->is_postfix()) { 1564 switch (context_) { 1565 case Expression::kUninitialized: 1566 UNREACHABLE(); 1567 case Expression::kEffect: 1568 // Do not save result. 1569 break; 1570 case Expression::kValue: 1571 case Expression::kTest: 1572 case Expression::kValueTest: 1573 case Expression::kTestValue: 1574 // Save the result on the stack. If we have a named or keyed property 1575 // we store the result under the receiver that is currently on top 1576 // of the stack. 1577 switch (assign_type) { 1578 case VARIABLE: 1579 __ push(r0); 1580 break; 1581 case NAMED_PROPERTY: 1582 __ str(r0, MemOperand(sp, kPointerSize)); 1583 break; 1584 case KEYED_PROPERTY: 1585 __ str(r0, MemOperand(sp, 2 * kPointerSize)); 1586 break; 1587 } 1588 break; 1589 } 1590 } 1591 1592 1593 // Inline smi case if we are in a loop. 1594 Label stub_call, done; 1595 int count_value = expr->op() == Token::INC ? 1 : -1; 1596 if (loop_depth() > 0) { 1597 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC); 1598 __ b(vs, &stub_call); 1599 // We could eliminate this smi check if we split the code at 1600 // the first smi check before calling ToNumber. 1601 __ tst(r0, Operand(kSmiTagMask)); 1602 __ b(eq, &done); 1603 __ bind(&stub_call); 1604 // Call stub. Undo operation first. 1605 __ sub(r0, r0, Operand(Smi::FromInt(count_value))); 1606 } 1607 __ mov(r1, Operand(Smi::FromInt(count_value))); 1608 GenericBinaryOpStub stub(Token::ADD, NO_OVERWRITE); 1609 __ CallStub(&stub); 1610 __ bind(&done); 1611 1612 // Store the value returned in r0. 1613 switch (assign_type) { 1614 case VARIABLE: 1615 if (expr->is_postfix()) { 1616 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 1617 Expression::kEffect); 1618 // For all contexts except kEffect: We have the result on 1619 // top of the stack. 1620 if (context_ != Expression::kEffect) { 1621 ApplyTOS(context_); 1622 } 1623 } else { 1624 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 1625 context_); 1626 } 1627 break; 1628 case NAMED_PROPERTY: { 1629 __ mov(r2, Operand(prop->key()->AsLiteral()->handle())); 1630 __ pop(r1); 1631 Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); 1632 __ Call(ic, RelocInfo::CODE_TARGET); 1633 if (expr->is_postfix()) { 1634 if (context_ != Expression::kEffect) { 1635 ApplyTOS(context_); 1636 } 1637 } else { 1638 Apply(context_, r0); 1639 } 1640 break; 1641 } 1642 case KEYED_PROPERTY: { 1643 Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); 1644 __ Call(ic, RelocInfo::CODE_TARGET); 1645 if (expr->is_postfix()) { 1646 __ Drop(2); // Result is on the stack under the key and the receiver. 1647 if (context_ != Expression::kEffect) { 1648 ApplyTOS(context_); 1649 } 1650 } else { 1651 DropAndApply(2, context_, r0); 1652 } 1653 break; 1654 } 1655 } 1656 } 1657 1658 1659 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { 1660 Comment cmnt(masm_, "[ BinaryOperation"); 1661 switch (expr->op()) { 1662 case Token::COMMA: 1663 VisitForEffect(expr->left()); 1664 Visit(expr->right()); 1665 break; 1666 1667 case Token::OR: 1668 case Token::AND: 1669 EmitLogicalOperation(expr); 1670 break; 1671 1672 case Token::ADD: 1673 case Token::SUB: 1674 case Token::DIV: 1675 case Token::MOD: 1676 case Token::MUL: 1677 case Token::BIT_OR: 1678 case Token::BIT_AND: 1679 case Token::BIT_XOR: 1680 case Token::SHL: 1681 case Token::SHR: 1682 case Token::SAR: 1683 VisitForValue(expr->left(), kStack); 1684 VisitForValue(expr->right(), kAccumulator); 1685 EmitBinaryOp(expr->op(), context_); 1686 break; 1687 1688 default: 1689 UNREACHABLE(); 1690 } 1691 } 1692 1693 1694 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { 1695 Comment cmnt(masm_, "[ CompareOperation"); 1696 1697 // Always perform the comparison for its control flow. Pack the result 1698 // into the expression's context after the comparison is performed. 1699 Label materialize_true, materialize_false, done; 1700 // Initially assume we are in a test context. 1701 Label* if_true = true_label_; 1702 Label* if_false = false_label_; 1703 switch (context_) { 1704 case Expression::kUninitialized: 1705 UNREACHABLE(); 1706 break; 1707 case Expression::kEffect: 1708 if_true = &done; 1709 if_false = &done; 1710 break; 1711 case Expression::kValue: 1712 if_true = &materialize_true; 1713 if_false = &materialize_false; 1714 break; 1715 case Expression::kTest: 1716 break; 1717 case Expression::kValueTest: 1718 if_true = &materialize_true; 1719 break; 1720 case Expression::kTestValue: 1721 if_false = &materialize_false; 1722 break; 1723 } 1724 1725 VisitForValue(expr->left(), kStack); 1726 switch (expr->op()) { 1727 case Token::IN: 1728 VisitForValue(expr->right(), kStack); 1729 __ InvokeBuiltin(Builtins::IN, CALL_JS); 1730 __ LoadRoot(ip, Heap::kTrueValueRootIndex); 1731 __ cmp(r0, ip); 1732 __ b(eq, if_true); 1733 __ jmp(if_false); 1734 break; 1735 1736 case Token::INSTANCEOF: { 1737 VisitForValue(expr->right(), kStack); 1738 InstanceofStub stub; 1739 __ CallStub(&stub); 1740 __ tst(r0, r0); 1741 __ b(eq, if_true); // The stub returns 0 for true. 1742 __ jmp(if_false); 1743 break; 1744 } 1745 1746 default: { 1747 VisitForValue(expr->right(), kAccumulator); 1748 Condition cc = eq; 1749 bool strict = false; 1750 switch (expr->op()) { 1751 case Token::EQ_STRICT: 1752 strict = true; 1753 // Fall through 1754 case Token::EQ: 1755 cc = eq; 1756 __ pop(r1); 1757 break; 1758 case Token::LT: 1759 cc = lt; 1760 __ pop(r1); 1761 break; 1762 case Token::GT: 1763 // Reverse left and right sides to obtain ECMA-262 conversion order. 1764 cc = lt; 1765 __ mov(r1, result_register()); 1766 __ pop(r0); 1767 break; 1768 case Token::LTE: 1769 // Reverse left and right sides to obtain ECMA-262 conversion order. 1770 cc = ge; 1771 __ mov(r1, result_register()); 1772 __ pop(r0); 1773 break; 1774 case Token::GTE: 1775 cc = ge; 1776 __ pop(r1); 1777 break; 1778 case Token::IN: 1779 case Token::INSTANCEOF: 1780 default: 1781 UNREACHABLE(); 1782 } 1783 1784 // The comparison stub expects the smi vs. smi case to be handled 1785 // before it is called. 1786 Label slow_case; 1787 __ orr(r2, r0, Operand(r1)); 1788 __ tst(r2, Operand(kSmiTagMask)); 1789 __ b(ne, &slow_case); 1790 __ cmp(r1, r0); 1791 __ b(cc, if_true); 1792 __ jmp(if_false); 1793 1794 __ bind(&slow_case); 1795 CompareStub stub(cc, strict); 1796 __ CallStub(&stub); 1797 __ cmp(r0, Operand(0)); 1798 __ b(cc, if_true); 1799 __ jmp(if_false); 1800 } 1801 } 1802 1803 // Convert the result of the comparison into one expected for this 1804 // expression's context. 1805 Apply(context_, if_true, if_false); 1806 } 1807 1808 1809 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) { 1810 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); 1811 Apply(context_, r0); 1812 } 1813 1814 1815 Register FullCodeGenerator::result_register() { return r0; } 1816 1817 1818 Register FullCodeGenerator::context_register() { return cp; } 1819 1820 1821 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 1822 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 1823 __ str(value, MemOperand(fp, frame_offset)); 1824 } 1825 1826 1827 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 1828 __ ldr(dst, CodeGenerator::ContextOperand(cp, context_index)); 1829 } 1830 1831 1832 // ---------------------------------------------------------------------------- 1833 // Non-local control flow support. 1834 1835 void FullCodeGenerator::EnterFinallyBlock() { 1836 ASSERT(!result_register().is(r1)); 1837 // Store result register while executing finally block. 1838 __ push(result_register()); 1839 // Cook return address in link register to stack (smi encoded Code* delta) 1840 __ sub(r1, lr, Operand(masm_->CodeObject())); 1841 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 1842 ASSERT_EQ(0, kSmiTag); 1843 __ add(r1, r1, Operand(r1)); // Convert to smi. 1844 __ push(r1); 1845 } 1846 1847 1848 void FullCodeGenerator::ExitFinallyBlock() { 1849 ASSERT(!result_register().is(r1)); 1850 // Restore result register from stack. 1851 __ pop(r1); 1852 // Uncook return address and return. 1853 __ pop(result_register()); 1854 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize); 1855 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value. 1856 __ add(pc, r1, Operand(masm_->CodeObject())); 1857 } 1858 1859 1860 #undef __ 1861 1862 } } // namespace v8::internal 1863