1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_X87 6 7 #include "src/code-stubs.h" 8 #include "src/api-arguments.h" 9 #include "src/base/bits.h" 10 #include "src/bootstrapper.h" 11 #include "src/codegen.h" 12 #include "src/ic/handler-compiler.h" 13 #include "src/ic/ic.h" 14 #include "src/ic/stub-cache.h" 15 #include "src/isolate.h" 16 #include "src/regexp/jsregexp.h" 17 #include "src/regexp/regexp-macro-assembler.h" 18 #include "src/runtime/runtime.h" 19 #include "src/x87/code-stubs-x87.h" 20 #include "src/x87/frames-x87.h" 21 22 namespace v8 { 23 namespace internal { 24 25 #define __ ACCESS_MASM(masm) 26 27 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) { 28 __ pop(ecx); 29 __ mov(MemOperand(esp, eax, times_4, 0), edi); 30 __ push(edi); 31 __ push(ebx); 32 __ push(ecx); 33 __ add(eax, Immediate(3)); 34 __ TailCallRuntime(Runtime::kNewArray); 35 } 36 37 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm, 38 ExternalReference miss) { 39 // Update the static counter each time a new code stub is generated. 40 isolate()->counters()->code_stubs()->Increment(); 41 42 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor(); 43 int param_count = descriptor.GetRegisterParameterCount(); 44 { 45 // Call the runtime system in a fresh internal frame. 46 FrameScope scope(masm, StackFrame::INTERNAL); 47 DCHECK(param_count == 0 || 48 eax.is(descriptor.GetRegisterParameter(param_count - 1))); 49 // Push arguments 50 for (int i = 0; i < param_count; ++i) { 51 __ push(descriptor.GetRegisterParameter(i)); 52 } 53 __ CallExternalReference(miss, param_count); 54 } 55 56 __ ret(0); 57 } 58 59 60 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 61 // We don't allow a GC during a store buffer overflow so there is no need to 62 // store the registers in any particular way, but we do have to store and 63 // restore them. 64 __ pushad(); 65 if (save_doubles()) { 66 // Save FPU stat in m108byte. 67 __ sub(esp, Immediate(108)); 68 __ fnsave(Operand(esp, 0)); 69 } 70 const int argument_count = 1; 71 72 AllowExternalCallThatCantCauseGC scope(masm); 73 __ PrepareCallCFunction(argument_count, ecx); 74 __ mov(Operand(esp, 0 * kPointerSize), 75 Immediate(ExternalReference::isolate_address(isolate()))); 76 __ CallCFunction( 77 ExternalReference::store_buffer_overflow_function(isolate()), 78 argument_count); 79 if (save_doubles()) { 80 // Restore FPU stat in m108byte. 81 __ frstor(Operand(esp, 0)); 82 __ add(esp, Immediate(108)); 83 } 84 __ popad(); 85 __ ret(0); 86 } 87 88 89 class FloatingPointHelper : public AllStatic { 90 public: 91 enum ArgLocation { 92 ARGS_ON_STACK, 93 ARGS_IN_REGISTERS 94 }; 95 96 // Code pattern for loading a floating point value. Input value must 97 // be either a smi or a heap number object (fp value). Requirements: 98 // operand in register number. Returns operand as floating point number 99 // on FPU stack. 100 static void LoadFloatOperand(MacroAssembler* masm, Register number); 101 102 // Test if operands are smi or number objects (fp). Requirements: 103 // operand_1 in eax, operand_2 in edx; falls through on float 104 // operands, jumps to the non_float label otherwise. 105 static void CheckFloatOperands(MacroAssembler* masm, 106 Label* non_float, 107 Register scratch); 108 }; 109 110 111 void DoubleToIStub::Generate(MacroAssembler* masm) { 112 Register input_reg = this->source(); 113 Register final_result_reg = this->destination(); 114 DCHECK(is_truncating()); 115 116 Label check_negative, process_64_bits, done, done_no_stash; 117 118 int double_offset = offset(); 119 120 // Account for return address and saved regs if input is esp. 121 if (input_reg.is(esp)) double_offset += 3 * kPointerSize; 122 123 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 124 MemOperand exponent_operand(MemOperand(input_reg, 125 double_offset + kDoubleSize / 2)); 126 127 Register scratch1; 128 { 129 Register scratch_candidates[3] = { ebx, edx, edi }; 130 for (int i = 0; i < 3; i++) { 131 scratch1 = scratch_candidates[i]; 132 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 133 } 134 } 135 // Since we must use ecx for shifts below, use some other register (eax) 136 // to calculate the result if ecx is the requested return register. 137 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg; 138 // Save ecx if it isn't the return register and therefore volatile, or if it 139 // is the return register, then save the temp register we use in its stead for 140 // the result. 141 Register save_reg = final_result_reg.is(ecx) ? eax : ecx; 142 __ push(scratch1); 143 __ push(save_reg); 144 145 bool stash_exponent_copy = !input_reg.is(esp); 146 __ mov(scratch1, mantissa_operand); 147 __ mov(ecx, exponent_operand); 148 if (stash_exponent_copy) __ push(ecx); 149 150 __ and_(ecx, HeapNumber::kExponentMask); 151 __ shr(ecx, HeapNumber::kExponentShift); 152 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias)); 153 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits)); 154 __ j(below, &process_64_bits); 155 156 // Result is entirely in lower 32-bits of mantissa 157 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; 158 __ sub(ecx, Immediate(delta)); 159 __ xor_(result_reg, result_reg); 160 __ cmp(ecx, Immediate(31)); 161 __ j(above, &done); 162 __ shl_cl(scratch1); 163 __ jmp(&check_negative); 164 165 __ bind(&process_64_bits); 166 // Result must be extracted from shifted 32-bit mantissa 167 __ sub(ecx, Immediate(delta)); 168 __ neg(ecx); 169 if (stash_exponent_copy) { 170 __ mov(result_reg, MemOperand(esp, 0)); 171 } else { 172 __ mov(result_reg, exponent_operand); 173 } 174 __ and_(result_reg, 175 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32))); 176 __ add(result_reg, 177 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32))); 178 __ shrd_cl(scratch1, result_reg); 179 __ shr_cl(result_reg); 180 __ test(ecx, Immediate(32)); 181 { 182 Label skip_mov; 183 __ j(equal, &skip_mov, Label::kNear); 184 __ mov(scratch1, result_reg); 185 __ bind(&skip_mov); 186 } 187 188 // If the double was negative, negate the integer result. 189 __ bind(&check_negative); 190 __ mov(result_reg, scratch1); 191 __ neg(result_reg); 192 if (stash_exponent_copy) { 193 __ cmp(MemOperand(esp, 0), Immediate(0)); 194 } else { 195 __ cmp(exponent_operand, Immediate(0)); 196 } 197 { 198 Label skip_mov; 199 __ j(less_equal, &skip_mov, Label::kNear); 200 __ mov(result_reg, scratch1); 201 __ bind(&skip_mov); 202 } 203 204 // Restore registers 205 __ bind(&done); 206 if (stash_exponent_copy) { 207 __ add(esp, Immediate(kDoubleSize / 2)); 208 } 209 __ bind(&done_no_stash); 210 if (!final_result_reg.is(result_reg)) { 211 DCHECK(final_result_reg.is(ecx)); 212 __ mov(final_result_reg, result_reg); 213 } 214 __ pop(save_reg); 215 __ pop(scratch1); 216 __ ret(0); 217 } 218 219 220 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm, 221 Register number) { 222 Label load_smi, done; 223 224 __ JumpIfSmi(number, &load_smi, Label::kNear); 225 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); 226 __ jmp(&done, Label::kNear); 227 228 __ bind(&load_smi); 229 __ SmiUntag(number); 230 __ push(number); 231 __ fild_s(Operand(esp, 0)); 232 __ pop(number); 233 234 __ bind(&done); 235 } 236 237 238 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, 239 Label* non_float, 240 Register scratch) { 241 Label test_other, done; 242 // Test if both operands are floats or smi -> scratch=k_is_float; 243 // Otherwise scratch = k_not_float. 244 __ JumpIfSmi(edx, &test_other, Label::kNear); 245 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); 246 Factory* factory = masm->isolate()->factory(); 247 __ cmp(scratch, factory->heap_number_map()); 248 __ j(not_equal, non_float); // argument in edx is not a number -> NaN 249 250 __ bind(&test_other); 251 __ JumpIfSmi(eax, &done, Label::kNear); 252 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); 253 __ cmp(scratch, factory->heap_number_map()); 254 __ j(not_equal, non_float); // argument in eax is not a number -> NaN 255 256 // Fall-through: Both operands are numbers. 257 __ bind(&done); 258 } 259 260 261 void MathPowStub::Generate(MacroAssembler* masm) { 262 const Register scratch = ecx; 263 264 // Load the double_exponent into x87 FPU 265 __ fld_d(Operand(esp, 0 * kDoubleSize + 4)); 266 // Load the double_base into x87 FPU 267 __ fld_d(Operand(esp, 1 * kDoubleSize + 4)); 268 269 // Call ieee754 runtime directly. 270 { 271 AllowExternalCallThatCantCauseGC scope(masm); 272 __ PrepareCallCFunction(4, scratch); 273 // Put the double_base parameter in call stack 274 __ fstp_d(Operand(esp, 0 * kDoubleSize)); 275 // Put the double_exponent parameter in call stack 276 __ fstp_d(Operand(esp, 1 * kDoubleSize)); 277 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), 278 4); 279 } 280 // Return value is in st(0) on ia32. 281 __ ret(0); 282 } 283 284 void RegExpExecStub::Generate(MacroAssembler* masm) { 285 // Just jump directly to runtime if native RegExp is not selected at compile 286 // time or if regexp entry in generated code is turned off runtime switch or 287 // at compilation. 288 #ifdef V8_INTERPRETED_REGEXP 289 __ TailCallRuntime(Runtime::kRegExpExec); 290 #else // V8_INTERPRETED_REGEXP 291 292 // Stack frame on entry. 293 // esp[0]: return address 294 // esp[4]: last_match_info (expected JSArray) 295 // esp[8]: previous index 296 // esp[12]: subject string 297 // esp[16]: JSRegExp object 298 299 static const int kLastMatchInfoOffset = 1 * kPointerSize; 300 static const int kPreviousIndexOffset = 2 * kPointerSize; 301 static const int kSubjectOffset = 3 * kPointerSize; 302 static const int kJSRegExpOffset = 4 * kPointerSize; 303 304 Label runtime; 305 Factory* factory = isolate()->factory(); 306 307 // Ensure that a RegExp stack is allocated. 308 ExternalReference address_of_regexp_stack_memory_address = 309 ExternalReference::address_of_regexp_stack_memory_address(isolate()); 310 ExternalReference address_of_regexp_stack_memory_size = 311 ExternalReference::address_of_regexp_stack_memory_size(isolate()); 312 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 313 __ test(ebx, ebx); 314 __ j(zero, &runtime); 315 316 // Check that the first argument is a JSRegExp object. 317 __ mov(eax, Operand(esp, kJSRegExpOffset)); 318 STATIC_ASSERT(kSmiTag == 0); 319 __ JumpIfSmi(eax, &runtime); 320 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); 321 __ j(not_equal, &runtime); 322 323 // Check that the RegExp has been compiled (data contains a fixed array). 324 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 325 if (FLAG_debug_code) { 326 __ test(ecx, Immediate(kSmiTagMask)); 327 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected); 328 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); 329 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); 330 } 331 332 // ecx: RegExp data (FixedArray) 333 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 334 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); 335 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); 336 __ j(not_equal, &runtime); 337 338 // ecx: RegExp data (FixedArray) 339 // Check that the number of captures fit in the static offsets vector buffer. 340 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); 341 // Check (number_of_captures + 1) * 2 <= offsets vector size 342 // Or number_of_captures * 2 <= offsets vector size - 2 343 // Multiplying by 2 comes for free since edx is smi-tagged. 344 STATIC_ASSERT(kSmiTag == 0); 345 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 346 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); 347 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2); 348 __ j(above, &runtime); 349 350 // Reset offset for possibly sliced string. 351 __ Move(edi, Immediate(0)); 352 __ mov(eax, Operand(esp, kSubjectOffset)); 353 __ JumpIfSmi(eax, &runtime); 354 __ mov(edx, eax); // Make a copy of the original subject string. 355 356 // eax: subject string 357 // edx: subject string 358 // ecx: RegExp data (FixedArray) 359 // Handle subject string according to its encoding and representation: 360 // (1) Sequential two byte? If yes, go to (9). 361 // (2) Sequential one byte? If yes, go to (5). 362 // (3) Sequential or cons? If not, go to (6). 363 // (4) Cons string. If the string is flat, replace subject with first string 364 // and go to (1). Otherwise bail out to runtime. 365 // (5) One byte sequential. Load regexp code for one byte. 366 // (E) Carry on. 367 /// [...] 368 369 // Deferred code at the end of the stub: 370 // (6) Long external string? If not, go to (10). 371 // (7) External string. Make it, offset-wise, look like a sequential string. 372 // (8) Is the external string one byte? If yes, go to (5). 373 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 374 // (10) Short external string or not a string? If yes, bail out to runtime. 375 // (11) Sliced or thin string. Replace subject with parent. Go to (1). 376 377 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */, 378 external_string /* 7 */, check_underlying /* 1 */, 379 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */; 380 381 __ bind(&check_underlying); 382 // (1) Sequential two byte? If yes, go to (9). 383 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 384 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 385 386 __ and_(ebx, kIsNotStringMask | 387 kStringRepresentationMask | 388 kStringEncodingMask | 389 kShortExternalStringMask); 390 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 391 __ j(zero, &seq_two_byte_string); // Go to (9). 392 393 // (2) Sequential one byte? If yes, go to (5). 394 // Any other sequential string must be one byte. 395 __ and_(ebx, Immediate(kIsNotStringMask | 396 kStringRepresentationMask | 397 kShortExternalStringMask)); 398 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). 399 400 // (3) Sequential or cons? If not, go to (6). 401 // We check whether the subject string is a cons, since sequential strings 402 // have already been covered. 403 STATIC_ASSERT(kConsStringTag < kExternalStringTag); 404 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); 405 STATIC_ASSERT(kThinStringTag > kExternalStringTag); 406 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); 407 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); 408 __ cmp(ebx, Immediate(kExternalStringTag)); 409 __ j(greater_equal, ¬_seq_nor_cons); // Go to (6). 410 411 // (4) Cons string. Check that it's flat. 412 // Replace subject with first string and reload instance type. 413 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string()); 414 __ j(not_equal, &runtime); 415 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); 416 __ jmp(&check_underlying); 417 418 // eax: sequential subject string (or look-alike, external string) 419 // edx: original subject string 420 // ecx: RegExp data (FixedArray) 421 // (5) One byte sequential. Load regexp code for one byte. 422 __ bind(&seq_one_byte_string); 423 // Load previous index and check range before edx is overwritten. We have 424 // to use edx instead of eax here because it might have been only made to 425 // look like a sequential string when it actually is an external string. 426 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); 427 __ JumpIfNotSmi(ebx, &runtime); 428 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); 429 __ j(above_equal, &runtime); 430 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset)); 431 __ Move(ecx, Immediate(1)); // Type is one byte. 432 433 // (E) Carry on. String handling is done. 434 __ bind(&check_code); 435 // edx: irregexp code 436 // Check that the irregexp code has been generated for the actual string 437 // encoding. If it has, the field contains a code object otherwise it contains 438 // a smi (code flushing support). 439 __ JumpIfSmi(edx, &runtime); 440 441 // eax: subject string 442 // ebx: previous index (smi) 443 // edx: code 444 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte); 445 // All checks done. Now push arguments for native regexp code. 446 Counters* counters = isolate()->counters(); 447 __ IncrementCounter(counters->regexp_entry_native(), 1); 448 449 // Isolates: note we add an additional parameter here (isolate pointer). 450 static const int kRegExpExecuteArguments = 9; 451 __ EnterApiExitFrame(kRegExpExecuteArguments); 452 453 // Argument 9: Pass current isolate address. 454 __ mov(Operand(esp, 8 * kPointerSize), 455 Immediate(ExternalReference::isolate_address(isolate()))); 456 457 // Argument 8: Indicate that this is a direct call from JavaScript. 458 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); 459 460 // Argument 7: Start (high end) of backtracking stack memory area. 461 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); 462 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); 463 __ mov(Operand(esp, 6 * kPointerSize), esi); 464 465 // Argument 6: Set the number of capture registers to zero to force global 466 // regexps to behave as non-global. This does not affect non-global regexps. 467 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); 468 469 // Argument 5: static offsets vector buffer. 470 __ mov(Operand(esp, 4 * kPointerSize), 471 Immediate(ExternalReference::address_of_static_offsets_vector( 472 isolate()))); 473 474 // Argument 2: Previous index. 475 __ SmiUntag(ebx); 476 __ mov(Operand(esp, 1 * kPointerSize), ebx); 477 478 // Argument 1: Original subject string. 479 // The original subject is in the previous stack frame. Therefore we have to 480 // use ebp, which points exactly to one pointer size below the previous esp. 481 // (Because creating a new stack frame pushes the previous ebp onto the stack 482 // and thereby moves up esp by one kPointerSize.) 483 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize)); 484 __ mov(Operand(esp, 0 * kPointerSize), esi); 485 486 // esi: original subject string 487 // eax: underlying subject string 488 // ebx: previous index 489 // ecx: encoding of subject string (1 if one_byte 0 if two_byte); 490 // edx: code 491 // Argument 4: End of string data 492 // Argument 3: Start of string data 493 // Prepare start and end index of the input. 494 // Load the length from the original sliced string if that is the case. 495 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); 496 __ add(esi, edi); // Calculate input end wrt offset. 497 __ SmiUntag(edi); 498 __ add(ebx, edi); // Calculate input start wrt offset. 499 500 // ebx: start index of the input string 501 // esi: end index of the input string 502 Label setup_two_byte, setup_rest; 503 __ test(ecx, ecx); 504 __ j(zero, &setup_two_byte, Label::kNear); 505 __ SmiUntag(esi); 506 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize)); 507 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 508 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize)); 509 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 510 __ jmp(&setup_rest, Label::kNear); 511 512 __ bind(&setup_two_byte); 513 STATIC_ASSERT(kSmiTag == 0); 514 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2). 515 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); 516 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. 517 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); 518 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. 519 520 __ bind(&setup_rest); 521 522 // Locate the code entry and call it. 523 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); 524 __ call(edx); 525 526 // Drop arguments and come back to JS mode. 527 __ LeaveApiExitFrame(true); 528 529 // Check the result. 530 Label success; 531 __ cmp(eax, 1); 532 // We expect exactly one result since we force the called regexp to behave 533 // as non-global. 534 __ j(equal, &success); 535 Label failure; 536 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); 537 __ j(equal, &failure); 538 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); 539 // If not exception it can only be retry. Handle that in the runtime system. 540 __ j(not_equal, &runtime); 541 // Result must now be exception. If there is no pending exception already a 542 // stack overflow (on the backtrack stack) was detected in RegExp code but 543 // haven't created the exception yet. Handle that in the runtime system. 544 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 545 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 546 isolate()); 547 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 548 __ mov(eax, Operand::StaticVariable(pending_exception)); 549 __ cmp(edx, eax); 550 __ j(equal, &runtime); 551 552 // For exception, throw the exception again. 553 __ TailCallRuntime(Runtime::kRegExpExecReThrow); 554 555 __ bind(&failure); 556 // For failure to match, return null. 557 __ mov(eax, factory->null_value()); 558 __ ret(4 * kPointerSize); 559 560 // Load RegExp data. 561 __ bind(&success); 562 __ mov(eax, Operand(esp, kJSRegExpOffset)); 563 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); 564 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); 565 // Calculate number of capture registers (number_of_captures + 1) * 2. 566 STATIC_ASSERT(kSmiTag == 0); 567 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); 568 __ add(edx, Immediate(2)); // edx was a smi. 569 570 // edx: Number of capture registers 571 // Check that the last match info is a FixedArray. 572 __ mov(ebx, Operand(esp, kLastMatchInfoOffset)); 573 __ JumpIfSmi(ebx, &runtime); 574 // Check that the object has fast elements. 575 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); 576 __ cmp(eax, factory->fixed_array_map()); 577 __ j(not_equal, &runtime); 578 // Check that the last match info has space for the capture registers and the 579 // additional information. 580 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); 581 __ SmiUntag(eax); 582 __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead)); 583 __ cmp(edx, eax); 584 __ j(greater, &runtime); 585 586 // ebx: last_match_info backing store (FixedArray) 587 // edx: number of capture registers 588 // Store the capture count. 589 __ SmiTag(edx); // Number of capture registers to smi. 590 __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx); 591 __ SmiUntag(edx); // Number of capture registers back from smi. 592 // Store last subject and last input. 593 __ mov(eax, Operand(esp, kSubjectOffset)); 594 __ mov(ecx, eax); 595 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax); 596 __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi, 597 kDontSaveFPRegs); 598 __ mov(eax, ecx); 599 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax); 600 __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi, 601 kDontSaveFPRegs); 602 603 // Get the static offsets vector filled by the native regexp code. 604 ExternalReference address_of_static_offsets_vector = 605 ExternalReference::address_of_static_offsets_vector(isolate()); 606 __ mov(ecx, Immediate(address_of_static_offsets_vector)); 607 608 // ebx: last_match_info backing store (FixedArray) 609 // ecx: offsets vector 610 // edx: number of capture registers 611 Label next_capture, done; 612 // Capture register counter starts from number of capture registers and 613 // counts down until wrapping after zero. 614 __ bind(&next_capture); 615 __ sub(edx, Immediate(1)); 616 __ j(negative, &done, Label::kNear); 617 // Read the value from the static offsets vector buffer. 618 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); 619 __ SmiTag(edi); 620 // Store the smi value in the last match info. 621 __ mov(FieldOperand(ebx, edx, times_pointer_size, 622 RegExpMatchInfo::kFirstCaptureOffset), 623 edi); 624 __ jmp(&next_capture); 625 __ bind(&done); 626 627 // Return last match info. 628 __ mov(eax, ebx); 629 __ ret(4 * kPointerSize); 630 631 // Do the runtime call to execute the regexp. 632 __ bind(&runtime); 633 __ TailCallRuntime(Runtime::kRegExpExec); 634 635 // Deferred code for string handling. 636 // (6) Long external string? If not, go to (10). 637 __ bind(¬_seq_nor_cons); 638 // Compare flags are still set from (3). 639 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). 640 641 // (7) External string. Short external strings have been ruled out. 642 __ bind(&external_string); 643 // Reload instance type. 644 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 645 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); 646 if (FLAG_debug_code) { 647 // Assert that we do not have a cons or slice (indirect strings) here. 648 // Sequential strings have already been ruled out. 649 __ test_b(ebx, Immediate(kIsIndirectStringMask)); 650 __ Assert(zero, kExternalStringExpectedButNotFound); 651 } 652 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset)); 653 // Move the pointer so that offset-wise, it looks like a sequential string. 654 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 655 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 656 STATIC_ASSERT(kTwoByteStringTag == 0); 657 // (8) Is the external string one byte? If yes, go to (5). 658 __ test_b(ebx, Immediate(kStringEncodingMask)); 659 __ j(not_zero, &seq_one_byte_string); // Go to (5). 660 661 // eax: sequential subject string (or look-alike, external string) 662 // edx: original subject string 663 // ecx: RegExp data (FixedArray) 664 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 665 __ bind(&seq_two_byte_string); 666 // Load previous index and check range before edx is overwritten. We have 667 // to use edx instead of eax here because it might have been only made to 668 // look like a sequential string when it actually is an external string. 669 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); 670 __ JumpIfNotSmi(ebx, &runtime); 671 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); 672 __ j(above_equal, &runtime); 673 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); 674 __ Move(ecx, Immediate(0)); // Type is two byte. 675 __ jmp(&check_code); // Go to (E). 676 677 // (10) Not a string or a short external string? If yes, bail out to runtime. 678 __ bind(¬_long_external); 679 // Catch non-string subject or short external string. 680 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); 681 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag)); 682 __ j(not_zero, &runtime); 683 684 // (11) Sliced or thin string. Replace subject with parent. Go to (1). 685 Label thin_string; 686 __ cmp(ebx, Immediate(kThinStringTag)); 687 __ j(equal, &thin_string, Label::kNear); 688 // Load offset into edi and replace subject string with parent. 689 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); 690 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); 691 __ jmp(&check_underlying); // Go to (1). 692 693 __ bind(&thin_string); 694 __ mov(eax, FieldOperand(eax, ThinString::kActualOffset)); 695 __ jmp(&check_underlying); // Go to (1). 696 #endif // V8_INTERPRETED_REGEXP 697 } 698 699 700 static int NegativeComparisonResult(Condition cc) { 701 DCHECK(cc != equal); 702 DCHECK((cc == less) || (cc == less_equal) 703 || (cc == greater) || (cc == greater_equal)); 704 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 705 } 706 707 708 static void CheckInputType(MacroAssembler* masm, Register input, 709 CompareICState::State expected, Label* fail) { 710 Label ok; 711 if (expected == CompareICState::SMI) { 712 __ JumpIfNotSmi(input, fail); 713 } else if (expected == CompareICState::NUMBER) { 714 __ JumpIfSmi(input, &ok); 715 __ cmp(FieldOperand(input, HeapObject::kMapOffset), 716 Immediate(masm->isolate()->factory()->heap_number_map())); 717 __ j(not_equal, fail); 718 } 719 // We could be strict about internalized/non-internalized here, but as long as 720 // hydrogen doesn't care, the stub doesn't have to care either. 721 __ bind(&ok); 722 } 723 724 725 static void BranchIfNotInternalizedString(MacroAssembler* masm, 726 Label* label, 727 Register object, 728 Register scratch) { 729 __ JumpIfSmi(object, label); 730 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset)); 731 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); 732 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 733 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 734 __ j(not_zero, label); 735 } 736 737 738 void CompareICStub::GenerateGeneric(MacroAssembler* masm) { 739 Label runtime_call, check_unequal_objects; 740 Condition cc = GetCondition(); 741 742 Label miss; 743 CheckInputType(masm, edx, left(), &miss); 744 CheckInputType(masm, eax, right(), &miss); 745 746 // Compare two smis. 747 Label non_smi, smi_done; 748 __ mov(ecx, edx); 749 __ or_(ecx, eax); 750 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); 751 __ sub(edx, eax); // Return on the result of the subtraction. 752 __ j(no_overflow, &smi_done, Label::kNear); 753 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. 754 __ bind(&smi_done); 755 __ mov(eax, edx); 756 __ ret(0); 757 __ bind(&non_smi); 758 759 // NOTICE! This code is only reached after a smi-fast-case check, so 760 // it is certain that at least one operand isn't a smi. 761 762 // Identical objects can be compared fast, but there are some tricky cases 763 // for NaN and undefined. 764 Label generic_heap_number_comparison; 765 { 766 Label not_identical; 767 __ cmp(eax, edx); 768 __ j(not_equal, ¬_identical); 769 770 if (cc != equal) { 771 // Check for undefined. undefined OP undefined is false even though 772 // undefined == undefined. 773 __ cmp(edx, isolate()->factory()->undefined_value()); 774 Label check_for_nan; 775 __ j(not_equal, &check_for_nan, Label::kNear); 776 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); 777 __ ret(0); 778 __ bind(&check_for_nan); 779 } 780 781 // Test for NaN. Compare heap numbers in a general way, 782 // to handle NaNs correctly. 783 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 784 Immediate(isolate()->factory()->heap_number_map())); 785 __ j(equal, &generic_heap_number_comparison, Label::kNear); 786 if (cc != equal) { 787 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 788 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); 789 // Call runtime on identical JSObjects. Otherwise return equal. 790 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE)); 791 __ j(above_equal, &runtime_call, Label::kFar); 792 // Call runtime on identical symbols since we need to throw a TypeError. 793 __ cmpb(ecx, Immediate(SYMBOL_TYPE)); 794 __ j(equal, &runtime_call, Label::kFar); 795 } 796 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 797 __ ret(0); 798 799 800 __ bind(¬_identical); 801 } 802 803 // Strict equality can quickly decide whether objects are equal. 804 // Non-strict object equality is slower, so it is handled later in the stub. 805 if (cc == equal && strict()) { 806 Label slow; // Fallthrough label. 807 Label not_smis; 808 // If we're doing a strict equality comparison, we don't have to do 809 // type conversion, so we generate code to do fast comparison for objects 810 // and oddballs. Non-smi numbers and strings still go through the usual 811 // slow-case code. 812 // If either is a Smi (we know that not both are), then they can only 813 // be equal if the other is a HeapNumber. If so, use the slow case. 814 STATIC_ASSERT(kSmiTag == 0); 815 DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero); 816 __ mov(ecx, Immediate(kSmiTagMask)); 817 __ and_(ecx, eax); 818 __ test(ecx, edx); 819 __ j(not_zero, ¬_smis, Label::kNear); 820 // One operand is a smi. 821 822 // Check whether the non-smi is a heap number. 823 STATIC_ASSERT(kSmiTagMask == 1); 824 // ecx still holds eax & kSmiTag, which is either zero or one. 825 __ sub(ecx, Immediate(0x01)); 826 __ mov(ebx, edx); 827 __ xor_(ebx, eax); 828 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. 829 __ xor_(ebx, eax); 830 // if eax was smi, ebx is now edx, else eax. 831 832 // Check if the non-smi operand is a heap number. 833 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), 834 Immediate(isolate()->factory()->heap_number_map())); 835 // If heap number, handle it in the slow case. 836 __ j(equal, &slow, Label::kNear); 837 // Return non-equal (ebx is not zero) 838 __ mov(eax, ebx); 839 __ ret(0); 840 841 __ bind(¬_smis); 842 // If either operand is a JSObject or an oddball value, then they are not 843 // equal since their pointers are different 844 // There is no test for undetectability in strict equality. 845 846 // Get the type of the first operand. 847 // If the first object is a JS object, we have done pointer comparison. 848 Label first_non_object; 849 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 850 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 851 __ j(below, &first_non_object, Label::kNear); 852 853 // Return non-zero (eax is not zero) 854 Label return_not_equal; 855 STATIC_ASSERT(kHeapObjectTag != 0); 856 __ bind(&return_not_equal); 857 __ ret(0); 858 859 __ bind(&first_non_object); 860 // Check for oddballs: true, false, null, undefined. 861 __ CmpInstanceType(ecx, ODDBALL_TYPE); 862 __ j(equal, &return_not_equal); 863 864 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); 865 __ j(above_equal, &return_not_equal); 866 867 // Check for oddballs: true, false, null, undefined. 868 __ CmpInstanceType(ecx, ODDBALL_TYPE); 869 __ j(equal, &return_not_equal); 870 871 // Fall through to the general case. 872 __ bind(&slow); 873 } 874 875 // Generate the number comparison code. 876 Label non_number_comparison; 877 Label unordered; 878 __ bind(&generic_heap_number_comparison); 879 FloatingPointHelper::CheckFloatOperands( 880 masm, &non_number_comparison, ebx); 881 FloatingPointHelper::LoadFloatOperand(masm, eax); 882 FloatingPointHelper::LoadFloatOperand(masm, edx); 883 __ FCmp(); 884 885 // Don't base result on EFLAGS when a NaN is involved. 886 __ j(parity_even, &unordered, Label::kNear); 887 888 Label below_label, above_label; 889 // Return a result of -1, 0, or 1, based on EFLAGS. 890 __ j(below, &below_label, Label::kNear); 891 __ j(above, &above_label, Label::kNear); 892 893 __ Move(eax, Immediate(0)); 894 __ ret(0); 895 896 __ bind(&below_label); 897 __ mov(eax, Immediate(Smi::FromInt(-1))); 898 __ ret(0); 899 900 __ bind(&above_label); 901 __ mov(eax, Immediate(Smi::FromInt(1))); 902 __ ret(0); 903 904 // If one of the numbers was NaN, then the result is always false. 905 // The cc is never not-equal. 906 __ bind(&unordered); 907 DCHECK(cc != not_equal); 908 if (cc == less || cc == less_equal) { 909 __ mov(eax, Immediate(Smi::FromInt(1))); 910 } else { 911 __ mov(eax, Immediate(Smi::FromInt(-1))); 912 } 913 __ ret(0); 914 915 // The number comparison code did not provide a valid result. 916 __ bind(&non_number_comparison); 917 918 // Fast negative check for internalized-to-internalized equality. 919 Label check_for_strings; 920 if (cc == equal) { 921 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx); 922 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx); 923 924 // We've already checked for object identity, so if both operands 925 // are internalized they aren't equal. Register eax already holds a 926 // non-zero value, which indicates not equal, so just return. 927 __ ret(0); 928 } 929 930 __ bind(&check_for_strings); 931 932 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, 933 &check_unequal_objects); 934 935 // Inline comparison of one-byte strings. 936 if (cc == equal) { 937 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx); 938 } else { 939 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx, 940 edi); 941 } 942 #ifdef DEBUG 943 __ Abort(kUnexpectedFallThroughFromStringComparison); 944 #endif 945 946 __ bind(&check_unequal_objects); 947 if (cc == equal && !strict()) { 948 // Non-strict equality. Objects are unequal if 949 // they are both JSObjects and not undetectable, 950 // and their pointers are different. 951 Label return_equal, return_unequal, undetectable; 952 // At most one is a smi, so we can test for smi by adding the two. 953 // A smi plus a heap object has the low bit set, a heap object plus 954 // a heap object has the low bit clear. 955 STATIC_ASSERT(kSmiTag == 0); 956 STATIC_ASSERT(kSmiTagMask == 1); 957 __ lea(ecx, Operand(eax, edx, times_1, 0)); 958 __ test(ecx, Immediate(kSmiTagMask)); 959 __ j(not_zero, &runtime_call); 960 961 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); 962 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); 963 964 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset), 965 Immediate(1 << Map::kIsUndetectable)); 966 __ j(not_zero, &undetectable, Label::kNear); 967 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 968 Immediate(1 << Map::kIsUndetectable)); 969 __ j(not_zero, &return_unequal, Label::kNear); 970 971 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE); 972 __ j(below, &runtime_call, Label::kNear); 973 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE); 974 __ j(below, &runtime_call, Label::kNear); 975 976 __ bind(&return_unequal); 977 // Return non-equal by returning the non-zero object pointer in eax. 978 __ ret(0); // eax, edx were pushed 979 980 __ bind(&undetectable); 981 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), 982 Immediate(1 << Map::kIsUndetectable)); 983 __ j(zero, &return_unequal, Label::kNear); 984 985 // If both sides are JSReceivers, then the result is false according to 986 // the HTML specification, which says that only comparisons with null or 987 // undefined are affected by special casing for document.all. 988 __ CmpInstanceType(ebx, ODDBALL_TYPE); 989 __ j(zero, &return_equal, Label::kNear); 990 __ CmpInstanceType(ecx, ODDBALL_TYPE); 991 __ j(not_zero, &return_unequal, Label::kNear); 992 993 __ bind(&return_equal); 994 __ Move(eax, Immediate(EQUAL)); 995 __ ret(0); // eax, edx were pushed 996 } 997 __ bind(&runtime_call); 998 999 if (cc == equal) { 1000 { 1001 FrameScope scope(masm, StackFrame::INTERNAL); 1002 __ Push(esi); 1003 __ Call(strict() ? isolate()->builtins()->StrictEqual() 1004 : isolate()->builtins()->Equal(), 1005 RelocInfo::CODE_TARGET); 1006 __ Pop(esi); 1007 } 1008 // Turn true into 0 and false into some non-zero value. 1009 STATIC_ASSERT(EQUAL == 0); 1010 __ sub(eax, Immediate(isolate()->factory()->true_value())); 1011 __ Ret(); 1012 } else { 1013 // Push arguments below the return address. 1014 __ pop(ecx); 1015 __ push(edx); 1016 __ push(eax); 1017 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); 1018 1019 // Restore return address on the stack. 1020 __ push(ecx); 1021 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater) 1022 // tagged as a small integer. 1023 __ TailCallRuntime(Runtime::kCompare); 1024 } 1025 1026 __ bind(&miss); 1027 GenerateMiss(masm); 1028 } 1029 1030 1031 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { 1032 // eax : number of arguments to the construct function 1033 // ebx : feedback vector 1034 // edx : slot in feedback vector (Smi) 1035 // edi : the function to call 1036 1037 { 1038 FrameScope scope(masm, StackFrame::INTERNAL); 1039 1040 // Number-of-arguments register must be smi-tagged to call out. 1041 __ SmiTag(eax); 1042 __ push(eax); 1043 __ push(edi); 1044 __ push(edx); 1045 __ push(ebx); 1046 __ push(esi); 1047 1048 __ CallStub(stub); 1049 1050 __ pop(esi); 1051 __ pop(ebx); 1052 __ pop(edx); 1053 __ pop(edi); 1054 __ pop(eax); 1055 __ SmiUntag(eax); 1056 } 1057 } 1058 1059 1060 static void GenerateRecordCallTarget(MacroAssembler* masm) { 1061 // Cache the called function in a feedback vector slot. Cache states 1062 // are uninitialized, monomorphic (indicated by a JSFunction), and 1063 // megamorphic. 1064 // eax : number of arguments to the construct function 1065 // ebx : feedback vector 1066 // edx : slot in feedback vector (Smi) 1067 // edi : the function to call 1068 Isolate* isolate = masm->isolate(); 1069 Label initialize, done, miss, megamorphic, not_array_function; 1070 1071 // Load the cache state into ecx. 1072 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1073 FixedArray::kHeaderSize)); 1074 1075 // A monomorphic cache hit or an already megamorphic state: invoke the 1076 // function without changing the state. 1077 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read 1078 // at this position in a symbol (see static asserts in feedback-vector.h). 1079 Label check_allocation_site; 1080 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); 1081 __ j(equal, &done, Label::kFar); 1082 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex); 1083 __ j(equal, &done, Label::kFar); 1084 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), 1085 Heap::kWeakCellMapRootIndex); 1086 __ j(not_equal, &check_allocation_site); 1087 1088 // If the weak cell is cleared, we have a new chance to become monomorphic. 1089 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize); 1090 __ jmp(&megamorphic); 1091 1092 __ bind(&check_allocation_site); 1093 // If we came here, we need to see if we are the array function. 1094 // If we didn't have a matching function, and we didn't find the megamorph 1095 // sentinel, then we have in the slot either some other function or an 1096 // AllocationSite. 1097 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex); 1098 __ j(not_equal, &miss); 1099 1100 // Make sure the function is the Array() function 1101 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1102 __ cmp(edi, ecx); 1103 __ j(not_equal, &megamorphic); 1104 __ jmp(&done, Label::kFar); 1105 1106 __ bind(&miss); 1107 1108 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 1109 // megamorphic. 1110 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex); 1111 __ j(equal, &initialize); 1112 // MegamorphicSentinel is an immortal immovable object (undefined) so no 1113 // write-barrier is needed. 1114 __ bind(&megamorphic); 1115 __ mov( 1116 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), 1117 Immediate(FeedbackVector::MegamorphicSentinel(isolate))); 1118 __ jmp(&done, Label::kFar); 1119 1120 // An uninitialized cache is patched with the function or sentinel to 1121 // indicate the ElementsKind if function is the Array constructor. 1122 __ bind(&initialize); 1123 // Make sure the function is the Array() function 1124 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1125 __ cmp(edi, ecx); 1126 __ j(not_equal, ¬_array_function); 1127 1128 // The target function is the Array constructor, 1129 // Create an AllocationSite if we don't already have it, store it in the 1130 // slot. 1131 CreateAllocationSiteStub create_stub(isolate); 1132 CallStubInRecordCallTarget(masm, &create_stub); 1133 __ jmp(&done); 1134 1135 __ bind(¬_array_function); 1136 CreateWeakCellStub weak_cell_stub(isolate); 1137 CallStubInRecordCallTarget(masm, &weak_cell_stub); 1138 1139 __ bind(&done); 1140 // Increment the call count for all function calls. 1141 __ add(FieldOperand(ebx, edx, times_half_pointer_size, 1142 FixedArray::kHeaderSize + kPointerSize), 1143 Immediate(Smi::FromInt(1))); 1144 } 1145 1146 1147 void CallConstructStub::Generate(MacroAssembler* masm) { 1148 // eax : number of arguments 1149 // ebx : feedback vector 1150 // edx : slot in feedback vector (Smi, for RecordCallTarget) 1151 // edi : constructor function 1152 1153 Label non_function; 1154 // Check that function is not a smi. 1155 __ JumpIfSmi(edi, &non_function); 1156 // Check that function is a JSFunction. 1157 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 1158 __ j(not_equal, &non_function); 1159 1160 GenerateRecordCallTarget(masm); 1161 1162 Label feedback_register_initialized; 1163 // Put the AllocationSite from the feedback vector into ebx, or undefined. 1164 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, 1165 FixedArray::kHeaderSize)); 1166 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map(); 1167 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); 1168 __ j(equal, &feedback_register_initialized); 1169 __ mov(ebx, isolate()->factory()->undefined_value()); 1170 __ bind(&feedback_register_initialized); 1171 1172 __ AssertUndefinedOrAllocationSite(ebx); 1173 1174 // Pass new target to construct stub. 1175 __ mov(edx, edi); 1176 1177 // Tail call to the function-specific construct stub (still in the caller 1178 // context at this point). 1179 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 1180 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); 1181 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); 1182 __ jmp(ecx); 1183 1184 __ bind(&non_function); 1185 __ mov(edx, edi); 1186 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1187 } 1188 1189 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector, 1190 Register slot) { 1191 __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size, 1192 FixedArray::kHeaderSize + kPointerSize), 1193 Immediate(Smi::FromInt(1))); 1194 } 1195 1196 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1197 // eax - number of arguments 1198 // edi - function 1199 // edx - slot id 1200 // ebx - vector 1201 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1202 __ cmp(edi, ecx); 1203 __ j(not_equal, miss); 1204 1205 // Reload ecx. 1206 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1207 FixedArray::kHeaderSize)); 1208 1209 // Increment the call count for monomorphic function calls. 1210 IncrementCallCount(masm, ebx, edx); 1211 1212 __ mov(ebx, ecx); 1213 __ mov(edx, edi); 1214 ArrayConstructorStub stub(masm->isolate()); 1215 __ TailCallStub(&stub); 1216 1217 // Unreachable. 1218 } 1219 1220 1221 void CallICStub::Generate(MacroAssembler* masm) { 1222 // edi - number of arguments 1223 // edi - function 1224 // edx - slot id 1225 // ebx - vector 1226 Isolate* isolate = masm->isolate(); 1227 Label extra_checks_or_miss, call, call_function, call_count_incremented; 1228 1229 // The checks. First, does edi match the recorded monomorphic target? 1230 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, 1231 FixedArray::kHeaderSize)); 1232 1233 // We don't know that we have a weak cell. We might have a private symbol 1234 // or an AllocationSite, but the memory is safe to examine. 1235 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1236 // FixedArray. 1237 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 1238 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not 1239 // computed, meaning that it can't appear to be a pointer. If the low bit is 1240 // 0, then hash is computed, but the 0 bit prevents the field from appearing 1241 // to be a pointer. 1242 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); 1243 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == 1244 WeakCell::kValueOffset && 1245 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 1246 1247 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); 1248 __ j(not_equal, &extra_checks_or_miss); 1249 1250 // The compare above could have been a SMI/SMI comparison. Guard against this 1251 // convincing us that we have a monomorphic JSFunction. 1252 __ JumpIfSmi(edi, &extra_checks_or_miss); 1253 1254 __ bind(&call_function); 1255 1256 // Increment the call count for monomorphic function calls. 1257 IncrementCallCount(masm, ebx, edx); 1258 1259 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1260 tail_call_mode()), 1261 RelocInfo::CODE_TARGET); 1262 1263 __ bind(&extra_checks_or_miss); 1264 Label uninitialized, miss, not_allocation_site; 1265 1266 __ cmp(ecx, Immediate(FeedbackVector::MegamorphicSentinel(isolate))); 1267 __ j(equal, &call); 1268 1269 // Check if we have an allocation site. 1270 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), 1271 Heap::kAllocationSiteMapRootIndex); 1272 __ j(not_equal, ¬_allocation_site); 1273 1274 // We have an allocation site. 1275 HandleArrayCase(masm, &miss); 1276 1277 __ bind(¬_allocation_site); 1278 1279 // The following cases attempt to handle MISS cases without going to the 1280 // runtime. 1281 if (FLAG_trace_ic) { 1282 __ jmp(&miss); 1283 } 1284 1285 __ cmp(ecx, Immediate(FeedbackVector::UninitializedSentinel(isolate))); 1286 __ j(equal, &uninitialized); 1287 1288 // We are going megamorphic. If the feedback is a JSFunction, it is fine 1289 // to handle it here. More complex cases are dealt with in the runtime. 1290 __ AssertNotSmi(ecx); 1291 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx); 1292 __ j(not_equal, &miss); 1293 __ mov( 1294 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize), 1295 Immediate(FeedbackVector::MegamorphicSentinel(isolate))); 1296 1297 __ bind(&call); 1298 1299 // Increment the call count for megamorphic function calls. 1300 IncrementCallCount(masm, ebx, edx); 1301 1302 __ bind(&call_count_incremented); 1303 1304 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 1305 RelocInfo::CODE_TARGET); 1306 1307 __ bind(&uninitialized); 1308 1309 // We are going monomorphic, provided we actually have a JSFunction. 1310 __ JumpIfSmi(edi, &miss); 1311 1312 // Goto miss case if we do not have a function. 1313 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); 1314 __ j(not_equal, &miss); 1315 1316 // Make sure the function is not the Array() function, which requires special 1317 // behavior on MISS. 1318 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); 1319 __ cmp(edi, ecx); 1320 __ j(equal, &miss); 1321 1322 // Make sure the function belongs to the same native context. 1323 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset)); 1324 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX)); 1325 __ cmp(ecx, NativeContextOperand()); 1326 __ j(not_equal, &miss); 1327 1328 // Store the function. Use a stub since we need a frame for allocation. 1329 // eax - number of arguments 1330 // ebx - vector 1331 // edx - slot 1332 // edi - function 1333 { 1334 FrameScope scope(masm, StackFrame::INTERNAL); 1335 CreateWeakCellStub create_stub(isolate); 1336 __ SmiTag(eax); 1337 __ push(eax); 1338 __ push(ebx); 1339 __ push(edx); 1340 __ push(edi); 1341 __ push(esi); 1342 __ CallStub(&create_stub); 1343 __ pop(esi); 1344 __ pop(edi); 1345 __ pop(edx); 1346 __ pop(ebx); 1347 __ pop(eax); 1348 __ SmiUntag(eax); 1349 } 1350 1351 __ jmp(&call_function); 1352 1353 // We are here because tracing is on or we encountered a MISS case we can't 1354 // handle here. 1355 __ bind(&miss); 1356 GenerateMiss(masm); 1357 1358 __ jmp(&call_count_incremented); 1359 1360 // Unreachable 1361 __ int3(); 1362 } 1363 1364 1365 void CallICStub::GenerateMiss(MacroAssembler* masm) { 1366 FrameScope scope(masm, StackFrame::INTERNAL); 1367 1368 // Preserve the number of arguments. 1369 __ SmiTag(eax); 1370 __ push(eax); 1371 1372 // Push the function and feedback info. 1373 __ push(edi); 1374 __ push(ebx); 1375 __ push(edx); 1376 1377 // Call the entry. 1378 __ CallRuntime(Runtime::kCallIC_Miss); 1379 1380 // Move result to edi and exit the internal frame. 1381 __ mov(edi, eax); 1382 1383 // Restore number of arguments. 1384 __ pop(eax); 1385 __ SmiUntag(eax); 1386 } 1387 1388 1389 bool CEntryStub::NeedsImmovableCode() { 1390 return false; 1391 } 1392 1393 1394 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1395 CEntryStub::GenerateAheadOfTime(isolate); 1396 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1397 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1398 // It is important that the store buffer overflow stubs are generated first. 1399 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate); 1400 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1401 CreateWeakCellStub::GenerateAheadOfTime(isolate); 1402 BinaryOpICStub::GenerateAheadOfTime(isolate); 1403 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 1404 StoreFastElementStub::GenerateAheadOfTime(isolate); 1405 } 1406 1407 1408 void CodeStub::GenerateFPStubs(Isolate* isolate) { 1409 CEntryStub save_doubles(isolate, 1, kSaveFPRegs); 1410 // Stubs might already be in the snapshot, detect that and don't regenerate, 1411 // which would lead to code stub initialization state being messed up. 1412 Code* save_doubles_code; 1413 if (!save_doubles.FindCodeInCache(&save_doubles_code)) { 1414 save_doubles_code = *(save_doubles.GetCode()); 1415 } 1416 } 1417 1418 1419 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1420 CEntryStub stub(isolate, 1, kDontSaveFPRegs); 1421 stub.GetCode(); 1422 } 1423 1424 1425 void CEntryStub::Generate(MacroAssembler* masm) { 1426 // eax: number of arguments including receiver 1427 // ebx: pointer to C function (C callee-saved) 1428 // ebp: frame pointer (restored after C call) 1429 // esp: stack pointer (restored after C call) 1430 // esi: current context (C callee-saved) 1431 // edi: JS function of the caller (C callee-saved) 1432 // 1433 // If argv_in_register(): 1434 // ecx: pointer to the first argument 1435 1436 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1437 1438 // Reserve space on the stack for the three arguments passed to the call. If 1439 // result size is greater than can be returned in registers, also reserve 1440 // space for the hidden argument for the result location, and space for the 1441 // result itself. 1442 int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size(); 1443 1444 // Enter the exit frame that transitions from JavaScript to C++. 1445 if (argv_in_register()) { 1446 DCHECK(!save_doubles()); 1447 DCHECK(!is_builtin_exit()); 1448 __ EnterApiExitFrame(arg_stack_space); 1449 1450 // Move argc and argv into the correct registers. 1451 __ mov(esi, ecx); 1452 __ mov(edi, eax); 1453 } else { 1454 __ EnterExitFrame( 1455 arg_stack_space, save_doubles(), 1456 is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT); 1457 } 1458 1459 // ebx: pointer to C function (C callee-saved) 1460 // ebp: frame pointer (restored after C call) 1461 // esp: stack pointer (restored after C call) 1462 // edi: number of arguments including receiver (C callee-saved) 1463 // esi: pointer to the first argument (C callee-saved) 1464 1465 // Result returned in eax, or eax+edx if result size is 2. 1466 1467 // Check stack alignment. 1468 if (FLAG_debug_code) { 1469 __ CheckStackAlignment(); 1470 } 1471 // Call C function. 1472 if (result_size() <= 2) { 1473 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. 1474 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. 1475 __ mov(Operand(esp, 2 * kPointerSize), 1476 Immediate(ExternalReference::isolate_address(isolate()))); 1477 } else { 1478 DCHECK_EQ(3, result_size()); 1479 // Pass a pointer to the result location as the first argument. 1480 __ lea(eax, Operand(esp, 4 * kPointerSize)); 1481 __ mov(Operand(esp, 0 * kPointerSize), eax); 1482 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc. 1483 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv. 1484 __ mov(Operand(esp, 3 * kPointerSize), 1485 Immediate(ExternalReference::isolate_address(isolate()))); 1486 } 1487 __ call(ebx); 1488 1489 if (result_size() > 2) { 1490 DCHECK_EQ(3, result_size()); 1491 #ifndef _WIN32 1492 // Restore the "hidden" argument on the stack which was popped by caller. 1493 __ sub(esp, Immediate(kPointerSize)); 1494 #endif 1495 // Read result values stored on stack. Result is stored above the arguments. 1496 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize)); 1497 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize)); 1498 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize)); 1499 } 1500 // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers! 1501 1502 // Check result for exception sentinel. 1503 Label exception_returned; 1504 __ cmp(eax, isolate()->factory()->exception()); 1505 __ j(equal, &exception_returned); 1506 1507 // Check that there is no pending exception, otherwise we 1508 // should have returned the exception sentinel. 1509 if (FLAG_debug_code) { 1510 __ push(edx); 1511 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); 1512 Label okay; 1513 ExternalReference pending_exception_address( 1514 Isolate::kPendingExceptionAddress, isolate()); 1515 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); 1516 // Cannot use check here as it attempts to generate call into runtime. 1517 __ j(equal, &okay, Label::kNear); 1518 __ int3(); 1519 __ bind(&okay); 1520 __ pop(edx); 1521 } 1522 1523 // Exit the JavaScript to C++ exit frame. 1524 __ LeaveExitFrame(save_doubles(), !argv_in_register()); 1525 __ ret(0); 1526 1527 // Handling of exception. 1528 __ bind(&exception_returned); 1529 1530 ExternalReference pending_handler_context_address( 1531 Isolate::kPendingHandlerContextAddress, isolate()); 1532 ExternalReference pending_handler_code_address( 1533 Isolate::kPendingHandlerCodeAddress, isolate()); 1534 ExternalReference pending_handler_offset_address( 1535 Isolate::kPendingHandlerOffsetAddress, isolate()); 1536 ExternalReference pending_handler_fp_address( 1537 Isolate::kPendingHandlerFPAddress, isolate()); 1538 ExternalReference pending_handler_sp_address( 1539 Isolate::kPendingHandlerSPAddress, isolate()); 1540 1541 // Ask the runtime for help to determine the handler. This will set eax to 1542 // contain the current pending exception, don't clobber it. 1543 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler, 1544 isolate()); 1545 { 1546 FrameScope scope(masm, StackFrame::MANUAL); 1547 __ PrepareCallCFunction(3, eax); 1548 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc. 1549 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv. 1550 __ mov(Operand(esp, 2 * kPointerSize), 1551 Immediate(ExternalReference::isolate_address(isolate()))); 1552 __ CallCFunction(find_handler, 3); 1553 } 1554 1555 // Retrieve the handler context, SP and FP. 1556 __ mov(esi, Operand::StaticVariable(pending_handler_context_address)); 1557 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address)); 1558 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address)); 1559 1560 // If the handler is a JS frame, restore the context to the frame. Note that 1561 // the context will be set to (esi == 0) for non-JS frames. 1562 Label skip; 1563 __ test(esi, esi); 1564 __ j(zero, &skip, Label::kNear); 1565 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); 1566 __ bind(&skip); 1567 1568 // Compute the handler entry address and jump to it. 1569 __ mov(edi, Operand::StaticVariable(pending_handler_code_address)); 1570 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address)); 1571 // Check whether it's a turbofanned exception handler code before jump to it. 1572 Label not_turbo; 1573 __ push(eax); 1574 __ mov(eax, Operand(edi, Code::kKindSpecificFlags1Offset - kHeapObjectTag)); 1575 __ and_(eax, Immediate(1 << Code::kIsTurbofannedBit)); 1576 __ j(zero, ¬_turbo); 1577 __ fninit(); 1578 __ fld1(); 1579 __ bind(¬_turbo); 1580 __ pop(eax); 1581 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize)); 1582 __ jmp(edi); 1583 } 1584 1585 1586 void JSEntryStub::Generate(MacroAssembler* masm) { 1587 Label invoke, handler_entry, exit; 1588 Label not_outermost_js, not_outermost_js_2; 1589 1590 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1591 1592 // Set up frame. 1593 __ push(ebp); 1594 __ mov(ebp, esp); 1595 1596 // Push marker in two places. 1597 int marker = type(); 1598 __ push(Immediate(Smi::FromInt(marker))); // marker 1599 ExternalReference context_address(Isolate::kContextAddress, isolate()); 1600 __ push(Operand::StaticVariable(context_address)); // context 1601 // Save callee-saved registers (C calling conventions). 1602 __ push(edi); 1603 __ push(esi); 1604 __ push(ebx); 1605 1606 // Save copies of the top frame descriptor on the stack. 1607 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); 1608 __ push(Operand::StaticVariable(c_entry_fp)); 1609 1610 // If this is the outermost JS call, set js_entry_sp value. 1611 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); 1612 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); 1613 __ j(not_equal, ¬_outermost_js, Label::kNear); 1614 __ mov(Operand::StaticVariable(js_entry_sp), ebp); 1615 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 1616 __ jmp(&invoke, Label::kNear); 1617 __ bind(¬_outermost_js); 1618 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); 1619 1620 // Jump to a faked try block that does the invoke, with a faked catch 1621 // block that sets the pending exception. 1622 __ jmp(&invoke); 1623 __ bind(&handler_entry); 1624 handler_offset_ = handler_entry.pos(); 1625 // Caught exception: Store result (exception) in the pending exception 1626 // field in the JSEnv and return a failure sentinel. 1627 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 1628 isolate()); 1629 __ mov(Operand::StaticVariable(pending_exception), eax); 1630 __ mov(eax, Immediate(isolate()->factory()->exception())); 1631 __ jmp(&exit); 1632 1633 // Invoke: Link this frame into the handler chain. 1634 __ bind(&invoke); 1635 __ PushStackHandler(); 1636 1637 // Fake a receiver (NULL). 1638 __ push(Immediate(0)); // receiver 1639 1640 // Invoke the function by calling through JS entry trampoline builtin and 1641 // pop the faked function when we return. Notice that we cannot store a 1642 // reference to the trampoline code directly in this stub, because the 1643 // builtin stubs may not have been generated yet. 1644 if (type() == StackFrame::ENTRY_CONSTRUCT) { 1645 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 1646 isolate()); 1647 __ mov(edx, Immediate(construct_entry)); 1648 } else { 1649 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); 1650 __ mov(edx, Immediate(entry)); 1651 } 1652 __ mov(edx, Operand(edx, 0)); // deref address 1653 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); 1654 __ call(edx); 1655 1656 // Unlink this frame from the handler chain. 1657 __ PopStackHandler(); 1658 1659 __ bind(&exit); 1660 // Check if the current stack frame is marked as the outermost JS frame. 1661 __ pop(ebx); 1662 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); 1663 __ j(not_equal, ¬_outermost_js_2); 1664 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); 1665 __ bind(¬_outermost_js_2); 1666 1667 // Restore the top frame descriptor from the stack. 1668 __ pop(Operand::StaticVariable(ExternalReference( 1669 Isolate::kCEntryFPAddress, isolate()))); 1670 1671 // Restore callee-saved registers (C calling conventions). 1672 __ pop(ebx); 1673 __ pop(esi); 1674 __ pop(edi); 1675 __ add(esp, Immediate(2 * kPointerSize)); // remove markers 1676 1677 // Restore frame pointer and return. 1678 __ pop(ebp); 1679 __ ret(0); 1680 } 1681 1682 1683 // ------------------------------------------------------------------------- 1684 // StringCharCodeAtGenerator 1685 1686 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 1687 // If the receiver is a smi trigger the non-string case. 1688 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 1689 __ JumpIfSmi(object_, receiver_not_string_); 1690 1691 // Fetch the instance type of the receiver into result register. 1692 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 1693 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 1694 // If the receiver is not a string trigger the non-string case. 1695 __ test(result_, Immediate(kIsNotStringMask)); 1696 __ j(not_zero, receiver_not_string_); 1697 } 1698 1699 // If the index is non-smi trigger the non-smi case. 1700 __ JumpIfNotSmi(index_, &index_not_smi_); 1701 __ bind(&got_smi_index_); 1702 1703 // Check for index out of range. 1704 __ cmp(index_, FieldOperand(object_, String::kLengthOffset)); 1705 __ j(above_equal, index_out_of_range_); 1706 1707 __ SmiUntag(index_); 1708 1709 Factory* factory = masm->isolate()->factory(); 1710 StringCharLoadGenerator::Generate( 1711 masm, factory, object_, index_, result_, &call_runtime_); 1712 1713 __ SmiTag(result_); 1714 __ bind(&exit_); 1715 } 1716 1717 1718 void StringCharCodeAtGenerator::GenerateSlow( 1719 MacroAssembler* masm, EmbedMode embed_mode, 1720 const RuntimeCallHelper& call_helper) { 1721 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 1722 1723 // Index is not a smi. 1724 __ bind(&index_not_smi_); 1725 // If index is a heap number, try converting it to an integer. 1726 __ CheckMap(index_, 1727 masm->isolate()->factory()->heap_number_map(), 1728 index_not_number_, 1729 DONT_DO_SMI_CHECK); 1730 call_helper.BeforeCall(masm); 1731 if (embed_mode == PART_OF_IC_HANDLER) { 1732 __ push(LoadWithVectorDescriptor::VectorRegister()); 1733 __ push(LoadDescriptor::SlotRegister()); 1734 } 1735 __ push(object_); 1736 __ push(index_); // Consumed by runtime conversion function. 1737 __ CallRuntime(Runtime::kNumberToSmi); 1738 if (!index_.is(eax)) { 1739 // Save the conversion result before the pop instructions below 1740 // have a chance to overwrite it. 1741 __ mov(index_, eax); 1742 } 1743 __ pop(object_); 1744 if (embed_mode == PART_OF_IC_HANDLER) { 1745 __ pop(LoadDescriptor::SlotRegister()); 1746 __ pop(LoadWithVectorDescriptor::VectorRegister()); 1747 } 1748 // Reload the instance type. 1749 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); 1750 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 1751 call_helper.AfterCall(masm); 1752 // If index is still not a smi, it must be out of range. 1753 STATIC_ASSERT(kSmiTag == 0); 1754 __ JumpIfNotSmi(index_, index_out_of_range_); 1755 // Otherwise, return to the fast path. 1756 __ jmp(&got_smi_index_); 1757 1758 // Call runtime. We get here when the receiver is a string and the 1759 // index is a number, but the code of getting the actual character 1760 // is too complex (e.g., when the string needs to be flattened). 1761 __ bind(&call_runtime_); 1762 call_helper.BeforeCall(masm); 1763 __ push(object_); 1764 __ SmiTag(index_); 1765 __ push(index_); 1766 __ CallRuntime(Runtime::kStringCharCodeAtRT); 1767 if (!result_.is(eax)) { 1768 __ mov(result_, eax); 1769 } 1770 call_helper.AfterCall(masm); 1771 __ jmp(&exit_); 1772 1773 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 1774 } 1775 1776 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, 1777 Register left, 1778 Register right, 1779 Register scratch1, 1780 Register scratch2) { 1781 Register length = scratch1; 1782 1783 // Compare lengths. 1784 Label strings_not_equal, check_zero_length; 1785 __ mov(length, FieldOperand(left, String::kLengthOffset)); 1786 __ cmp(length, FieldOperand(right, String::kLengthOffset)); 1787 __ j(equal, &check_zero_length, Label::kNear); 1788 __ bind(&strings_not_equal); 1789 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL))); 1790 __ ret(0); 1791 1792 // Check if the length is zero. 1793 Label compare_chars; 1794 __ bind(&check_zero_length); 1795 STATIC_ASSERT(kSmiTag == 0); 1796 __ test(length, length); 1797 __ j(not_zero, &compare_chars, Label::kNear); 1798 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 1799 __ ret(0); 1800 1801 // Compare characters. 1802 __ bind(&compare_chars); 1803 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, 1804 &strings_not_equal, Label::kNear); 1805 1806 // Characters are equal. 1807 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 1808 __ ret(0); 1809 } 1810 1811 1812 void StringHelper::GenerateCompareFlatOneByteStrings( 1813 MacroAssembler* masm, Register left, Register right, Register scratch1, 1814 Register scratch2, Register scratch3) { 1815 Counters* counters = masm->isolate()->counters(); 1816 __ IncrementCounter(counters->string_compare_native(), 1); 1817 1818 // Find minimum length. 1819 Label left_shorter; 1820 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); 1821 __ mov(scratch3, scratch1); 1822 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); 1823 1824 Register length_delta = scratch3; 1825 1826 __ j(less_equal, &left_shorter, Label::kNear); 1827 // Right string is shorter. Change scratch1 to be length of right string. 1828 __ sub(scratch1, length_delta); 1829 __ bind(&left_shorter); 1830 1831 Register min_length = scratch1; 1832 1833 // If either length is zero, just compare lengths. 1834 Label compare_lengths; 1835 __ test(min_length, min_length); 1836 __ j(zero, &compare_lengths, Label::kNear); 1837 1838 // Compare characters. 1839 Label result_not_equal; 1840 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2, 1841 &result_not_equal, Label::kNear); 1842 1843 // Compare lengths - strings up to min-length are equal. 1844 __ bind(&compare_lengths); 1845 __ test(length_delta, length_delta); 1846 Label length_not_equal; 1847 __ j(not_zero, &length_not_equal, Label::kNear); 1848 1849 // Result is EQUAL. 1850 STATIC_ASSERT(EQUAL == 0); 1851 STATIC_ASSERT(kSmiTag == 0); 1852 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 1853 __ ret(0); 1854 1855 Label result_greater; 1856 Label result_less; 1857 __ bind(&length_not_equal); 1858 __ j(greater, &result_greater, Label::kNear); 1859 __ jmp(&result_less, Label::kNear); 1860 __ bind(&result_not_equal); 1861 __ j(above, &result_greater, Label::kNear); 1862 __ bind(&result_less); 1863 1864 // Result is LESS. 1865 __ Move(eax, Immediate(Smi::FromInt(LESS))); 1866 __ ret(0); 1867 1868 // Result is GREATER. 1869 __ bind(&result_greater); 1870 __ Move(eax, Immediate(Smi::FromInt(GREATER))); 1871 __ ret(0); 1872 } 1873 1874 1875 void StringHelper::GenerateOneByteCharsCompareLoop( 1876 MacroAssembler* masm, Register left, Register right, Register length, 1877 Register scratch, Label* chars_not_equal, 1878 Label::Distance chars_not_equal_near) { 1879 // Change index to run from -length to -1 by adding length to string 1880 // start. This means that loop ends when index reaches zero, which 1881 // doesn't need an additional compare. 1882 __ SmiUntag(length); 1883 __ lea(left, 1884 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize)); 1885 __ lea(right, 1886 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize)); 1887 __ neg(length); 1888 Register index = length; // index = -length; 1889 1890 // Compare loop. 1891 Label loop; 1892 __ bind(&loop); 1893 __ mov_b(scratch, Operand(left, index, times_1, 0)); 1894 __ cmpb(scratch, Operand(right, index, times_1, 0)); 1895 __ j(not_equal, chars_not_equal, chars_not_equal_near); 1896 __ inc(index); 1897 __ j(not_zero, &loop); 1898 } 1899 1900 1901 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 1902 // ----------- S t a t e ------------- 1903 // -- edx : left 1904 // -- eax : right 1905 // -- esp[0] : return address 1906 // ----------------------------------- 1907 1908 // Load ecx with the allocation site. We stick an undefined dummy value here 1909 // and replace it with the real allocation site later when we instantiate this 1910 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 1911 __ mov(ecx, isolate()->factory()->undefined_value()); 1912 1913 // Make sure that we actually patched the allocation site. 1914 if (FLAG_debug_code) { 1915 __ test(ecx, Immediate(kSmiTagMask)); 1916 __ Assert(not_equal, kExpectedAllocationSite); 1917 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), 1918 isolate()->factory()->allocation_site_map()); 1919 __ Assert(equal, kExpectedAllocationSite); 1920 } 1921 1922 // Tail call into the stub that handles binary operations with allocation 1923 // sites. 1924 BinaryOpWithAllocationSiteStub stub(isolate(), state()); 1925 __ TailCallStub(&stub); 1926 } 1927 1928 1929 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 1930 DCHECK_EQ(CompareICState::BOOLEAN, state()); 1931 Label miss; 1932 Label::Distance const miss_distance = 1933 masm->emit_debug_code() ? Label::kFar : Label::kNear; 1934 1935 __ JumpIfSmi(edx, &miss, miss_distance); 1936 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); 1937 __ JumpIfSmi(eax, &miss, miss_distance); 1938 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); 1939 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 1940 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 1941 if (!Token::IsEqualityOp(op())) { 1942 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); 1943 __ AssertSmi(eax); 1944 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset)); 1945 __ AssertSmi(edx); 1946 __ xchg(eax, edx); 1947 } 1948 __ sub(eax, edx); 1949 __ Ret(); 1950 1951 __ bind(&miss); 1952 GenerateMiss(masm); 1953 } 1954 1955 1956 void CompareICStub::GenerateSmis(MacroAssembler* masm) { 1957 DCHECK(state() == CompareICState::SMI); 1958 Label miss; 1959 __ mov(ecx, edx); 1960 __ or_(ecx, eax); 1961 __ JumpIfNotSmi(ecx, &miss, Label::kNear); 1962 1963 if (GetCondition() == equal) { 1964 // For equality we do not care about the sign of the result. 1965 __ sub(eax, edx); 1966 } else { 1967 Label done; 1968 __ sub(edx, eax); 1969 __ j(no_overflow, &done, Label::kNear); 1970 // Correct sign of result in case of overflow. 1971 __ not_(edx); 1972 __ bind(&done); 1973 __ mov(eax, edx); 1974 } 1975 __ ret(0); 1976 1977 __ bind(&miss); 1978 GenerateMiss(masm); 1979 } 1980 1981 1982 void CompareICStub::GenerateNumbers(MacroAssembler* masm) { 1983 DCHECK(state() == CompareICState::NUMBER); 1984 1985 Label generic_stub, check_left; 1986 Label unordered, maybe_undefined1, maybe_undefined2; 1987 Label miss; 1988 1989 if (left() == CompareICState::SMI) { 1990 __ JumpIfNotSmi(edx, &miss); 1991 } 1992 if (right() == CompareICState::SMI) { 1993 __ JumpIfNotSmi(eax, &miss); 1994 } 1995 1996 // Inlining the double comparison and falling back to the general compare 1997 // stub if NaN is involved or SSE2 or CMOV is unsupported. 1998 __ JumpIfSmi(eax, &check_left, Label::kNear); 1999 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), 2000 isolate()->factory()->heap_number_map()); 2001 __ j(not_equal, &maybe_undefined1, Label::kNear); 2002 2003 __ bind(&check_left); 2004 __ JumpIfSmi(edx, &generic_stub, Label::kNear); 2005 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), 2006 isolate()->factory()->heap_number_map()); 2007 __ j(not_equal, &maybe_undefined2, Label::kNear); 2008 2009 __ bind(&unordered); 2010 __ bind(&generic_stub); 2011 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, 2012 CompareICState::GENERIC, CompareICState::GENERIC); 2013 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 2014 2015 __ bind(&maybe_undefined1); 2016 if (Token::IsOrderedRelationalCompareOp(op())) { 2017 __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); 2018 __ j(not_equal, &miss); 2019 __ JumpIfSmi(edx, &unordered); 2020 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); 2021 __ j(not_equal, &maybe_undefined2, Label::kNear); 2022 __ jmp(&unordered); 2023 } 2024 2025 __ bind(&maybe_undefined2); 2026 if (Token::IsOrderedRelationalCompareOp(op())) { 2027 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); 2028 __ j(equal, &unordered); 2029 } 2030 2031 __ bind(&miss); 2032 GenerateMiss(masm); 2033 } 2034 2035 2036 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) { 2037 DCHECK(state() == CompareICState::INTERNALIZED_STRING); 2038 DCHECK(GetCondition() == equal); 2039 2040 // Registers containing left and right operands respectively. 2041 Register left = edx; 2042 Register right = eax; 2043 Register tmp1 = ecx; 2044 Register tmp2 = ebx; 2045 2046 // Check that both operands are heap objects. 2047 Label miss; 2048 __ mov(tmp1, left); 2049 STATIC_ASSERT(kSmiTag == 0); 2050 __ and_(tmp1, right); 2051 __ JumpIfSmi(tmp1, &miss, Label::kNear); 2052 2053 // Check that both operands are internalized strings. 2054 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2055 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2056 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2057 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2058 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 2059 __ or_(tmp1, tmp2); 2060 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 2061 __ j(not_zero, &miss, Label::kNear); 2062 2063 // Internalized strings are compared by identity. 2064 Label done; 2065 __ cmp(left, right); 2066 // Make sure eax is non-zero. At this point input operands are 2067 // guaranteed to be non-zero. 2068 DCHECK(right.is(eax)); 2069 __ j(not_equal, &done, Label::kNear); 2070 STATIC_ASSERT(EQUAL == 0); 2071 STATIC_ASSERT(kSmiTag == 0); 2072 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2073 __ bind(&done); 2074 __ ret(0); 2075 2076 __ bind(&miss); 2077 GenerateMiss(masm); 2078 } 2079 2080 2081 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) { 2082 DCHECK(state() == CompareICState::UNIQUE_NAME); 2083 DCHECK(GetCondition() == equal); 2084 2085 // Registers containing left and right operands respectively. 2086 Register left = edx; 2087 Register right = eax; 2088 Register tmp1 = ecx; 2089 Register tmp2 = ebx; 2090 2091 // Check that both operands are heap objects. 2092 Label miss; 2093 __ mov(tmp1, left); 2094 STATIC_ASSERT(kSmiTag == 0); 2095 __ and_(tmp1, right); 2096 __ JumpIfSmi(tmp1, &miss, Label::kNear); 2097 2098 // Check that both operands are unique names. This leaves the instance 2099 // types loaded in tmp1 and tmp2. 2100 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2101 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2102 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2103 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2104 2105 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); 2106 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); 2107 2108 // Unique names are compared by identity. 2109 Label done; 2110 __ cmp(left, right); 2111 // Make sure eax is non-zero. At this point input operands are 2112 // guaranteed to be non-zero. 2113 DCHECK(right.is(eax)); 2114 __ j(not_equal, &done, Label::kNear); 2115 STATIC_ASSERT(EQUAL == 0); 2116 STATIC_ASSERT(kSmiTag == 0); 2117 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2118 __ bind(&done); 2119 __ ret(0); 2120 2121 __ bind(&miss); 2122 GenerateMiss(masm); 2123 } 2124 2125 2126 void CompareICStub::GenerateStrings(MacroAssembler* masm) { 2127 DCHECK(state() == CompareICState::STRING); 2128 Label miss; 2129 2130 bool equality = Token::IsEqualityOp(op()); 2131 2132 // Registers containing left and right operands respectively. 2133 Register left = edx; 2134 Register right = eax; 2135 Register tmp1 = ecx; 2136 Register tmp2 = ebx; 2137 Register tmp3 = edi; 2138 2139 // Check that both operands are heap objects. 2140 __ mov(tmp1, left); 2141 STATIC_ASSERT(kSmiTag == 0); 2142 __ and_(tmp1, right); 2143 __ JumpIfSmi(tmp1, &miss); 2144 2145 // Check that both operands are strings. This leaves the instance 2146 // types loaded in tmp1 and tmp2. 2147 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2148 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2149 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2150 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2151 __ mov(tmp3, tmp1); 2152 STATIC_ASSERT(kNotStringTag != 0); 2153 __ or_(tmp3, tmp2); 2154 __ test(tmp3, Immediate(kIsNotStringMask)); 2155 __ j(not_zero, &miss); 2156 2157 // Fast check for identical strings. 2158 Label not_same; 2159 __ cmp(left, right); 2160 __ j(not_equal, ¬_same, Label::kNear); 2161 STATIC_ASSERT(EQUAL == 0); 2162 STATIC_ASSERT(kSmiTag == 0); 2163 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); 2164 __ ret(0); 2165 2166 // Handle not identical strings. 2167 __ bind(¬_same); 2168 2169 // Check that both strings are internalized. If they are, we're done 2170 // because we already know they are not identical. But in the case of 2171 // non-equality compare, we still need to determine the order. We 2172 // also know they are both strings. 2173 if (equality) { 2174 Label do_compare; 2175 STATIC_ASSERT(kInternalizedTag == 0); 2176 __ or_(tmp1, tmp2); 2177 __ test(tmp1, Immediate(kIsNotInternalizedMask)); 2178 __ j(not_zero, &do_compare, Label::kNear); 2179 // Make sure eax is non-zero. At this point input operands are 2180 // guaranteed to be non-zero. 2181 DCHECK(right.is(eax)); 2182 __ ret(0); 2183 __ bind(&do_compare); 2184 } 2185 2186 // Check that both strings are sequential one-byte. 2187 Label runtime; 2188 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); 2189 2190 // Compare flat one byte strings. Returns when done. 2191 if (equality) { 2192 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, 2193 tmp2); 2194 } else { 2195 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1, 2196 tmp2, tmp3); 2197 } 2198 2199 // Handle more complex cases in runtime. 2200 __ bind(&runtime); 2201 if (equality) { 2202 { 2203 FrameScope scope(masm, StackFrame::INTERNAL); 2204 __ Push(left); 2205 __ Push(right); 2206 __ CallRuntime(Runtime::kStringEqual); 2207 } 2208 __ sub(eax, Immediate(masm->isolate()->factory()->true_value())); 2209 __ Ret(); 2210 } else { 2211 __ pop(tmp1); // Return address. 2212 __ push(left); 2213 __ push(right); 2214 __ push(tmp1); 2215 __ TailCallRuntime(Runtime::kStringCompare); 2216 } 2217 2218 __ bind(&miss); 2219 GenerateMiss(masm); 2220 } 2221 2222 2223 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 2224 DCHECK_EQ(CompareICState::RECEIVER, state()); 2225 Label miss; 2226 __ mov(ecx, edx); 2227 __ and_(ecx, eax); 2228 __ JumpIfSmi(ecx, &miss, Label::kNear); 2229 2230 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 2231 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); 2232 __ j(below, &miss, Label::kNear); 2233 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); 2234 __ j(below, &miss, Label::kNear); 2235 2236 DCHECK_EQ(equal, GetCondition()); 2237 __ sub(eax, edx); 2238 __ ret(0); 2239 2240 __ bind(&miss); 2241 GenerateMiss(masm); 2242 } 2243 2244 2245 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) { 2246 Label miss; 2247 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_); 2248 __ mov(ecx, edx); 2249 __ and_(ecx, eax); 2250 __ JumpIfSmi(ecx, &miss, Label::kNear); 2251 2252 __ GetWeakValue(edi, cell); 2253 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset)); 2254 __ j(not_equal, &miss, Label::kNear); 2255 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset)); 2256 __ j(not_equal, &miss, Label::kNear); 2257 2258 if (Token::IsEqualityOp(op())) { 2259 __ sub(eax, edx); 2260 __ ret(0); 2261 } else { 2262 __ PopReturnAddressTo(ecx); 2263 __ Push(edx); 2264 __ Push(eax); 2265 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition())))); 2266 __ PushReturnAddressFrom(ecx); 2267 __ TailCallRuntime(Runtime::kCompare); 2268 } 2269 2270 __ bind(&miss); 2271 GenerateMiss(masm); 2272 } 2273 2274 2275 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 2276 { 2277 // Call the runtime system in a fresh internal frame. 2278 FrameScope scope(masm, StackFrame::INTERNAL); 2279 __ push(edx); // Preserve edx and eax. 2280 __ push(eax); 2281 __ push(edx); // And also use them as the arguments. 2282 __ push(eax); 2283 __ push(Immediate(Smi::FromInt(op()))); 2284 __ CallRuntime(Runtime::kCompareIC_Miss); 2285 // Compute the entry point of the rewritten stub. 2286 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); 2287 __ pop(eax); 2288 __ pop(edx); 2289 } 2290 2291 // Do a tail call to the rewritten stub. 2292 __ jmp(edi); 2293 } 2294 2295 2296 // Helper function used to check that the dictionary doesn't contain 2297 // the property. This function may return false negatives, so miss_label 2298 // must always call a backup property check that is complete. 2299 // This function is safe to call if the receiver has fast properties. 2300 // Name must be a unique name and receiver must be a heap object. 2301 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, 2302 Label* miss, 2303 Label* done, 2304 Register properties, 2305 Handle<Name> name, 2306 Register r0) { 2307 DCHECK(name->IsUniqueName()); 2308 2309 // If names of slots in range from 1 to kProbes - 1 for the hash value are 2310 // not equal to the name and kProbes-th slot is not used (its name is the 2311 // undefined value), it guarantees the hash table doesn't contain the 2312 // property. It's true even if some slots represent deleted properties 2313 // (their names are the hole value). 2314 for (int i = 0; i < kInlinedProbes; i++) { 2315 // Compute the masked index: (hash + i + i * i) & mask. 2316 Register index = r0; 2317 // Capacity is smi 2^n. 2318 __ mov(index, FieldOperand(properties, kCapacityOffset)); 2319 __ dec(index); 2320 __ and_(index, 2321 Immediate(Smi::FromInt(name->Hash() + 2322 NameDictionary::GetProbeOffset(i)))); 2323 2324 // Scale the index by multiplying by the entry size. 2325 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 2326 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. 2327 Register entity_name = r0; 2328 // Having undefined at this place means the name is not contained. 2329 STATIC_ASSERT(kSmiTagSize == 1); 2330 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, 2331 kElementsStartOffset - kHeapObjectTag)); 2332 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); 2333 __ j(equal, done); 2334 2335 // Stop if found the property. 2336 __ cmp(entity_name, Handle<Name>(name)); 2337 __ j(equal, miss); 2338 2339 Label good; 2340 // Check for the hole and skip. 2341 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); 2342 __ j(equal, &good, Label::kNear); 2343 2344 // Check if the entry name is not a unique name. 2345 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 2346 __ JumpIfNotUniqueNameInstanceType( 2347 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss); 2348 __ bind(&good); 2349 } 2350 2351 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, 2352 NEGATIVE_LOOKUP); 2353 __ push(Immediate(Handle<Object>(name))); 2354 __ push(Immediate(name->Hash())); 2355 __ CallStub(&stub); 2356 __ test(r0, r0); 2357 __ j(not_zero, miss); 2358 __ jmp(done); 2359 } 2360 2361 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 2362 // This stub overrides SometimesSetsUpAFrame() to return false. That means 2363 // we cannot call anything that could cause a GC from this stub. 2364 // Stack frame on entry: 2365 // esp[0 * kPointerSize]: return address. 2366 // esp[1 * kPointerSize]: key's hash. 2367 // esp[2 * kPointerSize]: key. 2368 // Registers: 2369 // dictionary_: NameDictionary to probe. 2370 // result_: used as scratch. 2371 // index_: will hold an index of entry if lookup is successful. 2372 // might alias with result_. 2373 // Returns: 2374 // result_ is zero if lookup failed, non zero otherwise. 2375 2376 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 2377 2378 Register scratch = result(); 2379 2380 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); 2381 __ dec(scratch); 2382 __ SmiUntag(scratch); 2383 __ push(scratch); 2384 2385 // If names of slots in range from 1 to kProbes - 1 for the hash value are 2386 // not equal to the name and kProbes-th slot is not used (its name is the 2387 // undefined value), it guarantees the hash table doesn't contain the 2388 // property. It's true even if some slots represent deleted properties 2389 // (their names are the null value). 2390 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 2391 // Compute the masked index: (hash + i + i * i) & mask. 2392 __ mov(scratch, Operand(esp, 2 * kPointerSize)); 2393 if (i > 0) { 2394 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 2395 } 2396 __ and_(scratch, Operand(esp, 0)); 2397 2398 // Scale the index by multiplying by the entry size. 2399 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 2400 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. 2401 2402 // Having undefined at this place means the name is not contained. 2403 STATIC_ASSERT(kSmiTagSize == 1); 2404 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, 2405 kElementsStartOffset - kHeapObjectTag)); 2406 __ cmp(scratch, isolate()->factory()->undefined_value()); 2407 __ j(equal, ¬_in_dictionary); 2408 2409 // Stop if found the property. 2410 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); 2411 __ j(equal, &in_dictionary); 2412 2413 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { 2414 // If we hit a key that is not a unique name during negative 2415 // lookup we have to bailout as this key might be equal to the 2416 // key we are looking for. 2417 2418 // Check if the entry name is not a unique name. 2419 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 2420 __ JumpIfNotUniqueNameInstanceType( 2421 FieldOperand(scratch, Map::kInstanceTypeOffset), 2422 &maybe_in_dictionary); 2423 } 2424 } 2425 2426 __ bind(&maybe_in_dictionary); 2427 // If we are doing negative lookup then probing failure should be 2428 // treated as a lookup success. For positive lookup probing failure 2429 // should be treated as lookup failure. 2430 if (mode() == POSITIVE_LOOKUP) { 2431 __ mov(result(), Immediate(0)); 2432 __ Drop(1); 2433 __ ret(2 * kPointerSize); 2434 } 2435 2436 __ bind(&in_dictionary); 2437 __ mov(result(), Immediate(1)); 2438 __ Drop(1); 2439 __ ret(2 * kPointerSize); 2440 2441 __ bind(¬_in_dictionary); 2442 __ mov(result(), Immediate(0)); 2443 __ Drop(1); 2444 __ ret(2 * kPointerSize); 2445 } 2446 2447 2448 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 2449 Isolate* isolate) { 2450 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs); 2451 stub.GetCode(); 2452 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); 2453 stub2.GetCode(); 2454 } 2455 2456 2457 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 2458 // the value has just been written into the object, now this stub makes sure 2459 // we keep the GC informed. The word in the object where the value has been 2460 // written is in the address register. 2461 void RecordWriteStub::Generate(MacroAssembler* masm) { 2462 Label skip_to_incremental_noncompacting; 2463 Label skip_to_incremental_compacting; 2464 2465 // The first two instructions are generated with labels so as to get the 2466 // offset fixed up correctly by the bind(Label*) call. We patch it back and 2467 // forth between a compare instructions (a nop in this position) and the 2468 // real branch when we start and stop incremental heap marking. 2469 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); 2470 __ jmp(&skip_to_incremental_compacting, Label::kFar); 2471 2472 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 2473 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 2474 MacroAssembler::kReturnAtEnd); 2475 } else { 2476 __ ret(0); 2477 } 2478 2479 __ bind(&skip_to_incremental_noncompacting); 2480 GenerateIncremental(masm, INCREMENTAL); 2481 2482 __ bind(&skip_to_incremental_compacting); 2483 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 2484 2485 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 2486 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 2487 masm->set_byte_at(0, kTwoByteNopInstruction); 2488 masm->set_byte_at(2, kFiveByteNopInstruction); 2489 } 2490 2491 2492 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 2493 regs_.Save(masm); 2494 2495 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 2496 Label dont_need_remembered_set; 2497 2498 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 2499 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. 2500 regs_.scratch0(), 2501 &dont_need_remembered_set); 2502 2503 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), 2504 &dont_need_remembered_set); 2505 2506 // First notify the incremental marker if necessary, then update the 2507 // remembered set. 2508 CheckNeedsToInformIncrementalMarker( 2509 masm, 2510 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, 2511 mode); 2512 InformIncrementalMarker(masm); 2513 regs_.Restore(masm); 2514 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 2515 MacroAssembler::kReturnAtEnd); 2516 2517 __ bind(&dont_need_remembered_set); 2518 } 2519 2520 CheckNeedsToInformIncrementalMarker( 2521 masm, 2522 kReturnOnNoNeedToInformIncrementalMarker, 2523 mode); 2524 InformIncrementalMarker(masm); 2525 regs_.Restore(masm); 2526 __ ret(0); 2527 } 2528 2529 2530 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 2531 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode()); 2532 int argument_count = 3; 2533 __ PrepareCallCFunction(argument_count, regs_.scratch0()); 2534 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); 2535 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. 2536 __ mov(Operand(esp, 2 * kPointerSize), 2537 Immediate(ExternalReference::isolate_address(isolate()))); 2538 2539 AllowExternalCallThatCantCauseGC scope(masm); 2540 __ CallCFunction( 2541 ExternalReference::incremental_marking_record_write_function(isolate()), 2542 argument_count); 2543 2544 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode()); 2545 } 2546 2547 2548 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 2549 MacroAssembler* masm, 2550 OnNoNeedToInformIncrementalMarker on_no_need, 2551 Mode mode) { 2552 Label object_is_black, need_incremental, need_incremental_pop_object; 2553 2554 // Let's look at the color of the object: If it is not black we don't have 2555 // to inform the incremental marker. 2556 __ JumpIfBlack(regs_.object(), 2557 regs_.scratch0(), 2558 regs_.scratch1(), 2559 &object_is_black, 2560 Label::kNear); 2561 2562 regs_.Restore(masm); 2563 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 2564 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 2565 MacroAssembler::kReturnAtEnd); 2566 } else { 2567 __ ret(0); 2568 } 2569 2570 __ bind(&object_is_black); 2571 2572 // Get the value from the slot. 2573 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); 2574 2575 if (mode == INCREMENTAL_COMPACTION) { 2576 Label ensure_not_white; 2577 2578 __ CheckPageFlag(regs_.scratch0(), // Contains value. 2579 regs_.scratch1(), // Scratch. 2580 MemoryChunk::kEvacuationCandidateMask, 2581 zero, 2582 &ensure_not_white, 2583 Label::kNear); 2584 2585 __ CheckPageFlag(regs_.object(), 2586 regs_.scratch1(), // Scratch. 2587 MemoryChunk::kSkipEvacuationSlotsRecordingMask, 2588 not_zero, 2589 &ensure_not_white, 2590 Label::kNear); 2591 2592 __ jmp(&need_incremental); 2593 2594 __ bind(&ensure_not_white); 2595 } 2596 2597 // We need an extra register for this, so we push the object register 2598 // temporarily. 2599 __ push(regs_.object()); 2600 __ JumpIfWhite(regs_.scratch0(), // The value. 2601 regs_.scratch1(), // Scratch. 2602 regs_.object(), // Scratch. 2603 &need_incremental_pop_object, Label::kNear); 2604 __ pop(regs_.object()); 2605 2606 regs_.Restore(masm); 2607 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 2608 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 2609 MacroAssembler::kReturnAtEnd); 2610 } else { 2611 __ ret(0); 2612 } 2613 2614 __ bind(&need_incremental_pop_object); 2615 __ pop(regs_.object()); 2616 2617 __ bind(&need_incremental); 2618 2619 // Fall through when we need to inform the incremental marker. 2620 } 2621 2622 2623 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 2624 CEntryStub ces(isolate(), 1, kSaveFPRegs); 2625 __ call(ces.GetCode(), RelocInfo::CODE_TARGET); 2626 int parameter_count_offset = 2627 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset; 2628 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); 2629 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 2630 __ pop(ecx); 2631 int additional_offset = 2632 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; 2633 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); 2634 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. 2635 } 2636 2637 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 2638 if (masm->isolate()->function_entry_hook() != NULL) { 2639 ProfileEntryHookStub stub(masm->isolate()); 2640 masm->CallStub(&stub); 2641 } 2642 } 2643 2644 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 2645 // Save volatile registers. 2646 const int kNumSavedRegisters = 3; 2647 __ push(eax); 2648 __ push(ecx); 2649 __ push(edx); 2650 2651 // Calculate and push the original stack pointer. 2652 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 2653 __ push(eax); 2654 2655 // Retrieve our return address and use it to calculate the calling 2656 // function's address. 2657 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); 2658 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); 2659 __ push(eax); 2660 2661 // Call the entry hook. 2662 DCHECK(isolate()->function_entry_hook() != NULL); 2663 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), 2664 RelocInfo::RUNTIME_ENTRY); 2665 __ add(esp, Immediate(2 * kPointerSize)); 2666 2667 // Restore ecx. 2668 __ pop(edx); 2669 __ pop(ecx); 2670 __ pop(eax); 2671 2672 __ ret(0); 2673 } 2674 2675 template <class T> 2676 static void CreateArrayDispatch(MacroAssembler* masm, 2677 AllocationSiteOverrideMode mode) { 2678 if (mode == DISABLE_ALLOCATION_SITES) { 2679 T stub(masm->isolate(), GetInitialFastElementsKind(), mode); 2680 __ TailCallStub(&stub); 2681 } else if (mode == DONT_OVERRIDE) { 2682 int last_index = 2683 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 2684 for (int i = 0; i <= last_index; ++i) { 2685 Label next; 2686 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 2687 __ cmp(edx, kind); 2688 __ j(not_equal, &next); 2689 T stub(masm->isolate(), kind); 2690 __ TailCallStub(&stub); 2691 __ bind(&next); 2692 } 2693 2694 // If we reached this point there is a problem. 2695 __ Abort(kUnexpectedElementsKindInArrayConstructor); 2696 } else { 2697 UNREACHABLE(); 2698 } 2699 } 2700 2701 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 2702 AllocationSiteOverrideMode mode) { 2703 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) 2704 // edx - kind (if mode != DISABLE_ALLOCATION_SITES) 2705 // eax - number of arguments 2706 // edi - constructor? 2707 // esp[0] - return address 2708 // esp[4] - last argument 2709 Label normal_sequence; 2710 if (mode == DONT_OVERRIDE) { 2711 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 2712 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 2713 STATIC_ASSERT(FAST_ELEMENTS == 2); 2714 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 2715 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4); 2716 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); 2717 2718 // is the low bit set? If so, we are holey and that is good. 2719 __ test_b(edx, Immediate(1)); 2720 __ j(not_zero, &normal_sequence); 2721 } 2722 2723 // look at the first argument 2724 __ mov(ecx, Operand(esp, kPointerSize)); 2725 __ test(ecx, ecx); 2726 __ j(zero, &normal_sequence); 2727 2728 if (mode == DISABLE_ALLOCATION_SITES) { 2729 ElementsKind initial = GetInitialFastElementsKind(); 2730 ElementsKind holey_initial = GetHoleyElementsKind(initial); 2731 2732 ArraySingleArgumentConstructorStub stub_holey( 2733 masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES); 2734 __ TailCallStub(&stub_holey); 2735 2736 __ bind(&normal_sequence); 2737 ArraySingleArgumentConstructorStub stub(masm->isolate(), initial, 2738 DISABLE_ALLOCATION_SITES); 2739 __ TailCallStub(&stub); 2740 } else if (mode == DONT_OVERRIDE) { 2741 // We are going to create a holey array, but our kind is non-holey. 2742 // Fix kind and retry. 2743 __ inc(edx); 2744 2745 if (FLAG_debug_code) { 2746 Handle<Map> allocation_site_map = 2747 masm->isolate()->factory()->allocation_site_map(); 2748 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); 2749 __ Assert(equal, kExpectedAllocationSite); 2750 } 2751 2752 // Save the resulting elements kind in type info. We can't just store r3 2753 // in the AllocationSite::transition_info field because elements kind is 2754 // restricted to a portion of the field...upper bits need to be left alone. 2755 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 2756 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), 2757 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); 2758 2759 __ bind(&normal_sequence); 2760 int last_index = 2761 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 2762 for (int i = 0; i <= last_index; ++i) { 2763 Label next; 2764 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 2765 __ cmp(edx, kind); 2766 __ j(not_equal, &next); 2767 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); 2768 __ TailCallStub(&stub); 2769 __ bind(&next); 2770 } 2771 2772 // If we reached this point there is a problem. 2773 __ Abort(kUnexpectedElementsKindInArrayConstructor); 2774 } else { 2775 UNREACHABLE(); 2776 } 2777 } 2778 2779 template <class T> 2780 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 2781 int to_index = 2782 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); 2783 for (int i = 0; i <= to_index; ++i) { 2784 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 2785 T stub(isolate, kind); 2786 stub.GetCode(); 2787 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 2788 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); 2789 stub1.GetCode(); 2790 } 2791 } 2792 } 2793 2794 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) { 2795 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 2796 isolate); 2797 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 2798 isolate); 2799 ArrayNArgumentsConstructorStub stub(isolate); 2800 stub.GetCode(); 2801 2802 ElementsKind kinds[2] = {FAST_ELEMENTS, FAST_HOLEY_ELEMENTS}; 2803 for (int i = 0; i < 2; i++) { 2804 // For internal arrays we only need a few things 2805 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); 2806 stubh1.GetCode(); 2807 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); 2808 stubh2.GetCode(); 2809 } 2810 } 2811 2812 void ArrayConstructorStub::GenerateDispatchToArrayStub( 2813 MacroAssembler* masm, AllocationSiteOverrideMode mode) { 2814 Label not_zero_case, not_one_case; 2815 __ test(eax, eax); 2816 __ j(not_zero, ¬_zero_case); 2817 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 2818 2819 __ bind(¬_zero_case); 2820 __ cmp(eax, 1); 2821 __ j(greater, ¬_one_case); 2822 CreateArrayDispatchOneArgument(masm, mode); 2823 2824 __ bind(¬_one_case); 2825 ArrayNArgumentsConstructorStub stub(masm->isolate()); 2826 __ TailCallStub(&stub); 2827 } 2828 2829 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 2830 // ----------- S t a t e ------------- 2831 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE) 2832 // -- ebx : AllocationSite or undefined 2833 // -- edi : constructor 2834 // -- edx : Original constructor 2835 // -- esp[0] : return address 2836 // -- esp[4] : last argument 2837 // ----------------------------------- 2838 if (FLAG_debug_code) { 2839 // The array construct code is only set for the global and natives 2840 // builtin Array functions which always have maps. 2841 2842 // Initial map for the builtin Array function should be a map. 2843 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 2844 // Will both indicate a NULL and a Smi. 2845 __ test(ecx, Immediate(kSmiTagMask)); 2846 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 2847 __ CmpObjectType(ecx, MAP_TYPE, ecx); 2848 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 2849 2850 // We should either have undefined in ebx or a valid AllocationSite 2851 __ AssertUndefinedOrAllocationSite(ebx); 2852 } 2853 2854 Label subclassing; 2855 2856 // Enter the context of the Array function. 2857 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); 2858 2859 __ cmp(edx, edi); 2860 __ j(not_equal, &subclassing); 2861 2862 Label no_info; 2863 // If the feedback vector is the undefined value call an array constructor 2864 // that doesn't use AllocationSites. 2865 __ cmp(ebx, isolate()->factory()->undefined_value()); 2866 __ j(equal, &no_info); 2867 2868 // Only look at the lower 16 bits of the transition info. 2869 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); 2870 __ SmiUntag(edx); 2871 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 2872 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); 2873 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 2874 2875 __ bind(&no_info); 2876 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 2877 2878 // Subclassing. 2879 __ bind(&subclassing); 2880 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); 2881 __ add(eax, Immediate(3)); 2882 __ PopReturnAddressTo(ecx); 2883 __ Push(edx); 2884 __ Push(ebx); 2885 __ PushReturnAddressFrom(ecx); 2886 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); 2887 } 2888 2889 void InternalArrayConstructorStub::GenerateCase(MacroAssembler* masm, 2890 ElementsKind kind) { 2891 Label not_zero_case, not_one_case; 2892 Label normal_sequence; 2893 2894 __ test(eax, eax); 2895 __ j(not_zero, ¬_zero_case); 2896 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); 2897 __ TailCallStub(&stub0); 2898 2899 __ bind(¬_zero_case); 2900 __ cmp(eax, 1); 2901 __ j(greater, ¬_one_case); 2902 2903 if (IsFastPackedElementsKind(kind)) { 2904 // We might need to create a holey array 2905 // look at the first argument 2906 __ mov(ecx, Operand(esp, kPointerSize)); 2907 __ test(ecx, ecx); 2908 __ j(zero, &normal_sequence); 2909 2910 InternalArraySingleArgumentConstructorStub stub1_holey( 2911 isolate(), GetHoleyElementsKind(kind)); 2912 __ TailCallStub(&stub1_holey); 2913 } 2914 2915 __ bind(&normal_sequence); 2916 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); 2917 __ TailCallStub(&stub1); 2918 2919 __ bind(¬_one_case); 2920 ArrayNArgumentsConstructorStub stubN(isolate()); 2921 __ TailCallStub(&stubN); 2922 } 2923 2924 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 2925 // ----------- S t a t e ------------- 2926 // -- eax : argc 2927 // -- edi : constructor 2928 // -- esp[0] : return address 2929 // -- esp[4] : last argument 2930 // ----------------------------------- 2931 2932 if (FLAG_debug_code) { 2933 // The array construct code is only set for the global and natives 2934 // builtin Array functions which always have maps. 2935 2936 // Initial map for the builtin Array function should be a map. 2937 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 2938 // Will both indicate a NULL and a Smi. 2939 __ test(ecx, Immediate(kSmiTagMask)); 2940 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); 2941 __ CmpObjectType(ecx, MAP_TYPE, ecx); 2942 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); 2943 } 2944 2945 // Figure out the right elements kind 2946 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); 2947 2948 // Load the map's "bit field 2" into |result|. We only need the first byte, 2949 // but the following masking takes care of that anyway. 2950 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); 2951 // Retrieve elements_kind from bit field 2. 2952 __ DecodeField<Map::ElementsKindBits>(ecx); 2953 2954 if (FLAG_debug_code) { 2955 Label done; 2956 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 2957 __ j(equal, &done); 2958 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS)); 2959 __ Assert(equal, kInvalidElementsKindForInternalArrayOrInternalPackedArray); 2960 __ bind(&done); 2961 } 2962 2963 Label fast_elements_case; 2964 __ cmp(ecx, Immediate(FAST_ELEMENTS)); 2965 __ j(equal, &fast_elements_case); 2966 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 2967 2968 __ bind(&fast_elements_case); 2969 GenerateCase(masm, FAST_ELEMENTS); 2970 } 2971 2972 void FastNewRestParameterStub::Generate(MacroAssembler* masm) { 2973 // ----------- S t a t e ------------- 2974 // -- edi : function 2975 // -- esi : context 2976 // -- ebp : frame pointer 2977 // -- esp[0] : return address 2978 // ----------------------------------- 2979 __ AssertFunction(edi); 2980 2981 // Make edx point to the JavaScript frame. 2982 __ mov(edx, ebp); 2983 if (skip_stub_frame()) { 2984 // For Ignition we need to skip the handler/stub frame to reach the 2985 // JavaScript frame for the function. 2986 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 2987 } 2988 if (FLAG_debug_code) { 2989 Label ok; 2990 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset)); 2991 __ j(equal, &ok); 2992 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); 2993 __ bind(&ok); 2994 } 2995 2996 // Check if we have rest parameters (only possible if we have an 2997 // arguments adaptor frame below the function frame). 2998 Label no_rest_parameters; 2999 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 3000 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset), 3001 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3002 __ j(not_equal, &no_rest_parameters, Label::kNear); 3003 3004 // Check if the arguments adaptor frame contains more arguments than 3005 // specified by the function's internal formal parameter count. 3006 Label rest_parameters; 3007 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 3008 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3009 __ sub(eax, 3010 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset)); 3011 __ j(greater, &rest_parameters); 3012 3013 // Return an empty rest parameter array. 3014 __ bind(&no_rest_parameters); 3015 { 3016 // ----------- S t a t e ------------- 3017 // -- esi : context 3018 // -- esp[0] : return address 3019 // ----------------------------------- 3020 3021 // Allocate an empty rest parameter array. 3022 Label allocate, done_allocate; 3023 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS); 3024 __ bind(&done_allocate); 3025 3026 // Setup the rest parameter array in rax. 3027 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); 3028 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx); 3029 __ mov(ecx, isolate()->factory()->empty_fixed_array()); 3030 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx); 3031 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx); 3032 __ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero)); 3033 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 3034 __ Ret(); 3035 3036 // Fall back to %AllocateInNewSpace. 3037 __ bind(&allocate); 3038 { 3039 FrameScope scope(masm, StackFrame::INTERNAL); 3040 __ Push(Smi::FromInt(JSArray::kSize)); 3041 __ CallRuntime(Runtime::kAllocateInNewSpace); 3042 } 3043 __ jmp(&done_allocate); 3044 } 3045 3046 __ bind(&rest_parameters); 3047 { 3048 // Compute the pointer to the first rest parameter (skippping the receiver). 3049 __ lea(ebx, 3050 Operand(ebx, eax, times_half_pointer_size, 3051 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 3052 3053 // ----------- S t a t e ------------- 3054 // -- esi : context 3055 // -- eax : number of rest parameters (tagged) 3056 // -- ebx : pointer to first rest parameters 3057 // -- esp[0] : return address 3058 // ----------------------------------- 3059 3060 // Allocate space for the rest parameter array plus the backing store. 3061 Label allocate, done_allocate; 3062 __ lea(ecx, Operand(eax, times_half_pointer_size, 3063 JSArray::kSize + FixedArray::kHeaderSize)); 3064 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS); 3065 __ bind(&done_allocate); 3066 3067 // Setup the elements array in edx. 3068 __ mov(FieldOperand(edx, FixedArray::kMapOffset), 3069 isolate()->factory()->fixed_array_map()); 3070 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); 3071 { 3072 Label loop, done_loop; 3073 __ Move(ecx, Smi::kZero); 3074 __ bind(&loop); 3075 __ cmp(ecx, eax); 3076 __ j(equal, &done_loop, Label::kNear); 3077 __ mov(edi, Operand(ebx, 0 * kPointerSize)); 3078 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, 3079 FixedArray::kHeaderSize), 3080 edi); 3081 __ sub(ebx, Immediate(1 * kPointerSize)); 3082 __ add(ecx, Immediate(Smi::FromInt(1))); 3083 __ jmp(&loop); 3084 __ bind(&done_loop); 3085 } 3086 3087 // Setup the rest parameter array in edi. 3088 __ lea(edi, 3089 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize)); 3090 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); 3091 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx); 3092 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset), 3093 isolate()->factory()->empty_fixed_array()); 3094 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx); 3095 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax); 3096 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 3097 __ mov(eax, edi); 3098 __ Ret(); 3099 3100 // Fall back to %AllocateInNewSpace (if not too big). 3101 Label too_big_for_new_space; 3102 __ bind(&allocate); 3103 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize)); 3104 __ j(greater, &too_big_for_new_space); 3105 { 3106 FrameScope scope(masm, StackFrame::INTERNAL); 3107 __ SmiTag(ecx); 3108 __ Push(eax); 3109 __ Push(ebx); 3110 __ Push(ecx); 3111 __ CallRuntime(Runtime::kAllocateInNewSpace); 3112 __ mov(edx, eax); 3113 __ Pop(ebx); 3114 __ Pop(eax); 3115 } 3116 __ jmp(&done_allocate); 3117 3118 // Fall back to %NewRestParameter. 3119 __ bind(&too_big_for_new_space); 3120 __ PopReturnAddressTo(ecx); 3121 // We reload the function from the caller frame due to register pressure 3122 // within this stub. This is the slow path, hence reloading is preferable. 3123 if (skip_stub_frame()) { 3124 // For Ignition we need to skip the handler/stub frame to reach the 3125 // JavaScript frame for the function. 3126 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 3127 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset)); 3128 } else { 3129 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset)); 3130 } 3131 __ PushReturnAddressFrom(ecx); 3132 __ TailCallRuntime(Runtime::kNewRestParameter); 3133 } 3134 } 3135 3136 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { 3137 // ----------- S t a t e ------------- 3138 // -- edi : function 3139 // -- esi : context 3140 // -- ebp : frame pointer 3141 // -- esp[0] : return address 3142 // ----------------------------------- 3143 __ AssertFunction(edi); 3144 3145 // Make ecx point to the JavaScript frame. 3146 __ mov(ecx, ebp); 3147 if (skip_stub_frame()) { 3148 // For Ignition we need to skip the handler/stub frame to reach the 3149 // JavaScript frame for the function. 3150 __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); 3151 } 3152 if (FLAG_debug_code) { 3153 Label ok; 3154 __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset)); 3155 __ j(equal, &ok); 3156 __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub); 3157 __ bind(&ok); 3158 } 3159 3160 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. 3161 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 3162 __ mov(ebx, 3163 FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset)); 3164 __ lea(edx, Operand(ecx, ebx, times_half_pointer_size, 3165 StandardFrameConstants::kCallerSPOffset)); 3166 3167 // ebx : number of parameters (tagged) 3168 // edx : parameters pointer 3169 // edi : function 3170 // ecx : JavaScript frame pointer. 3171 // esp[0] : return address 3172 3173 // Check if the calling frame is an arguments adaptor frame. 3174 Label adaptor_frame, try_allocate, runtime; 3175 __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); 3176 __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset)); 3177 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3178 __ j(equal, &adaptor_frame, Label::kNear); 3179 3180 // No adaptor, parameter count = argument count. 3181 __ mov(ecx, ebx); 3182 __ push(ebx); 3183 __ jmp(&try_allocate, Label::kNear); 3184 3185 // We have an adaptor frame. Patch the parameters pointer. 3186 __ bind(&adaptor_frame); 3187 __ push(ebx); 3188 __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); 3189 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3190 __ lea(edx, 3191 Operand(edx, ecx, times_2, StandardFrameConstants::kCallerSPOffset)); 3192 3193 // ebx = parameter count (tagged) 3194 // ecx = argument count (smi-tagged) 3195 // Compute the mapped parameter count = min(ebx, ecx) in ebx. 3196 __ cmp(ebx, ecx); 3197 __ j(less_equal, &try_allocate, Label::kNear); 3198 __ mov(ebx, ecx); 3199 3200 // Save mapped parameter count and function. 3201 __ bind(&try_allocate); 3202 __ push(edi); 3203 __ push(ebx); 3204 3205 // Compute the sizes of backing store, parameter map, and arguments object. 3206 // 1. Parameter map, has 2 extra words containing context and backing store. 3207 const int kParameterMapHeaderSize = 3208 FixedArray::kHeaderSize + 2 * kPointerSize; 3209 Label no_parameter_map; 3210 __ test(ebx, ebx); 3211 __ j(zero, &no_parameter_map, Label::kNear); 3212 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); 3213 __ bind(&no_parameter_map); 3214 3215 // 2. Backing store. 3216 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); 3217 3218 // 3. Arguments object. 3219 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize)); 3220 3221 // Do the allocation of all three objects in one go. 3222 __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS); 3223 3224 // eax = address of new object(s) (tagged) 3225 // ecx = argument count (smi-tagged) 3226 // esp[0] = mapped parameter count (tagged) 3227 // esp[4] = function 3228 // esp[8] = parameter count (tagged) 3229 // Get the arguments map from the current native context into edi. 3230 Label has_mapped_parameters, instantiate; 3231 __ mov(edi, NativeContextOperand()); 3232 __ mov(ebx, Operand(esp, 0 * kPointerSize)); 3233 __ test(ebx, ebx); 3234 __ j(not_zero, &has_mapped_parameters, Label::kNear); 3235 __ mov( 3236 edi, 3237 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX))); 3238 __ jmp(&instantiate, Label::kNear); 3239 3240 __ bind(&has_mapped_parameters); 3241 __ mov(edi, Operand(edi, Context::SlotOffset( 3242 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX))); 3243 __ bind(&instantiate); 3244 3245 // eax = address of new object (tagged) 3246 // ebx = mapped parameter count (tagged) 3247 // ecx = argument count (smi-tagged) 3248 // edi = address of arguments map (tagged) 3249 // esp[0] = mapped parameter count (tagged) 3250 // esp[4] = function 3251 // esp[8] = parameter count (tagged) 3252 // Copy the JS object part. 3253 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi); 3254 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), 3255 masm->isolate()->factory()->empty_fixed_array()); 3256 __ mov(FieldOperand(eax, JSObject::kElementsOffset), 3257 masm->isolate()->factory()->empty_fixed_array()); 3258 3259 // Set up the callee in-object property. 3260 STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1); 3261 __ mov(edi, Operand(esp, 1 * kPointerSize)); 3262 __ AssertNotSmi(edi); 3263 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi); 3264 3265 // Use the length (smi tagged) and set that as an in-object property too. 3266 __ AssertSmi(ecx); 3267 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx); 3268 3269 // Set up the elements pointer in the allocated arguments object. 3270 // If we allocated a parameter map, edi will point there, otherwise to the 3271 // backing store. 3272 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize)); 3273 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); 3274 3275 // eax = address of new object (tagged) 3276 // ebx = mapped parameter count (tagged) 3277 // ecx = argument count (tagged) 3278 // edx = address of receiver argument 3279 // edi = address of parameter map or backing store (tagged) 3280 // esp[0] = mapped parameter count (tagged) 3281 // esp[4] = function 3282 // esp[8] = parameter count (tagged) 3283 // Free two registers. 3284 __ push(edx); 3285 __ push(eax); 3286 3287 // Initialize parameter map. If there are no mapped arguments, we're done. 3288 Label skip_parameter_map; 3289 __ test(ebx, ebx); 3290 __ j(zero, &skip_parameter_map); 3291 3292 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 3293 Immediate(isolate()->factory()->sloppy_arguments_elements_map())); 3294 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); 3295 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); 3296 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); 3297 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); 3298 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); 3299 3300 // Copy the parameter slots and the holes in the arguments. 3301 // We need to fill in mapped_parameter_count slots. They index the context, 3302 // where parameters are stored in reverse order, at 3303 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 3304 // The mapped parameter thus need to get indices 3305 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 3306 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 3307 // We loop from right to left. 3308 Label parameters_loop, parameters_test; 3309 __ push(ecx); 3310 __ mov(eax, Operand(esp, 3 * kPointerSize)); 3311 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); 3312 __ add(ebx, Operand(esp, 5 * kPointerSize)); 3313 __ sub(ebx, eax); 3314 __ mov(ecx, isolate()->factory()->the_hole_value()); 3315 __ mov(edx, edi); 3316 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); 3317 // eax = loop variable (tagged) 3318 // ebx = mapping index (tagged) 3319 // ecx = the hole value 3320 // edx = address of parameter map (tagged) 3321 // edi = address of backing store (tagged) 3322 // esp[0] = argument count (tagged) 3323 // esp[4] = address of new object (tagged) 3324 // esp[8] = address of receiver argument 3325 // esp[12] = mapped parameter count (tagged) 3326 // esp[16] = function 3327 // esp[20] = parameter count (tagged) 3328 __ jmp(¶meters_test, Label::kNear); 3329 3330 __ bind(¶meters_loop); 3331 __ sub(eax, Immediate(Smi::FromInt(1))); 3332 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx); 3333 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx); 3334 __ add(ebx, Immediate(Smi::FromInt(1))); 3335 __ bind(¶meters_test); 3336 __ test(eax, eax); 3337 __ j(not_zero, ¶meters_loop, Label::kNear); 3338 __ pop(ecx); 3339 3340 __ bind(&skip_parameter_map); 3341 3342 // ecx = argument count (tagged) 3343 // edi = address of backing store (tagged) 3344 // esp[0] = address of new object (tagged) 3345 // esp[4] = address of receiver argument 3346 // esp[8] = mapped parameter count (tagged) 3347 // esp[12] = function 3348 // esp[16] = parameter count (tagged) 3349 // Copy arguments header and remaining slots (if there are any). 3350 __ mov(FieldOperand(edi, FixedArray::kMapOffset), 3351 Immediate(isolate()->factory()->fixed_array_map())); 3352 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); 3353 3354 Label arguments_loop, arguments_test; 3355 __ mov(ebx, Operand(esp, 2 * kPointerSize)); 3356 __ mov(edx, Operand(esp, 1 * kPointerSize)); 3357 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? 3358 __ sub(edx, ebx); 3359 __ jmp(&arguments_test, Label::kNear); 3360 3361 __ bind(&arguments_loop); 3362 __ sub(edx, Immediate(kPointerSize)); 3363 __ mov(eax, Operand(edx, 0)); 3364 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax); 3365 __ add(ebx, Immediate(Smi::FromInt(1))); 3366 3367 __ bind(&arguments_test); 3368 __ cmp(ebx, ecx); 3369 __ j(less, &arguments_loop, Label::kNear); 3370 3371 // Restore. 3372 __ pop(eax); // Address of arguments object. 3373 __ Drop(4); 3374 3375 // Return. 3376 __ ret(0); 3377 3378 // Do the runtime call to allocate the arguments object. 3379 __ bind(&runtime); 3380 __ pop(eax); // Remove saved mapped parameter count. 3381 __ pop(edi); // Pop saved function. 3382 __ pop(eax); // Remove saved parameter count. 3383 __ pop(eax); // Pop return address. 3384 __ push(edi); // Push function. 3385 __ push(edx); // Push parameters pointer. 3386 __ push(ecx); // Push parameter count. 3387 __ push(eax); // Push return address. 3388 __ TailCallRuntime(Runtime::kNewSloppyArguments); 3389 } 3390 3391 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { 3392 // ----------- S t a t e ------------- 3393 // -- edi : function 3394 // -- esi : context 3395 // -- ebp : frame pointer 3396 // -- esp[0] : return address 3397 // ----------------------------------- 3398 __ AssertFunction(edi); 3399 3400 // Make edx point to the JavaScript frame. 3401 __ mov(edx, ebp); 3402 if (skip_stub_frame()) { 3403 // For Ignition we need to skip the handler/stub frame to reach the 3404 // JavaScript frame for the function. 3405 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 3406 } 3407 if (FLAG_debug_code) { 3408 Label ok; 3409 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset)); 3410 __ j(equal, &ok); 3411 __ Abort(kInvalidFrameForFastNewStrictArgumentsStub); 3412 __ bind(&ok); 3413 } 3414 3415 // Check if we have an arguments adaptor frame below the function frame. 3416 Label arguments_adaptor, arguments_done; 3417 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); 3418 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset), 3419 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); 3420 __ j(equal, &arguments_adaptor, Label::kNear); 3421 { 3422 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); 3423 __ mov(eax, 3424 FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset)); 3425 __ lea(ebx, 3426 Operand(edx, eax, times_half_pointer_size, 3427 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 3428 } 3429 __ jmp(&arguments_done, Label::kNear); 3430 __ bind(&arguments_adaptor); 3431 { 3432 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 3433 __ lea(ebx, 3434 Operand(ebx, eax, times_half_pointer_size, 3435 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize)); 3436 } 3437 __ bind(&arguments_done); 3438 3439 // ----------- S t a t e ------------- 3440 // -- eax : number of arguments (tagged) 3441 // -- ebx : pointer to the first argument 3442 // -- esi : context 3443 // -- esp[0] : return address 3444 // ----------------------------------- 3445 3446 // Allocate space for the strict arguments object plus the backing store. 3447 Label allocate, done_allocate; 3448 __ lea(ecx, 3449 Operand(eax, times_half_pointer_size, 3450 JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize)); 3451 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS); 3452 __ bind(&done_allocate); 3453 3454 // Setup the elements array in edx. 3455 __ mov(FieldOperand(edx, FixedArray::kMapOffset), 3456 isolate()->factory()->fixed_array_map()); 3457 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); 3458 { 3459 Label loop, done_loop; 3460 __ Move(ecx, Smi::kZero); 3461 __ bind(&loop); 3462 __ cmp(ecx, eax); 3463 __ j(equal, &done_loop, Label::kNear); 3464 __ mov(edi, Operand(ebx, 0 * kPointerSize)); 3465 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, 3466 FixedArray::kHeaderSize), 3467 edi); 3468 __ sub(ebx, Immediate(1 * kPointerSize)); 3469 __ add(ecx, Immediate(Smi::FromInt(1))); 3470 __ jmp(&loop); 3471 __ bind(&done_loop); 3472 } 3473 3474 // Setup the rest parameter array in edi. 3475 __ lea(edi, 3476 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize)); 3477 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx); 3478 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx); 3479 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset), 3480 isolate()->factory()->empty_fixed_array()); 3481 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx); 3482 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax); 3483 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); 3484 __ mov(eax, edi); 3485 __ Ret(); 3486 3487 // Fall back to %AllocateInNewSpace (if not too big). 3488 Label too_big_for_new_space; 3489 __ bind(&allocate); 3490 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize)); 3491 __ j(greater, &too_big_for_new_space); 3492 { 3493 FrameScope scope(masm, StackFrame::INTERNAL); 3494 __ SmiTag(ecx); 3495 __ Push(eax); 3496 __ Push(ebx); 3497 __ Push(ecx); 3498 __ CallRuntime(Runtime::kAllocateInNewSpace); 3499 __ mov(edx, eax); 3500 __ Pop(ebx); 3501 __ Pop(eax); 3502 } 3503 __ jmp(&done_allocate); 3504 3505 // Fall back to %NewStrictArguments. 3506 __ bind(&too_big_for_new_space); 3507 __ PopReturnAddressTo(ecx); 3508 // We reload the function from the caller frame due to register pressure 3509 // within this stub. This is the slow path, hence reloading is preferable. 3510 if (skip_stub_frame()) { 3511 // For Ignition we need to skip the handler/stub frame to reach the 3512 // JavaScript frame for the function. 3513 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); 3514 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset)); 3515 } else { 3516 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset)); 3517 } 3518 __ PushReturnAddressFrom(ecx); 3519 __ TailCallRuntime(Runtime::kNewStrictArguments); 3520 } 3521 3522 // Generates an Operand for saving parameters after PrepareCallApiFunction. 3523 static Operand ApiParameterOperand(int index) { 3524 return Operand(esp, index * kPointerSize); 3525 } 3526 3527 3528 // Prepares stack to put arguments (aligns and so on). Reserves 3529 // space for return value if needed (assumes the return value is a handle). 3530 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1) 3531 // etc. Saves context (esi). If space was reserved for return value then 3532 // stores the pointer to the reserved slot into esi. 3533 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) { 3534 __ EnterApiExitFrame(argc); 3535 if (__ emit_debug_code()) { 3536 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue))); 3537 } 3538 } 3539 3540 3541 // Calls an API function. Allocates HandleScope, extracts returned value 3542 // from handle and propagates exceptions. Clobbers ebx, edi and 3543 // caller-save registers. Restores context. On return removes 3544 // stack_space * kPointerSize (GCed). 3545 static void CallApiFunctionAndReturn(MacroAssembler* masm, 3546 Register function_address, 3547 ExternalReference thunk_ref, 3548 Operand thunk_last_arg, int stack_space, 3549 Operand* stack_space_operand, 3550 Operand return_value_operand, 3551 Operand* context_restore_operand) { 3552 Isolate* isolate = masm->isolate(); 3553 3554 ExternalReference next_address = 3555 ExternalReference::handle_scope_next_address(isolate); 3556 ExternalReference limit_address = 3557 ExternalReference::handle_scope_limit_address(isolate); 3558 ExternalReference level_address = 3559 ExternalReference::handle_scope_level_address(isolate); 3560 3561 DCHECK(edx.is(function_address)); 3562 // Allocate HandleScope in callee-save registers. 3563 __ mov(ebx, Operand::StaticVariable(next_address)); 3564 __ mov(edi, Operand::StaticVariable(limit_address)); 3565 __ add(Operand::StaticVariable(level_address), Immediate(1)); 3566 3567 if (FLAG_log_timer_events) { 3568 FrameScope frame(masm, StackFrame::MANUAL); 3569 __ PushSafepointRegisters(); 3570 __ PrepareCallCFunction(1, eax); 3571 __ mov(Operand(esp, 0), 3572 Immediate(ExternalReference::isolate_address(isolate))); 3573 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), 3574 1); 3575 __ PopSafepointRegisters(); 3576 } 3577 3578 3579 Label profiler_disabled; 3580 Label end_profiler_check; 3581 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate))); 3582 __ cmpb(Operand(eax, 0), Immediate(0)); 3583 __ j(zero, &profiler_disabled); 3584 3585 // Additional parameter is the address of the actual getter function. 3586 __ mov(thunk_last_arg, function_address); 3587 // Call the api function. 3588 __ mov(eax, Immediate(thunk_ref)); 3589 __ call(eax); 3590 __ jmp(&end_profiler_check); 3591 3592 __ bind(&profiler_disabled); 3593 // Call the api function. 3594 __ call(function_address); 3595 __ bind(&end_profiler_check); 3596 3597 if (FLAG_log_timer_events) { 3598 FrameScope frame(masm, StackFrame::MANUAL); 3599 __ PushSafepointRegisters(); 3600 __ PrepareCallCFunction(1, eax); 3601 __ mov(Operand(esp, 0), 3602 Immediate(ExternalReference::isolate_address(isolate))); 3603 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), 3604 1); 3605 __ PopSafepointRegisters(); 3606 } 3607 3608 Label prologue; 3609 // Load the value from ReturnValue 3610 __ mov(eax, return_value_operand); 3611 3612 Label promote_scheduled_exception; 3613 Label delete_allocated_handles; 3614 Label leave_exit_frame; 3615 3616 __ bind(&prologue); 3617 // No more valid handles (the result handle was the last one). Restore 3618 // previous handle scope. 3619 __ mov(Operand::StaticVariable(next_address), ebx); 3620 __ sub(Operand::StaticVariable(level_address), Immediate(1)); 3621 __ Assert(above_equal, kInvalidHandleScopeLevel); 3622 __ cmp(edi, Operand::StaticVariable(limit_address)); 3623 __ j(not_equal, &delete_allocated_handles); 3624 3625 // Leave the API exit frame. 3626 __ bind(&leave_exit_frame); 3627 bool restore_context = context_restore_operand != NULL; 3628 if (restore_context) { 3629 __ mov(esi, *context_restore_operand); 3630 } 3631 if (stack_space_operand != nullptr) { 3632 __ mov(ebx, *stack_space_operand); 3633 } 3634 __ LeaveApiExitFrame(!restore_context); 3635 3636 // Check if the function scheduled an exception. 3637 ExternalReference scheduled_exception_address = 3638 ExternalReference::scheduled_exception_address(isolate); 3639 __ cmp(Operand::StaticVariable(scheduled_exception_address), 3640 Immediate(isolate->factory()->the_hole_value())); 3641 __ j(not_equal, &promote_scheduled_exception); 3642 3643 #if DEBUG 3644 // Check if the function returned a valid JavaScript value. 3645 Label ok; 3646 Register return_value = eax; 3647 Register map = ecx; 3648 3649 __ JumpIfSmi(return_value, &ok, Label::kNear); 3650 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset)); 3651 3652 __ CmpInstanceType(map, LAST_NAME_TYPE); 3653 __ j(below_equal, &ok, Label::kNear); 3654 3655 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); 3656 __ j(above_equal, &ok, Label::kNear); 3657 3658 __ cmp(map, isolate->factory()->heap_number_map()); 3659 __ j(equal, &ok, Label::kNear); 3660 3661 __ cmp(return_value, isolate->factory()->undefined_value()); 3662 __ j(equal, &ok, Label::kNear); 3663 3664 __ cmp(return_value, isolate->factory()->true_value()); 3665 __ j(equal, &ok, Label::kNear); 3666 3667 __ cmp(return_value, isolate->factory()->false_value()); 3668 __ j(equal, &ok, Label::kNear); 3669 3670 __ cmp(return_value, isolate->factory()->null_value()); 3671 __ j(equal, &ok, Label::kNear); 3672 3673 __ Abort(kAPICallReturnedInvalidObject); 3674 3675 __ bind(&ok); 3676 #endif 3677 3678 if (stack_space_operand != nullptr) { 3679 DCHECK_EQ(0, stack_space); 3680 __ pop(ecx); 3681 __ add(esp, ebx); 3682 __ jmp(ecx); 3683 } else { 3684 __ ret(stack_space * kPointerSize); 3685 } 3686 3687 // Re-throw by promoting a scheduled exception. 3688 __ bind(&promote_scheduled_exception); 3689 __ TailCallRuntime(Runtime::kPromoteScheduledException); 3690 3691 // HandleScope limit has changed. Delete allocated extensions. 3692 ExternalReference delete_extensions = 3693 ExternalReference::delete_handle_scope_extensions(isolate); 3694 __ bind(&delete_allocated_handles); 3695 __ mov(Operand::StaticVariable(limit_address), edi); 3696 __ mov(edi, eax); 3697 __ mov(Operand(esp, 0), 3698 Immediate(ExternalReference::isolate_address(isolate))); 3699 __ mov(eax, Immediate(delete_extensions)); 3700 __ call(eax); 3701 __ mov(eax, edi); 3702 __ jmp(&leave_exit_frame); 3703 } 3704 3705 void CallApiCallbackStub::Generate(MacroAssembler* masm) { 3706 // ----------- S t a t e ------------- 3707 // -- edi : callee 3708 // -- ebx : call_data 3709 // -- ecx : holder 3710 // -- edx : api_function_address 3711 // -- esi : context 3712 // -- 3713 // -- esp[0] : return address 3714 // -- esp[4] : last argument 3715 // -- ... 3716 // -- esp[argc * 4] : first argument 3717 // -- esp[(argc + 1) * 4] : receiver 3718 // ----------------------------------- 3719 3720 Register callee = edi; 3721 Register call_data = ebx; 3722 Register holder = ecx; 3723 Register api_function_address = edx; 3724 Register context = esi; 3725 Register return_address = eax; 3726 3727 typedef FunctionCallbackArguments FCA; 3728 3729 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 3730 STATIC_ASSERT(FCA::kCalleeIndex == 5); 3731 STATIC_ASSERT(FCA::kDataIndex == 4); 3732 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 3733 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 3734 STATIC_ASSERT(FCA::kIsolateIndex == 1); 3735 STATIC_ASSERT(FCA::kHolderIndex == 0); 3736 STATIC_ASSERT(FCA::kNewTargetIndex == 7); 3737 STATIC_ASSERT(FCA::kArgsLength == 8); 3738 3739 __ pop(return_address); 3740 3741 // new target 3742 __ PushRoot(Heap::kUndefinedValueRootIndex); 3743 3744 // context save. 3745 __ push(context); 3746 3747 // callee 3748 __ push(callee); 3749 3750 // call data 3751 __ push(call_data); 3752 3753 Register scratch = call_data; 3754 if (!call_data_undefined()) { 3755 // return value 3756 __ push(Immediate(masm->isolate()->factory()->undefined_value())); 3757 // return value default 3758 __ push(Immediate(masm->isolate()->factory()->undefined_value())); 3759 } else { 3760 // return value 3761 __ push(scratch); 3762 // return value default 3763 __ push(scratch); 3764 } 3765 // isolate 3766 __ push(Immediate(reinterpret_cast<int>(masm->isolate()))); 3767 // holder 3768 __ push(holder); 3769 3770 __ mov(scratch, esp); 3771 3772 // push return address 3773 __ push(return_address); 3774 3775 if (!is_lazy()) { 3776 // load context from callee 3777 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); 3778 } 3779 3780 // API function gets reference to the v8::Arguments. If CPU profiler 3781 // is enabled wrapper function will be called and we need to pass 3782 // address of the callback as additional parameter, always allocate 3783 // space for it. 3784 const int kApiArgc = 1 + 1; 3785 3786 // Allocate the v8::Arguments structure in the arguments' space since 3787 // it's not controlled by GC. 3788 const int kApiStackSpace = 3; 3789 3790 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace); 3791 3792 // FunctionCallbackInfo::implicit_args_. 3793 __ mov(ApiParameterOperand(2), scratch); 3794 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize)); 3795 // FunctionCallbackInfo::values_. 3796 __ mov(ApiParameterOperand(3), scratch); 3797 // FunctionCallbackInfo::length_. 3798 __ Move(ApiParameterOperand(4), Immediate(argc())); 3799 3800 // v8::InvocationCallback's argument. 3801 __ lea(scratch, ApiParameterOperand(2)); 3802 __ mov(ApiParameterOperand(0), scratch); 3803 3804 ExternalReference thunk_ref = 3805 ExternalReference::invoke_function_callback(masm->isolate()); 3806 3807 Operand context_restore_operand(ebp, 3808 (2 + FCA::kContextSaveIndex) * kPointerSize); 3809 // Stores return the first js argument 3810 int return_value_offset = 0; 3811 if (is_store()) { 3812 return_value_offset = 2 + FCA::kArgsLength; 3813 } else { 3814 return_value_offset = 2 + FCA::kReturnValueOffset; 3815 } 3816 Operand return_value_operand(ebp, return_value_offset * kPointerSize); 3817 int stack_space = 0; 3818 Operand length_operand = ApiParameterOperand(4); 3819 Operand* stack_space_operand = &length_operand; 3820 stack_space = argc() + FCA::kArgsLength + 1; 3821 stack_space_operand = nullptr; 3822 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, 3823 ApiParameterOperand(1), stack_space, 3824 stack_space_operand, return_value_operand, 3825 &context_restore_operand); 3826 } 3827 3828 3829 void CallApiGetterStub::Generate(MacroAssembler* masm) { 3830 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property 3831 // name below the exit frame to make GC aware of them. 3832 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0); 3833 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1); 3834 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2); 3835 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3); 3836 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4); 3837 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5); 3838 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6); 3839 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7); 3840 3841 Register receiver = ApiGetterDescriptor::ReceiverRegister(); 3842 Register holder = ApiGetterDescriptor::HolderRegister(); 3843 Register callback = ApiGetterDescriptor::CallbackRegister(); 3844 Register scratch = ebx; 3845 DCHECK(!AreAliased(receiver, holder, callback, scratch)); 3846 3847 __ pop(scratch); // Pop return address to extend the frame. 3848 __ push(receiver); 3849 __ push(FieldOperand(callback, AccessorInfo::kDataOffset)); 3850 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue 3851 // ReturnValue default value 3852 __ PushRoot(Heap::kUndefinedValueRootIndex); 3853 __ push(Immediate(ExternalReference::isolate_address(isolate()))); 3854 __ push(holder); 3855 __ push(Immediate(Smi::kZero)); // should_throw_on_error -> false 3856 __ push(FieldOperand(callback, AccessorInfo::kNameOffset)); 3857 __ push(scratch); // Restore return address. 3858 3859 // v8::PropertyCallbackInfo::args_ array and name handle. 3860 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 3861 3862 // Allocate v8::PropertyCallbackInfo object, arguments for callback and 3863 // space for optional callback address parameter (in case CPU profiler is 3864 // active) in non-GCed stack space. 3865 const int kApiArgc = 3 + 1; 3866 3867 // Load address of v8::PropertyAccessorInfo::args_ array. 3868 __ lea(scratch, Operand(esp, 2 * kPointerSize)); 3869 3870 PrepareCallApiFunction(masm, kApiArgc); 3871 // Create v8::PropertyCallbackInfo object on the stack and initialize 3872 // it's args_ field. 3873 Operand info_object = ApiParameterOperand(3); 3874 __ mov(info_object, scratch); 3875 3876 // Name as handle. 3877 __ sub(scratch, Immediate(kPointerSize)); 3878 __ mov(ApiParameterOperand(0), scratch); 3879 // Arguments pointer. 3880 __ lea(scratch, info_object); 3881 __ mov(ApiParameterOperand(1), scratch); 3882 // Reserve space for optional callback address parameter. 3883 Operand thunk_last_arg = ApiParameterOperand(2); 3884 3885 ExternalReference thunk_ref = 3886 ExternalReference::invoke_accessor_getter_callback(isolate()); 3887 3888 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); 3889 Register function_address = edx; 3890 __ mov(function_address, 3891 FieldOperand(scratch, Foreign::kForeignAddressOffset)); 3892 // +3 is to skip prolog, return address and name handle. 3893 Operand return_value_operand( 3894 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); 3895 CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg, 3896 kStackUnwindSpace, nullptr, return_value_operand, 3897 NULL); 3898 } 3899 3900 #undef __ 3901 3902 } // namespace internal 3903 } // namespace v8 3904 3905 #endif // V8_TARGET_ARCH_X87 3906