1 // Copyright 2013 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #if V8_TARGET_ARCH_X64 6 7 #include "src/code-stubs.h" 8 #include "src/api-arguments.h" 9 #include "src/bootstrapper.h" 10 #include "src/codegen.h" 11 #include "src/ic/handler-compiler.h" 12 #include "src/ic/ic.h" 13 #include "src/ic/stub-cache.h" 14 #include "src/isolate.h" 15 #include "src/regexp/jsregexp.h" 16 #include "src/regexp/regexp-macro-assembler.h" 17 #include "src/runtime/runtime.h" 18 #include "src/x64/code-stubs-x64.h" 19 20 namespace v8 { 21 namespace internal { 22 23 #define __ ACCESS_MASM(masm) 24 25 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) { 26 __ popq(rcx); 27 __ movq(MemOperand(rsp, rax, times_8, 0), rdi); 28 __ pushq(rdi); 29 __ pushq(rbx); 30 __ pushq(rcx); 31 __ addq(rax, Immediate(3)); 32 __ TailCallRuntime(Runtime::kNewArray); 33 } 34 35 void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) { 36 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry; 37 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE); 38 } 39 40 void FastFunctionBindStub::InitializeDescriptor( 41 CodeStubDescriptor* descriptor) { 42 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry; 43 descriptor->Initialize(rax, deopt_handler, -1, JS_FUNCTION_STUB_MODE); 44 } 45 46 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm, 47 ExternalReference miss) { 48 // Update the static counter each time a new code stub is generated. 49 isolate()->counters()->code_stubs()->Increment(); 50 51 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor(); 52 int param_count = descriptor.GetRegisterParameterCount(); 53 { 54 // Call the runtime system in a fresh internal frame. 55 FrameScope scope(masm, StackFrame::INTERNAL); 56 DCHECK(param_count == 0 || 57 rax.is(descriptor.GetRegisterParameter(param_count - 1))); 58 // Push arguments 59 for (int i = 0; i < param_count; ++i) { 60 __ Push(descriptor.GetRegisterParameter(i)); 61 } 62 __ CallExternalReference(miss, param_count); 63 } 64 65 __ Ret(); 66 } 67 68 69 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { 70 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); 71 const int argument_count = 1; 72 __ PrepareCallCFunction(argument_count); 73 __ LoadAddress(arg_reg_1, 74 ExternalReference::isolate_address(isolate())); 75 76 AllowExternalCallThatCantCauseGC scope(masm); 77 __ CallCFunction( 78 ExternalReference::store_buffer_overflow_function(isolate()), 79 argument_count); 80 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); 81 __ ret(0); 82 } 83 84 85 class FloatingPointHelper : public AllStatic { 86 public: 87 enum ConvertUndefined { 88 CONVERT_UNDEFINED_TO_ZERO, 89 BAILOUT_ON_UNDEFINED 90 }; 91 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles. 92 // If the operands are not both numbers, jump to not_numbers. 93 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis. 94 // NumberOperands assumes both are smis or heap numbers. 95 static void LoadSSE2UnknownOperands(MacroAssembler* masm, 96 Label* not_numbers); 97 }; 98 99 100 void DoubleToIStub::Generate(MacroAssembler* masm) { 101 Register input_reg = this->source(); 102 Register final_result_reg = this->destination(); 103 DCHECK(is_truncating()); 104 105 Label check_negative, process_64_bits, done; 106 107 int double_offset = offset(); 108 109 // Account for return address and saved regs if input is rsp. 110 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize; 111 112 MemOperand mantissa_operand(MemOperand(input_reg, double_offset)); 113 MemOperand exponent_operand(MemOperand(input_reg, 114 double_offset + kDoubleSize / 2)); 115 116 Register scratch1; 117 Register scratch_candidates[3] = { rbx, rdx, rdi }; 118 for (int i = 0; i < 3; i++) { 119 scratch1 = scratch_candidates[i]; 120 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break; 121 } 122 123 // Since we must use rcx for shifts below, use some other register (rax) 124 // to calculate the result if ecx is the requested return register. 125 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg; 126 // Save ecx if it isn't the return register and therefore volatile, or if it 127 // is the return register, then save the temp register we use in its stead 128 // for the result. 129 Register save_reg = final_result_reg.is(rcx) ? rax : rcx; 130 __ pushq(scratch1); 131 __ pushq(save_reg); 132 133 bool stash_exponent_copy = !input_reg.is(rsp); 134 __ movl(scratch1, mantissa_operand); 135 __ Movsd(kScratchDoubleReg, mantissa_operand); 136 __ movl(rcx, exponent_operand); 137 if (stash_exponent_copy) __ pushq(rcx); 138 139 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); 140 __ shrl(rcx, Immediate(HeapNumber::kExponentShift)); 141 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias)); 142 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits)); 143 __ j(below, &process_64_bits); 144 145 // Result is entirely in lower 32-bits of mantissa 146 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; 147 __ subl(rcx, Immediate(delta)); 148 __ xorl(result_reg, result_reg); 149 __ cmpl(rcx, Immediate(31)); 150 __ j(above, &done); 151 __ shll_cl(scratch1); 152 __ jmp(&check_negative); 153 154 __ bind(&process_64_bits); 155 __ Cvttsd2siq(result_reg, kScratchDoubleReg); 156 __ jmp(&done, Label::kNear); 157 158 // If the double was negative, negate the integer result. 159 __ bind(&check_negative); 160 __ movl(result_reg, scratch1); 161 __ negl(result_reg); 162 if (stash_exponent_copy) { 163 __ cmpl(MemOperand(rsp, 0), Immediate(0)); 164 } else { 165 __ cmpl(exponent_operand, Immediate(0)); 166 } 167 __ cmovl(greater, result_reg, scratch1); 168 169 // Restore registers 170 __ bind(&done); 171 if (stash_exponent_copy) { 172 __ addp(rsp, Immediate(kDoubleSize)); 173 } 174 if (!final_result_reg.is(result_reg)) { 175 DCHECK(final_result_reg.is(rcx)); 176 __ movl(final_result_reg, result_reg); 177 } 178 __ popq(save_reg); 179 __ popq(scratch1); 180 __ ret(0); 181 } 182 183 184 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, 185 Label* not_numbers) { 186 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; 187 // Load operand in rdx into xmm0, or branch to not_numbers. 188 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); 189 __ JumpIfSmi(rdx, &load_smi_rdx); 190 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx); 191 __ j(not_equal, not_numbers); // Argument in rdx is not a number. 192 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 193 // Load operand in rax into xmm1, or branch to not_numbers. 194 __ JumpIfSmi(rax, &load_smi_rax); 195 196 __ bind(&load_nonsmi_rax); 197 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx); 198 __ j(not_equal, not_numbers); 199 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 200 __ jmp(&done); 201 202 __ bind(&load_smi_rdx); 203 __ SmiToInteger32(kScratchRegister, rdx); 204 __ Cvtlsi2sd(xmm0, kScratchRegister); 205 __ JumpIfNotSmi(rax, &load_nonsmi_rax); 206 207 __ bind(&load_smi_rax); 208 __ SmiToInteger32(kScratchRegister, rax); 209 __ Cvtlsi2sd(xmm1, kScratchRegister); 210 __ bind(&done); 211 } 212 213 214 void MathPowStub::Generate(MacroAssembler* masm) { 215 const Register exponent = MathPowTaggedDescriptor::exponent(); 216 DCHECK(exponent.is(rdx)); 217 const Register base = rax; 218 const Register scratch = rcx; 219 const XMMRegister double_result = xmm3; 220 const XMMRegister double_base = xmm2; 221 const XMMRegister double_exponent = xmm1; 222 const XMMRegister double_scratch = xmm4; 223 224 Label call_runtime, done, exponent_not_smi, int_exponent; 225 226 // Save 1 in double_result - we need this several times later on. 227 __ movp(scratch, Immediate(1)); 228 __ Cvtlsi2sd(double_result, scratch); 229 230 if (exponent_type() == ON_STACK) { 231 Label base_is_smi, unpack_exponent; 232 // The exponent and base are supplied as arguments on the stack. 233 // This can only happen if the stub is called from non-optimized code. 234 // Load input parameters from stack. 235 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 236 __ movp(base, args.GetArgumentOperand(0)); 237 __ movp(exponent, args.GetArgumentOperand(1)); 238 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 239 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 240 Heap::kHeapNumberMapRootIndex); 241 __ j(not_equal, &call_runtime); 242 243 __ Movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 244 __ jmp(&unpack_exponent, Label::kNear); 245 246 __ bind(&base_is_smi); 247 __ SmiToInteger32(base, base); 248 __ Cvtlsi2sd(double_base, base); 249 __ bind(&unpack_exponent); 250 251 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 252 __ SmiToInteger32(exponent, exponent); 253 __ jmp(&int_exponent); 254 255 __ bind(&exponent_not_smi); 256 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), 257 Heap::kHeapNumberMapRootIndex); 258 __ j(not_equal, &call_runtime); 259 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 260 } else if (exponent_type() == TAGGED) { 261 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 262 __ SmiToInteger32(exponent, exponent); 263 __ jmp(&int_exponent); 264 265 __ bind(&exponent_not_smi); 266 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 267 } 268 269 if (exponent_type() != INTEGER) { 270 Label fast_power, try_arithmetic_simplification; 271 // Detect integer exponents stored as double. 272 __ DoubleToI(exponent, double_exponent, double_scratch, 273 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification, 274 &try_arithmetic_simplification, 275 &try_arithmetic_simplification); 276 __ jmp(&int_exponent); 277 278 __ bind(&try_arithmetic_simplification); 279 __ Cvttsd2si(exponent, double_exponent); 280 // Skip to runtime if possibly NaN (indicated by the indefinite integer). 281 __ cmpl(exponent, Immediate(0x1)); 282 __ j(overflow, &call_runtime); 283 284 if (exponent_type() == ON_STACK) { 285 // Detect square root case. Crankshaft detects constant +/-0.5 at 286 // compile time and uses DoMathPowHalf instead. We then skip this check 287 // for non-constant cases of +/-0.5 as these hardly occur. 288 Label continue_sqrt, continue_rsqrt, not_plus_half; 289 // Test for 0.5. 290 // Load double_scratch with 0.5. 291 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000)); 292 __ Movq(double_scratch, scratch); 293 // Already ruled out NaNs for exponent. 294 __ Ucomisd(double_scratch, double_exponent); 295 __ j(not_equal, ¬_plus_half, Label::kNear); 296 297 // Calculates square root of base. Check for the special case of 298 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13). 299 // According to IEEE-754, double-precision -Infinity has the highest 300 // 12 bits set and the lowest 52 bits cleared. 301 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000)); 302 __ Movq(double_scratch, scratch); 303 __ Ucomisd(double_scratch, double_base); 304 // Comparing -Infinity with NaN results in "unordered", which sets the 305 // zero flag as if both were equal. However, it also sets the carry flag. 306 __ j(not_equal, &continue_sqrt, Label::kNear); 307 __ j(carry, &continue_sqrt, Label::kNear); 308 309 // Set result to Infinity in the special case. 310 __ Xorpd(double_result, double_result); 311 __ Subsd(double_result, double_scratch); 312 __ jmp(&done); 313 314 __ bind(&continue_sqrt); 315 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 316 __ Xorpd(double_scratch, double_scratch); 317 __ Addsd(double_scratch, double_base); // Convert -0 to 0. 318 __ Sqrtsd(double_result, double_scratch); 319 __ jmp(&done); 320 321 // Test for -0.5. 322 __ bind(¬_plus_half); 323 // Load double_scratch with -0.5 by substracting 1. 324 __ Subsd(double_scratch, double_result); 325 // Already ruled out NaNs for exponent. 326 __ Ucomisd(double_scratch, double_exponent); 327 __ j(not_equal, &fast_power, Label::kNear); 328 329 // Calculates reciprocal of square root of base. Check for the special 330 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13). 331 // According to IEEE-754, double-precision -Infinity has the highest 332 // 12 bits set and the lowest 52 bits cleared. 333 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000)); 334 __ Movq(double_scratch, scratch); 335 __ Ucomisd(double_scratch, double_base); 336 // Comparing -Infinity with NaN results in "unordered", which sets the 337 // zero flag as if both were equal. However, it also sets the carry flag. 338 __ j(not_equal, &continue_rsqrt, Label::kNear); 339 __ j(carry, &continue_rsqrt, Label::kNear); 340 341 // Set result to 0 in the special case. 342 __ Xorpd(double_result, double_result); 343 __ jmp(&done); 344 345 __ bind(&continue_rsqrt); 346 // sqrtsd returns -0 when input is -0. ECMA spec requires +0. 347 __ Xorpd(double_exponent, double_exponent); 348 __ Addsd(double_exponent, double_base); // Convert -0 to +0. 349 __ Sqrtsd(double_exponent, double_exponent); 350 __ Divsd(double_result, double_exponent); 351 __ jmp(&done); 352 } 353 354 // Using FPU instructions to calculate power. 355 Label fast_power_failed; 356 __ bind(&fast_power); 357 __ fnclex(); // Clear flags to catch exceptions later. 358 // Transfer (B)ase and (E)xponent onto the FPU register stack. 359 __ subp(rsp, Immediate(kDoubleSize)); 360 __ Movsd(Operand(rsp, 0), double_exponent); 361 __ fld_d(Operand(rsp, 0)); // E 362 __ Movsd(Operand(rsp, 0), double_base); 363 __ fld_d(Operand(rsp, 0)); // B, E 364 365 // Exponent is in st(1) and base is in st(0) 366 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B) 367 // FYL2X calculates st(1) * log2(st(0)) 368 __ fyl2x(); // X 369 __ fld(0); // X, X 370 __ frndint(); // rnd(X), X 371 __ fsub(1); // rnd(X), X-rnd(X) 372 __ fxch(1); // X - rnd(X), rnd(X) 373 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1 374 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) 375 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) 376 __ faddp(1); // 2^(X-rnd(X)), rnd(X) 377 // FSCALE calculates st(0) * 2^st(1) 378 __ fscale(); // 2^X, rnd(X) 379 __ fstp(1); 380 // Bail out to runtime in case of exceptions in the status word. 381 __ fnstsw_ax(); 382 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception. 383 __ j(not_zero, &fast_power_failed, Label::kNear); 384 __ fstp_d(Operand(rsp, 0)); 385 __ Movsd(double_result, Operand(rsp, 0)); 386 __ addp(rsp, Immediate(kDoubleSize)); 387 __ jmp(&done); 388 389 __ bind(&fast_power_failed); 390 __ fninit(); 391 __ addp(rsp, Immediate(kDoubleSize)); 392 __ jmp(&call_runtime); 393 } 394 395 // Calculate power with integer exponent. 396 __ bind(&int_exponent); 397 const XMMRegister double_scratch2 = double_exponent; 398 // Back up exponent as we need to check if exponent is negative later. 399 __ movp(scratch, exponent); // Back up exponent. 400 __ Movsd(double_scratch, double_base); // Back up base. 401 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1. 402 403 // Get absolute value of exponent. 404 Label no_neg, while_true, while_false; 405 __ testl(scratch, scratch); 406 __ j(positive, &no_neg, Label::kNear); 407 __ negl(scratch); 408 __ bind(&no_neg); 409 410 __ j(zero, &while_false, Label::kNear); 411 __ shrl(scratch, Immediate(1)); 412 // Above condition means CF==0 && ZF==0. This means that the 413 // bit that has been shifted out is 0 and the result is not 0. 414 __ j(above, &while_true, Label::kNear); 415 __ Movsd(double_result, double_scratch); 416 __ j(zero, &while_false, Label::kNear); 417 418 __ bind(&while_true); 419 __ shrl(scratch, Immediate(1)); 420 __ Mulsd(double_scratch, double_scratch); 421 __ j(above, &while_true, Label::kNear); 422 __ Mulsd(double_result, double_scratch); 423 __ j(not_zero, &while_true); 424 425 __ bind(&while_false); 426 // If the exponent is negative, return 1/result. 427 __ testl(exponent, exponent); 428 __ j(greater, &done); 429 __ Divsd(double_scratch2, double_result); 430 __ Movsd(double_result, double_scratch2); 431 // Test whether result is zero. Bail out to check for subnormal result. 432 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. 433 __ Xorpd(double_scratch2, double_scratch2); 434 __ Ucomisd(double_scratch2, double_result); 435 // double_exponent aliased as double_scratch2 has already been overwritten 436 // and may not have contained the exponent value in the first place when the 437 // input was a smi. We reset it with exponent value before bailing out. 438 __ j(not_equal, &done); 439 __ Cvtlsi2sd(double_exponent, exponent); 440 441 // Returning or bailing out. 442 if (exponent_type() == ON_STACK) { 443 // The arguments are still on the stack. 444 __ bind(&call_runtime); 445 __ TailCallRuntime(Runtime::kMathPowRT); 446 447 // The stub is called from non-optimized code, which expects the result 448 // as heap number in rax. 449 __ bind(&done); 450 __ AllocateHeapNumber(rax, rcx, &call_runtime); 451 __ Movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); 452 __ ret(2 * kPointerSize); 453 } else { 454 __ bind(&call_runtime); 455 // Move base to the correct argument register. Exponent is already in xmm1. 456 __ Movsd(xmm0, double_base); 457 DCHECK(double_exponent.is(xmm1)); 458 { 459 AllowExternalCallThatCantCauseGC scope(masm); 460 __ PrepareCallCFunction(2); 461 __ CallCFunction( 462 ExternalReference::power_double_double_function(isolate()), 2); 463 } 464 // Return value is in xmm0. 465 __ Movsd(double_result, xmm0); 466 467 __ bind(&done); 468 __ ret(0); 469 } 470 } 471 472 473 void FunctionPrototypeStub::Generate(MacroAssembler* masm) { 474 Label miss; 475 Register receiver = LoadDescriptor::ReceiverRegister(); 476 // Ensure that the vector and slot registers won't be clobbered before 477 // calling the miss handler. 478 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(), 479 LoadDescriptor::SlotRegister())); 480 481 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, 482 r9, &miss); 483 __ bind(&miss); 484 PropertyAccessCompiler::TailCallBuiltin( 485 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); 486 } 487 488 489 void LoadIndexedStringStub::Generate(MacroAssembler* masm) { 490 // Return address is on the stack. 491 Label miss; 492 493 Register receiver = LoadDescriptor::ReceiverRegister(); 494 Register index = LoadDescriptor::NameRegister(); 495 Register scratch = rdi; 496 Register result = rax; 497 DCHECK(!scratch.is(receiver) && !scratch.is(index)); 498 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) && 499 result.is(LoadDescriptor::SlotRegister())); 500 501 // StringCharAtGenerator doesn't use the result register until it's passed 502 // the different miss possibilities. If it did, we would have a conflict 503 // when FLAG_vector_ics is true. 504 StringCharAtGenerator char_at_generator(receiver, index, scratch, result, 505 &miss, // When not a string. 506 &miss, // When not a number. 507 &miss, // When index out of range. 508 RECEIVER_IS_STRING); 509 char_at_generator.GenerateFast(masm); 510 __ ret(0); 511 512 StubRuntimeCallHelper call_helper; 513 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper); 514 515 __ bind(&miss); 516 PropertyAccessCompiler::TailCallBuiltin( 517 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC)); 518 } 519 520 521 void RegExpExecStub::Generate(MacroAssembler* masm) { 522 // Just jump directly to runtime if native RegExp is not selected at compile 523 // time or if regexp entry in generated code is turned off runtime switch or 524 // at compilation. 525 #ifdef V8_INTERPRETED_REGEXP 526 __ TailCallRuntime(Runtime::kRegExpExec); 527 #else // V8_INTERPRETED_REGEXP 528 529 // Stack frame on entry. 530 // rsp[0] : return address 531 // rsp[8] : last_match_info (expected JSArray) 532 // rsp[16] : previous index 533 // rsp[24] : subject string 534 // rsp[32] : JSRegExp object 535 536 enum RegExpExecStubArgumentIndices { 537 JS_REG_EXP_OBJECT_ARGUMENT_INDEX, 538 SUBJECT_STRING_ARGUMENT_INDEX, 539 PREVIOUS_INDEX_ARGUMENT_INDEX, 540 LAST_MATCH_INFO_ARGUMENT_INDEX, 541 REG_EXP_EXEC_ARGUMENT_COUNT 542 }; 543 544 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT, 545 ARGUMENTS_DONT_CONTAIN_RECEIVER); 546 Label runtime; 547 // Ensure that a RegExp stack is allocated. 548 ExternalReference address_of_regexp_stack_memory_address = 549 ExternalReference::address_of_regexp_stack_memory_address(isolate()); 550 ExternalReference address_of_regexp_stack_memory_size = 551 ExternalReference::address_of_regexp_stack_memory_size(isolate()); 552 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); 553 __ testp(kScratchRegister, kScratchRegister); 554 __ j(zero, &runtime); 555 556 // Check that the first argument is a JSRegExp object. 557 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); 558 __ JumpIfSmi(rax, &runtime); 559 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); 560 __ j(not_equal, &runtime); 561 562 // Check that the RegExp has been compiled (data contains a fixed array). 563 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset)); 564 if (FLAG_debug_code) { 565 Condition is_smi = masm->CheckSmi(rax); 566 __ Check(NegateCondition(is_smi), 567 kUnexpectedTypeForRegExpDataFixedArrayExpected); 568 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); 569 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); 570 } 571 572 // rax: RegExp data (FixedArray) 573 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP. 574 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); 575 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); 576 __ j(not_equal, &runtime); 577 578 // rax: RegExp data (FixedArray) 579 // Check that the number of captures fit in the static offsets vector buffer. 580 __ SmiToInteger32(rdx, 581 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset)); 582 // Check (number_of_captures + 1) * 2 <= offsets vector size 583 // Or number_of_captures <= offsets vector size / 2 - 1 584 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2); 585 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); 586 __ j(above, &runtime); 587 588 // Reset offset for possibly sliced string. 589 __ Set(r14, 0); 590 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); 591 __ JumpIfSmi(rdi, &runtime); 592 __ movp(r15, rdi); // Make a copy of the original subject string. 593 // rax: RegExp data (FixedArray) 594 // rdi: subject string 595 // r15: subject string 596 // Handle subject string according to its encoding and representation: 597 // (1) Sequential two byte? If yes, go to (9). 598 // (2) Sequential one byte? If yes, go to (5). 599 // (3) Sequential or cons? If not, go to (6). 600 // (4) Cons string. If the string is flat, replace subject with first string 601 // and go to (1). Otherwise bail out to runtime. 602 // (5) One byte sequential. Load regexp code for one byte. 603 // (E) Carry on. 604 /// [...] 605 606 // Deferred code at the end of the stub: 607 // (6) Long external string? If not, go to (10). 608 // (7) External string. Make it, offset-wise, look like a sequential string. 609 // (8) Is the external string one byte? If yes, go to (5). 610 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 611 // (10) Short external string or not a string? If yes, bail out to runtime. 612 // (11) Sliced string. Replace subject with parent. Go to (1). 613 614 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */, 615 external_string /* 7 */, check_underlying /* 1 */, 616 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */; 617 618 __ bind(&check_underlying); 619 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 620 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 621 622 // (1) Sequential two byte? If yes, go to (9). 623 __ andb(rbx, Immediate(kIsNotStringMask | 624 kStringRepresentationMask | 625 kStringEncodingMask | 626 kShortExternalStringMask)); 627 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0); 628 __ j(zero, &seq_two_byte_string); // Go to (9). 629 630 // (2) Sequential one byte? If yes, go to (5). 631 // Any other sequential string must be one byte. 632 __ andb(rbx, Immediate(kIsNotStringMask | 633 kStringRepresentationMask | 634 kShortExternalStringMask)); 635 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). 636 637 // (3) Sequential or cons? If not, go to (6). 638 // We check whether the subject string is a cons, since sequential strings 639 // have already been covered. 640 STATIC_ASSERT(kConsStringTag < kExternalStringTag); 641 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag); 642 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag); 643 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag); 644 __ cmpp(rbx, Immediate(kExternalStringTag)); 645 __ j(greater_equal, ¬_seq_nor_cons); // Go to (6). 646 647 // (4) Cons string. Check that it's flat. 648 // Replace subject with first string and reload instance type. 649 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), 650 Heap::kempty_stringRootIndex); 651 __ j(not_equal, &runtime); 652 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); 653 __ jmp(&check_underlying); 654 655 // (5) One byte sequential. Load regexp code for one byte. 656 __ bind(&seq_one_byte_string); 657 // rax: RegExp data (FixedArray) 658 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset)); 659 __ Set(rcx, 1); // Type is one byte. 660 661 // (E) Carry on. String handling is done. 662 __ bind(&check_code); 663 // r11: irregexp code 664 // Check that the irregexp code has been generated for the actual string 665 // encoding. If it has, the field contains a code object otherwise it contains 666 // smi (code flushing support) 667 __ JumpIfSmi(r11, &runtime); 668 669 // rdi: sequential subject string (or look-alike, external string) 670 // r15: original subject string 671 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte); 672 // r11: code 673 // Load used arguments before starting to push arguments for call to native 674 // RegExp code to avoid handling changing stack height. 675 // We have to use r15 instead of rdi to load the length because rdi might 676 // have been only made to look like a sequential string when it actually 677 // is an external string. 678 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); 679 __ JumpIfNotSmi(rbx, &runtime); 680 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); 681 __ j(above_equal, &runtime); 682 __ SmiToInteger64(rbx, rbx); 683 684 // rdi: subject string 685 // rbx: previous index 686 // rcx: encoding of subject string (1 if one_byte 0 if two_byte); 687 // r11: code 688 // All checks done. Now push arguments for native regexp code. 689 Counters* counters = isolate()->counters(); 690 __ IncrementCounter(counters->regexp_entry_native(), 1); 691 692 // Isolates: note we add an additional parameter here (isolate pointer). 693 static const int kRegExpExecuteArguments = 9; 694 int argument_slots_on_stack = 695 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments); 696 __ EnterApiExitFrame(argument_slots_on_stack); 697 698 // Argument 9: Pass current isolate address. 699 __ LoadAddress(kScratchRegister, 700 ExternalReference::isolate_address(isolate())); 701 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), 702 kScratchRegister); 703 704 // Argument 8: Indicate that this is a direct call from JavaScript. 705 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), 706 Immediate(1)); 707 708 // Argument 7: Start (high end) of backtracking stack memory area. 709 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); 710 __ movp(r9, Operand(kScratchRegister, 0)); 711 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); 712 __ addp(r9, Operand(kScratchRegister, 0)); 713 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); 714 715 // Argument 6: Set the number of capture registers to zero to force global 716 // regexps to behave as non-global. This does not affect non-global regexps. 717 // Argument 6 is passed in r9 on Linux and on the stack on Windows. 718 #ifdef _WIN64 719 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), 720 Immediate(0)); 721 #else 722 __ Set(r9, 0); 723 #endif 724 725 // Argument 5: static offsets vector buffer. 726 __ LoadAddress( 727 r8, ExternalReference::address_of_static_offsets_vector(isolate())); 728 // Argument 5 passed in r8 on Linux and on the stack on Windows. 729 #ifdef _WIN64 730 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); 731 #endif 732 733 // rdi: subject string 734 // rbx: previous index 735 // rcx: encoding of subject string (1 if one_byte 0 if two_byte); 736 // r11: code 737 // r14: slice offset 738 // r15: original subject string 739 740 // Argument 2: Previous index. 741 __ movp(arg_reg_2, rbx); 742 743 // Argument 4: End of string data 744 // Argument 3: Start of string data 745 Label setup_two_byte, setup_rest, got_length, length_not_from_slice; 746 // Prepare start and end index of the input. 747 // Load the length from the original sliced string if that is the case. 748 __ addp(rbx, r14); 749 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); 750 __ addp(r14, arg_reg_3); // Using arg3 as scratch. 751 752 // rbx: start index of the input 753 // r14: end index of the input 754 // r15: original subject string 755 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. 756 __ j(zero, &setup_two_byte, Label::kNear); 757 __ leap(arg_reg_4, 758 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize)); 759 __ leap(arg_reg_3, 760 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize)); 761 __ jmp(&setup_rest, Label::kNear); 762 __ bind(&setup_two_byte); 763 __ leap(arg_reg_4, 764 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize)); 765 __ leap(arg_reg_3, 766 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); 767 __ bind(&setup_rest); 768 769 // Argument 1: Original subject string. 770 // The original subject is in the previous stack frame. Therefore we have to 771 // use rbp, which points exactly to one pointer size below the previous rsp. 772 // (Because creating a new stack frame pushes the previous rbp onto the stack 773 // and thereby moves up rsp by one kPointerSize.) 774 __ movp(arg_reg_1, r15); 775 776 // Locate the code entry and call it. 777 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); 778 __ call(r11); 779 780 __ LeaveApiExitFrame(true); 781 782 // Check the result. 783 Label success; 784 Label exception; 785 __ cmpl(rax, Immediate(1)); 786 // We expect exactly one result since we force the called regexp to behave 787 // as non-global. 788 __ j(equal, &success, Label::kNear); 789 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); 790 __ j(equal, &exception); 791 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); 792 // If none of the above, it can only be retry. 793 // Handle that in the runtime system. 794 __ j(not_equal, &runtime); 795 796 // For failure return null. 797 __ LoadRoot(rax, Heap::kNullValueRootIndex); 798 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); 799 800 // Load RegExp data. 801 __ bind(&success); 802 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); 803 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); 804 __ SmiToInteger32(rax, 805 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset)); 806 // Calculate number of capture registers (number_of_captures + 1) * 2. 807 __ leal(rdx, Operand(rax, rax, times_1, 2)); 808 809 // rdx: Number of capture registers 810 // Check that the fourth object is a JSArray object. 811 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); 812 __ JumpIfSmi(r15, &runtime); 813 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister); 814 __ j(not_equal, &runtime); 815 // Check that the JSArray is in fast case. 816 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset)); 817 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset)); 818 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); 819 __ j(not_equal, &runtime); 820 // Check that the last match info has space for the capture registers and the 821 // additional information. Ensure no overflow in add. 822 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset); 823 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); 824 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead)); 825 __ cmpl(rdx, rax); 826 __ j(greater, &runtime); 827 828 // rbx: last_match_info backing store (FixedArray) 829 // rdx: number of capture registers 830 // Store the capture count. 831 __ Integer32ToSmi(kScratchRegister, rdx); 832 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), 833 kScratchRegister); 834 // Store last subject and last input. 835 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); 836 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); 837 __ movp(rcx, rax); 838 __ RecordWriteField(rbx, 839 RegExpImpl::kLastSubjectOffset, 840 rax, 841 rdi, 842 kDontSaveFPRegs); 843 __ movp(rax, rcx); 844 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); 845 __ RecordWriteField(rbx, 846 RegExpImpl::kLastInputOffset, 847 rax, 848 rdi, 849 kDontSaveFPRegs); 850 851 // Get the static offsets vector filled by the native regexp code. 852 __ LoadAddress( 853 rcx, ExternalReference::address_of_static_offsets_vector(isolate())); 854 855 // rbx: last_match_info backing store (FixedArray) 856 // rcx: offsets vector 857 // rdx: number of capture registers 858 Label next_capture, done; 859 // Capture register counter starts from number of capture registers and 860 // counts down until wraping after zero. 861 __ bind(&next_capture); 862 __ subp(rdx, Immediate(1)); 863 __ j(negative, &done, Label::kNear); 864 // Read the value from the static offsets vector buffer and make it a smi. 865 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); 866 __ Integer32ToSmi(rdi, rdi); 867 // Store the smi value in the last match info. 868 __ movp(FieldOperand(rbx, 869 rdx, 870 times_pointer_size, 871 RegExpImpl::kFirstCaptureOffset), 872 rdi); 873 __ jmp(&next_capture); 874 __ bind(&done); 875 876 // Return last match info. 877 __ movp(rax, r15); 878 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); 879 880 __ bind(&exception); 881 // Result must now be exception. If there is no pending exception already a 882 // stack overflow (on the backtrack stack) was detected in RegExp code but 883 // haven't created the exception yet. Handle that in the runtime system. 884 // TODO(592): Rerunning the RegExp to get the stack overflow exception. 885 ExternalReference pending_exception_address( 886 Isolate::kPendingExceptionAddress, isolate()); 887 Operand pending_exception_operand = 888 masm->ExternalOperand(pending_exception_address, rbx); 889 __ movp(rax, pending_exception_operand); 890 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); 891 __ cmpp(rax, rdx); 892 __ j(equal, &runtime); 893 894 // For exception, throw the exception again. 895 __ TailCallRuntime(Runtime::kRegExpExecReThrow); 896 897 // Do the runtime call to execute the regexp. 898 __ bind(&runtime); 899 __ TailCallRuntime(Runtime::kRegExpExec); 900 901 // Deferred code for string handling. 902 // (6) Long external string? If not, go to (10). 903 __ bind(¬_seq_nor_cons); 904 // Compare flags are still set from (3). 905 __ j(greater, ¬_long_external, Label::kNear); // Go to (10). 906 907 // (7) External string. Short external strings have been ruled out. 908 __ bind(&external_string); 909 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 910 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 911 if (FLAG_debug_code) { 912 // Assert that we do not have a cons or slice (indirect strings) here. 913 // Sequential strings have already been ruled out. 914 __ testb(rbx, Immediate(kIsIndirectStringMask)); 915 __ Assert(zero, kExternalStringExpectedButNotFound); 916 } 917 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); 918 // Move the pointer so that offset-wise, it looks like a sequential string. 919 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 920 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 921 STATIC_ASSERT(kTwoByteStringTag == 0); 922 // (8) Is the external string one byte? If yes, go to (5). 923 __ testb(rbx, Immediate(kStringEncodingMask)); 924 __ j(not_zero, &seq_one_byte_string); // Go to (5). 925 926 // rdi: subject string (flat two-byte) 927 // rax: RegExp data (FixedArray) 928 // (9) Two byte sequential. Load regexp code for two byte. Go to (E). 929 __ bind(&seq_two_byte_string); 930 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); 931 __ Set(rcx, 0); // Type is two byte. 932 __ jmp(&check_code); // Go to (E). 933 934 // (10) Not a string or a short external string? If yes, bail out to runtime. 935 __ bind(¬_long_external); 936 // Catch non-string subject or short external string. 937 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0); 938 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); 939 __ j(not_zero, &runtime); 940 941 // (11) Sliced string. Replace subject with parent. Go to (1). 942 // Load offset into r14 and replace subject string with parent. 943 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); 944 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); 945 __ jmp(&check_underlying); 946 #endif // V8_INTERPRETED_REGEXP 947 } 948 949 950 static int NegativeComparisonResult(Condition cc) { 951 DCHECK(cc != equal); 952 DCHECK((cc == less) || (cc == less_equal) 953 || (cc == greater) || (cc == greater_equal)); 954 return (cc == greater || cc == greater_equal) ? LESS : GREATER; 955 } 956 957 958 static void CheckInputType(MacroAssembler* masm, Register input, 959 CompareICState::State expected, Label* fail) { 960 Label ok; 961 if (expected == CompareICState::SMI) { 962 __ JumpIfNotSmi(input, fail); 963 } else if (expected == CompareICState::NUMBER) { 964 __ JumpIfSmi(input, &ok); 965 __ CompareMap(input, masm->isolate()->factory()->heap_number_map()); 966 __ j(not_equal, fail); 967 } 968 // We could be strict about internalized/non-internalized here, but as long as 969 // hydrogen doesn't care, the stub doesn't have to care either. 970 __ bind(&ok); 971 } 972 973 974 static void BranchIfNotInternalizedString(MacroAssembler* masm, 975 Label* label, 976 Register object, 977 Register scratch) { 978 __ JumpIfSmi(object, label); 979 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset)); 980 __ movzxbp(scratch, 981 FieldOperand(scratch, Map::kInstanceTypeOffset)); 982 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 983 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 984 __ j(not_zero, label); 985 } 986 987 988 void CompareICStub::GenerateGeneric(MacroAssembler* masm) { 989 Label runtime_call, check_unequal_objects, done; 990 Condition cc = GetCondition(); 991 Factory* factory = isolate()->factory(); 992 993 Label miss; 994 CheckInputType(masm, rdx, left(), &miss); 995 CheckInputType(masm, rax, right(), &miss); 996 997 // Compare two smis. 998 Label non_smi, smi_done; 999 __ JumpIfNotBothSmi(rax, rdx, &non_smi); 1000 __ subp(rdx, rax); 1001 __ j(no_overflow, &smi_done); 1002 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. 1003 __ bind(&smi_done); 1004 __ movp(rax, rdx); 1005 __ ret(0); 1006 __ bind(&non_smi); 1007 1008 // The compare stub returns a positive, negative, or zero 64-bit integer 1009 // value in rax, corresponding to result of comparing the two inputs. 1010 // NOTICE! This code is only reached after a smi-fast-case check, so 1011 // it is certain that at least one operand isn't a smi. 1012 1013 // Two identical objects are equal unless they are both NaN or undefined. 1014 { 1015 Label not_identical; 1016 __ cmpp(rax, rdx); 1017 __ j(not_equal, ¬_identical, Label::kNear); 1018 1019 if (cc != equal) { 1020 // Check for undefined. undefined OP undefined is false even though 1021 // undefined == undefined. 1022 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); 1023 Label check_for_nan; 1024 __ j(not_equal, &check_for_nan, Label::kNear); 1025 __ Set(rax, NegativeComparisonResult(cc)); 1026 __ ret(0); 1027 __ bind(&check_for_nan); 1028 } 1029 1030 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(), 1031 // so we do the second best thing - test it ourselves. 1032 Label heap_number; 1033 // If it's not a heap number, then return equal for (in)equality operator. 1034 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), 1035 factory->heap_number_map()); 1036 __ j(equal, &heap_number, Label::kNear); 1037 if (cc != equal) { 1038 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); 1039 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); 1040 // Call runtime on identical objects. Otherwise return equal. 1041 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE))); 1042 __ j(above_equal, &runtime_call, Label::kFar); 1043 // Call runtime on identical symbols since we need to throw a TypeError. 1044 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); 1045 __ j(equal, &runtime_call, Label::kFar); 1046 // Call runtime on identical SIMD values since we must throw a TypeError. 1047 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE))); 1048 __ j(equal, &runtime_call, Label::kFar); 1049 } 1050 __ Set(rax, EQUAL); 1051 __ ret(0); 1052 1053 __ bind(&heap_number); 1054 // It is a heap number, so return equal if it's not NaN. 1055 // For NaN, return 1 for every condition except greater and 1056 // greater-equal. Return -1 for them, so the comparison yields 1057 // false for all conditions except not-equal. 1058 __ Set(rax, EQUAL); 1059 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1060 __ Ucomisd(xmm0, xmm0); 1061 __ setcc(parity_even, rax); 1062 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs. 1063 if (cc == greater_equal || cc == greater) { 1064 __ negp(rax); 1065 } 1066 __ ret(0); 1067 1068 __ bind(¬_identical); 1069 } 1070 1071 if (cc == equal) { // Both strict and non-strict. 1072 Label slow; // Fallthrough label. 1073 1074 // If we're doing a strict equality comparison, we don't have to do 1075 // type conversion, so we generate code to do fast comparison for objects 1076 // and oddballs. Non-smi numbers and strings still go through the usual 1077 // slow-case code. 1078 if (strict()) { 1079 // If either is a Smi (we know that not both are), then they can only 1080 // be equal if the other is a HeapNumber. If so, use the slow case. 1081 { 1082 Label not_smis; 1083 __ SelectNonSmi(rbx, rax, rdx, ¬_smis); 1084 1085 // Check if the non-smi operand is a heap number. 1086 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), 1087 factory->heap_number_map()); 1088 // If heap number, handle it in the slow case. 1089 __ j(equal, &slow); 1090 // Return non-equal. ebx (the lower half of rbx) is not zero. 1091 __ movp(rax, rbx); 1092 __ ret(0); 1093 1094 __ bind(¬_smis); 1095 } 1096 1097 // If either operand is a JSObject or an oddball value, then they are not 1098 // equal since their pointers are different 1099 // There is no test for undetectability in strict equality. 1100 1101 // If the first object is a JS object, we have done pointer comparison. 1102 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 1103 Label first_non_object; 1104 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); 1105 __ j(below, &first_non_object, Label::kNear); 1106 // Return non-zero (rax (not rax) is not zero) 1107 Label return_not_equal; 1108 STATIC_ASSERT(kHeapObjectTag != 0); 1109 __ bind(&return_not_equal); 1110 __ ret(0); 1111 1112 __ bind(&first_non_object); 1113 // Check for oddballs: true, false, null, undefined. 1114 __ CmpInstanceType(rcx, ODDBALL_TYPE); 1115 __ j(equal, &return_not_equal); 1116 1117 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); 1118 __ j(above_equal, &return_not_equal); 1119 1120 // Check for oddballs: true, false, null, undefined. 1121 __ CmpInstanceType(rcx, ODDBALL_TYPE); 1122 __ j(equal, &return_not_equal); 1123 1124 // Fall through to the general case. 1125 } 1126 __ bind(&slow); 1127 } 1128 1129 // Generate the number comparison code. 1130 Label non_number_comparison; 1131 Label unordered; 1132 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison); 1133 __ xorl(rax, rax); 1134 __ xorl(rcx, rcx); 1135 __ Ucomisd(xmm0, xmm1); 1136 1137 // Don't base result on EFLAGS when a NaN is involved. 1138 __ j(parity_even, &unordered, Label::kNear); 1139 // Return a result of -1, 0, or 1, based on EFLAGS. 1140 __ setcc(above, rax); 1141 __ setcc(below, rcx); 1142 __ subp(rax, rcx); 1143 __ ret(0); 1144 1145 // If one of the numbers was NaN, then the result is always false. 1146 // The cc is never not-equal. 1147 __ bind(&unordered); 1148 DCHECK(cc != not_equal); 1149 if (cc == less || cc == less_equal) { 1150 __ Set(rax, 1); 1151 } else { 1152 __ Set(rax, -1); 1153 } 1154 __ ret(0); 1155 1156 // The number comparison code did not provide a valid result. 1157 __ bind(&non_number_comparison); 1158 1159 // Fast negative check for internalized-to-internalized equality. 1160 Label check_for_strings; 1161 if (cc == equal) { 1162 BranchIfNotInternalizedString( 1163 masm, &check_for_strings, rax, kScratchRegister); 1164 BranchIfNotInternalizedString( 1165 masm, &check_for_strings, rdx, kScratchRegister); 1166 1167 // We've already checked for object identity, so if both operands are 1168 // internalized strings they aren't equal. Register rax (not rax) already 1169 // holds a non-zero value, which indicates not equal, so just return. 1170 __ ret(0); 1171 } 1172 1173 __ bind(&check_for_strings); 1174 1175 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, 1176 &check_unequal_objects); 1177 1178 // Inline comparison of one-byte strings. 1179 if (cc == equal) { 1180 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx); 1181 } else { 1182 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, 1183 rdi, r8); 1184 } 1185 1186 #ifdef DEBUG 1187 __ Abort(kUnexpectedFallThroughFromStringComparison); 1188 #endif 1189 1190 __ bind(&check_unequal_objects); 1191 if (cc == equal && !strict()) { 1192 // Not strict equality. Objects are unequal if 1193 // they are both JSObjects and not undetectable, 1194 // and their pointers are different. 1195 Label return_equal, return_unequal, undetectable; 1196 // At most one is a smi, so we can test for smi by adding the two. 1197 // A smi plus a heap object has the low bit set, a heap object plus 1198 // a heap object has the low bit clear. 1199 STATIC_ASSERT(kSmiTag == 0); 1200 STATIC_ASSERT(kSmiTagMask == 1); 1201 __ leap(rcx, Operand(rax, rdx, times_1, 0)); 1202 __ testb(rcx, Immediate(kSmiTagMask)); 1203 __ j(not_zero, &runtime_call, Label::kNear); 1204 1205 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 1206 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 1207 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), 1208 Immediate(1 << Map::kIsUndetectable)); 1209 __ j(not_zero, &undetectable, Label::kNear); 1210 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1211 Immediate(1 << Map::kIsUndetectable)); 1212 __ j(not_zero, &return_unequal, Label::kNear); 1213 1214 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE); 1215 __ j(below, &runtime_call, Label::kNear); 1216 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE); 1217 __ j(below, &runtime_call, Label::kNear); 1218 1219 __ bind(&return_unequal); 1220 // Return non-equal by returning the non-zero object pointer in rax. 1221 __ ret(0); 1222 1223 __ bind(&undetectable); 1224 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), 1225 Immediate(1 << Map::kIsUndetectable)); 1226 __ j(zero, &return_unequal, Label::kNear); 1227 1228 // If both sides are JSReceivers, then the result is false according to 1229 // the HTML specification, which says that only comparisons with null or 1230 // undefined are affected by special casing for document.all. 1231 __ CmpInstanceType(rbx, ODDBALL_TYPE); 1232 __ j(zero, &return_equal, Label::kNear); 1233 __ CmpInstanceType(rcx, ODDBALL_TYPE); 1234 __ j(not_zero, &return_unequal, Label::kNear); 1235 1236 __ bind(&return_equal); 1237 __ Set(rax, EQUAL); 1238 __ ret(0); 1239 } 1240 __ bind(&runtime_call); 1241 1242 if (cc == equal) { 1243 { 1244 FrameScope scope(masm, StackFrame::INTERNAL); 1245 __ Push(rdx); 1246 __ Push(rax); 1247 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual); 1248 } 1249 // Turn true into 0 and false into some non-zero value. 1250 STATIC_ASSERT(EQUAL == 0); 1251 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); 1252 __ subp(rax, rdx); 1253 __ Ret(); 1254 } else { 1255 // Push arguments below the return address to prepare jump to builtin. 1256 __ PopReturnAddressTo(rcx); 1257 __ Push(rdx); 1258 __ Push(rax); 1259 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); 1260 __ PushReturnAddressFrom(rcx); 1261 __ TailCallRuntime(Runtime::kCompare); 1262 } 1263 1264 __ bind(&miss); 1265 GenerateMiss(masm); 1266 } 1267 1268 1269 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) { 1270 // rax : number of arguments to the construct function 1271 // rbx : feedback vector 1272 // rdx : slot in feedback vector (Smi) 1273 // rdi : the function to call 1274 FrameScope scope(masm, StackFrame::INTERNAL); 1275 1276 // Number-of-arguments register must be smi-tagged to call out. 1277 __ Integer32ToSmi(rax, rax); 1278 __ Push(rax); 1279 __ Push(rdi); 1280 __ Integer32ToSmi(rdx, rdx); 1281 __ Push(rdx); 1282 __ Push(rbx); 1283 1284 __ CallStub(stub); 1285 1286 __ Pop(rbx); 1287 __ Pop(rdx); 1288 __ Pop(rdi); 1289 __ Pop(rax); 1290 __ SmiToInteger32(rax, rax); 1291 } 1292 1293 1294 static void GenerateRecordCallTarget(MacroAssembler* masm) { 1295 // Cache the called function in a feedback vector slot. Cache states 1296 // are uninitialized, monomorphic (indicated by a JSFunction), and 1297 // megamorphic. 1298 // rax : number of arguments to the construct function 1299 // rbx : feedback vector 1300 // rdx : slot in feedback vector (Smi) 1301 // rdi : the function to call 1302 Isolate* isolate = masm->isolate(); 1303 Label initialize, done, miss, megamorphic, not_array_function; 1304 Label done_initialize_count, done_increment_count; 1305 1306 // Load the cache state into r11. 1307 __ SmiToInteger32(rdx, rdx); 1308 __ movp(r11, 1309 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); 1310 1311 // A monomorphic cache hit or an already megamorphic state: invoke the 1312 // function without changing the state. 1313 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read 1314 // at this position in a symbol (see static asserts in 1315 // type-feedback-vector.h). 1316 Label check_allocation_site; 1317 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset)); 1318 __ j(equal, &done_increment_count, Label::kFar); 1319 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex); 1320 __ j(equal, &done, Label::kFar); 1321 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset), 1322 Heap::kWeakCellMapRootIndex); 1323 __ j(not_equal, &check_allocation_site); 1324 1325 // If the weak cell is cleared, we have a new chance to become monomorphic. 1326 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset)); 1327 __ j(equal, &initialize); 1328 __ jmp(&megamorphic); 1329 1330 __ bind(&check_allocation_site); 1331 // If we came here, we need to see if we are the array function. 1332 // If we didn't have a matching function, and we didn't find the megamorph 1333 // sentinel, then we have in the slot either some other function or an 1334 // AllocationSite. 1335 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); 1336 __ j(not_equal, &miss); 1337 1338 // Make sure the function is the Array() function 1339 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); 1340 __ cmpp(rdi, r11); 1341 __ j(not_equal, &megamorphic); 1342 __ jmp(&done_increment_count); 1343 1344 __ bind(&miss); 1345 1346 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 1347 // megamorphic. 1348 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); 1349 __ j(equal, &initialize); 1350 // MegamorphicSentinel is an immortal immovable object (undefined) so no 1351 // write-barrier is needed. 1352 __ bind(&megamorphic); 1353 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 1354 TypeFeedbackVector::MegamorphicSentinel(isolate)); 1355 __ jmp(&done); 1356 1357 // An uninitialized cache is patched with the function or sentinel to 1358 // indicate the ElementsKind if function is the Array constructor. 1359 __ bind(&initialize); 1360 1361 // Make sure the function is the Array() function 1362 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); 1363 __ cmpp(rdi, r11); 1364 __ j(not_equal, ¬_array_function); 1365 1366 CreateAllocationSiteStub create_stub(isolate); 1367 CallStubInRecordCallTarget(masm, &create_stub); 1368 __ jmp(&done_initialize_count); 1369 1370 __ bind(¬_array_function); 1371 CreateWeakCellStub weak_cell_stub(isolate); 1372 CallStubInRecordCallTarget(masm, &weak_cell_stub); 1373 1374 __ bind(&done_initialize_count); 1375 // Initialize the call counter. 1376 __ SmiToInteger32(rdx, rdx); 1377 __ Move(FieldOperand(rbx, rdx, times_pointer_size, 1378 FixedArray::kHeaderSize + kPointerSize), 1379 Smi::FromInt(1)); 1380 __ jmp(&done); 1381 1382 __ bind(&done_increment_count); 1383 1384 // Increment the call count for monomorphic function calls. 1385 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, 1386 FixedArray::kHeaderSize + kPointerSize), 1387 Smi::FromInt(1)); 1388 1389 __ bind(&done); 1390 __ Integer32ToSmi(rdx, rdx); 1391 } 1392 1393 1394 void CallConstructStub::Generate(MacroAssembler* masm) { 1395 // rax : number of arguments 1396 // rbx : feedback vector 1397 // rdx : slot in feedback vector (Smi) 1398 // rdi : constructor function 1399 1400 Label non_function; 1401 // Check that the constructor is not a smi. 1402 __ JumpIfSmi(rdi, &non_function); 1403 // Check that constructor is a JSFunction. 1404 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); 1405 __ j(not_equal, &non_function); 1406 1407 GenerateRecordCallTarget(masm); 1408 1409 __ SmiToInteger32(rdx, rdx); 1410 Label feedback_register_initialized; 1411 // Put the AllocationSite from the feedback vector into rbx, or undefined. 1412 __ movp(rbx, 1413 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); 1414 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); 1415 __ j(equal, &feedback_register_initialized, Label::kNear); 1416 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); 1417 __ bind(&feedback_register_initialized); 1418 1419 __ AssertUndefinedOrAllocationSite(rbx); 1420 1421 // Pass new target to construct stub. 1422 __ movp(rdx, rdi); 1423 1424 // Tail call to the function-specific construct stub (still in the caller 1425 // context at this point). 1426 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 1427 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); 1428 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); 1429 __ jmp(rcx); 1430 1431 __ bind(&non_function); 1432 __ movp(rdx, rdi); 1433 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); 1434 } 1435 1436 1437 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { 1438 // rdi - function 1439 // rdx - slot id 1440 // rbx - vector 1441 // rcx - allocation site (loaded from vector[slot]). 1442 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); 1443 __ cmpp(rdi, r8); 1444 __ j(not_equal, miss); 1445 1446 __ movp(rax, Immediate(arg_count())); 1447 1448 // Increment the call count for monomorphic function calls. 1449 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, 1450 FixedArray::kHeaderSize + kPointerSize), 1451 Smi::FromInt(1)); 1452 1453 __ movp(rbx, rcx); 1454 __ movp(rdx, rdi); 1455 ArrayConstructorStub stub(masm->isolate(), arg_count()); 1456 __ TailCallStub(&stub); 1457 } 1458 1459 1460 void CallICStub::Generate(MacroAssembler* masm) { 1461 // ----------- S t a t e ------------- 1462 // -- rdi - function 1463 // -- rdx - slot id 1464 // -- rbx - vector 1465 // ----------------------------------- 1466 Isolate* isolate = masm->isolate(); 1467 Label extra_checks_or_miss, call, call_function; 1468 int argc = arg_count(); 1469 StackArgumentsAccessor args(rsp, argc); 1470 ParameterCount actual(argc); 1471 1472 // The checks. First, does rdi match the recorded monomorphic target? 1473 __ SmiToInteger32(rdx, rdx); 1474 __ movp(rcx, 1475 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize)); 1476 1477 // We don't know that we have a weak cell. We might have a private symbol 1478 // or an AllocationSite, but the memory is safe to examine. 1479 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to 1480 // FixedArray. 1481 // WeakCell::kValueOffset - contains a JSFunction or Smi(0) 1482 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not 1483 // computed, meaning that it can't appear to be a pointer. If the low bit is 1484 // 0, then hash is computed, but the 0 bit prevents the field from appearing 1485 // to be a pointer. 1486 STATIC_ASSERT(WeakCell::kSize >= kPointerSize); 1487 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset == 1488 WeakCell::kValueOffset && 1489 WeakCell::kValueOffset == Symbol::kHashFieldSlot); 1490 1491 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset)); 1492 __ j(not_equal, &extra_checks_or_miss); 1493 1494 // The compare above could have been a SMI/SMI comparison. Guard against this 1495 // convincing us that we have a monomorphic JSFunction. 1496 __ JumpIfSmi(rdi, &extra_checks_or_miss); 1497 1498 // Increment the call count for monomorphic function calls. 1499 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, 1500 FixedArray::kHeaderSize + kPointerSize), 1501 Smi::FromInt(1)); 1502 1503 __ bind(&call_function); 1504 __ Set(rax, argc); 1505 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), 1506 tail_call_mode()), 1507 RelocInfo::CODE_TARGET); 1508 1509 __ bind(&extra_checks_or_miss); 1510 Label uninitialized, miss, not_allocation_site; 1511 1512 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); 1513 __ j(equal, &call); 1514 1515 // Check if we have an allocation site. 1516 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), 1517 Heap::kAllocationSiteMapRootIndex); 1518 __ j(not_equal, ¬_allocation_site); 1519 1520 // We have an allocation site. 1521 HandleArrayCase(masm, &miss); 1522 1523 __ bind(¬_allocation_site); 1524 1525 // The following cases attempt to handle MISS cases without going to the 1526 // runtime. 1527 if (FLAG_trace_ic) { 1528 __ jmp(&miss); 1529 } 1530 1531 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); 1532 __ j(equal, &uninitialized); 1533 1534 // We are going megamorphic. If the feedback is a JSFunction, it is fine 1535 // to handle it here. More complex cases are dealt with in the runtime. 1536 __ AssertNotSmi(rcx); 1537 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx); 1538 __ j(not_equal, &miss); 1539 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), 1540 TypeFeedbackVector::MegamorphicSentinel(isolate)); 1541 1542 __ bind(&call); 1543 __ Set(rax, argc); 1544 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), 1545 RelocInfo::CODE_TARGET); 1546 1547 __ bind(&uninitialized); 1548 1549 // We are going monomorphic, provided we actually have a JSFunction. 1550 __ JumpIfSmi(rdi, &miss); 1551 1552 // Goto miss case if we do not have a function. 1553 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); 1554 __ j(not_equal, &miss); 1555 1556 // Make sure the function is not the Array() function, which requires special 1557 // behavior on MISS. 1558 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, rcx); 1559 __ cmpp(rdi, rcx); 1560 __ j(equal, &miss); 1561 1562 // Make sure the function belongs to the same native context. 1563 __ movp(rcx, FieldOperand(rdi, JSFunction::kContextOffset)); 1564 __ movp(rcx, ContextOperand(rcx, Context::NATIVE_CONTEXT_INDEX)); 1565 __ cmpp(rcx, NativeContextOperand()); 1566 __ j(not_equal, &miss); 1567 1568 // Initialize the call counter. 1569 __ Move(FieldOperand(rbx, rdx, times_pointer_size, 1570 FixedArray::kHeaderSize + kPointerSize), 1571 Smi::FromInt(1)); 1572 1573 // Store the function. Use a stub since we need a frame for allocation. 1574 // rbx - vector 1575 // rdx - slot (needs to be in smi form) 1576 // rdi - function 1577 { 1578 FrameScope scope(masm, StackFrame::INTERNAL); 1579 CreateWeakCellStub create_stub(isolate); 1580 1581 __ Integer32ToSmi(rdx, rdx); 1582 __ Push(rdi); 1583 __ CallStub(&create_stub); 1584 __ Pop(rdi); 1585 } 1586 1587 __ jmp(&call_function); 1588 1589 // We are here because tracing is on or we encountered a MISS case we can't 1590 // handle here. 1591 __ bind(&miss); 1592 GenerateMiss(masm); 1593 1594 __ jmp(&call); 1595 1596 // Unreachable 1597 __ int3(); 1598 } 1599 1600 1601 void CallICStub::GenerateMiss(MacroAssembler* masm) { 1602 FrameScope scope(masm, StackFrame::INTERNAL); 1603 1604 // Push the receiver and the function and feedback info. 1605 __ Push(rdi); 1606 __ Push(rbx); 1607 __ Integer32ToSmi(rdx, rdx); 1608 __ Push(rdx); 1609 1610 // Call the entry. 1611 __ CallRuntime(Runtime::kCallIC_Miss); 1612 1613 // Move result to edi and exit the internal frame. 1614 __ movp(rdi, rax); 1615 } 1616 1617 1618 bool CEntryStub::NeedsImmovableCode() { 1619 return false; 1620 } 1621 1622 1623 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { 1624 CEntryStub::GenerateAheadOfTime(isolate); 1625 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); 1626 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); 1627 // It is important that the store buffer overflow stubs are generated first. 1628 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate); 1629 CreateAllocationSiteStub::GenerateAheadOfTime(isolate); 1630 CreateWeakCellStub::GenerateAheadOfTime(isolate); 1631 BinaryOpICStub::GenerateAheadOfTime(isolate); 1632 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate); 1633 StoreFastElementStub::GenerateAheadOfTime(isolate); 1634 TypeofStub::GenerateAheadOfTime(isolate); 1635 } 1636 1637 1638 void CodeStub::GenerateFPStubs(Isolate* isolate) { 1639 } 1640 1641 1642 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { 1643 CEntryStub stub(isolate, 1, kDontSaveFPRegs); 1644 stub.GetCode(); 1645 CEntryStub save_doubles(isolate, 1, kSaveFPRegs); 1646 save_doubles.GetCode(); 1647 } 1648 1649 1650 void CEntryStub::Generate(MacroAssembler* masm) { 1651 // rax: number of arguments including receiver 1652 // rbx: pointer to C function (C callee-saved) 1653 // rbp: frame pointer of calling JS frame (restored after C call) 1654 // rsp: stack pointer (restored after C call) 1655 // rsi: current context (restored) 1656 // 1657 // If argv_in_register(): 1658 // r15: pointer to the first argument 1659 1660 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1661 1662 #ifdef _WIN64 1663 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9. It requires the 1664 // stack to be aligned to 16 bytes. It only allows a single-word to be 1665 // returned in register rax. Larger return sizes must be written to an address 1666 // passed as a hidden first argument. 1667 const Register kCCallArg0 = rcx; 1668 const Register kCCallArg1 = rdx; 1669 const Register kCCallArg2 = r8; 1670 const Register kCCallArg3 = r9; 1671 const int kArgExtraStackSpace = 2; 1672 const int kMaxRegisterResultSize = 1; 1673 #else 1674 // GCC / Clang passes arguments in rdi, rsi, rdx, rcx, r8, r9. Simple results 1675 // are returned in rax, and a struct of two pointers are returned in rax+rdx. 1676 // Larger return sizes must be written to an address passed as a hidden first 1677 // argument. 1678 const Register kCCallArg0 = rdi; 1679 const Register kCCallArg1 = rsi; 1680 const Register kCCallArg2 = rdx; 1681 const Register kCCallArg3 = rcx; 1682 const int kArgExtraStackSpace = 0; 1683 const int kMaxRegisterResultSize = 2; 1684 #endif // _WIN64 1685 1686 // Enter the exit frame that transitions from JavaScript to C++. 1687 int arg_stack_space = 1688 kArgExtraStackSpace + 1689 (result_size() <= kMaxRegisterResultSize ? 0 : result_size()); 1690 if (argv_in_register()) { 1691 DCHECK(!save_doubles()); 1692 __ EnterApiExitFrame(arg_stack_space); 1693 // Move argc into r14 (argv is already in r15). 1694 __ movp(r14, rax); 1695 } else { 1696 __ EnterExitFrame(arg_stack_space, save_doubles()); 1697 } 1698 1699 // rbx: pointer to builtin function (C callee-saved). 1700 // rbp: frame pointer of exit frame (restored after C call). 1701 // rsp: stack pointer (restored after C call). 1702 // r14: number of arguments including receiver (C callee-saved). 1703 // r15: argv pointer (C callee-saved). 1704 1705 // Check stack alignment. 1706 if (FLAG_debug_code) { 1707 __ CheckStackAlignment(); 1708 } 1709 1710 // Call C function. The arguments object will be created by stubs declared by 1711 // DECLARE_RUNTIME_FUNCTION(). 1712 if (result_size() <= kMaxRegisterResultSize) { 1713 // Pass a pointer to the Arguments object as the first argument. 1714 // Return result in single register (rax), or a register pair (rax, rdx). 1715 __ movp(kCCallArg0, r14); // argc. 1716 __ movp(kCCallArg1, r15); // argv. 1717 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate())); 1718 } else { 1719 DCHECK_LE(result_size(), 3); 1720 // Pass a pointer to the result location as the first argument. 1721 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace)); 1722 // Pass a pointer to the Arguments object as the second argument. 1723 __ movp(kCCallArg1, r14); // argc. 1724 __ movp(kCCallArg2, r15); // argv. 1725 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate())); 1726 } 1727 __ call(rbx); 1728 1729 if (result_size() > kMaxRegisterResultSize) { 1730 // Read result values stored on stack. Result is stored 1731 // above the the two Arguments object slots on Win64. 1732 DCHECK_LE(result_size(), 3); 1733 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0)); 1734 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1)); 1735 if (result_size() > 2) { 1736 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2)); 1737 } 1738 } 1739 // Result is in rax, rdx:rax or r8:rdx:rax - do not destroy these registers! 1740 1741 // Check result for exception sentinel. 1742 Label exception_returned; 1743 __ CompareRoot(rax, Heap::kExceptionRootIndex); 1744 __ j(equal, &exception_returned); 1745 1746 // Check that there is no pending exception, otherwise we 1747 // should have returned the exception sentinel. 1748 if (FLAG_debug_code) { 1749 Label okay; 1750 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); 1751 ExternalReference pending_exception_address( 1752 Isolate::kPendingExceptionAddress, isolate()); 1753 Operand pending_exception_operand = 1754 masm->ExternalOperand(pending_exception_address); 1755 __ cmpp(r14, pending_exception_operand); 1756 __ j(equal, &okay, Label::kNear); 1757 __ int3(); 1758 __ bind(&okay); 1759 } 1760 1761 // Exit the JavaScript to C++ exit frame. 1762 __ LeaveExitFrame(save_doubles(), !argv_in_register()); 1763 __ ret(0); 1764 1765 // Handling of exception. 1766 __ bind(&exception_returned); 1767 1768 ExternalReference pending_handler_context_address( 1769 Isolate::kPendingHandlerContextAddress, isolate()); 1770 ExternalReference pending_handler_code_address( 1771 Isolate::kPendingHandlerCodeAddress, isolate()); 1772 ExternalReference pending_handler_offset_address( 1773 Isolate::kPendingHandlerOffsetAddress, isolate()); 1774 ExternalReference pending_handler_fp_address( 1775 Isolate::kPendingHandlerFPAddress, isolate()); 1776 ExternalReference pending_handler_sp_address( 1777 Isolate::kPendingHandlerSPAddress, isolate()); 1778 1779 // Ask the runtime for help to determine the handler. This will set rax to 1780 // contain the current pending exception, don't clobber it. 1781 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler, 1782 isolate()); 1783 { 1784 FrameScope scope(masm, StackFrame::MANUAL); 1785 __ movp(arg_reg_1, Immediate(0)); // argc. 1786 __ movp(arg_reg_2, Immediate(0)); // argv. 1787 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate())); 1788 __ PrepareCallCFunction(3); 1789 __ CallCFunction(find_handler, 3); 1790 } 1791 1792 // Retrieve the handler context, SP and FP. 1793 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address)); 1794 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address)); 1795 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address)); 1796 1797 // If the handler is a JS frame, restore the context to the frame. Note that 1798 // the context will be set to (rsi == 0) for non-JS frames. 1799 Label skip; 1800 __ testp(rsi, rsi); 1801 __ j(zero, &skip, Label::kNear); 1802 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); 1803 __ bind(&skip); 1804 1805 // Compute the handler entry address and jump to it. 1806 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address)); 1807 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address)); 1808 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize)); 1809 __ jmp(rdi); 1810 } 1811 1812 1813 void JSEntryStub::Generate(MacroAssembler* masm) { 1814 Label invoke, handler_entry, exit; 1815 Label not_outermost_js, not_outermost_js_2; 1816 1817 ProfileEntryHookStub::MaybeCallEntryHook(masm); 1818 1819 { // NOLINT. Scope block confuses linter. 1820 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm); 1821 // Set up frame. 1822 __ pushq(rbp); 1823 __ movp(rbp, rsp); 1824 1825 // Push the stack frame type. 1826 int marker = type(); 1827 __ Push(Smi::FromInt(marker)); // context slot 1828 ExternalReference context_address(Isolate::kContextAddress, isolate()); 1829 __ Load(kScratchRegister, context_address); 1830 __ Push(kScratchRegister); // context 1831 // Save callee-saved registers (X64/X32/Win64 calling conventions). 1832 __ pushq(r12); 1833 __ pushq(r13); 1834 __ pushq(r14); 1835 __ pushq(r15); 1836 #ifdef _WIN64 1837 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 1838 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. 1839 #endif 1840 __ pushq(rbx); 1841 1842 #ifdef _WIN64 1843 // On Win64 XMM6-XMM15 are callee-save 1844 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 1845 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6); 1846 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7); 1847 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8); 1848 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9); 1849 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10); 1850 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11); 1851 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12); 1852 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13); 1853 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); 1854 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); 1855 #endif 1856 1857 // Set up the roots and smi constant registers. 1858 // Needs to be done before any further smi loads. 1859 __ InitializeRootRegister(); 1860 } 1861 1862 // Save copies of the top frame descriptor on the stack. 1863 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate()); 1864 { 1865 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 1866 __ Push(c_entry_fp_operand); 1867 } 1868 1869 // If this is the outermost JS call, set js_entry_sp value. 1870 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate()); 1871 __ Load(rax, js_entry_sp); 1872 __ testp(rax, rax); 1873 __ j(not_zero, ¬_outermost_js); 1874 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 1875 __ movp(rax, rbp); 1876 __ Store(js_entry_sp, rax); 1877 Label cont; 1878 __ jmp(&cont); 1879 __ bind(¬_outermost_js); 1880 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); 1881 __ bind(&cont); 1882 1883 // Jump to a faked try block that does the invoke, with a faked catch 1884 // block that sets the pending exception. 1885 __ jmp(&invoke); 1886 __ bind(&handler_entry); 1887 handler_offset_ = handler_entry.pos(); 1888 // Caught exception: Store result (exception) in the pending exception 1889 // field in the JSEnv and return a failure sentinel. 1890 ExternalReference pending_exception(Isolate::kPendingExceptionAddress, 1891 isolate()); 1892 __ Store(pending_exception, rax); 1893 __ LoadRoot(rax, Heap::kExceptionRootIndex); 1894 __ jmp(&exit); 1895 1896 // Invoke: Link this frame into the handler chain. 1897 __ bind(&invoke); 1898 __ PushStackHandler(); 1899 1900 // Clear any pending exceptions. 1901 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); 1902 __ Store(pending_exception, rax); 1903 1904 // Fake a receiver (NULL). 1905 __ Push(Immediate(0)); // receiver 1906 1907 // Invoke the function by calling through JS entry trampoline builtin and 1908 // pop the faked function when we return. We load the address from an 1909 // external reference instead of inlining the call target address directly 1910 // in the code, because the builtin stubs may not have been generated yet 1911 // at the time this code is generated. 1912 if (type() == StackFrame::ENTRY_CONSTRUCT) { 1913 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline, 1914 isolate()); 1915 __ Load(rax, construct_entry); 1916 } else { 1917 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate()); 1918 __ Load(rax, entry); 1919 } 1920 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); 1921 __ call(kScratchRegister); 1922 1923 // Unlink this frame from the handler chain. 1924 __ PopStackHandler(); 1925 1926 __ bind(&exit); 1927 // Check if the current stack frame is marked as the outermost JS frame. 1928 __ Pop(rbx); 1929 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); 1930 __ j(not_equal, ¬_outermost_js_2); 1931 __ Move(kScratchRegister, js_entry_sp); 1932 __ movp(Operand(kScratchRegister, 0), Immediate(0)); 1933 __ bind(¬_outermost_js_2); 1934 1935 // Restore the top frame descriptor from the stack. 1936 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp); 1937 __ Pop(c_entry_fp_operand); 1938 } 1939 1940 // Restore callee-saved registers (X64 conventions). 1941 #ifdef _WIN64 1942 // On Win64 XMM6-XMM15 are callee-save 1943 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0)); 1944 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1)); 1945 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2)); 1946 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3)); 1947 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4)); 1948 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5)); 1949 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6)); 1950 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7)); 1951 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8)); 1952 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9)); 1953 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); 1954 #endif 1955 1956 __ popq(rbx); 1957 #ifdef _WIN64 1958 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI. 1959 __ popq(rsi); 1960 __ popq(rdi); 1961 #endif 1962 __ popq(r15); 1963 __ popq(r14); 1964 __ popq(r13); 1965 __ popq(r12); 1966 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers 1967 1968 // Restore frame pointer and return. 1969 __ popq(rbp); 1970 __ ret(0); 1971 } 1972 1973 1974 // ------------------------------------------------------------------------- 1975 // StringCharCodeAtGenerator 1976 1977 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { 1978 // If the receiver is a smi trigger the non-string case. 1979 if (check_mode_ == RECEIVER_IS_UNKNOWN) { 1980 __ JumpIfSmi(object_, receiver_not_string_); 1981 1982 // Fetch the instance type of the receiver into result register. 1983 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); 1984 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 1985 // If the receiver is not a string trigger the non-string case. 1986 __ testb(result_, Immediate(kIsNotStringMask)); 1987 __ j(not_zero, receiver_not_string_); 1988 } 1989 1990 // If the index is non-smi trigger the non-smi case. 1991 __ JumpIfNotSmi(index_, &index_not_smi_); 1992 __ bind(&got_smi_index_); 1993 1994 // Check for index out of range. 1995 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset)); 1996 __ j(above_equal, index_out_of_range_); 1997 1998 __ SmiToInteger32(index_, index_); 1999 2000 StringCharLoadGenerator::Generate( 2001 masm, object_, index_, result_, &call_runtime_); 2002 2003 __ Integer32ToSmi(result_, result_); 2004 __ bind(&exit_); 2005 } 2006 2007 2008 void StringCharCodeAtGenerator::GenerateSlow( 2009 MacroAssembler* masm, EmbedMode embed_mode, 2010 const RuntimeCallHelper& call_helper) { 2011 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); 2012 2013 Factory* factory = masm->isolate()->factory(); 2014 // Index is not a smi. 2015 __ bind(&index_not_smi_); 2016 // If index is a heap number, try converting it to an integer. 2017 __ CheckMap(index_, 2018 factory->heap_number_map(), 2019 index_not_number_, 2020 DONT_DO_SMI_CHECK); 2021 call_helper.BeforeCall(masm); 2022 if (embed_mode == PART_OF_IC_HANDLER) { 2023 __ Push(LoadWithVectorDescriptor::VectorRegister()); 2024 __ Push(LoadDescriptor::SlotRegister()); 2025 } 2026 __ Push(object_); 2027 __ Push(index_); // Consumed by runtime conversion function. 2028 __ CallRuntime(Runtime::kNumberToSmi); 2029 if (!index_.is(rax)) { 2030 // Save the conversion result before the pop instructions below 2031 // have a chance to overwrite it. 2032 __ movp(index_, rax); 2033 } 2034 __ Pop(object_); 2035 if (embed_mode == PART_OF_IC_HANDLER) { 2036 __ Pop(LoadDescriptor::SlotRegister()); 2037 __ Pop(LoadWithVectorDescriptor::VectorRegister()); 2038 } 2039 // Reload the instance type. 2040 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); 2041 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); 2042 call_helper.AfterCall(masm); 2043 // If index is still not a smi, it must be out of range. 2044 __ JumpIfNotSmi(index_, index_out_of_range_); 2045 // Otherwise, return to the fast path. 2046 __ jmp(&got_smi_index_); 2047 2048 // Call runtime. We get here when the receiver is a string and the 2049 // index is a number, but the code of getting the actual character 2050 // is too complex (e.g., when the string needs to be flattened). 2051 __ bind(&call_runtime_); 2052 call_helper.BeforeCall(masm); 2053 __ Push(object_); 2054 __ Integer32ToSmi(index_, index_); 2055 __ Push(index_); 2056 __ CallRuntime(Runtime::kStringCharCodeAtRT); 2057 if (!result_.is(rax)) { 2058 __ movp(result_, rax); 2059 } 2060 call_helper.AfterCall(masm); 2061 __ jmp(&exit_); 2062 2063 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); 2064 } 2065 2066 2067 // ------------------------------------------------------------------------- 2068 // StringCharFromCodeGenerator 2069 2070 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) { 2071 // Fast case of Heap::LookupSingleCharacterStringFromCode. 2072 __ JumpIfNotSmi(code_, &slow_case_); 2073 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode)); 2074 __ j(above, &slow_case_); 2075 2076 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); 2077 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2); 2078 __ movp(result_, FieldOperand(result_, index.reg, index.scale, 2079 FixedArray::kHeaderSize)); 2080 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); 2081 __ j(equal, &slow_case_); 2082 __ bind(&exit_); 2083 } 2084 2085 2086 void StringCharFromCodeGenerator::GenerateSlow( 2087 MacroAssembler* masm, 2088 const RuntimeCallHelper& call_helper) { 2089 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase); 2090 2091 __ bind(&slow_case_); 2092 call_helper.BeforeCall(masm); 2093 __ Push(code_); 2094 __ CallRuntime(Runtime::kStringCharFromCode); 2095 if (!result_.is(rax)) { 2096 __ movp(result_, rax); 2097 } 2098 call_helper.AfterCall(masm); 2099 __ jmp(&exit_); 2100 2101 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase); 2102 } 2103 2104 2105 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm, 2106 Register dest, 2107 Register src, 2108 Register count, 2109 String::Encoding encoding) { 2110 // Nothing to do for zero characters. 2111 Label done; 2112 __ testl(count, count); 2113 __ j(zero, &done, Label::kNear); 2114 2115 // Make count the number of bytes to copy. 2116 if (encoding == String::TWO_BYTE_ENCODING) { 2117 STATIC_ASSERT(2 == sizeof(uc16)); 2118 __ addl(count, count); 2119 } 2120 2121 // Copy remaining characters. 2122 Label loop; 2123 __ bind(&loop); 2124 __ movb(kScratchRegister, Operand(src, 0)); 2125 __ movb(Operand(dest, 0), kScratchRegister); 2126 __ incp(src); 2127 __ incp(dest); 2128 __ decl(count); 2129 __ j(not_zero, &loop); 2130 2131 __ bind(&done); 2132 } 2133 2134 2135 void SubStringStub::Generate(MacroAssembler* masm) { 2136 Label runtime; 2137 2138 // Stack frame on entry. 2139 // rsp[0] : return address 2140 // rsp[8] : to 2141 // rsp[16] : from 2142 // rsp[24] : string 2143 2144 enum SubStringStubArgumentIndices { 2145 STRING_ARGUMENT_INDEX, 2146 FROM_ARGUMENT_INDEX, 2147 TO_ARGUMENT_INDEX, 2148 SUB_STRING_ARGUMENT_COUNT 2149 }; 2150 2151 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT, 2152 ARGUMENTS_DONT_CONTAIN_RECEIVER); 2153 2154 // Make sure first argument is a string. 2155 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX)); 2156 STATIC_ASSERT(kSmiTag == 0); 2157 __ testl(rax, Immediate(kSmiTagMask)); 2158 __ j(zero, &runtime); 2159 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx); 2160 __ j(NegateCondition(is_string), &runtime); 2161 2162 // rax: string 2163 // rbx: instance type 2164 // Calculate length of sub string using the smi values. 2165 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX)); 2166 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX)); 2167 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); 2168 2169 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. 2170 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset)); 2171 Label not_original_string; 2172 // Shorter than original string's length: an actual substring. 2173 __ j(below, ¬_original_string, Label::kNear); 2174 // Longer than original string's length or negative: unsafe arguments. 2175 __ j(above, &runtime); 2176 // Return original string. 2177 Counters* counters = isolate()->counters(); 2178 __ IncrementCounter(counters->sub_string_native(), 1); 2179 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 2180 __ bind(¬_original_string); 2181 2182 Label single_char; 2183 __ SmiCompare(rcx, Smi::FromInt(1)); 2184 __ j(equal, &single_char); 2185 2186 __ SmiToInteger32(rcx, rcx); 2187 2188 // rax: string 2189 // rbx: instance type 2190 // rcx: sub string length 2191 // rdx: from index (smi) 2192 // Deal with different string types: update the index if necessary 2193 // and put the underlying string into edi. 2194 Label underlying_unpacked, sliced_string, seq_or_external_string; 2195 // If the string is not indirect, it can only be sequential or external. 2196 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); 2197 STATIC_ASSERT(kIsIndirectStringMask != 0); 2198 __ testb(rbx, Immediate(kIsIndirectStringMask)); 2199 __ j(zero, &seq_or_external_string, Label::kNear); 2200 2201 __ testb(rbx, Immediate(kSlicedNotConsMask)); 2202 __ j(not_zero, &sliced_string, Label::kNear); 2203 // Cons string. Check whether it is flat, then fetch first part. 2204 // Flat cons strings have an empty second part. 2205 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), 2206 Heap::kempty_stringRootIndex); 2207 __ j(not_equal, &runtime); 2208 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset)); 2209 // Update instance type. 2210 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 2211 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2212 __ jmp(&underlying_unpacked, Label::kNear); 2213 2214 __ bind(&sliced_string); 2215 // Sliced string. Fetch parent and correct start index by offset. 2216 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); 2217 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset)); 2218 // Update instance type. 2219 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); 2220 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); 2221 __ jmp(&underlying_unpacked, Label::kNear); 2222 2223 __ bind(&seq_or_external_string); 2224 // Sequential or external string. Just move string to the correct register. 2225 __ movp(rdi, rax); 2226 2227 __ bind(&underlying_unpacked); 2228 2229 if (FLAG_string_slices) { 2230 Label copy_routine; 2231 // rdi: underlying subject string 2232 // rbx: instance type of underlying subject string 2233 // rdx: adjusted start index (smi) 2234 // rcx: length 2235 // If coming from the make_two_character_string path, the string 2236 // is too short to be sliced anyways. 2237 __ cmpp(rcx, Immediate(SlicedString::kMinLength)); 2238 // Short slice. Copy instead of slicing. 2239 __ j(less, ©_routine); 2240 // Allocate new sliced string. At this point we do not reload the instance 2241 // type including the string encoding because we simply rely on the info 2242 // provided by the original string. It does not matter if the original 2243 // string's encoding is wrong because we always have to recheck encoding of 2244 // the newly created string's parent anyways due to externalized strings. 2245 Label two_byte_slice, set_slice_header; 2246 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0); 2247 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); 2248 __ testb(rbx, Immediate(kStringEncodingMask)); 2249 __ j(zero, &two_byte_slice, Label::kNear); 2250 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime); 2251 __ jmp(&set_slice_header, Label::kNear); 2252 __ bind(&two_byte_slice); 2253 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); 2254 __ bind(&set_slice_header); 2255 __ Integer32ToSmi(rcx, rcx); 2256 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx); 2257 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset), 2258 Immediate(String::kEmptyHashField)); 2259 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi); 2260 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); 2261 __ IncrementCounter(counters->sub_string_native(), 1); 2262 __ ret(3 * kPointerSize); 2263 2264 __ bind(©_routine); 2265 } 2266 2267 // rdi: underlying subject string 2268 // rbx: instance type of underlying subject string 2269 // rdx: adjusted start index (smi) 2270 // rcx: length 2271 // The subject string can only be external or sequential string of either 2272 // encoding at this point. 2273 Label two_byte_sequential, sequential_string; 2274 STATIC_ASSERT(kExternalStringTag != 0); 2275 STATIC_ASSERT(kSeqStringTag == 0); 2276 __ testb(rbx, Immediate(kExternalStringTag)); 2277 __ j(zero, &sequential_string); 2278 2279 // Handle external string. 2280 // Rule out short external strings. 2281 STATIC_ASSERT(kShortExternalStringTag != 0); 2282 __ testb(rbx, Immediate(kShortExternalStringMask)); 2283 __ j(not_zero, &runtime); 2284 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); 2285 // Move the pointer so that offset-wise, it looks like a sequential string. 2286 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize); 2287 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); 2288 2289 __ bind(&sequential_string); 2290 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0); 2291 __ testb(rbx, Immediate(kStringEncodingMask)); 2292 __ j(zero, &two_byte_sequential); 2293 2294 // Allocate the result. 2295 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime); 2296 2297 // rax: result string 2298 // rcx: result string length 2299 { // Locate character of sub string start. 2300 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1); 2301 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale, 2302 SeqOneByteString::kHeaderSize - kHeapObjectTag)); 2303 } 2304 // Locate first character of result. 2305 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize)); 2306 2307 // rax: result string 2308 // rcx: result length 2309 // r14: first character of result 2310 // rsi: character of sub string start 2311 StringHelper::GenerateCopyCharacters( 2312 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING); 2313 __ IncrementCounter(counters->sub_string_native(), 1); 2314 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 2315 2316 __ bind(&two_byte_sequential); 2317 // Allocate the result. 2318 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); 2319 2320 // rax: result string 2321 // rcx: result string length 2322 { // Locate character of sub string start. 2323 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); 2324 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale, 2325 SeqOneByteString::kHeaderSize - kHeapObjectTag)); 2326 } 2327 // Locate first character of result. 2328 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); 2329 2330 // rax: result string 2331 // rcx: result length 2332 // rdi: first character of result 2333 // r14: character of sub string start 2334 StringHelper::GenerateCopyCharacters( 2335 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING); 2336 __ IncrementCounter(counters->sub_string_native(), 1); 2337 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 2338 2339 // Just jump to runtime to create the sub string. 2340 __ bind(&runtime); 2341 __ TailCallRuntime(Runtime::kSubString); 2342 2343 __ bind(&single_char); 2344 // rax: string 2345 // rbx: instance type 2346 // rcx: sub string length (smi) 2347 // rdx: from index (smi) 2348 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime, 2349 &runtime, RECEIVER_IS_STRING); 2350 generator.GenerateFast(masm); 2351 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize); 2352 generator.SkipSlow(masm, &runtime); 2353 } 2354 2355 void ToStringStub::Generate(MacroAssembler* masm) { 2356 // The ToString stub takes one argument in rax. 2357 Label is_number; 2358 __ JumpIfSmi(rax, &is_number, Label::kNear); 2359 2360 Label not_string; 2361 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi); 2362 // rax: receiver 2363 // rdi: receiver map 2364 __ j(above_equal, ¬_string, Label::kNear); 2365 __ Ret(); 2366 __ bind(¬_string); 2367 2368 Label not_heap_number; 2369 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex); 2370 __ j(not_equal, ¬_heap_number, Label::kNear); 2371 __ bind(&is_number); 2372 NumberToStringStub stub(isolate()); 2373 __ TailCallStub(&stub); 2374 __ bind(¬_heap_number); 2375 2376 Label not_oddball; 2377 __ CmpInstanceType(rdi, ODDBALL_TYPE); 2378 __ j(not_equal, ¬_oddball, Label::kNear); 2379 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset)); 2380 __ Ret(); 2381 __ bind(¬_oddball); 2382 2383 __ PopReturnAddressTo(rcx); // Pop return address. 2384 __ Push(rax); // Push argument. 2385 __ PushReturnAddressFrom(rcx); // Push return address. 2386 __ TailCallRuntime(Runtime::kToString); 2387 } 2388 2389 void ToNameStub::Generate(MacroAssembler* masm) { 2390 // The ToName stub takes one argument in rax. 2391 Label is_number; 2392 __ JumpIfSmi(rax, &is_number, Label::kNear); 2393 2394 Label not_name; 2395 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE); 2396 __ CmpObjectType(rax, LAST_NAME_TYPE, rdi); 2397 // rax: receiver 2398 // rdi: receiver map 2399 __ j(above, ¬_name, Label::kNear); 2400 __ Ret(); 2401 __ bind(¬_name); 2402 2403 Label not_heap_number; 2404 __ CompareRoot(rdi, Heap::kHeapNumberMapRootIndex); 2405 __ j(not_equal, ¬_heap_number, Label::kNear); 2406 __ bind(&is_number); 2407 NumberToStringStub stub(isolate()); 2408 __ TailCallStub(&stub); 2409 __ bind(¬_heap_number); 2410 2411 Label not_oddball; 2412 __ CmpInstanceType(rdi, ODDBALL_TYPE); 2413 __ j(not_equal, ¬_oddball, Label::kNear); 2414 __ movp(rax, FieldOperand(rax, Oddball::kToStringOffset)); 2415 __ Ret(); 2416 __ bind(¬_oddball); 2417 2418 __ PopReturnAddressTo(rcx); // Pop return address. 2419 __ Push(rax); // Push argument. 2420 __ PushReturnAddressFrom(rcx); // Push return address. 2421 __ TailCallRuntime(Runtime::kToName); 2422 } 2423 2424 2425 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm, 2426 Register left, 2427 Register right, 2428 Register scratch1, 2429 Register scratch2) { 2430 Register length = scratch1; 2431 2432 // Compare lengths. 2433 Label check_zero_length; 2434 __ movp(length, FieldOperand(left, String::kLengthOffset)); 2435 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); 2436 __ j(equal, &check_zero_length, Label::kNear); 2437 __ Move(rax, Smi::FromInt(NOT_EQUAL)); 2438 __ ret(0); 2439 2440 // Check if the length is zero. 2441 Label compare_chars; 2442 __ bind(&check_zero_length); 2443 STATIC_ASSERT(kSmiTag == 0); 2444 __ SmiTest(length); 2445 __ j(not_zero, &compare_chars, Label::kNear); 2446 __ Move(rax, Smi::FromInt(EQUAL)); 2447 __ ret(0); 2448 2449 // Compare characters. 2450 __ bind(&compare_chars); 2451 Label strings_not_equal; 2452 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2, 2453 &strings_not_equal, Label::kNear); 2454 2455 // Characters are equal. 2456 __ Move(rax, Smi::FromInt(EQUAL)); 2457 __ ret(0); 2458 2459 // Characters are not equal. 2460 __ bind(&strings_not_equal); 2461 __ Move(rax, Smi::FromInt(NOT_EQUAL)); 2462 __ ret(0); 2463 } 2464 2465 2466 void StringHelper::GenerateCompareFlatOneByteStrings( 2467 MacroAssembler* masm, Register left, Register right, Register scratch1, 2468 Register scratch2, Register scratch3, Register scratch4) { 2469 // Ensure that you can always subtract a string length from a non-negative 2470 // number (e.g. another length). 2471 STATIC_ASSERT(String::kMaxLength < 0x7fffffff); 2472 2473 // Find minimum length and length difference. 2474 __ movp(scratch1, FieldOperand(left, String::kLengthOffset)); 2475 __ movp(scratch4, scratch1); 2476 __ SmiSub(scratch4, 2477 scratch4, 2478 FieldOperand(right, String::kLengthOffset)); 2479 // Register scratch4 now holds left.length - right.length. 2480 const Register length_difference = scratch4; 2481 Label left_shorter; 2482 __ j(less, &left_shorter, Label::kNear); 2483 // The right string isn't longer that the left one. 2484 // Get the right string's length by subtracting the (non-negative) difference 2485 // from the left string's length. 2486 __ SmiSub(scratch1, scratch1, length_difference); 2487 __ bind(&left_shorter); 2488 // Register scratch1 now holds Min(left.length, right.length). 2489 const Register min_length = scratch1; 2490 2491 Label compare_lengths; 2492 // If min-length is zero, go directly to comparing lengths. 2493 __ SmiTest(min_length); 2494 __ j(zero, &compare_lengths, Label::kNear); 2495 2496 // Compare loop. 2497 Label result_not_equal; 2498 GenerateOneByteCharsCompareLoop( 2499 masm, left, right, min_length, scratch2, &result_not_equal, 2500 // In debug-code mode, SmiTest below might push 2501 // the target label outside the near range. 2502 Label::kFar); 2503 2504 // Completed loop without finding different characters. 2505 // Compare lengths (precomputed). 2506 __ bind(&compare_lengths); 2507 __ SmiTest(length_difference); 2508 Label length_not_equal; 2509 __ j(not_zero, &length_not_equal, Label::kNear); 2510 2511 // Result is EQUAL. 2512 __ Move(rax, Smi::FromInt(EQUAL)); 2513 __ ret(0); 2514 2515 Label result_greater; 2516 Label result_less; 2517 __ bind(&length_not_equal); 2518 __ j(greater, &result_greater, Label::kNear); 2519 __ jmp(&result_less, Label::kNear); 2520 __ bind(&result_not_equal); 2521 // Unequal comparison of left to right, either character or length. 2522 __ j(above, &result_greater, Label::kNear); 2523 __ bind(&result_less); 2524 2525 // Result is LESS. 2526 __ Move(rax, Smi::FromInt(LESS)); 2527 __ ret(0); 2528 2529 // Result is GREATER. 2530 __ bind(&result_greater); 2531 __ Move(rax, Smi::FromInt(GREATER)); 2532 __ ret(0); 2533 } 2534 2535 2536 void StringHelper::GenerateOneByteCharsCompareLoop( 2537 MacroAssembler* masm, Register left, Register right, Register length, 2538 Register scratch, Label* chars_not_equal, Label::Distance near_jump) { 2539 // Change index to run from -length to -1 by adding length to string 2540 // start. This means that loop ends when index reaches zero, which 2541 // doesn't need an additional compare. 2542 __ SmiToInteger32(length, length); 2543 __ leap(left, 2544 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize)); 2545 __ leap(right, 2546 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize)); 2547 __ negq(length); 2548 Register index = length; // index = -length; 2549 2550 // Compare loop. 2551 Label loop; 2552 __ bind(&loop); 2553 __ movb(scratch, Operand(left, index, times_1, 0)); 2554 __ cmpb(scratch, Operand(right, index, times_1, 0)); 2555 __ j(not_equal, chars_not_equal, near_jump); 2556 __ incq(index); 2557 __ j(not_zero, &loop); 2558 } 2559 2560 2561 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) { 2562 // ----------- S t a t e ------------- 2563 // -- rdx : left 2564 // -- rax : right 2565 // -- rsp[0] : return address 2566 // ----------------------------------- 2567 2568 // Load rcx with the allocation site. We stick an undefined dummy value here 2569 // and replace it with the real allocation site later when we instantiate this 2570 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate(). 2571 __ Move(rcx, isolate()->factory()->undefined_value()); 2572 2573 // Make sure that we actually patched the allocation site. 2574 if (FLAG_debug_code) { 2575 __ testb(rcx, Immediate(kSmiTagMask)); 2576 __ Assert(not_equal, kExpectedAllocationSite); 2577 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), 2578 isolate()->factory()->allocation_site_map()); 2579 __ Assert(equal, kExpectedAllocationSite); 2580 } 2581 2582 // Tail call into the stub that handles binary operations with allocation 2583 // sites. 2584 BinaryOpWithAllocationSiteStub stub(isolate(), state()); 2585 __ TailCallStub(&stub); 2586 } 2587 2588 2589 void CompareICStub::GenerateBooleans(MacroAssembler* masm) { 2590 DCHECK_EQ(CompareICState::BOOLEAN, state()); 2591 Label miss; 2592 Label::Distance const miss_distance = 2593 masm->emit_debug_code() ? Label::kFar : Label::kNear; 2594 2595 __ JumpIfSmi(rdx, &miss, miss_distance); 2596 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); 2597 __ JumpIfSmi(rax, &miss, miss_distance); 2598 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); 2599 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 2600 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance); 2601 if (!Token::IsEqualityOp(op())) { 2602 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); 2603 __ AssertSmi(rax); 2604 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset)); 2605 __ AssertSmi(rdx); 2606 __ pushq(rax); 2607 __ movq(rax, rdx); 2608 __ popq(rdx); 2609 } 2610 __ subp(rax, rdx); 2611 __ Ret(); 2612 2613 __ bind(&miss); 2614 GenerateMiss(masm); 2615 } 2616 2617 2618 void CompareICStub::GenerateSmis(MacroAssembler* masm) { 2619 DCHECK(state() == CompareICState::SMI); 2620 Label miss; 2621 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); 2622 2623 if (GetCondition() == equal) { 2624 // For equality we do not care about the sign of the result. 2625 __ subp(rax, rdx); 2626 } else { 2627 Label done; 2628 __ subp(rdx, rax); 2629 __ j(no_overflow, &done, Label::kNear); 2630 // Correct sign of result in case of overflow. 2631 __ notp(rdx); 2632 __ bind(&done); 2633 __ movp(rax, rdx); 2634 } 2635 __ ret(0); 2636 2637 __ bind(&miss); 2638 GenerateMiss(masm); 2639 } 2640 2641 2642 void CompareICStub::GenerateNumbers(MacroAssembler* masm) { 2643 DCHECK(state() == CompareICState::NUMBER); 2644 2645 Label generic_stub; 2646 Label unordered, maybe_undefined1, maybe_undefined2; 2647 Label miss; 2648 2649 if (left() == CompareICState::SMI) { 2650 __ JumpIfNotSmi(rdx, &miss); 2651 } 2652 if (right() == CompareICState::SMI) { 2653 __ JumpIfNotSmi(rax, &miss); 2654 } 2655 2656 // Load left and right operand. 2657 Label done, left, left_smi, right_smi; 2658 __ JumpIfSmi(rax, &right_smi, Label::kNear); 2659 __ CompareMap(rax, isolate()->factory()->heap_number_map()); 2660 __ j(not_equal, &maybe_undefined1, Label::kNear); 2661 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 2662 __ jmp(&left, Label::kNear); 2663 __ bind(&right_smi); 2664 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. 2665 __ Cvtlsi2sd(xmm1, rcx); 2666 2667 __ bind(&left); 2668 __ JumpIfSmi(rdx, &left_smi, Label::kNear); 2669 __ CompareMap(rdx, isolate()->factory()->heap_number_map()); 2670 __ j(not_equal, &maybe_undefined2, Label::kNear); 2671 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 2672 __ jmp(&done); 2673 __ bind(&left_smi); 2674 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. 2675 __ Cvtlsi2sd(xmm0, rcx); 2676 2677 __ bind(&done); 2678 // Compare operands 2679 __ Ucomisd(xmm0, xmm1); 2680 2681 // Don't base result on EFLAGS when a NaN is involved. 2682 __ j(parity_even, &unordered, Label::kNear); 2683 2684 // Return a result of -1, 0, or 1, based on EFLAGS. 2685 // Performing mov, because xor would destroy the flag register. 2686 __ movl(rax, Immediate(0)); 2687 __ movl(rcx, Immediate(0)); 2688 __ setcc(above, rax); // Add one to zero if carry clear and not equal. 2689 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). 2690 __ ret(0); 2691 2692 __ bind(&unordered); 2693 __ bind(&generic_stub); 2694 CompareICStub stub(isolate(), op(), CompareICState::GENERIC, 2695 CompareICState::GENERIC, CompareICState::GENERIC); 2696 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 2697 2698 __ bind(&maybe_undefined1); 2699 if (Token::IsOrderedRelationalCompareOp(op())) { 2700 __ Cmp(rax, isolate()->factory()->undefined_value()); 2701 __ j(not_equal, &miss); 2702 __ JumpIfSmi(rdx, &unordered); 2703 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); 2704 __ j(not_equal, &maybe_undefined2, Label::kNear); 2705 __ jmp(&unordered); 2706 } 2707 2708 __ bind(&maybe_undefined2); 2709 if (Token::IsOrderedRelationalCompareOp(op())) { 2710 __ Cmp(rdx, isolate()->factory()->undefined_value()); 2711 __ j(equal, &unordered); 2712 } 2713 2714 __ bind(&miss); 2715 GenerateMiss(masm); 2716 } 2717 2718 2719 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) { 2720 DCHECK(state() == CompareICState::INTERNALIZED_STRING); 2721 DCHECK(GetCondition() == equal); 2722 2723 // Registers containing left and right operands respectively. 2724 Register left = rdx; 2725 Register right = rax; 2726 Register tmp1 = rcx; 2727 Register tmp2 = rbx; 2728 2729 // Check that both operands are heap objects. 2730 Label miss; 2731 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 2732 __ j(cond, &miss, Label::kNear); 2733 2734 // Check that both operands are internalized strings. 2735 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2736 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2737 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2738 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2739 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0); 2740 __ orp(tmp1, tmp2); 2741 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); 2742 __ j(not_zero, &miss, Label::kNear); 2743 2744 // Internalized strings are compared by identity. 2745 Label done; 2746 __ cmpp(left, right); 2747 // Make sure rax is non-zero. At this point input operands are 2748 // guaranteed to be non-zero. 2749 DCHECK(right.is(rax)); 2750 __ j(not_equal, &done, Label::kNear); 2751 STATIC_ASSERT(EQUAL == 0); 2752 STATIC_ASSERT(kSmiTag == 0); 2753 __ Move(rax, Smi::FromInt(EQUAL)); 2754 __ bind(&done); 2755 __ ret(0); 2756 2757 __ bind(&miss); 2758 GenerateMiss(masm); 2759 } 2760 2761 2762 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) { 2763 DCHECK(state() == CompareICState::UNIQUE_NAME); 2764 DCHECK(GetCondition() == equal); 2765 2766 // Registers containing left and right operands respectively. 2767 Register left = rdx; 2768 Register right = rax; 2769 Register tmp1 = rcx; 2770 Register tmp2 = rbx; 2771 2772 // Check that both operands are heap objects. 2773 Label miss; 2774 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 2775 __ j(cond, &miss, Label::kNear); 2776 2777 // Check that both operands are unique names. This leaves the instance 2778 // types loaded in tmp1 and tmp2. 2779 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2780 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2781 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2782 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2783 2784 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); 2785 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); 2786 2787 // Unique names are compared by identity. 2788 Label done; 2789 __ cmpp(left, right); 2790 // Make sure rax is non-zero. At this point input operands are 2791 // guaranteed to be non-zero. 2792 DCHECK(right.is(rax)); 2793 __ j(not_equal, &done, Label::kNear); 2794 STATIC_ASSERT(EQUAL == 0); 2795 STATIC_ASSERT(kSmiTag == 0); 2796 __ Move(rax, Smi::FromInt(EQUAL)); 2797 __ bind(&done); 2798 __ ret(0); 2799 2800 __ bind(&miss); 2801 GenerateMiss(masm); 2802 } 2803 2804 2805 void CompareICStub::GenerateStrings(MacroAssembler* masm) { 2806 DCHECK(state() == CompareICState::STRING); 2807 Label miss; 2808 2809 bool equality = Token::IsEqualityOp(op()); 2810 2811 // Registers containing left and right operands respectively. 2812 Register left = rdx; 2813 Register right = rax; 2814 Register tmp1 = rcx; 2815 Register tmp2 = rbx; 2816 Register tmp3 = rdi; 2817 2818 // Check that both operands are heap objects. 2819 Condition cond = masm->CheckEitherSmi(left, right, tmp1); 2820 __ j(cond, &miss); 2821 2822 // Check that both operands are strings. This leaves the instance 2823 // types loaded in tmp1 and tmp2. 2824 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); 2825 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); 2826 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); 2827 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); 2828 __ movp(tmp3, tmp1); 2829 STATIC_ASSERT(kNotStringTag != 0); 2830 __ orp(tmp3, tmp2); 2831 __ testb(tmp3, Immediate(kIsNotStringMask)); 2832 __ j(not_zero, &miss); 2833 2834 // Fast check for identical strings. 2835 Label not_same; 2836 __ cmpp(left, right); 2837 __ j(not_equal, ¬_same, Label::kNear); 2838 STATIC_ASSERT(EQUAL == 0); 2839 STATIC_ASSERT(kSmiTag == 0); 2840 __ Move(rax, Smi::FromInt(EQUAL)); 2841 __ ret(0); 2842 2843 // Handle not identical strings. 2844 __ bind(¬_same); 2845 2846 // Check that both strings are internalized strings. If they are, we're done 2847 // because we already know they are not identical. We also know they are both 2848 // strings. 2849 if (equality) { 2850 Label do_compare; 2851 STATIC_ASSERT(kInternalizedTag == 0); 2852 __ orp(tmp1, tmp2); 2853 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); 2854 __ j(not_zero, &do_compare, Label::kNear); 2855 // Make sure rax is non-zero. At this point input operands are 2856 // guaranteed to be non-zero. 2857 DCHECK(right.is(rax)); 2858 __ ret(0); 2859 __ bind(&do_compare); 2860 } 2861 2862 // Check that both strings are sequential one-byte. 2863 Label runtime; 2864 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); 2865 2866 // Compare flat one-byte strings. Returns when done. 2867 if (equality) { 2868 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1, 2869 tmp2); 2870 } else { 2871 StringHelper::GenerateCompareFlatOneByteStrings( 2872 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister); 2873 } 2874 2875 // Handle more complex cases in runtime. 2876 __ bind(&runtime); 2877 if (equality) { 2878 { 2879 FrameScope scope(masm, StackFrame::INTERNAL); 2880 __ Push(left); 2881 __ Push(right); 2882 __ CallRuntime(Runtime::kStringEqual); 2883 } 2884 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); 2885 __ subp(rax, rdx); 2886 __ Ret(); 2887 } else { 2888 __ PopReturnAddressTo(tmp1); 2889 __ Push(left); 2890 __ Push(right); 2891 __ PushReturnAddressFrom(tmp1); 2892 __ TailCallRuntime(Runtime::kStringCompare); 2893 } 2894 2895 __ bind(&miss); 2896 GenerateMiss(masm); 2897 } 2898 2899 2900 void CompareICStub::GenerateReceivers(MacroAssembler* masm) { 2901 DCHECK_EQ(CompareICState::RECEIVER, state()); 2902 Label miss; 2903 Condition either_smi = masm->CheckEitherSmi(rdx, rax); 2904 __ j(either_smi, &miss, Label::kNear); 2905 2906 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE); 2907 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); 2908 __ j(below, &miss, Label::kNear); 2909 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); 2910 __ j(below, &miss, Label::kNear); 2911 2912 DCHECK_EQ(equal, GetCondition()); 2913 __ subp(rax, rdx); 2914 __ ret(0); 2915 2916 __ bind(&miss); 2917 GenerateMiss(masm); 2918 } 2919 2920 2921 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) { 2922 Label miss; 2923 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_); 2924 Condition either_smi = masm->CheckEitherSmi(rdx, rax); 2925 __ j(either_smi, &miss, Label::kNear); 2926 2927 __ GetWeakValue(rdi, cell); 2928 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi); 2929 __ j(not_equal, &miss, Label::kNear); 2930 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi); 2931 __ j(not_equal, &miss, Label::kNear); 2932 2933 if (Token::IsEqualityOp(op())) { 2934 __ subp(rax, rdx); 2935 __ ret(0); 2936 } else { 2937 __ PopReturnAddressTo(rcx); 2938 __ Push(rdx); 2939 __ Push(rax); 2940 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition()))); 2941 __ PushReturnAddressFrom(rcx); 2942 __ TailCallRuntime(Runtime::kCompare); 2943 } 2944 2945 __ bind(&miss); 2946 GenerateMiss(masm); 2947 } 2948 2949 2950 void CompareICStub::GenerateMiss(MacroAssembler* masm) { 2951 { 2952 // Call the runtime system in a fresh internal frame. 2953 FrameScope scope(masm, StackFrame::INTERNAL); 2954 __ Push(rdx); 2955 __ Push(rax); 2956 __ Push(rdx); 2957 __ Push(rax); 2958 __ Push(Smi::FromInt(op())); 2959 __ CallRuntime(Runtime::kCompareIC_Miss); 2960 2961 // Compute the entry point of the rewritten stub. 2962 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); 2963 __ Pop(rax); 2964 __ Pop(rdx); 2965 } 2966 2967 // Do a tail call to the rewritten stub. 2968 __ jmp(rdi); 2969 } 2970 2971 2972 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm, 2973 Label* miss, 2974 Label* done, 2975 Register properties, 2976 Handle<Name> name, 2977 Register r0) { 2978 DCHECK(name->IsUniqueName()); 2979 // If names of slots in range from 1 to kProbes - 1 for the hash value are 2980 // not equal to the name and kProbes-th slot is not used (its name is the 2981 // undefined value), it guarantees the hash table doesn't contain the 2982 // property. It's true even if some slots represent deleted properties 2983 // (their names are the hole value). 2984 for (int i = 0; i < kInlinedProbes; i++) { 2985 // r0 points to properties hash. 2986 // Compute the masked index: (hash + i + i * i) & mask. 2987 Register index = r0; 2988 // Capacity is smi 2^n. 2989 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); 2990 __ decl(index); 2991 __ andp(index, 2992 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i))); 2993 2994 // Scale the index by multiplying by the entry size. 2995 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 2996 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3. 2997 2998 Register entity_name = r0; 2999 // Having undefined at this place means the name is not contained. 3000 STATIC_ASSERT(kSmiTagSize == 1); 3001 __ movp(entity_name, Operand(properties, 3002 index, 3003 times_pointer_size, 3004 kElementsStartOffset - kHeapObjectTag)); 3005 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); 3006 __ j(equal, done); 3007 3008 // Stop if found the property. 3009 __ Cmp(entity_name, Handle<Name>(name)); 3010 __ j(equal, miss); 3011 3012 Label good; 3013 // Check for the hole and skip. 3014 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); 3015 __ j(equal, &good, Label::kNear); 3016 3017 // Check if the entry name is not a unique name. 3018 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); 3019 __ JumpIfNotUniqueNameInstanceType( 3020 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss); 3021 __ bind(&good); 3022 } 3023 3024 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0, 3025 NEGATIVE_LOOKUP); 3026 __ Push(Handle<Object>(name)); 3027 __ Push(Immediate(name->Hash())); 3028 __ CallStub(&stub); 3029 __ testp(r0, r0); 3030 __ j(not_zero, miss); 3031 __ jmp(done); 3032 } 3033 3034 3035 // Probe the name dictionary in the |elements| register. Jump to the 3036 // |done| label if a property with the given name is found leaving the 3037 // index into the dictionary in |r1|. Jump to the |miss| label 3038 // otherwise. 3039 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm, 3040 Label* miss, 3041 Label* done, 3042 Register elements, 3043 Register name, 3044 Register r0, 3045 Register r1) { 3046 DCHECK(!elements.is(r0)); 3047 DCHECK(!elements.is(r1)); 3048 DCHECK(!name.is(r0)); 3049 DCHECK(!name.is(r1)); 3050 3051 __ AssertName(name); 3052 3053 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); 3054 __ decl(r0); 3055 3056 for (int i = 0; i < kInlinedProbes; i++) { 3057 // Compute the masked index: (hash + i + i * i) & mask. 3058 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset)); 3059 __ shrl(r1, Immediate(Name::kHashShift)); 3060 if (i > 0) { 3061 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i))); 3062 } 3063 __ andp(r1, r0); 3064 3065 // Scale the index by multiplying by the entry size. 3066 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 3067 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 3068 3069 // Check if the key is identical to the name. 3070 __ cmpp(name, Operand(elements, r1, times_pointer_size, 3071 kElementsStartOffset - kHeapObjectTag)); 3072 __ j(equal, done); 3073 } 3074 3075 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1, 3076 POSITIVE_LOOKUP); 3077 __ Push(name); 3078 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset)); 3079 __ shrl(r0, Immediate(Name::kHashShift)); 3080 __ Push(r0); 3081 __ CallStub(&stub); 3082 3083 __ testp(r0, r0); 3084 __ j(zero, miss); 3085 __ jmp(done); 3086 } 3087 3088 3089 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) { 3090 // This stub overrides SometimesSetsUpAFrame() to return false. That means 3091 // we cannot call anything that could cause a GC from this stub. 3092 // Stack frame on entry: 3093 // rsp[0 * kPointerSize] : return address. 3094 // rsp[1 * kPointerSize] : key's hash. 3095 // rsp[2 * kPointerSize] : key. 3096 // Registers: 3097 // dictionary_: NameDictionary to probe. 3098 // result_: used as scratch. 3099 // index_: will hold an index of entry if lookup is successful. 3100 // might alias with result_. 3101 // Returns: 3102 // result_ is zero if lookup failed, non zero otherwise. 3103 3104 Label in_dictionary, maybe_in_dictionary, not_in_dictionary; 3105 3106 Register scratch = result(); 3107 3108 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset)); 3109 __ decl(scratch); 3110 __ Push(scratch); 3111 3112 // If names of slots in range from 1 to kProbes - 1 for the hash value are 3113 // not equal to the name and kProbes-th slot is not used (its name is the 3114 // undefined value), it guarantees the hash table doesn't contain the 3115 // property. It's true even if some slots represent deleted properties 3116 // (their names are the null value). 3117 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER, 3118 kPointerSize); 3119 for (int i = kInlinedProbes; i < kTotalProbes; i++) { 3120 // Compute the masked index: (hash + i + i * i) & mask. 3121 __ movp(scratch, args.GetArgumentOperand(1)); 3122 if (i > 0) { 3123 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); 3124 } 3125 __ andp(scratch, Operand(rsp, 0)); 3126 3127 // Scale the index by multiplying by the entry size. 3128 STATIC_ASSERT(NameDictionary::kEntrySize == 3); 3129 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. 3130 3131 // Having undefined at this place means the name is not contained. 3132 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size, 3133 kElementsStartOffset - kHeapObjectTag)); 3134 3135 __ Cmp(scratch, isolate()->factory()->undefined_value()); 3136 __ j(equal, ¬_in_dictionary); 3137 3138 // Stop if found the property. 3139 __ cmpp(scratch, args.GetArgumentOperand(0)); 3140 __ j(equal, &in_dictionary); 3141 3142 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) { 3143 // If we hit a key that is not a unique name during negative 3144 // lookup we have to bailout as this key might be equal to the 3145 // key we are looking for. 3146 3147 // Check if the entry name is not a unique name. 3148 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); 3149 __ JumpIfNotUniqueNameInstanceType( 3150 FieldOperand(scratch, Map::kInstanceTypeOffset), 3151 &maybe_in_dictionary); 3152 } 3153 } 3154 3155 __ bind(&maybe_in_dictionary); 3156 // If we are doing negative lookup then probing failure should be 3157 // treated as a lookup success. For positive lookup probing failure 3158 // should be treated as lookup failure. 3159 if (mode() == POSITIVE_LOOKUP) { 3160 __ movp(scratch, Immediate(0)); 3161 __ Drop(1); 3162 __ ret(2 * kPointerSize); 3163 } 3164 3165 __ bind(&in_dictionary); 3166 __ movp(scratch, Immediate(1)); 3167 __ Drop(1); 3168 __ ret(2 * kPointerSize); 3169 3170 __ bind(¬_in_dictionary); 3171 __ movp(scratch, Immediate(0)); 3172 __ Drop(1); 3173 __ ret(2 * kPointerSize); 3174 } 3175 3176 3177 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime( 3178 Isolate* isolate) { 3179 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs); 3180 stub1.GetCode(); 3181 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs); 3182 stub2.GetCode(); 3183 } 3184 3185 3186 // Takes the input in 3 registers: address_ value_ and object_. A pointer to 3187 // the value has just been written into the object, now this stub makes sure 3188 // we keep the GC informed. The word in the object where the value has been 3189 // written is in the address register. 3190 void RecordWriteStub::Generate(MacroAssembler* masm) { 3191 Label skip_to_incremental_noncompacting; 3192 Label skip_to_incremental_compacting; 3193 3194 // The first two instructions are generated with labels so as to get the 3195 // offset fixed up correctly by the bind(Label*) call. We patch it back and 3196 // forth between a compare instructions (a nop in this position) and the 3197 // real branch when we start and stop incremental heap marking. 3198 // See RecordWriteStub::Patch for details. 3199 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); 3200 __ jmp(&skip_to_incremental_compacting, Label::kFar); 3201 3202 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 3203 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3204 MacroAssembler::kReturnAtEnd); 3205 } else { 3206 __ ret(0); 3207 } 3208 3209 __ bind(&skip_to_incremental_noncompacting); 3210 GenerateIncremental(masm, INCREMENTAL); 3211 3212 __ bind(&skip_to_incremental_compacting); 3213 GenerateIncremental(masm, INCREMENTAL_COMPACTION); 3214 3215 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. 3216 // Will be checked in IncrementalMarking::ActivateGeneratedStub. 3217 masm->set_byte_at(0, kTwoByteNopInstruction); 3218 masm->set_byte_at(2, kFiveByteNopInstruction); 3219 } 3220 3221 3222 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { 3223 regs_.Save(masm); 3224 3225 if (remembered_set_action() == EMIT_REMEMBERED_SET) { 3226 Label dont_need_remembered_set; 3227 3228 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); 3229 __ JumpIfNotInNewSpace(regs_.scratch0(), 3230 regs_.scratch0(), 3231 &dont_need_remembered_set); 3232 3233 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), 3234 &dont_need_remembered_set); 3235 3236 // First notify the incremental marker if necessary, then update the 3237 // remembered set. 3238 CheckNeedsToInformIncrementalMarker( 3239 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); 3240 InformIncrementalMarker(masm); 3241 regs_.Restore(masm); 3242 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3243 MacroAssembler::kReturnAtEnd); 3244 3245 __ bind(&dont_need_remembered_set); 3246 } 3247 3248 CheckNeedsToInformIncrementalMarker( 3249 masm, kReturnOnNoNeedToInformIncrementalMarker, mode); 3250 InformIncrementalMarker(masm); 3251 regs_.Restore(masm); 3252 __ ret(0); 3253 } 3254 3255 3256 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) { 3257 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode()); 3258 Register address = 3259 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address(); 3260 DCHECK(!address.is(regs_.object())); 3261 DCHECK(!address.is(arg_reg_1)); 3262 __ Move(address, regs_.address()); 3263 __ Move(arg_reg_1, regs_.object()); 3264 // TODO(gc) Can we just set address arg2 in the beginning? 3265 __ Move(arg_reg_2, address); 3266 __ LoadAddress(arg_reg_3, 3267 ExternalReference::isolate_address(isolate())); 3268 int argument_count = 3; 3269 3270 AllowExternalCallThatCantCauseGC scope(masm); 3271 __ PrepareCallCFunction(argument_count); 3272 __ CallCFunction( 3273 ExternalReference::incremental_marking_record_write_function(isolate()), 3274 argument_count); 3275 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode()); 3276 } 3277 3278 3279 void RecordWriteStub::CheckNeedsToInformIncrementalMarker( 3280 MacroAssembler* masm, 3281 OnNoNeedToInformIncrementalMarker on_no_need, 3282 Mode mode) { 3283 Label on_black; 3284 Label need_incremental; 3285 Label need_incremental_pop_object; 3286 3287 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask)); 3288 __ andp(regs_.scratch0(), regs_.object()); 3289 __ movp(regs_.scratch1(), 3290 Operand(regs_.scratch0(), 3291 MemoryChunk::kWriteBarrierCounterOffset)); 3292 __ subp(regs_.scratch1(), Immediate(1)); 3293 __ movp(Operand(regs_.scratch0(), 3294 MemoryChunk::kWriteBarrierCounterOffset), 3295 regs_.scratch1()); 3296 __ j(negative, &need_incremental); 3297 3298 // Let's look at the color of the object: If it is not black we don't have 3299 // to inform the incremental marker. 3300 __ JumpIfBlack(regs_.object(), 3301 regs_.scratch0(), 3302 regs_.scratch1(), 3303 &on_black, 3304 Label::kNear); 3305 3306 regs_.Restore(masm); 3307 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 3308 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3309 MacroAssembler::kReturnAtEnd); 3310 } else { 3311 __ ret(0); 3312 } 3313 3314 __ bind(&on_black); 3315 3316 // Get the value from the slot. 3317 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); 3318 3319 if (mode == INCREMENTAL_COMPACTION) { 3320 Label ensure_not_white; 3321 3322 __ CheckPageFlag(regs_.scratch0(), // Contains value. 3323 regs_.scratch1(), // Scratch. 3324 MemoryChunk::kEvacuationCandidateMask, 3325 zero, 3326 &ensure_not_white, 3327 Label::kNear); 3328 3329 __ CheckPageFlag(regs_.object(), 3330 regs_.scratch1(), // Scratch. 3331 MemoryChunk::kSkipEvacuationSlotsRecordingMask, 3332 zero, 3333 &need_incremental); 3334 3335 __ bind(&ensure_not_white); 3336 } 3337 3338 // We need an extra register for this, so we push the object register 3339 // temporarily. 3340 __ Push(regs_.object()); 3341 __ JumpIfWhite(regs_.scratch0(), // The value. 3342 regs_.scratch1(), // Scratch. 3343 regs_.object(), // Scratch. 3344 &need_incremental_pop_object, Label::kNear); 3345 __ Pop(regs_.object()); 3346 3347 regs_.Restore(masm); 3348 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { 3349 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), 3350 MacroAssembler::kReturnAtEnd); 3351 } else { 3352 __ ret(0); 3353 } 3354 3355 __ bind(&need_incremental_pop_object); 3356 __ Pop(regs_.object()); 3357 3358 __ bind(&need_incremental); 3359 3360 // Fall through when we need to inform the incremental marker. 3361 } 3362 3363 3364 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) { 3365 CEntryStub ces(isolate(), 1, kSaveFPRegs); 3366 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); 3367 int parameter_count_offset = 3368 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset; 3369 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); 3370 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 3371 __ PopReturnAddressTo(rcx); 3372 int additional_offset = 3373 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0; 3374 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); 3375 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. 3376 } 3377 3378 3379 void LoadICTrampolineStub::Generate(MacroAssembler* masm) { 3380 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); 3381 LoadICStub stub(isolate()); 3382 stub.GenerateForTrampoline(masm); 3383 } 3384 3385 3386 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) { 3387 __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister()); 3388 KeyedLoadICStub stub(isolate()); 3389 stub.GenerateForTrampoline(masm); 3390 } 3391 3392 3393 static void HandleArrayCases(MacroAssembler* masm, Register feedback, 3394 Register receiver_map, Register scratch1, 3395 Register scratch2, Register scratch3, 3396 bool is_polymorphic, Label* miss) { 3397 // feedback initially contains the feedback array 3398 Label next_loop, prepare_next; 3399 Label start_polymorphic; 3400 3401 Register counter = scratch1; 3402 Register length = scratch2; 3403 Register cached_map = scratch3; 3404 3405 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0))); 3406 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3407 __ j(not_equal, &start_polymorphic); 3408 3409 // found, now call handler. 3410 Register handler = feedback; 3411 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1))); 3412 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); 3413 __ jmp(handler); 3414 3415 // Polymorphic, we have to loop from 2 to N 3416 __ bind(&start_polymorphic); 3417 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset)); 3418 if (!is_polymorphic) { 3419 // If the IC could be monomorphic we have to make sure we don't go past the 3420 // end of the feedback array. 3421 __ cmpl(length, Immediate(2)); 3422 __ j(equal, miss); 3423 } 3424 __ movl(counter, Immediate(2)); 3425 3426 __ bind(&next_loop); 3427 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, 3428 FixedArray::kHeaderSize)); 3429 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3430 __ j(not_equal, &prepare_next); 3431 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size, 3432 FixedArray::kHeaderSize + kPointerSize)); 3433 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); 3434 __ jmp(handler); 3435 3436 __ bind(&prepare_next); 3437 __ addl(counter, Immediate(2)); 3438 __ cmpl(counter, length); 3439 __ j(less, &next_loop); 3440 3441 // We exhausted our array of map handler pairs. 3442 __ jmp(miss); 3443 } 3444 3445 3446 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver, 3447 Register receiver_map, Register feedback, 3448 Register vector, Register integer_slot, 3449 Label* compare_map, Label* load_smi_map, 3450 Label* try_array) { 3451 __ JumpIfSmi(receiver, load_smi_map); 3452 __ movp(receiver_map, FieldOperand(receiver, 0)); 3453 3454 __ bind(compare_map); 3455 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset)); 3456 __ j(not_equal, try_array); 3457 Register handler = feedback; 3458 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size, 3459 FixedArray::kHeaderSize + kPointerSize)); 3460 __ leap(handler, FieldOperand(handler, Code::kHeaderSize)); 3461 __ jmp(handler); 3462 } 3463 3464 3465 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); } 3466 3467 3468 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) { 3469 GenerateImpl(masm, true); 3470 } 3471 3472 3473 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3474 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx 3475 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx 3476 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx 3477 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax 3478 Register feedback = rdi; 3479 Register integer_slot = r8; 3480 Register receiver_map = r9; 3481 3482 __ SmiToInteger32(integer_slot, slot); 3483 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3484 FixedArray::kHeaderSize)); 3485 3486 // Try to quickly handle the monomorphic case without knowing for sure 3487 // if we have a weak cell in feedback. We do know it's safe to look 3488 // at WeakCell::kValueOffset. 3489 Label try_array, load_smi_map, compare_map; 3490 Label not_array, miss; 3491 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, 3492 integer_slot, &compare_map, &load_smi_map, &try_array); 3493 3494 // Is it a fixed array? 3495 __ bind(&try_array); 3496 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); 3497 __ j(not_equal, ¬_array); 3498 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true, 3499 &miss); 3500 3501 __ bind(¬_array); 3502 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); 3503 __ j(not_equal, &miss); 3504 Code::Flags code_flags = 3505 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC)); 3506 masm->isolate()->stub_cache()->GenerateProbe( 3507 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg); 3508 3509 __ bind(&miss); 3510 LoadIC::GenerateMiss(masm); 3511 3512 __ bind(&load_smi_map); 3513 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 3514 __ jmp(&compare_map); 3515 } 3516 3517 3518 void KeyedLoadICStub::Generate(MacroAssembler* masm) { 3519 GenerateImpl(masm, false); 3520 } 3521 3522 3523 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) { 3524 GenerateImpl(masm, true); 3525 } 3526 3527 3528 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3529 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx 3530 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx 3531 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx 3532 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax 3533 Register feedback = rdi; 3534 Register integer_slot = r8; 3535 Register receiver_map = r9; 3536 3537 __ SmiToInteger32(integer_slot, slot); 3538 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3539 FixedArray::kHeaderSize)); 3540 3541 // Try to quickly handle the monomorphic case without knowing for sure 3542 // if we have a weak cell in feedback. We do know it's safe to look 3543 // at WeakCell::kValueOffset. 3544 Label try_array, load_smi_map, compare_map; 3545 Label not_array, miss; 3546 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, 3547 integer_slot, &compare_map, &load_smi_map, &try_array); 3548 3549 __ bind(&try_array); 3550 // Is it a fixed array? 3551 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); 3552 __ j(not_equal, ¬_array); 3553 3554 // We have a polymorphic element handler. 3555 Label polymorphic, try_poly_name; 3556 __ bind(&polymorphic); 3557 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, true, 3558 &miss); 3559 3560 __ bind(¬_array); 3561 // Is it generic? 3562 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); 3563 __ j(not_equal, &try_poly_name); 3564 Handle<Code> megamorphic_stub = 3565 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); 3566 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); 3567 3568 __ bind(&try_poly_name); 3569 // We might have a name in feedback, and a fixed array in the next slot. 3570 __ cmpp(key, feedback); 3571 __ j(not_equal, &miss); 3572 // If the name comparison succeeded, we know we have a fixed array with 3573 // at least one map/handler pair. 3574 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3575 FixedArray::kHeaderSize + kPointerSize)); 3576 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r11, r15, false, 3577 &miss); 3578 3579 __ bind(&miss); 3580 KeyedLoadIC::GenerateMiss(masm); 3581 3582 __ bind(&load_smi_map); 3583 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 3584 __ jmp(&compare_map); 3585 } 3586 3587 3588 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) { 3589 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); 3590 VectorStoreICStub stub(isolate(), state()); 3591 stub.GenerateForTrampoline(masm); 3592 } 3593 3594 3595 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) { 3596 __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister()); 3597 VectorKeyedStoreICStub stub(isolate(), state()); 3598 stub.GenerateForTrampoline(masm); 3599 } 3600 3601 3602 void VectorStoreICStub::Generate(MacroAssembler* masm) { 3603 GenerateImpl(masm, false); 3604 } 3605 3606 3607 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { 3608 GenerateImpl(masm, true); 3609 } 3610 3611 3612 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3613 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx 3614 Register key = VectorStoreICDescriptor::NameRegister(); // rcx 3615 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx 3616 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi 3617 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax 3618 Register feedback = r8; 3619 Register integer_slot = r9; 3620 Register receiver_map = r11; 3621 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map)); 3622 3623 __ SmiToInteger32(integer_slot, slot); 3624 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3625 FixedArray::kHeaderSize)); 3626 3627 // Try to quickly handle the monomorphic case without knowing for sure 3628 // if we have a weak cell in feedback. We do know it's safe to look 3629 // at WeakCell::kValueOffset. 3630 Label try_array, load_smi_map, compare_map; 3631 Label not_array, miss; 3632 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, 3633 integer_slot, &compare_map, &load_smi_map, &try_array); 3634 3635 // Is it a fixed array? 3636 __ bind(&try_array); 3637 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); 3638 __ j(not_equal, ¬_array); 3639 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, true, 3640 &miss); 3641 3642 __ bind(¬_array); 3643 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); 3644 __ j(not_equal, &miss); 3645 3646 Code::Flags code_flags = 3647 Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC)); 3648 masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags, 3649 receiver, key, feedback, no_reg); 3650 3651 __ bind(&miss); 3652 StoreIC::GenerateMiss(masm); 3653 3654 __ bind(&load_smi_map); 3655 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 3656 __ jmp(&compare_map); 3657 } 3658 3659 3660 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) { 3661 GenerateImpl(masm, false); 3662 } 3663 3664 3665 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) { 3666 GenerateImpl(masm, true); 3667 } 3668 3669 3670 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm, 3671 Register receiver_map, 3672 Register feedback, Register scratch, 3673 Register scratch1, 3674 Register scratch2, Label* miss) { 3675 // feedback initially contains the feedback array 3676 Label next, next_loop, prepare_next; 3677 Label transition_call; 3678 3679 Register cached_map = scratch; 3680 Register counter = scratch1; 3681 Register length = scratch2; 3682 3683 // Polymorphic, we have to loop from 0 to N - 1 3684 __ movp(counter, Immediate(0)); 3685 __ movp(length, FieldOperand(feedback, FixedArray::kLengthOffset)); 3686 __ SmiToInteger32(length, length); 3687 3688 __ bind(&next_loop); 3689 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, 3690 FixedArray::kHeaderSize)); 3691 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3692 __ j(not_equal, &prepare_next); 3693 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size, 3694 FixedArray::kHeaderSize + kPointerSize)); 3695 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex); 3696 __ j(not_equal, &transition_call); 3697 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size, 3698 FixedArray::kHeaderSize + 2 * kPointerSize)); 3699 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize)); 3700 __ jmp(feedback); 3701 3702 __ bind(&transition_call); 3703 DCHECK(receiver_map.is(VectorStoreTransitionDescriptor::MapRegister())); 3704 __ movp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset)); 3705 // The weak cell may have been cleared. 3706 __ JumpIfSmi(receiver_map, miss); 3707 // Get the handler in value. 3708 __ movp(feedback, FieldOperand(feedback, counter, times_pointer_size, 3709 FixedArray::kHeaderSize + 2 * kPointerSize)); 3710 __ leap(feedback, FieldOperand(feedback, Code::kHeaderSize)); 3711 __ jmp(feedback); 3712 3713 __ bind(&prepare_next); 3714 __ addl(counter, Immediate(3)); 3715 __ cmpl(counter, length); 3716 __ j(less, &next_loop); 3717 3718 // We exhausted our array of map handler pairs. 3719 __ jmp(miss); 3720 } 3721 3722 3723 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) { 3724 Register receiver = VectorStoreICDescriptor::ReceiverRegister(); // rdx 3725 Register key = VectorStoreICDescriptor::NameRegister(); // rcx 3726 Register vector = VectorStoreICDescriptor::VectorRegister(); // rbx 3727 Register slot = VectorStoreICDescriptor::SlotRegister(); // rdi 3728 DCHECK(VectorStoreICDescriptor::ValueRegister().is(rax)); // rax 3729 Register feedback = r8; 3730 Register integer_slot = r9; 3731 Register receiver_map = r11; 3732 DCHECK(!AreAliased(feedback, integer_slot, vector, slot, receiver_map)); 3733 3734 __ SmiToInteger32(integer_slot, slot); 3735 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3736 FixedArray::kHeaderSize)); 3737 3738 // Try to quickly handle the monomorphic case without knowing for sure 3739 // if we have a weak cell in feedback. We do know it's safe to look 3740 // at WeakCell::kValueOffset. 3741 Label try_array, load_smi_map, compare_map; 3742 Label not_array, miss; 3743 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, 3744 integer_slot, &compare_map, &load_smi_map, &try_array); 3745 3746 // Is it a fixed array? 3747 __ bind(&try_array); 3748 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex); 3749 __ j(not_equal, ¬_array); 3750 HandlePolymorphicKeyedStoreCase(masm, receiver_map, feedback, integer_slot, 3751 r15, r14, &miss); 3752 3753 __ bind(¬_array); 3754 Label try_poly_name; 3755 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex); 3756 __ j(not_equal, &try_poly_name); 3757 3758 Handle<Code> megamorphic_stub = 3759 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState()); 3760 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET); 3761 3762 __ bind(&try_poly_name); 3763 // We might have a name in feedback, and a fixed array in the next slot. 3764 __ cmpp(key, feedback); 3765 __ j(not_equal, &miss); 3766 // If the name comparison succeeded, we know we have a fixed array with 3767 // at least one map/handler pair. 3768 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size, 3769 FixedArray::kHeaderSize + kPointerSize)); 3770 HandleArrayCases(masm, feedback, receiver_map, integer_slot, r14, r15, false, 3771 &miss); 3772 3773 __ bind(&miss); 3774 KeyedStoreIC::GenerateMiss(masm); 3775 3776 __ bind(&load_smi_map); 3777 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); 3778 __ jmp(&compare_map); 3779 } 3780 3781 3782 void CallICTrampolineStub::Generate(MacroAssembler* masm) { 3783 __ EmitLoadTypeFeedbackVector(rbx); 3784 CallICStub stub(isolate(), state()); 3785 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); 3786 } 3787 3788 3789 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 3790 if (masm->isolate()->function_entry_hook() != NULL) { 3791 ProfileEntryHookStub stub(masm->isolate()); 3792 masm->CallStub(&stub); 3793 } 3794 } 3795 3796 3797 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 3798 // This stub can be called from essentially anywhere, so it needs to save 3799 // all volatile and callee-save registers. 3800 const size_t kNumSavedRegisters = 2; 3801 __ pushq(arg_reg_1); 3802 __ pushq(arg_reg_2); 3803 3804 // Calculate the original stack pointer and store it in the second arg. 3805 __ leap(arg_reg_2, 3806 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize)); 3807 3808 // Calculate the function address to the first arg. 3809 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); 3810 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); 3811 3812 // Save the remainder of the volatile registers. 3813 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 3814 3815 // Call the entry hook function. 3816 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), 3817 Assembler::RelocInfoNone()); 3818 3819 AllowExternalCallThatCantCauseGC scope(masm); 3820 3821 const int kArgumentCount = 2; 3822 __ PrepareCallCFunction(kArgumentCount); 3823 __ CallCFunction(rax, kArgumentCount); 3824 3825 // Restore volatile regs. 3826 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2); 3827 __ popq(arg_reg_2); 3828 __ popq(arg_reg_1); 3829 3830 __ Ret(); 3831 } 3832 3833 3834 template<class T> 3835 static void CreateArrayDispatch(MacroAssembler* masm, 3836 AllocationSiteOverrideMode mode) { 3837 if (mode == DISABLE_ALLOCATION_SITES) { 3838 T stub(masm->isolate(), GetInitialFastElementsKind(), mode); 3839 __ TailCallStub(&stub); 3840 } else if (mode == DONT_OVERRIDE) { 3841 int last_index = GetSequenceIndexFromFastElementsKind( 3842 TERMINAL_FAST_ELEMENTS_KIND); 3843 for (int i = 0; i <= last_index; ++i) { 3844 Label next; 3845 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 3846 __ cmpl(rdx, Immediate(kind)); 3847 __ j(not_equal, &next); 3848 T stub(masm->isolate(), kind); 3849 __ TailCallStub(&stub); 3850 __ bind(&next); 3851 } 3852 3853 // If we reached this point there is a problem. 3854 __ Abort(kUnexpectedElementsKindInArrayConstructor); 3855 } else { 3856 UNREACHABLE(); 3857 } 3858 } 3859 3860 3861 static void CreateArrayDispatchOneArgument(MacroAssembler* masm, 3862 AllocationSiteOverrideMode mode) { 3863 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES) 3864 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES) 3865 // rax - number of arguments 3866 // rdi - constructor? 3867 // rsp[0] - return address 3868 // rsp[8] - last argument 3869 3870 Label normal_sequence; 3871 if (mode == DONT_OVERRIDE) { 3872 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); 3873 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); 3874 STATIC_ASSERT(FAST_ELEMENTS == 2); 3875 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); 3876 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4); 3877 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5); 3878 3879 // is the low bit set? If so, we are holey and that is good. 3880 __ testb(rdx, Immediate(1)); 3881 __ j(not_zero, &normal_sequence); 3882 } 3883 3884 // look at the first argument 3885 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 3886 __ movp(rcx, args.GetArgumentOperand(0)); 3887 __ testp(rcx, rcx); 3888 __ j(zero, &normal_sequence); 3889 3890 if (mode == DISABLE_ALLOCATION_SITES) { 3891 ElementsKind initial = GetInitialFastElementsKind(); 3892 ElementsKind holey_initial = GetHoleyElementsKind(initial); 3893 3894 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), 3895 holey_initial, 3896 DISABLE_ALLOCATION_SITES); 3897 __ TailCallStub(&stub_holey); 3898 3899 __ bind(&normal_sequence); 3900 ArraySingleArgumentConstructorStub stub(masm->isolate(), 3901 initial, 3902 DISABLE_ALLOCATION_SITES); 3903 __ TailCallStub(&stub); 3904 } else if (mode == DONT_OVERRIDE) { 3905 // We are going to create a holey array, but our kind is non-holey. 3906 // Fix kind and retry (only if we have an allocation site in the slot). 3907 __ incl(rdx); 3908 3909 if (FLAG_debug_code) { 3910 Handle<Map> allocation_site_map = 3911 masm->isolate()->factory()->allocation_site_map(); 3912 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); 3913 __ Assert(equal, kExpectedAllocationSite); 3914 } 3915 3916 // Save the resulting elements kind in type info. We can't just store r3 3917 // in the AllocationSite::transition_info field because elements kind is 3918 // restricted to a portion of the field...upper bits need to be left alone. 3919 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 3920 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), 3921 Smi::FromInt(kFastElementsKindPackedToHoley)); 3922 3923 __ bind(&normal_sequence); 3924 int last_index = GetSequenceIndexFromFastElementsKind( 3925 TERMINAL_FAST_ELEMENTS_KIND); 3926 for (int i = 0; i <= last_index; ++i) { 3927 Label next; 3928 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 3929 __ cmpl(rdx, Immediate(kind)); 3930 __ j(not_equal, &next); 3931 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); 3932 __ TailCallStub(&stub); 3933 __ bind(&next); 3934 } 3935 3936 // If we reached this point there is a problem. 3937 __ Abort(kUnexpectedElementsKindInArrayConstructor); 3938 } else { 3939 UNREACHABLE(); 3940 } 3941 } 3942 3943 3944 template<class T> 3945 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 3946 int to_index = GetSequenceIndexFromFastElementsKind( 3947 TERMINAL_FAST_ELEMENTS_KIND); 3948 for (int i = 0; i <= to_index; ++i) { 3949 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 3950 T stub(isolate, kind); 3951 stub.GetCode(); 3952 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 3953 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); 3954 stub1.GetCode(); 3955 } 3956 } 3957 } 3958 3959 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) { 3960 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 3961 isolate); 3962 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 3963 isolate); 3964 ArrayNArgumentsConstructorStub stub(isolate); 3965 stub.GetCode(); 3966 3967 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS }; 3968 for (int i = 0; i < 2; i++) { 3969 // For internal arrays we only need a few things 3970 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); 3971 stubh1.GetCode(); 3972 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); 3973 stubh2.GetCode(); 3974 } 3975 } 3976 3977 3978 void ArrayConstructorStub::GenerateDispatchToArrayStub( 3979 MacroAssembler* masm, 3980 AllocationSiteOverrideMode mode) { 3981 if (argument_count() == ANY) { 3982 Label not_zero_case, not_one_case; 3983 __ testp(rax, rax); 3984 __ j(not_zero, ¬_zero_case); 3985 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 3986 3987 __ bind(¬_zero_case); 3988 __ cmpl(rax, Immediate(1)); 3989 __ j(greater, ¬_one_case); 3990 CreateArrayDispatchOneArgument(masm, mode); 3991 3992 __ bind(¬_one_case); 3993 ArrayNArgumentsConstructorStub stub(masm->isolate()); 3994 __ TailCallStub(&stub); 3995 } else if (argument_count() == NONE) { 3996 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode); 3997 } else if (argument_count() == ONE) { 3998 CreateArrayDispatchOneArgument(masm, mode); 3999 } else if (argument_count() == MORE_THAN_ONE) { 4000 ArrayNArgumentsConstructorStub stub(masm->isolate()); 4001 __ TailCallStub(&stub); 4002 } else { 4003 UNREACHABLE(); 4004 } 4005 } 4006 4007 4008 void ArrayConstructorStub::Generate(MacroAssembler* masm) { 4009 // ----------- S t a t e ------------- 4010 // -- rax : argc 4011 // -- rbx : AllocationSite or undefined 4012 // -- rdi : constructor 4013 // -- rdx : new target 4014 // -- rsp[0] : return address 4015 // -- rsp[8] : last argument 4016 // ----------------------------------- 4017 if (FLAG_debug_code) { 4018 // The array construct code is only set for the global and natives 4019 // builtin Array functions which always have maps. 4020 4021 // Initial map for the builtin Array function should be a map. 4022 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4023 // Will both indicate a NULL and a Smi. 4024 STATIC_ASSERT(kSmiTag == 0); 4025 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 4026 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 4027 __ CmpObjectType(rcx, MAP_TYPE, rcx); 4028 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 4029 4030 // We should either have undefined in rbx or a valid AllocationSite 4031 __ AssertUndefinedOrAllocationSite(rbx); 4032 } 4033 4034 // Enter the context of the Array function. 4035 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); 4036 4037 Label subclassing; 4038 __ cmpp(rdi, rdx); 4039 __ j(not_equal, &subclassing); 4040 4041 Label no_info; 4042 // If the feedback vector is the undefined value call an array constructor 4043 // that doesn't use AllocationSites. 4044 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 4045 __ j(equal, &no_info); 4046 4047 // Only look at the lower 16 bits of the transition info. 4048 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); 4049 __ SmiToInteger32(rdx, rdx); 4050 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); 4051 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); 4052 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); 4053 4054 __ bind(&no_info); 4055 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); 4056 4057 // Subclassing 4058 __ bind(&subclassing); 4059 switch (argument_count()) { 4060 case ANY: 4061 case MORE_THAN_ONE: { 4062 StackArgumentsAccessor args(rsp, rax); 4063 __ movp(args.GetReceiverOperand(), rdi); 4064 __ addp(rax, Immediate(3)); 4065 break; 4066 } 4067 case NONE: { 4068 StackArgumentsAccessor args(rsp, 0); 4069 __ movp(args.GetReceiverOperand(), rdi); 4070 __ Set(rax, 3); 4071 break; 4072 } 4073 case ONE: { 4074 StackArgumentsAccessor args(rsp, 1); 4075 __ movp(args.GetReceiverOperand(), rdi); 4076 __ Set(rax, 4); 4077 break; 4078 } 4079 } 4080 __ PopReturnAddressTo(rcx); 4081 __ Push(rdx); 4082 __ Push(rbx); 4083 __ PushReturnAddressFrom(rcx); 4084 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); 4085 } 4086 4087 4088 void InternalArrayConstructorStub::GenerateCase( 4089 MacroAssembler* masm, ElementsKind kind) { 4090 Label not_zero_case, not_one_case; 4091 Label normal_sequence; 4092 4093 __ testp(rax, rax); 4094 __ j(not_zero, ¬_zero_case); 4095 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); 4096 __ TailCallStub(&stub0); 4097 4098 __ bind(¬_zero_case); 4099 __ cmpl(rax, Immediate(1)); 4100 __ j(greater, ¬_one_case); 4101 4102 if (IsFastPackedElementsKind(kind)) { 4103 // We might need to create a holey array 4104 // look at the first argument 4105 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 4106 __ movp(rcx, args.GetArgumentOperand(0)); 4107 __ testp(rcx, rcx); 4108 __ j(zero, &normal_sequence); 4109 4110 InternalArraySingleArgumentConstructorStub 4111 stub1_holey(isolate(), GetHoleyElementsKind(kind)); 4112 __ TailCallStub(&stub1_holey); 4113 } 4114 4115 __ bind(&normal_sequence); 4116 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); 4117 __ TailCallStub(&stub1); 4118 4119 __ bind(¬_one_case); 4120 ArrayNArgumentsConstructorStub stubN(isolate()); 4121 __ TailCallStub(&stubN); 4122 } 4123 4124 4125 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { 4126 // ----------- S t a t e ------------- 4127 // -- rax : argc 4128 // -- rdi : constructor 4129 // -- rsp[0] : return address 4130 // -- rsp[8] : last argument 4131 // ----------------------------------- 4132 4133 if (FLAG_debug_code) { 4134 // The array construct code is only set for the global and natives 4135 // builtin Array functions which always have maps. 4136 4137 // Initial map for the builtin Array function should be a map. 4138 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4139 // Will both indicate a NULL and a Smi. 4140 STATIC_ASSERT(kSmiTag == 0); 4141 Condition not_smi = NegateCondition(masm->CheckSmi(rcx)); 4142 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); 4143 __ CmpObjectType(rcx, MAP_TYPE, rcx); 4144 __ Check(equal, kUnexpectedInitialMapForArrayFunction); 4145 } 4146 4147 // Figure out the right elements kind 4148 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); 4149 4150 // Load the map's "bit field 2" into |result|. We only need the first byte, 4151 // but the following masking takes care of that anyway. 4152 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); 4153 // Retrieve elements_kind from bit field 2. 4154 __ DecodeField<Map::ElementsKindBits>(rcx); 4155 4156 if (FLAG_debug_code) { 4157 Label done; 4158 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 4159 __ j(equal, &done); 4160 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); 4161 __ Assert(equal, 4162 kInvalidElementsKindForInternalArrayOrInternalPackedArray); 4163 __ bind(&done); 4164 } 4165 4166 Label fast_elements_case; 4167 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); 4168 __ j(equal, &fast_elements_case); 4169 GenerateCase(masm, FAST_HOLEY_ELEMENTS); 4170 4171 __ bind(&fast_elements_case); 4172 GenerateCase(masm, FAST_ELEMENTS); 4173 } 4174 4175 4176 void FastNewObjectStub::Generate(MacroAssembler* masm) { 4177 // ----------- S t a t e ------------- 4178 // -- rdi : target 4179 // -- rdx : new target 4180 // -- rsi : context 4181 // -- rsp[0] : return address 4182 // ----------------------------------- 4183 __ AssertFunction(rdi); 4184 __ AssertReceiver(rdx); 4185 4186 // Verify that the new target is a JSFunction. 4187 Label new_object; 4188 __ CmpObjectType(rdx, JS_FUNCTION_TYPE, rbx); 4189 __ j(not_equal, &new_object); 4190 4191 // Load the initial map and verify that it's in fact a map. 4192 __ movp(rcx, FieldOperand(rdx, JSFunction::kPrototypeOrInitialMapOffset)); 4193 __ JumpIfSmi(rcx, &new_object); 4194 __ CmpObjectType(rcx, MAP_TYPE, rbx); 4195 __ j(not_equal, &new_object); 4196 4197 // Fall back to runtime if the target differs from the new target's 4198 // initial map constructor. 4199 __ cmpp(rdi, FieldOperand(rcx, Map::kConstructorOrBackPointerOffset)); 4200 __ j(not_equal, &new_object); 4201 4202 // Allocate the JSObject on the heap. 4203 Label allocate, done_allocate; 4204 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset)); 4205 __ leal(rbx, Operand(rbx, times_pointer_size, 0)); 4206 __ Allocate(rbx, rax, rdi, no_reg, &allocate, NO_ALLOCATION_FLAGS); 4207 __ bind(&done_allocate); 4208 4209 // Initialize the JSObject fields. 4210 __ movp(FieldOperand(rax, JSObject::kMapOffset), rcx); 4211 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); 4212 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); 4213 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx); 4214 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize); 4215 __ leap(rbx, FieldOperand(rax, JSObject::kHeaderSize)); 4216 4217 // ----------- S t a t e ------------- 4218 // -- rax : result (tagged) 4219 // -- rbx : result fields (untagged) 4220 // -- rdi : result end (untagged) 4221 // -- rcx : initial map 4222 // -- rsi : context 4223 // -- rsp[0] : return address 4224 // ----------------------------------- 4225 4226 // Perform in-object slack tracking if requested. 4227 Label slack_tracking; 4228 STATIC_ASSERT(Map::kNoSlackTracking == 0); 4229 __ LoadRoot(r11, Heap::kUndefinedValueRootIndex); 4230 __ testl(FieldOperand(rcx, Map::kBitField3Offset), 4231 Immediate(Map::ConstructionCounter::kMask)); 4232 __ j(not_zero, &slack_tracking, Label::kNear); 4233 { 4234 // Initialize all in-object fields with undefined. 4235 __ InitializeFieldsWithFiller(rbx, rdi, r11); 4236 __ Ret(); 4237 } 4238 __ bind(&slack_tracking); 4239 { 4240 // Decrease generous allocation count. 4241 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32); 4242 __ subl(FieldOperand(rcx, Map::kBitField3Offset), 4243 Immediate(1 << Map::ConstructionCounter::kShift)); 4244 4245 // Initialize the in-object fields with undefined. 4246 __ movzxbl(rdx, FieldOperand(rcx, Map::kUnusedPropertyFieldsOffset)); 4247 __ negp(rdx); 4248 __ leap(rdx, Operand(rdi, rdx, times_pointer_size, 0)); 4249 __ InitializeFieldsWithFiller(rbx, rdx, r11); 4250 4251 // Initialize the remaining (reserved) fields with one pointer filler map. 4252 __ LoadRoot(r11, Heap::kOnePointerFillerMapRootIndex); 4253 __ InitializeFieldsWithFiller(rdx, rdi, r11); 4254 4255 // Check if we can finalize the instance size. 4256 Label finalize; 4257 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1); 4258 __ testl(FieldOperand(rcx, Map::kBitField3Offset), 4259 Immediate(Map::ConstructionCounter::kMask)); 4260 __ j(zero, &finalize, Label::kNear); 4261 __ Ret(); 4262 4263 // Finalize the instance size. 4264 __ bind(&finalize); 4265 { 4266 FrameScope scope(masm, StackFrame::INTERNAL); 4267 __ Push(rax); 4268 __ Push(rcx); 4269 __ CallRuntime(Runtime::kFinalizeInstanceSize); 4270 __ Pop(rax); 4271 } 4272 __ Ret(); 4273 } 4274 4275 // Fall back to %AllocateInNewSpace. 4276 __ bind(&allocate); 4277 { 4278 FrameScope scope(masm, StackFrame::INTERNAL); 4279 __ Integer32ToSmi(rbx, rbx); 4280 __ Push(rcx); 4281 __ Push(rbx); 4282 __ CallRuntime(Runtime::kAllocateInNewSpace); 4283 __ Pop(rcx); 4284 } 4285 __ movzxbl(rbx, FieldOperand(rcx, Map::kInstanceSizeOffset)); 4286 __ leap(rdi, Operand(rax, rbx, times_pointer_size, 0)); 4287 STATIC_ASSERT(kHeapObjectTag == 1); 4288 __ decp(rdi); // Remove the tag from the end address. 4289 __ jmp(&done_allocate); 4290 4291 // Fall back to %NewObject. 4292 __ bind(&new_object); 4293 __ PopReturnAddressTo(rcx); 4294 __ Push(rdi); 4295 __ Push(rdx); 4296 __ PushReturnAddressFrom(rcx); 4297 __ TailCallRuntime(Runtime::kNewObject); 4298 } 4299 4300 4301 void FastNewRestParameterStub::Generate(MacroAssembler* masm) { 4302 // ----------- S t a t e ------------- 4303 // -- rdi : function 4304 // -- rsi : context 4305 // -- rbp : frame pointer 4306 // -- rsp[0] : return address 4307 // ----------------------------------- 4308 __ AssertFunction(rdi); 4309 4310 // Make rdx point to the JavaScript frame. 4311 __ movp(rdx, rbp); 4312 if (skip_stub_frame()) { 4313 // For Ignition we need to skip the handler/stub frame to reach the 4314 // JavaScript frame for the function. 4315 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); 4316 } 4317 if (FLAG_debug_code) { 4318 Label ok; 4319 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset)); 4320 __ j(equal, &ok); 4321 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); 4322 __ bind(&ok); 4323 } 4324 4325 // Check if we have rest parameters (only possible if we have an 4326 // arguments adaptor frame below the function frame). 4327 Label no_rest_parameters; 4328 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); 4329 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset), 4330 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 4331 __ j(not_equal, &no_rest_parameters, Label::kNear); 4332 4333 // Check if the arguments adaptor frame contains more arguments than 4334 // specified by the function's internal formal parameter count. 4335 Label rest_parameters; 4336 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 4337 __ LoadSharedFunctionInfoSpecialField( 4338 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset); 4339 __ SmiToInteger32( 4340 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 4341 __ subl(rax, rcx); 4342 __ j(greater, &rest_parameters); 4343 4344 // Return an empty rest parameter array. 4345 __ bind(&no_rest_parameters); 4346 { 4347 // ----------- S t a t e ------------- 4348 // -- rsi : context 4349 // -- rsp[0] : return address 4350 // ----------------------------------- 4351 4352 // Allocate an empty rest parameter array. 4353 Label allocate, done_allocate; 4354 __ Allocate(JSArray::kSize, rax, rdx, rcx, &allocate, NO_ALLOCATION_FLAGS); 4355 __ bind(&done_allocate); 4356 4357 // Setup the rest parameter array in rax. 4358 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx); 4359 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx); 4360 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 4361 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx); 4362 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rcx); 4363 __ movp(FieldOperand(rax, JSArray::kLengthOffset), Immediate(0)); 4364 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 4365 __ Ret(); 4366 4367 // Fall back to %AllocateInNewSpace. 4368 __ bind(&allocate); 4369 { 4370 FrameScope scope(masm, StackFrame::INTERNAL); 4371 __ Push(Smi::FromInt(JSArray::kSize)); 4372 __ CallRuntime(Runtime::kAllocateInNewSpace); 4373 } 4374 __ jmp(&done_allocate); 4375 } 4376 4377 __ bind(&rest_parameters); 4378 { 4379 // Compute the pointer to the first rest parameter (skippping the receiver). 4380 __ leap(rbx, Operand(rbx, rax, times_pointer_size, 4381 StandardFrameConstants::kCallerSPOffset - 4382 1 * kPointerSize)); 4383 4384 // ----------- S t a t e ------------- 4385 // -- rdi : function 4386 // -- rsi : context 4387 // -- rax : number of rest parameters 4388 // -- rbx : pointer to first rest parameters 4389 // -- rsp[0] : return address 4390 // ----------------------------------- 4391 4392 // Allocate space for the rest parameter array plus the backing store. 4393 Label allocate, done_allocate; 4394 __ leal(rcx, Operand(rax, times_pointer_size, 4395 JSArray::kSize + FixedArray::kHeaderSize)); 4396 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS); 4397 __ bind(&done_allocate); 4398 4399 // Compute the arguments.length in rdi. 4400 __ Integer32ToSmi(rdi, rax); 4401 4402 // Setup the elements array in rdx. 4403 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); 4404 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx); 4405 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi); 4406 { 4407 Label loop, done_loop; 4408 __ Set(rcx, 0); 4409 __ bind(&loop); 4410 __ cmpl(rcx, rax); 4411 __ j(equal, &done_loop, Label::kNear); 4412 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize)); 4413 __ movp( 4414 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize), 4415 kScratchRegister); 4416 __ subp(rbx, Immediate(1 * kPointerSize)); 4417 __ addl(rcx, Immediate(1)); 4418 __ jmp(&loop); 4419 __ bind(&done_loop); 4420 } 4421 4422 // Setup the rest parameter array in rax. 4423 __ leap(rax, 4424 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize)); 4425 __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, rcx); 4426 __ movp(FieldOperand(rax, JSArray::kMapOffset), rcx); 4427 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 4428 __ movp(FieldOperand(rax, JSArray::kPropertiesOffset), rcx); 4429 __ movp(FieldOperand(rax, JSArray::kElementsOffset), rdx); 4430 __ movp(FieldOperand(rax, JSArray::kLengthOffset), rdi); 4431 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize); 4432 __ Ret(); 4433 4434 // Fall back to %AllocateInNewSpace (if not too big). 4435 Label too_big_for_new_space; 4436 __ bind(&allocate); 4437 __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize)); 4438 __ j(greater, &too_big_for_new_space); 4439 { 4440 FrameScope scope(masm, StackFrame::INTERNAL); 4441 __ Integer32ToSmi(rax, rax); 4442 __ Integer32ToSmi(rcx, rcx); 4443 __ Push(rax); 4444 __ Push(rbx); 4445 __ Push(rcx); 4446 __ CallRuntime(Runtime::kAllocateInNewSpace); 4447 __ movp(rdx, rax); 4448 __ Pop(rbx); 4449 __ Pop(rax); 4450 __ SmiToInteger32(rax, rax); 4451 } 4452 __ jmp(&done_allocate); 4453 4454 // Fall back to %NewRestParameter. 4455 __ bind(&too_big_for_new_space); 4456 __ PopReturnAddressTo(kScratchRegister); 4457 __ Push(rdi); 4458 __ PushReturnAddressFrom(kScratchRegister); 4459 __ TailCallRuntime(Runtime::kNewRestParameter); 4460 } 4461 } 4462 4463 4464 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { 4465 // ----------- S t a t e ------------- 4466 // -- rdi : function 4467 // -- rsi : context 4468 // -- rbp : frame pointer 4469 // -- rsp[0] : return address 4470 // ----------------------------------- 4471 __ AssertFunction(rdi); 4472 4473 // Make r9 point to the JavaScript frame. 4474 __ movp(r9, rbp); 4475 if (skip_stub_frame()) { 4476 // For Ignition we need to skip the handler/stub frame to reach the 4477 // JavaScript frame for the function. 4478 __ movp(r9, Operand(r9, StandardFrameConstants::kCallerFPOffset)); 4479 } 4480 if (FLAG_debug_code) { 4481 Label ok; 4482 __ cmpp(rdi, Operand(r9, StandardFrameConstants::kFunctionOffset)); 4483 __ j(equal, &ok); 4484 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); 4485 __ bind(&ok); 4486 } 4487 4488 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. 4489 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 4490 __ LoadSharedFunctionInfoSpecialField( 4491 rcx, rcx, SharedFunctionInfo::kFormalParameterCountOffset); 4492 __ leap(rdx, Operand(r9, rcx, times_pointer_size, 4493 StandardFrameConstants::kCallerSPOffset)); 4494 __ Integer32ToSmi(rcx, rcx); 4495 4496 // rcx : number of parameters (tagged) 4497 // rdx : parameters pointer 4498 // rdi : function 4499 // rsp[0] : return address 4500 // r9 : JavaScript frame pointer. 4501 // Registers used over the whole function: 4502 // rbx: the mapped parameter count (untagged) 4503 // rax: the allocated object (tagged). 4504 Factory* factory = isolate()->factory(); 4505 4506 __ SmiToInteger64(rbx, rcx); 4507 // rbx = parameter count (untagged) 4508 4509 // Check if the calling frame is an arguments adaptor frame. 4510 Label adaptor_frame, try_allocate, runtime; 4511 __ movp(rax, Operand(r9, StandardFrameConstants::kCallerFPOffset)); 4512 __ movp(r8, Operand(rax, CommonFrameConstants::kContextOrFrameTypeOffset)); 4513 __ Cmp(r8, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 4514 __ j(equal, &adaptor_frame); 4515 4516 // No adaptor, parameter count = argument count. 4517 __ movp(r11, rbx); 4518 __ jmp(&try_allocate, Label::kNear); 4519 4520 // We have an adaptor frame. Patch the parameters pointer. 4521 __ bind(&adaptor_frame); 4522 __ SmiToInteger64( 4523 r11, Operand(rax, ArgumentsAdaptorFrameConstants::kLengthOffset)); 4524 __ leap(rdx, Operand(rax, r11, times_pointer_size, 4525 StandardFrameConstants::kCallerSPOffset)); 4526 4527 // rbx = parameter count (untagged) 4528 // r11 = argument count (untagged) 4529 // Compute the mapped parameter count = min(rbx, r11) in rbx. 4530 __ cmpp(rbx, r11); 4531 __ j(less_equal, &try_allocate, Label::kNear); 4532 __ movp(rbx, r11); 4533 4534 __ bind(&try_allocate); 4535 4536 // Compute the sizes of backing store, parameter map, and arguments object. 4537 // 1. Parameter map, has 2 extra words containing context and backing store. 4538 const int kParameterMapHeaderSize = 4539 FixedArray::kHeaderSize + 2 * kPointerSize; 4540 Label no_parameter_map; 4541 __ xorp(r8, r8); 4542 __ testp(rbx, rbx); 4543 __ j(zero, &no_parameter_map, Label::kNear); 4544 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); 4545 __ bind(&no_parameter_map); 4546 4547 // 2. Backing store. 4548 __ leap(r8, Operand(r8, r11, times_pointer_size, FixedArray::kHeaderSize)); 4549 4550 // 3. Arguments object. 4551 __ addp(r8, Immediate(JSSloppyArgumentsObject::kSize)); 4552 4553 // Do the allocation of all three objects in one go. 4554 __ Allocate(r8, rax, r9, no_reg, &runtime, NO_ALLOCATION_FLAGS); 4555 4556 // rax = address of new object(s) (tagged) 4557 // r11 = argument count (untagged) 4558 // Get the arguments map from the current native context into r9. 4559 Label has_mapped_parameters, instantiate; 4560 __ movp(r9, NativeContextOperand()); 4561 __ testp(rbx, rbx); 4562 __ j(not_zero, &has_mapped_parameters, Label::kNear); 4563 4564 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX; 4565 __ movp(r9, Operand(r9, Context::SlotOffset(kIndex))); 4566 __ jmp(&instantiate, Label::kNear); 4567 4568 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX; 4569 __ bind(&has_mapped_parameters); 4570 __ movp(r9, Operand(r9, Context::SlotOffset(kAliasedIndex))); 4571 __ bind(&instantiate); 4572 4573 // rax = address of new object (tagged) 4574 // rbx = mapped parameter count (untagged) 4575 // r11 = argument count (untagged) 4576 // r9 = address of arguments map (tagged) 4577 __ movp(FieldOperand(rax, JSObject::kMapOffset), r9); 4578 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); 4579 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); 4580 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister); 4581 4582 // Set up the callee in-object property. 4583 __ AssertNotSmi(rdi); 4584 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kCalleeOffset), rdi); 4585 4586 // Use the length (smi tagged) and set that as an in-object property too. 4587 // Note: r11 is tagged from here on. 4588 __ Integer32ToSmi(r11, r11); 4589 __ movp(FieldOperand(rax, JSSloppyArgumentsObject::kLengthOffset), r11); 4590 4591 // Set up the elements pointer in the allocated arguments object. 4592 // If we allocated a parameter map, rdi will point there, otherwise to the 4593 // backing store. 4594 __ leap(rdi, Operand(rax, JSSloppyArgumentsObject::kSize)); 4595 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi); 4596 4597 // rax = address of new object (tagged) 4598 // rbx = mapped parameter count (untagged) 4599 // r11 = argument count (tagged) 4600 // rdi = address of parameter map or backing store (tagged) 4601 4602 // Initialize parameter map. If there are no mapped arguments, we're done. 4603 Label skip_parameter_map; 4604 __ testp(rbx, rbx); 4605 __ j(zero, &skip_parameter_map); 4606 4607 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex); 4608 // rbx contains the untagged argument count. Add 2 and tag to write. 4609 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); 4610 __ Integer64PlusConstantToSmi(r9, rbx, 2); 4611 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9); 4612 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); 4613 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 4614 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); 4615 4616 // Copy the parameter slots and the holes in the arguments. 4617 // We need to fill in mapped_parameter_count slots. They index the context, 4618 // where parameters are stored in reverse order, at 4619 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 4620 // The mapped parameter thus need to get indices 4621 // MIN_CONTEXT_SLOTS+parameter_count-1 .. 4622 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count 4623 // We loop from right to left. 4624 Label parameters_loop, parameters_test; 4625 4626 // Load tagged parameter count into r9. 4627 __ Integer32ToSmi(r9, rbx); 4628 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); 4629 __ addp(r8, rcx); 4630 __ subp(r8, r9); 4631 __ movp(rcx, rdi); 4632 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); 4633 __ SmiToInteger64(r9, r9); 4634 // r9 = loop variable (untagged) 4635 // r8 = mapping index (tagged) 4636 // rcx = address of parameter map (tagged) 4637 // rdi = address of backing store (tagged) 4638 __ jmp(¶meters_test, Label::kNear); 4639 4640 __ bind(¶meters_loop); 4641 __ subp(r9, Immediate(1)); 4642 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); 4643 __ movp(FieldOperand(rcx, r9, times_pointer_size, kParameterMapHeaderSize), 4644 r8); 4645 __ movp(FieldOperand(rdi, r9, times_pointer_size, FixedArray::kHeaderSize), 4646 kScratchRegister); 4647 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); 4648 __ bind(¶meters_test); 4649 __ testp(r9, r9); 4650 __ j(not_zero, ¶meters_loop, Label::kNear); 4651 4652 __ bind(&skip_parameter_map); 4653 4654 // r11 = argument count (tagged) 4655 // rdi = address of backing store (tagged) 4656 // Copy arguments header and remaining slots (if there are any). 4657 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), 4658 factory->fixed_array_map()); 4659 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r11); 4660 4661 Label arguments_loop, arguments_test; 4662 __ movp(r8, rbx); 4663 // Untag r11 for the loop below. 4664 __ SmiToInteger64(r11, r11); 4665 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0)); 4666 __ subp(rdx, kScratchRegister); 4667 __ jmp(&arguments_test, Label::kNear); 4668 4669 __ bind(&arguments_loop); 4670 __ subp(rdx, Immediate(kPointerSize)); 4671 __ movp(r9, Operand(rdx, 0)); 4672 __ movp(FieldOperand(rdi, r8, 4673 times_pointer_size, 4674 FixedArray::kHeaderSize), 4675 r9); 4676 __ addp(r8, Immediate(1)); 4677 4678 __ bind(&arguments_test); 4679 __ cmpp(r8, r11); 4680 __ j(less, &arguments_loop, Label::kNear); 4681 4682 // Return. 4683 __ ret(0); 4684 4685 // Do the runtime call to allocate the arguments object. 4686 // r11 = argument count (untagged) 4687 __ bind(&runtime); 4688 __ Integer32ToSmi(r11, r11); 4689 __ PopReturnAddressTo(rax); 4690 __ Push(rdi); // Push function. 4691 __ Push(rdx); // Push parameters pointer. 4692 __ Push(r11); // Push parameter count. 4693 __ PushReturnAddressFrom(rax); 4694 __ TailCallRuntime(Runtime::kNewSloppyArguments); 4695 } 4696 4697 4698 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { 4699 // ----------- S t a t e ------------- 4700 // -- rdi : function 4701 // -- rsi : context 4702 // -- rbp : frame pointer 4703 // -- rsp[0] : return address 4704 // ----------------------------------- 4705 __ AssertFunction(rdi); 4706 4707 // Make rdx point to the JavaScript frame. 4708 __ movp(rdx, rbp); 4709 if (skip_stub_frame()) { 4710 // For Ignition we need to skip the handler/stub frame to reach the 4711 // JavaScript frame for the function. 4712 __ movp(rdx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); 4713 } 4714 if (FLAG_debug_code) { 4715 Label ok; 4716 __ cmpp(rdi, Operand(rdx, StandardFrameConstants::kFunctionOffset)); 4717 __ j(equal, &ok); 4718 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); 4719 __ bind(&ok); 4720 } 4721 4722 // Check if we have an arguments adaptor frame below the function frame. 4723 Label arguments_adaptor, arguments_done; 4724 __ movp(rbx, Operand(rdx, StandardFrameConstants::kCallerFPOffset)); 4725 __ Cmp(Operand(rbx, CommonFrameConstants::kContextOrFrameTypeOffset), 4726 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 4727 __ j(equal, &arguments_adaptor, Label::kNear); 4728 { 4729 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); 4730 __ LoadSharedFunctionInfoSpecialField( 4731 rax, rax, SharedFunctionInfo::kFormalParameterCountOffset); 4732 __ leap(rbx, Operand(rdx, rax, times_pointer_size, 4733 StandardFrameConstants::kCallerSPOffset - 4734 1 * kPointerSize)); 4735 } 4736 __ jmp(&arguments_done, Label::kNear); 4737 __ bind(&arguments_adaptor); 4738 { 4739 __ SmiToInteger32( 4740 rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); 4741 __ leap(rbx, Operand(rbx, rax, times_pointer_size, 4742 StandardFrameConstants::kCallerSPOffset - 4743 1 * kPointerSize)); 4744 } 4745 __ bind(&arguments_done); 4746 4747 // ----------- S t a t e ------------- 4748 // -- rax : number of arguments 4749 // -- rbx : pointer to the first argument 4750 // -- rdi : function 4751 // -- rsi : context 4752 // -- rsp[0] : return address 4753 // ----------------------------------- 4754 4755 // Allocate space for the strict arguments object plus the backing store. 4756 Label allocate, done_allocate; 4757 __ leal(rcx, Operand(rax, times_pointer_size, JSStrictArgumentsObject::kSize + 4758 FixedArray::kHeaderSize)); 4759 __ Allocate(rcx, rdx, r8, no_reg, &allocate, NO_ALLOCATION_FLAGS); 4760 __ bind(&done_allocate); 4761 4762 // Compute the arguments.length in rdi. 4763 __ Integer32ToSmi(rdi, rax); 4764 4765 // Setup the elements array in rdx. 4766 __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex); 4767 __ movp(FieldOperand(rdx, FixedArray::kMapOffset), rcx); 4768 __ movp(FieldOperand(rdx, FixedArray::kLengthOffset), rdi); 4769 { 4770 Label loop, done_loop; 4771 __ Set(rcx, 0); 4772 __ bind(&loop); 4773 __ cmpl(rcx, rax); 4774 __ j(equal, &done_loop, Label::kNear); 4775 __ movp(kScratchRegister, Operand(rbx, 0 * kPointerSize)); 4776 __ movp( 4777 FieldOperand(rdx, rcx, times_pointer_size, FixedArray::kHeaderSize), 4778 kScratchRegister); 4779 __ subp(rbx, Immediate(1 * kPointerSize)); 4780 __ addl(rcx, Immediate(1)); 4781 __ jmp(&loop); 4782 __ bind(&done_loop); 4783 } 4784 4785 // Setup the strict arguments object in rax. 4786 __ leap(rax, 4787 Operand(rdx, rax, times_pointer_size, FixedArray::kHeaderSize)); 4788 __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, rcx); 4789 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kMapOffset), rcx); 4790 __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex); 4791 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kPropertiesOffset), rcx); 4792 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kElementsOffset), rdx); 4793 __ movp(FieldOperand(rax, JSStrictArgumentsObject::kLengthOffset), rdi); 4794 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize); 4795 __ Ret(); 4796 4797 // Fall back to %AllocateInNewSpace (if not too big). 4798 Label too_big_for_new_space; 4799 __ bind(&allocate); 4800 __ cmpl(rcx, Immediate(Page::kMaxRegularHeapObjectSize)); 4801 __ j(greater, &too_big_for_new_space); 4802 { 4803 FrameScope scope(masm, StackFrame::INTERNAL); 4804 __ Integer32ToSmi(rax, rax); 4805 __ Integer32ToSmi(rcx, rcx); 4806 __ Push(rax); 4807 __ Push(rbx); 4808 __ Push(rcx); 4809 __ CallRuntime(Runtime::kAllocateInNewSpace); 4810 __ movp(rdx, rax); 4811 __ Pop(rbx); 4812 __ Pop(rax); 4813 __ SmiToInteger32(rax, rax); 4814 } 4815 __ jmp(&done_allocate); 4816 4817 // Fall back to %NewStrictArguments. 4818 __ bind(&too_big_for_new_space); 4819 __ PopReturnAddressTo(kScratchRegister); 4820 __ Push(rdi); 4821 __ PushReturnAddressFrom(kScratchRegister); 4822 __ TailCallRuntime(Runtime::kNewStrictArguments); 4823 } 4824 4825 4826 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) { 4827 Register context_reg = rsi; 4828 Register slot_reg = rbx; 4829 Register value_reg = rax; 4830 Register cell_reg = r8; 4831 Register cell_details_reg = rdx; 4832 Register cell_value_reg = r9; 4833 Label fast_heapobject_case, fast_smi_case, slow_case; 4834 4835 if (FLAG_debug_code) { 4836 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex); 4837 __ Check(not_equal, kUnexpectedValue); 4838 } 4839 4840 // Go up context chain to the script context. 4841 for (int i = 0; i < depth(); ++i) { 4842 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX)); 4843 context_reg = rdi; 4844 } 4845 4846 // Load the PropertyCell at the specified slot. 4847 __ movp(cell_reg, ContextOperand(context_reg, slot_reg)); 4848 4849 // Load PropertyDetails for the cell (actually only the cell_type, kind and 4850 // READ_ONLY bit of attributes). 4851 __ SmiToInteger32(cell_details_reg, 4852 FieldOperand(cell_reg, PropertyCell::kDetailsOffset)); 4853 __ andl(cell_details_reg, 4854 Immediate(PropertyDetails::PropertyCellTypeField::kMask | 4855 PropertyDetails::KindField::kMask | 4856 PropertyDetails::kAttributesReadOnlyMask)); 4857 4858 // Check if PropertyCell holds mutable data. 4859 Label not_mutable_data; 4860 __ cmpl(cell_details_reg, 4861 Immediate(PropertyDetails::PropertyCellTypeField::encode( 4862 PropertyCellType::kMutable) | 4863 PropertyDetails::KindField::encode(kData))); 4864 __ j(not_equal, ¬_mutable_data); 4865 __ JumpIfSmi(value_reg, &fast_smi_case); 4866 __ bind(&fast_heapobject_case); 4867 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); 4868 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg, 4869 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET, 4870 OMIT_SMI_CHECK); 4871 // RecordWriteField clobbers the value register, so we need to reload. 4872 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); 4873 __ Ret(); 4874 __ bind(¬_mutable_data); 4875 4876 // Check if PropertyCell value matches the new value (relevant for Constant, 4877 // ConstantType and Undefined cells). 4878 Label not_same_value; 4879 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset)); 4880 __ cmpp(cell_value_reg, value_reg); 4881 __ j(not_equal, ¬_same_value, 4882 FLAG_debug_code ? Label::kFar : Label::kNear); 4883 // Make sure the PropertyCell is not marked READ_ONLY. 4884 __ testl(cell_details_reg, 4885 Immediate(PropertyDetails::kAttributesReadOnlyMask)); 4886 __ j(not_zero, &slow_case); 4887 if (FLAG_debug_code) { 4888 Label done; 4889 // This can only be true for Constant, ConstantType and Undefined cells, 4890 // because we never store the_hole via this stub. 4891 __ cmpl(cell_details_reg, 4892 Immediate(PropertyDetails::PropertyCellTypeField::encode( 4893 PropertyCellType::kConstant) | 4894 PropertyDetails::KindField::encode(kData))); 4895 __ j(equal, &done); 4896 __ cmpl(cell_details_reg, 4897 Immediate(PropertyDetails::PropertyCellTypeField::encode( 4898 PropertyCellType::kConstantType) | 4899 PropertyDetails::KindField::encode(kData))); 4900 __ j(equal, &done); 4901 __ cmpl(cell_details_reg, 4902 Immediate(PropertyDetails::PropertyCellTypeField::encode( 4903 PropertyCellType::kUndefined) | 4904 PropertyDetails::KindField::encode(kData))); 4905 __ Check(equal, kUnexpectedValue); 4906 __ bind(&done); 4907 } 4908 __ Ret(); 4909 __ bind(¬_same_value); 4910 4911 // Check if PropertyCell contains data with constant type (and is not 4912 // READ_ONLY). 4913 __ cmpl(cell_details_reg, 4914 Immediate(PropertyDetails::PropertyCellTypeField::encode( 4915 PropertyCellType::kConstantType) | 4916 PropertyDetails::KindField::encode(kData))); 4917 __ j(not_equal, &slow_case, Label::kNear); 4918 4919 // Now either both old and new values must be SMIs or both must be heap 4920 // objects with same map. 4921 Label value_is_heap_object; 4922 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear); 4923 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear); 4924 // Old and new values are SMIs, no need for a write barrier here. 4925 __ bind(&fast_smi_case); 4926 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg); 4927 __ Ret(); 4928 __ bind(&value_is_heap_object); 4929 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear); 4930 Register cell_value_map_reg = cell_value_reg; 4931 __ movp(cell_value_map_reg, 4932 FieldOperand(cell_value_reg, HeapObject::kMapOffset)); 4933 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset)); 4934 __ j(equal, &fast_heapobject_case); 4935 4936 // Fallback to the runtime. 4937 __ bind(&slow_case); 4938 __ Integer32ToSmi(slot_reg, slot_reg); 4939 __ PopReturnAddressTo(kScratchRegister); 4940 __ Push(slot_reg); 4941 __ Push(value_reg); 4942 __ Push(kScratchRegister); 4943 __ TailCallRuntime(is_strict(language_mode()) 4944 ? Runtime::kStoreGlobalViaContext_Strict 4945 : Runtime::kStoreGlobalViaContext_Sloppy); 4946 } 4947 4948 4949 static int Offset(ExternalReference ref0, ExternalReference ref1) { 4950 int64_t offset = (ref0.address() - ref1.address()); 4951 // Check that fits into int. 4952 DCHECK(static_cast<int>(offset) == offset); 4953 return static_cast<int>(offset); 4954 } 4955 4956 4957 // Prepares stack to put arguments (aligns and so on). WIN64 calling 4958 // convention requires to put the pointer to the return value slot into 4959 // rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves 4960 // context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize 4961 // inside the exit frame (not GCed) accessible via StackSpaceOperand. 4962 static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) { 4963 __ EnterApiExitFrame(arg_stack_space); 4964 } 4965 4966 4967 // Calls an API function. Allocates HandleScope, extracts returned value 4968 // from handle and propagates exceptions. Clobbers r14, r15, rbx and 4969 // caller-save registers. Restores context. On return removes 4970 // stack_space * kPointerSize (GCed). 4971 static void CallApiFunctionAndReturn(MacroAssembler* masm, 4972 Register function_address, 4973 ExternalReference thunk_ref, 4974 Register thunk_last_arg, int stack_space, 4975 Operand* stack_space_operand, 4976 Operand return_value_operand, 4977 Operand* context_restore_operand) { 4978 Label prologue; 4979 Label promote_scheduled_exception; 4980 Label delete_allocated_handles; 4981 Label leave_exit_frame; 4982 Label write_back; 4983 4984 Isolate* isolate = masm->isolate(); 4985 Factory* factory = isolate->factory(); 4986 ExternalReference next_address = 4987 ExternalReference::handle_scope_next_address(isolate); 4988 const int kNextOffset = 0; 4989 const int kLimitOffset = Offset( 4990 ExternalReference::handle_scope_limit_address(isolate), next_address); 4991 const int kLevelOffset = Offset( 4992 ExternalReference::handle_scope_level_address(isolate), next_address); 4993 ExternalReference scheduled_exception_address = 4994 ExternalReference::scheduled_exception_address(isolate); 4995 4996 DCHECK(rdx.is(function_address) || r8.is(function_address)); 4997 // Allocate HandleScope in callee-save registers. 4998 Register prev_next_address_reg = r14; 4999 Register prev_limit_reg = rbx; 5000 Register base_reg = r15; 5001 __ Move(base_reg, next_address); 5002 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset)); 5003 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset)); 5004 __ addl(Operand(base_reg, kLevelOffset), Immediate(1)); 5005 5006 if (FLAG_log_timer_events) { 5007 FrameScope frame(masm, StackFrame::MANUAL); 5008 __ PushSafepointRegisters(); 5009 __ PrepareCallCFunction(1); 5010 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); 5011 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), 5012 1); 5013 __ PopSafepointRegisters(); 5014 } 5015 5016 Label profiler_disabled; 5017 Label end_profiler_check; 5018 __ Move(rax, ExternalReference::is_profiling_address(isolate)); 5019 __ cmpb(Operand(rax, 0), Immediate(0)); 5020 __ j(zero, &profiler_disabled); 5021 5022 // Third parameter is the address of the actual getter function. 5023 __ Move(thunk_last_arg, function_address); 5024 __ Move(rax, thunk_ref); 5025 __ jmp(&end_profiler_check); 5026 5027 __ bind(&profiler_disabled); 5028 // Call the api function! 5029 __ Move(rax, function_address); 5030 5031 __ bind(&end_profiler_check); 5032 5033 // Call the api function! 5034 __ call(rax); 5035 5036 if (FLAG_log_timer_events) { 5037 FrameScope frame(masm, StackFrame::MANUAL); 5038 __ PushSafepointRegisters(); 5039 __ PrepareCallCFunction(1); 5040 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); 5041 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), 5042 1); 5043 __ PopSafepointRegisters(); 5044 } 5045 5046 // Load the value from ReturnValue 5047 __ movp(rax, return_value_operand); 5048 __ bind(&prologue); 5049 5050 // No more valid handles (the result handle was the last one). Restore 5051 // previous handle scope. 5052 __ subl(Operand(base_reg, kLevelOffset), Immediate(1)); 5053 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg); 5054 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset)); 5055 __ j(not_equal, &delete_allocated_handles); 5056 5057 // Leave the API exit frame. 5058 __ bind(&leave_exit_frame); 5059 bool restore_context = context_restore_operand != NULL; 5060 if (restore_context) { 5061 __ movp(rsi, *context_restore_operand); 5062 } 5063 if (stack_space_operand != nullptr) { 5064 __ movp(rbx, *stack_space_operand); 5065 } 5066 __ LeaveApiExitFrame(!restore_context); 5067 5068 // Check if the function scheduled an exception. 5069 __ Move(rdi, scheduled_exception_address); 5070 __ Cmp(Operand(rdi, 0), factory->the_hole_value()); 5071 __ j(not_equal, &promote_scheduled_exception); 5072 5073 #if DEBUG 5074 // Check if the function returned a valid JavaScript value. 5075 Label ok; 5076 Register return_value = rax; 5077 Register map = rcx; 5078 5079 __ JumpIfSmi(return_value, &ok, Label::kNear); 5080 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset)); 5081 5082 __ CmpInstanceType(map, LAST_NAME_TYPE); 5083 __ j(below_equal, &ok, Label::kNear); 5084 5085 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); 5086 __ j(above_equal, &ok, Label::kNear); 5087 5088 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); 5089 __ j(equal, &ok, Label::kNear); 5090 5091 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex); 5092 __ j(equal, &ok, Label::kNear); 5093 5094 __ CompareRoot(return_value, Heap::kTrueValueRootIndex); 5095 __ j(equal, &ok, Label::kNear); 5096 5097 __ CompareRoot(return_value, Heap::kFalseValueRootIndex); 5098 __ j(equal, &ok, Label::kNear); 5099 5100 __ CompareRoot(return_value, Heap::kNullValueRootIndex); 5101 __ j(equal, &ok, Label::kNear); 5102 5103 __ Abort(kAPICallReturnedInvalidObject); 5104 5105 __ bind(&ok); 5106 #endif 5107 5108 if (stack_space_operand != nullptr) { 5109 DCHECK_EQ(stack_space, 0); 5110 __ PopReturnAddressTo(rcx); 5111 __ addq(rsp, rbx); 5112 __ jmp(rcx); 5113 } else { 5114 __ ret(stack_space * kPointerSize); 5115 } 5116 5117 // Re-throw by promoting a scheduled exception. 5118 __ bind(&promote_scheduled_exception); 5119 __ TailCallRuntime(Runtime::kPromoteScheduledException); 5120 5121 // HandleScope limit has changed. Delete allocated extensions. 5122 __ bind(&delete_allocated_handles); 5123 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg); 5124 __ movp(prev_limit_reg, rax); 5125 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); 5126 __ LoadAddress(rax, 5127 ExternalReference::delete_handle_scope_extensions(isolate)); 5128 __ call(rax); 5129 __ movp(rax, prev_limit_reg); 5130 __ jmp(&leave_exit_frame); 5131 } 5132 5133 void CallApiCallbackStub::Generate(MacroAssembler* masm) { 5134 // ----------- S t a t e ------------- 5135 // -- rdi : callee 5136 // -- rbx : call_data 5137 // -- rcx : holder 5138 // -- rdx : api_function_address 5139 // -- rsi : context 5140 // -- rax : number of arguments if argc is a register 5141 // -- rsp[0] : return address 5142 // -- rsp[8] : last argument 5143 // -- ... 5144 // -- rsp[argc * 8] : first argument 5145 // -- rsp[(argc + 1) * 8] : receiver 5146 // ----------------------------------- 5147 5148 Register callee = rdi; 5149 Register call_data = rbx; 5150 Register holder = rcx; 5151 Register api_function_address = rdx; 5152 Register context = rsi; 5153 Register return_address = r8; 5154 5155 typedef FunctionCallbackArguments FCA; 5156 5157 STATIC_ASSERT(FCA::kContextSaveIndex == 6); 5158 STATIC_ASSERT(FCA::kCalleeIndex == 5); 5159 STATIC_ASSERT(FCA::kDataIndex == 4); 5160 STATIC_ASSERT(FCA::kReturnValueOffset == 3); 5161 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); 5162 STATIC_ASSERT(FCA::kIsolateIndex == 1); 5163 STATIC_ASSERT(FCA::kHolderIndex == 0); 5164 STATIC_ASSERT(FCA::kNewTargetIndex == 7); 5165 STATIC_ASSERT(FCA::kArgsLength == 8); 5166 5167 __ PopReturnAddressTo(return_address); 5168 5169 // new target 5170 __ PushRoot(Heap::kUndefinedValueRootIndex); 5171 5172 // context save 5173 __ Push(context); 5174 5175 // callee 5176 __ Push(callee); 5177 5178 // call data 5179 __ Push(call_data); 5180 Register scratch = call_data; 5181 if (!this->call_data_undefined()) { 5182 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); 5183 } 5184 // return value 5185 __ Push(scratch); 5186 // return value default 5187 __ Push(scratch); 5188 // isolate 5189 __ Move(scratch, ExternalReference::isolate_address(masm->isolate())); 5190 __ Push(scratch); 5191 // holder 5192 __ Push(holder); 5193 5194 __ movp(scratch, rsp); 5195 // Push return address back on stack. 5196 __ PushReturnAddressFrom(return_address); 5197 5198 if (!this->is_lazy()) { 5199 // load context from callee 5200 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset)); 5201 } 5202 5203 // Allocate the v8::Arguments structure in the arguments' space since 5204 // it's not controlled by GC. 5205 const int kApiStackSpace = 3; 5206 5207 PrepareCallApiFunction(masm, kApiStackSpace); 5208 5209 // FunctionCallbackInfo::implicit_args_. 5210 int argc = this->argc(); 5211 __ movp(StackSpaceOperand(0), scratch); 5212 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize)); 5213 // FunctionCallbackInfo::values_. 5214 __ movp(StackSpaceOperand(1), scratch); 5215 // FunctionCallbackInfo::length_. 5216 __ Set(StackSpaceOperand(2), argc); 5217 5218 #if defined(__MINGW64__) || defined(_WIN64) 5219 Register arguments_arg = rcx; 5220 Register callback_arg = rdx; 5221 #else 5222 Register arguments_arg = rdi; 5223 Register callback_arg = rsi; 5224 #endif 5225 5226 // It's okay if api_function_address == callback_arg 5227 // but not arguments_arg 5228 DCHECK(!api_function_address.is(arguments_arg)); 5229 5230 // v8::InvocationCallback's argument. 5231 __ leap(arguments_arg, StackSpaceOperand(0)); 5232 5233 ExternalReference thunk_ref = 5234 ExternalReference::invoke_function_callback(masm->isolate()); 5235 5236 // Accessor for FunctionCallbackInfo and first js arg. 5237 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1, 5238 ARGUMENTS_DONT_CONTAIN_RECEIVER); 5239 Operand context_restore_operand = args_from_rbp.GetArgumentOperand( 5240 FCA::kArgsLength - FCA::kContextSaveIndex); 5241 Operand length_operand = StackSpaceOperand(2); 5242 Operand return_value_operand = args_from_rbp.GetArgumentOperand( 5243 this->is_store() ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset); 5244 int stack_space = 0; 5245 Operand* stack_space_operand = &length_operand; 5246 stack_space = argc + FCA::kArgsLength + 1; 5247 stack_space_operand = nullptr; 5248 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg, 5249 stack_space, stack_space_operand, 5250 return_value_operand, &context_restore_operand); 5251 } 5252 5253 5254 void CallApiGetterStub::Generate(MacroAssembler* masm) { 5255 #if defined(__MINGW64__) || defined(_WIN64) 5256 Register getter_arg = r8; 5257 Register accessor_info_arg = rdx; 5258 Register name_arg = rcx; 5259 #else 5260 Register getter_arg = rdx; 5261 Register accessor_info_arg = rsi; 5262 Register name_arg = rdi; 5263 #endif 5264 Register api_function_address = r8; 5265 Register receiver = ApiGetterDescriptor::ReceiverRegister(); 5266 Register holder = ApiGetterDescriptor::HolderRegister(); 5267 Register callback = ApiGetterDescriptor::CallbackRegister(); 5268 Register scratch = rax; 5269 DCHECK(!AreAliased(receiver, holder, callback, scratch)); 5270 5271 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property 5272 // name below the exit frame to make GC aware of them. 5273 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0); 5274 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1); 5275 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2); 5276 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3); 5277 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4); 5278 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5); 5279 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6); 5280 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7); 5281 5282 // Insert additional parameters into the stack frame above return address. 5283 __ PopReturnAddressTo(scratch); 5284 __ Push(receiver); 5285 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset)); 5286 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); 5287 __ Push(kScratchRegister); // return value 5288 __ Push(kScratchRegister); // return value default 5289 __ PushAddress(ExternalReference::isolate_address(isolate())); 5290 __ Push(holder); 5291 __ Push(Smi::FromInt(0)); // should_throw_on_error -> false 5292 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset)); 5293 __ PushReturnAddressFrom(scratch); 5294 5295 // v8::PropertyCallbackInfo::args_ array and name handle. 5296 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; 5297 5298 // Allocate v8::PropertyCallbackInfo in non-GCed stack space. 5299 const int kArgStackSpace = 1; 5300 5301 // Load address of v8::PropertyAccessorInfo::args_ array. 5302 __ leap(scratch, Operand(rsp, 2 * kPointerSize)); 5303 5304 PrepareCallApiFunction(masm, kArgStackSpace); 5305 // Create v8::PropertyCallbackInfo object on the stack and initialize 5306 // it's args_ field. 5307 Operand info_object = StackSpaceOperand(0); 5308 __ movp(info_object, scratch); 5309 5310 __ leap(name_arg, Operand(scratch, -kPointerSize)); 5311 // The context register (rsi) has been saved in PrepareCallApiFunction and 5312 // could be used to pass arguments. 5313 __ leap(accessor_info_arg, info_object); 5314 5315 ExternalReference thunk_ref = 5316 ExternalReference::invoke_accessor_getter_callback(isolate()); 5317 5318 // It's okay if api_function_address == getter_arg 5319 // but not accessor_info_arg or name_arg 5320 DCHECK(!api_function_address.is(accessor_info_arg)); 5321 DCHECK(!api_function_address.is(name_arg)); 5322 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); 5323 __ movp(api_function_address, 5324 FieldOperand(scratch, Foreign::kForeignAddressOffset)); 5325 5326 // +3 is to skip prolog, return address and name handle. 5327 Operand return_value_operand( 5328 rbp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); 5329 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, 5330 kStackUnwindSpace, nullptr, return_value_operand, 5331 NULL); 5332 } 5333 5334 #undef __ 5335 5336 } // namespace internal 5337 } // namespace v8 5338 5339 #endif // V8_TARGET_ARCH_X64 5340