Home | History | Annotate | Download | only in arm

Lines Matching refs:__

42 #define __ ACCESS_MASM(masm)
63 __ push(r1);
70 __ add(r0, r0, Operand(num_extra_args + 1));
71 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
80 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
81 __ ldr(result,
84 __ ldr(result,
95 __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
96 __ ldr(result,
99 __ ldr(result,
117 __ LoadInitialArrayMap(array_function, scratch2, scratch1);
125 __ AllocateInNewSpace(size,
137 __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
138 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
139 __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
141 __ mov(scratch3, Operand(0, RelocInfo::NONE));
142 __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
145 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
153 __ add(scratch1, result, Operand(JSArray::kSize));
154 __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
157 __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
164 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
166 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
167 __ mov(scratch3, Operand(Smi::FromInt(initial_capacity)));
169 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
173 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
177 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
181 __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
182 __ b(&entry);
183 __ bind(&loop);
184 __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
185 __ bind(&entry);
186 __ cmp(scratch1, scratch2);
187 __ b(lt, &loop);
211 __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
214 __ tst(array_size, array_size);
215 __ Assert(ne, "array size is unexpectedly 0");
221 __ mov(elements_array_end,
223 __ add(elements_array_end,
226 __ AllocateInNewSpace(
239 __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
240 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
241 __ str(elements_array_storage,
244 __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
250 __ add(elements_array_storage, result, Operand(JSArray::kSize));
251 __ str(elements_array_storage,
256 __ sub(elements_array_storage,
264 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
266 __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
269 __ str(array_size,
277 __ add(elements_array_end,
287 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
288 __ jmp(&entry);
289 __ bind(&loop);
290 __ str(scratch1,
292 __ bind(&entry);
293 __ cmp(elements_array_storage, elements_array_end);
294 __ b(lt, &loop);
319 __ cmp(r0, Operand(0, RelocInfo::NONE));
320 __ b(ne, &argc_one_or_more);
323 __ bind(&empty_array);
331 __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
333 __ mov(r0, r2);
334 __ add(sp, sp, Operand(kPointerSize));
335 __ Jump(lr);
339 __ bind(&argc_one_or_more);
340 __ cmp(r0, Operand(1));
341 __ b(ne, &argc_two_or_more);
343 __ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
344 __ tst(r2, r2);
345 __ b(ne, &not_empty_array);
346 __ Drop(1); // Adjust stack.
347 __ mov(r0, Operand(0)); // Treat this as a call with argc of zero.
348 __ b(&empty_array);
350 __ bind(&not_empty_array);
351 __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
352 __ b(ne, call_generic_code);
357 __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
358 __ b(ge, call_generic_code);
374 __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
376 __ mov(r0, r3);
377 __ add(sp, sp, Operand(2 * kPointerSize));
378 __ Jump(lr);
381 __ bind(&argc_two_or_more);
382 __ mov(r2, Operand(r0, LSL, kSmiTagSize)); // Convet argc to a smi.
398 __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
410 __ mov(r7, sp);
411 __ jmp(&entry);
412 __ bind(&loop);
413 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
415 __ JumpIfNotSmi(r2, &has_non_smi_element);
417 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
418 __ bind(&entry);
419 __ cmp(r4, r5);
420 __ b(lt, &loop);
422 __ bind(&finish);
423 __ mov(sp, r7);
430 __ add(sp, sp, Operand(kPointerSize));
431 __ mov(r0, r3);
432 __ Jump(lr);
434 __ bind(&has_non_smi_element);
436 __ CheckMap(
438 __ bind(&cant_transition_map);
439 __ UndoAllocationInNewSpace(r3, r4);
440 __ b(call_generic_code);
442 __ bind(&not_double);
445 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
446 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
451 __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
452 __ RecordWriteField(r3,
461 __ sub(r7, r7, Operand(kPointerSize));
462 __ bind(&loop2);
463 __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
464 __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
465 __ cmp(r4, r5);
466 __ b(lt, &loop2);
467 __ b(&finish);
484 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
485 __ tst(r2, Operand(kSmiTagMask));
486 __ Assert(ne, "Unexpected initial map for InternalArray function");
487 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
488 __ Assert(eq, "Unexpected initial map for InternalArray function");
497 __ bind(&generic_array_code);
501 __ Jump(array_code, RelocInfo::CODE_TARGET);
518 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
519 __ tst(r2, Operand(kSmiTagMask));
520 __ Assert(ne, "Unexpected initial map for Array function");
521 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
522 __ Assert(eq, "Unexpected initial map for Array function");
530 __ bind(&generic_array_code);
534 __ Jump(array_code, RelocInfo::CODE_TARGET);
551 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
552 __ tst(r2, Operand(kSmiTagMask));
553 __ Assert(ne, "Unexpected initial map for Array function");
554 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
555 __ Assert(eq, "Unexpected initial map for Array function");
563 __ bind(&generic_constructor);
566 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
579 __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
583 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
584 __ cmp(function, Operand(r2));
585 __ Assert(eq, "Unexpected String function");
590 __ cmp(r0, Operand(0, RelocInfo::NONE));
591 __ b(eq, &no_arguments);
593 __ sub(r0, r0, Operand(1));
594 __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
596 __ Drop(2);
609 __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
610 __ bind(&argument_is_string);
619 __ AllocateInNewSpace(JSValue::kSize,
628 __ LoadGlobalFunctionInitialMap(function, map, r4);
630 __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
631 __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
632 __ Assert(eq, "Unexpected string wrapper instance size");
633 __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
634 __ cmp(r4, Operand(0, RelocInfo::NONE));
635 __ Assert(eq, "Unexpected unused properties of string wrapper");
637 __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
639 __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
640 __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
641 __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
643 __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
648 __ Ret();
653 __ bind(&not_cached);
654 __ JumpIfSmi(r0, &convert_argument);
657 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
658 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
660 __ tst(r3, Operand(kIsNotStringMask));
661 __ b(ne, &convert_argument);
662 __ mov(argument, r0);
663 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
664 __ b(&argument_is_string);
667 __ bind(&convert_argument);
668 __ push(function); // Preserve the function.
669 __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
672 __ push(r0);
673 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
675 __ pop(function);
676 __ mov(argument, r0);
677 __ b(&argument_is_string);
681 __ bind(&no_arguments);
682 __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
683 __ Drop(1);
684 __ b(&argument_is_string);
688 __ bind(&gc_required);
689 __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
692 __ push(argument);
693 __ CallRuntime(Runtime::kNewStringWrapper, 1);
695 __ Ret();
719 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
720 __ push(r0); // Smi-tagged arguments count.
721 __ push(r1); // Constructor function.
731 __ mov(r2, Operand(debug_step_in_fp));
732 __ ldr(r2, MemOperand(r2));
733 __ tst(r2, r2);
734 __ b(ne, &rt_call);
739 __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
740 __ JumpIfSmi(r2, &rt_call);
741 __ CompareObjectType(r2, r3, r4, MAP_TYPE);
742 __ b(ne, &rt_call);
749 __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
750 __ b(eq, &rt_call);
755 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
758 __ ldrb(r4, constructor_count);
759 __ sub(r4, r4, Operand(1), SetCC);
760 __ strb(r4, constructor_count);
761 __ b(ne, &allocate);
763 __ Push(r1, r2);
765 __ push(r1); // constructor
767 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
769 __ pop(r2);
770 __ pop(r1);
772 __ bind(&allocate);
778 __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
779 __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
787 __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
788 __ mov(r5, r4);
790 __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
792 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
794 __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
802 __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
804 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
806 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
807 __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
809 __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
812 __ cmp(r0, r6);
813 __ Assert(le, "Unexpected number of pre-allocated property fields.");
815 __ InitializeFieldsWithFiller(r5, r0, r7);
817 __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
819 __ InitializeFieldsWithFiller(r5, r6, r7);
825 __ add(r4, r4, Operand(kHeapObjectTag));
832 __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
835 __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
836 __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
838 __ add(r3, r3, Operand(r6));
839 __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
841 __ sub(r3, r3, Operand(r6), SetCC);
844 __ b(eq, &allocated);
845 __ Assert(pl, "Property allocation count failed.");
853 __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
854 __ AllocateInNewSpace(
867 __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
868 __ mov(r2, r5);
870 __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
872 __ mov(r0, Operand(r3, LSL, kSmiTagSize));
873 __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
881 __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2)); // End of object.
885 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
887 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
888 __ cmp(r7, r8);
889 __ Assert(eq, "Undefined value not loaded.");
891 __ b(&entry);
892 __ bind(&loop);
893 __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
894 __ bind(&entry);
895 __ cmp(r2, r6);
896 __ b(lt, &loop);
904 __ add(r5, r5, Operand(kHeapObjectTag)); // Add the heap tag.
905 __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
910 __ jmp(&allocated);
916 __ bind(&undo_allocation);
917 __ UndoAllocationInNewSpace(r4, r5);
922 __ bind(&rt_call);
923 __ push(r1); // argument for Runtime_NewObject
924 __ CallRuntime(Runtime::kNewObject, 1);
925 __ mov(r4, r0);
929 __ bind(&allocated);
930 __ push(r4);
931 __ push(r4);
938 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
939 __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
942 __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
945 __ mov(r0, Operand(r3, LSR, kSmiTagSize));
957 __ b(&entry);
958 __ bind(&loop);
959 __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
960 __ push(ip);
961 __ bind(&entry);
962 __ sub(r3, r3, Operand(2), SetCC);
963 __ b(ge, &loop);
969 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
973 __ InvokeCode(code, expected, expected,
977 __ InvokeFunction(r1, actual, CALL_FUNCTION,
991 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1003 __ JumpIfSmi(r0, &use_receiver);
1007 __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1008 __ b(ge, &exit);
1012 __ bind(&use_receiver);
1013 __ ldr(r0, MemOperand(sp));
1017 __ bind(&exit);
1022 __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1027 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
1028 __ add(sp, sp, Operand(kPointerSize));
1029 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
1030 __ Jump(lr);
1060 __ mov(cp, Operand(0, RelocInfo::NONE));
1067 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1069 __ InitializeRootRegister();
1072 __ push(r1);
1073 __ push(r2);
1080 __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1082 __ b(&entry);
1083 __ bind(&loop);
1084 __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex)); // read next parameter
1085 __ ldr(r0, MemOperand(r0)); // dereference handle
1086 __ push(r0); // push parameter
1087 __ bind(&entry);
1088 __ cmp(r4, r2);
1089 __ b(ne, &loop);
1093 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1094 __ mov(r5, Operand(r4));
1095 __ mov(r6, Operand(r4));
1096 __ mov(r7, Operand(r4));
1098 __ mov(r9, Operand(r4));
1102 __ mov(r0, Operand(r3));
1105 __ CallStub(&stub);
1108 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1115 __ Jump(lr);
1137 __ push(r1);
1139 __ push(r5);
1142 __ push(r1);
1143 __ CallRuntime(Runtime::kLazyCompile, 1);
1145 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1148 __ pop(r5);
1150 __ pop(r1);
1156 __ Jump(r2);
1166 __ push(r1);
1168 __ push(r5);
1171 __ push(r1);
1172 __ CallRuntime(Runtime::kLazyRecompile, 1);
1174 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1177 __ pop(r5);
1179 __ pop(r1);
1185 __ Jump(r2);
1194 __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1195 __ push(r0);
1196 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1200 __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1201 __ SmiUntag(r6);
1204 __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1205 __ b(ne, &with_tos_register);
1206 __ add(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1207 __ Ret();
1209 __ bind(&with_tos_register);
1210 __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1211 __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1212 __ b(ne, &unknown_state);
1213 __ add(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1214 __ Ret();
1216 __ bind(&unknown_state);
1217 __ stop("no cases left");
1236 __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1239 __ CallRuntime(Runtime::kNotifyOSR, 0);
1241 __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1242 __ Ret();
1249 __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1255 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1258 __ push(r0);
1259 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1265 __ cmp(r0, Operand(Smi::FromInt(-1)));
1266 __ b(ne, &skip);
1267 __ Ret();
1269 __ bind(&skip);
1271 __ SmiUntag(r0);
1272 __ push(r0);
1285 __ cmp(r0, Operand(0));
1286 __ b(ne, &done);
1287 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1288 __ push(r2);
1289 __ add(r0, r0, Operand(1));
1290 __ bind(&done);
1297 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1298 __ JumpIfSmi(r1, &non_function);
1299 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1300 __ b(ne, &slow);
1306 __ mov(r4, Operand(0, RelocInfo::NONE)); // indicate regular JS_FUNCTION
1309 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1312 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1313 __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1314 __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1316 __ b(ne, &shift_arguments);
1319 __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1320 __ b(ne, &shift_arguments);
1323 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1324 __ ldr(r2, MemOperand(r2, -kPointerSize));
1328 __ JumpIfSmi(r2, &convert_to_object);
1330 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1331 __ cmp(r2, r3);
1332 __ b(eq, &use_global_receiver);
1333 __ LoadRoot(r3, Heap::kNullValueRootIndex);
1334 __ cmp(r2, r3);
1335 __ b(eq, &use_global_receiver);
1338 __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1339 __ b(ge, &shift_arguments);
1341 __ bind(&convert_to_object);
1346 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); // Smi-tagged.
1347 __ push(r0);
1349 __ push(r2);
1350 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1351 __ mov(r2, r0);
1353 __ pop(r0);
1354 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1360 __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1361 __ mov(r4, Operand(0, RelocInfo::NONE));
1362 __
1366 __ bind(&use_global_receiver);
1369 __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1370 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
1371 __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1372 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1374 __ bind(&patch_receiver);
1375 __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1376 __ str(r2, MemOperand(r3, -kPointerSize));
1378 __ jmp(&shift_arguments);
1382 __ bind(&slow);
1383 __ mov(r4, Operand(1, RelocInfo::NONE)); // indicate function proxy
1384 __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1385 __ b(eq, &shift_arguments);
1386 __ bind(&non_function);
1387 __ mov(r4, Operand(2, RelocInfo::NONE)); // indicate non-function
1396 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1397 __ str(r1, MemOperand(r2, -kPointerSize));
1405 __ bind(&shift_arguments);
1408 __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1410 __ bind(&loop);
1411 __ ldr(ip, MemOperand(r2, -kPointerSize));
1412 __ str(ip, MemOperand(r2));
1413 __ sub(r2, r2, Operand(kPointerSize));
1414 __ cmp(r2, sp);
1415 __ b(ne, &loop);
1418 __ sub(r0, r0, Operand(1));
1419 __ pop();
1428 __ tst(r4, r4);
1429 __ b(eq, &function);
1431 __ mov(r2, Operand(0, RelocInfo::NONE));
1432 __ SetCallKind(r5, CALL_AS_METHOD);
1433 __ cmp(r4, Operand(1));
1434 __ b(ne, &non_proxy);
1436 __ push(r1); // re-add proxy object as additional argument
1437 __ add(r0, r0, Operand(1));
1438 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1439 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1442 __ bind(&non_proxy);
1443 __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1444 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1446 __ bind(&function);
1454 __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1455 __ ldr(r2,
1457 __ mov(r2, Operand(r2, ASR, kSmiTagSize));
1458 __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1459 __ SetCallKind(r5, CALL_AS_METHOD);
1460 __ cmp(r2, r0); // Check formal and actual parameter counts.
1461 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1466 __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1481 __ ldr(r0, MemOperand(fp, kFunctionOffset)); // get the function
1482 __ push(r0);
1483 __ ldr(r0, MemOperand(fp, kArgsOffset)); // get the args array
1484 __ push(r0);
1485 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1491 __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1494 __ sub(r2, sp, r2);
1496 __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1497 __ b(gt, &okay); // Signed comparison.
1500 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1501 __ push(r1);
1502 __ push(r0);
1503 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1507 __ bind(&okay);
1508 __ push(r0); // limit
1509 __ mov(r1, Operand(0, RelocInfo::NONE)); // initial index
1510 __ push(r1);
1513 __ ldr(r0, MemOperand(fp, kRecvOffset));
1517 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1518 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1519 __ b(ne, &push_receiver);
1522 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1524 __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1529 __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1530 __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1532 __ b(ne, &push_receiver);
1535 __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1536 __ b(ne, &push_receiver);
1539 __ JumpIfSmi(r0, &call_to_object);
1540 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1541 __ cmp(r0, r1);
1542 __ b(eq, &use_global_receiver);
1543 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1544 __ cmp(r0, r1);
1545 __ b(eq, &use_global_receiver);
1550 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1551 __ b(ge, &push_receiver);
1555 __ bind(&call_to_object);
1556 __ push(r0);
1557 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1558 __ b(&push_receiver);
1561 __ bind(&use_global_receiver);
1564 __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1565 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1566 __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1567 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1571 __ bind(&push_receiver);
1572 __ push(r0);
1576 __ ldr(r0, MemOperand(fp, kIndexOffset));
1577 __ b(&entry);
1582 __ bind(&loop);
1583 __ ldr(r1, MemOperand(fp, kArgsOffset));
1584 __ push(r1);
1585 __ push(r0);
1588 __ CallRuntime(Runtime::kGetProperty, 2);
1589 __ push(r0);
1592 __ ldr(r0, MemOperand(fp, kIndexOffset));
1593 __ add(r0, r0, Operand(1 << kSmiTagSize));
1594 __ str(r0, MemOperand(fp, kIndexOffset));
1598 __ bind(&entry);
1599 __ ldr(r1, MemOperand(fp, kLimitOffset));
1600 __ cmp(r0, r1);
1601 __ b(ne, &loop);
1606 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1607 __ ldr(r1, MemOperand(fp, kFunctionOffset));
1608 __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1609 __ b(ne, &call_proxy);
1610 __ InvokeFunction(r1, actual, CALL_FUNCTION,
1614 __ add(sp, sp, Operand(3 * kPointerSize));
1615 __ Jump(lr);
1618 __ bind(&call_proxy);
1619 __ push(r1); // add function proxy as last argument
1620 __ add(r0, r0, Operand(1));
1621 __ mov(r2, Operand(0, RelocInfo::NONE));
1622 __ SetCallKind(r5, CALL_AS_METHOD);
1623 __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1624 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1629 __ add(sp, sp, Operand(3 * kPointerSize));
1630 __ Jump(lr);
1635 __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1636 __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1637 __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1638 __ add(fp, sp, Operand(3 * kPointerSize));
1648 __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1649 __ mov(sp, fp);
1650 __ ldm(ia_w, sp, fp.bit() | lr.bit());
1651 __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1652 __ add(sp, sp, Operand(kPointerSize)); // adjust for receiver
1668 __ cmp(r0, r2);
1669 __ b(lt, &too_few);
1670 __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1671 __ b(eq, &dont_adapt_arguments);
1674 __ bind(&enough);
1682 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1684 __ add(r0, r0, Operand(2 * kPointerSize));
1685 __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1694 __ bind(&copy);
1695 __ ldr(ip, MemOperand(r0, 0));
1696 __ push(ip);
1697 __ cmp(r0, r2); // Compare before moving to next argument.
1698 __ sub(r0, r0, Operand(kPointerSize));
1699 __ b(ne, &copy);
1701 __ b(&invoke);
1705 __ bind(&too_few);
1713 __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1721 __ bind(&copy);
1723 __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1724 __ push(ip);
1725 __ cmp(r0, fp); // Compare before moving to next argument.
1726 __ sub(r0, r0, Operand(kPointerSize));
1727 __ b(ne, &copy);
1733 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1734 __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1735 __ sub(r2, r2, Operand(4 * kPointerSize)); // Adjust for frame.
1738 __ bind(&fill);
1739 __ push(ip);
1740 __ cmp(sp, r2);
1741 __ b(ne, &fill);
1745 __ bind(&invoke);
1746 __ Call(r3);
1753 __ Jump(lr);
1759 __ bind(&dont_adapt_arguments);
1760 __ Jump(r3);
1764 #undef __