Lines Matching full:scratch
136 Register scratch = GetRegisterThatIsNotOneOf(input_reg, result_reg);
138 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch);
140 GetRegisterThatIsNotOneOf(input_reg, result_reg, scratch, scratch_low);
143 __ push(scratch);
154 scratch,
162 __ TestIfInt32(scratch, result_reg, r0);
176 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
177 // Load scratch with exponent - 1. This is faster than loading
180 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
185 __ cmpi(scratch, Operand(83));
191 // Scratch contains exponent - 1.
192 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
193 __ subfic(scratch, scratch, Operand(51));
194 __ cmpi(scratch, Operand::Zero());
198 __ srw(scratch_low, scratch_low, scratch);
199 // Scratch contains: 52 - exponent.
201 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
202 __ subfic(scratch, scratch, Operand(32));
208 __ slw(r0, result_reg, scratch);
218 // On entry, scratch contains: 52 - exponent.
219 __ neg(scratch, scratch);
220 __ slw(result_reg, scratch_low, scratch);
241 __ pop(scratch);
557 Register scratch,
565 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
745 const Register scratch = r4;
748 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
783 const Register scratch = r11;
797 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
798 __ LoadP(scratch, FieldMemOperand(base, JSObject::kMapOffset));
799 __ cmp(scratch, heapnumbermap);
806 __ ConvertIntToDouble(scratch, double_base);
809 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
810 __ LoadP(scratch, FieldMemOperand(exponent, JSObject::kMapOffset));
811 __ cmp(scratch, heapnumbermap);
818 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
826 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2,
837 __ LoadDoubleLiteral(double_scratch, 0.5, scratch);
843 __ LoadDoubleLiteral(double_scratch, -V8_INFINITY, scratch);
856 __ LoadDoubleLiteral(double_scratch, -0.5, scratch);
862 __ LoadDoubleLiteral(double_scratch, -V8_INFINITY, scratch);
871 __ LoadDoubleLiteral(double_result, 1.0, scratch);
881 __ PrepareCallCFunction(0, 2, scratch);
895 // Get two copies of exponent in the registers scratch and exponent.
897 __ mr(scratch, exponent);
899 // Exponent has previously been stored into scratch as untagged integer.
900 __ mr(exponent, scratch);
907 __ cmpi(scratch, Operand::Zero());
909 __ neg(scratch2, scratch);
910 __ isel(lt, scratch, scratch2, scratch);
914 __ neg(scratch, scratch);
920 __ andi(scratch2, scratch, Operand(1));
924 __ ShiftRightArithImm(scratch, scratch, 1, SetRC);
954 __ AllocateHeapNumber(heapnumber, scratch, scratch2, heapnumbermap,
959 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
966 __ PrepareCallCFunction(0, 2, scratch);
976 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1383 Register const scratch = r8;
1408 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1416 __ CompareObjectType(function, function_map, scratch, JS_FUNCTION_TYPE);
1420 __ lbz(scratch, FieldMemOperand(function_map, Map::kBitFieldOffset));
1421 __ TestBit(scratch, Map::kHasNonInstancePrototype, r0);
1434 __ CompareObjectType(function_prototype, scratch, scratch, MAP_TYPE);
1450 Register const null = scratch;
1481 __ LoadSmiLiteral(scratch, Smi::FromInt(0));
1482 __ StoreRoot(scratch, Heap::kInstanceofCacheFunctionRootIndex);
1514 Register scratch = r8;
1516 DCHECK(!scratch.is(receiver) && !scratch.is(index));
1517 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
1523 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
1808 // r8 = temporary scratch (a.o., for address calculation)
1809 // r10 = temporary scratch (a.o., for address calculation)
1833 // r11 = scratch
2902 Register scratch,
2925 __ lbz(scratch, MemOperand(src));
2927 __ stb(scratch, MemOperand(dest));
4015 Register scratch = r9;
4046 __ ShiftLeftImm(scratch, index, Operand(1));
4047 __ add(index, index, scratch); // index *= 3.
4049 __ ShiftLeftImm(scratch, index, Operand(kPointerSizeLog2));
4050 __ add(index, dictionary, scratch);
4231 regs_.scratch1(), // Scratch.
4236 regs_.scratch1(), // Scratch.
4247 regs_.scratch1(), // Scratch.
4248 regs_.object(), // Scratch.
4249 regs_.address(), // Scratch.
4389 Register scratch, Label* compare_map,
4394 Register cached_map = scratch;
5183 Register scratch = r9;
5227 __ RecordWriteField(cell, PropertyCell::kValueOffset, r6, scratch,
5289 __ LoadP(scratch, FieldMemOperand(value, HeapObject::kMapOffset));
5290 __ cmp(cell_value_map, scratch);
5330 Register scratch = r6;
5332 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate)));
5333 __ lbz(scratch, MemOperand(scratch, 0));
5334 __ cmpi(scratch, Operand::Zero());
5337 __ mov(scratch, Operand(thunk_ref));
5338 __ isel(eq, scratch, function_address, scratch);
5343 __ mov(scratch, Operand(thunk_ref));
5346 __ mr(scratch, function_address);
5376 stub.GenerateCall(masm, scratch);
5498 Register scratch = call_data;
5500 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5503 __ push(scratch);
5505 __ push(scratch);
5507 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate())));
5508 __ push(scratch);
5513 __ mr(scratch, sp);
5529 DCHECK(!api_function_address.is(r3) && !scratch.is(r3));
5534 __ StoreP(scratch, MemOperand(r3, 0 * kPointerSize));
5537 __ addi(ip, scratch,
5550 __ add(r0, scratch, ip);