Home | History | Annotate | Download | only in mips

Lines Matching defs:__

38 #define __ ACCESS_MASM(masm)
86 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
87 __ RecordWriteField(a2,
115 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
116 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
117 __ Branch(&only_change_map, eq, at, Operand(t0));
119 __ push(ra);
120 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
125 __ sll(scratch, t1, 2);
126 __ Addu(scratch, scratch, FixedDoubleArray::kHeaderSize);
127 __ AllocateInNewSpace(scratch, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
130 __ LoadRoot(t5, Heap::kFixedDoubleArrayMapRootIndex);
131 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
132 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
135 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
136 __ RecordWriteField(a2,
145 __ Addu(a3, t2, Operand(kHeapObjectTag));
146 __ sw(a3, FieldMemOperand(a2, JSObject::kElementsOffset));
147 __ RecordWriteField(a2,
158 __ Addu(a3, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
159 __ Addu(t3, t2, Operand(FixedDoubleArray::kHeaderSize));
160 __ sll(t2, t1, 2);
161 __ Addu(t2, t2, t3);
162 __ li(t0, Operand(kHoleNanLower32));
163 __ li(t1, Operand(kHoleNanUpper32));
169 if (!fpu_supported) __ Push(a1, a0);
171 __ Branch(&entry);
173 __ bind(&only_change_map);
174 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
175 __ RecordWriteField(a2,
183 __ Branch(&done);
186 __ bind(&gc_required);
187 __ pop(ra);
188 __ Branch(fail);
191 __ bind(&loop);
192 __ lw(t5, MemOperand(a3));
193 __ Addu(a3, a3, kIntSize);
195 __ UntagAndJumpIfNotSmi(t5, t5, &convert_hole);
200 __ mtc1(t5, f0);
201 __ cvt_d_w(f0, f0);
202 __ sdc1(f0, MemOperand(t3));
203 __ Addu(t3, t3, kDoubleSize);
213 __ sw(a0, MemOperand(t3)); // mantissa
214 __ sw(a1, MemOperand(t3, kIntSize)); // exponent
215 __ Addu(t3, t3, kDoubleSize);
217 __ Branch(&entry);
220 __ bind(&convert_hole);
223 __ SmiTag(t5);
224 __ Or(t5, t5, Operand(1));
225 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
226 __ Assert(eq, "object found in smi-only array", at, Operand(t5));
228 __ sw(t0, MemOperand(t3)); // mantissa
229 __ sw(t1, MemOperand(t3, kIntSize)); // exponent
230 __ Addu(t3, t3, kDoubleSize);
232 __ bind(&entry);
233 __ Branch(&loop, lt, t3, Operand(t2));
235 if (!fpu_supported) __ Pop(a1, a0);
236 __ pop(ra);
237 __ bind(&done);
255 __ lw(t0, FieldMemOperand(a2, JSObject::kElementsOffset));
256 __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
257 __ Branch(&only_change_map, eq, at, Operand(t0));
259 __ MultiPush(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
261 __ lw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
266 __ sll(a0, t1, 1);
267 __ Addu(a0, a0, FixedDoubleArray::kHeaderSize);
268 __ AllocateInNewSpace(a0, t2, t3, t5, &gc_required, NO_ALLOCATION_FLAGS);
271 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
272 __ sw(t1, MemOperand(t2, FixedDoubleArray::kLengthOffset));
273 __ sw(t5, MemOperand(t2, HeapObject::kMapOffset));
276 __ Addu(t0, t0, Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag + 4));
277 __ Addu(a3, t2, Operand(FixedArray::kHeaderSize));
278 __ Addu(t2, t2, Operand(kHeapObjectTag));
279 __ sll(t1, t1, 1);
280 __ Addu(t1, a3, t1);
281 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
282 __ LoadRoot(t5, Heap::kHeapNumberMapRootIndex);
290 __ Branch(&entry);
293 __ bind(&gc_required);
294 __ MultiPop(a0.bit() | a1.bit() | a2.bit() | a3.bit() | ra.bit());
296 __ Branch(fail);
298 __ bind(&loop);
299 __ lw(a1, MemOperand(t0));
300 __ Addu(t0, t0, kDoubleSize);
303 __ Branch(&convert_hole, eq, a1, Operand(kHoleNanUpper32));
306 __ AllocateHeapNumber(a2, a0, t6, t5, &gc_required);
308 __ lw(a0, MemOperand(t0, -12));
309 __ sw(a0, FieldMemOperand(a2, HeapNumber::kMantissaOffset));
310 __ sw(a1, FieldMemOperand(a2, HeapNumber::kExponentOffset));
311 __ mov(a0, a3);
312 __ sw(a2, MemOperand(a3));
313 __ Addu(a3, a3, kIntSize);
314 __ RecordWrite(t2,
321 __ Branch(&entry);
324 __ bind(&convert_hole);
325 __ sw(t3, MemOperand(a3));
326 __ Addu(a3, a3, kIntSize);
328 __ bind(&entry);
329 __ Branch(&loop, lt, a3, Operand(t1));
331 __ MultiPop(a2.bit() | a3.bit() | a0.bit() | a1.bit());
333 __ sw(t2, FieldMemOperand(a2, JSObject::kElementsOffset));
334 __ RecordWriteField(a2,
342 __ pop(ra);
344 __ bind(&only_change_map);
346 __ sw(a3, FieldMemOperand(a2, HeapObject::kMapOffset));
347 __ RecordWriteField(a2,
364 __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
365 __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
369 __ And(at, result, Operand(kIsIndirectStringMask));
370 __ Branch(&check_sequential, eq, at, Operand(zero_reg));
374 __ And(at, result, Operand(kSlicedNotConsMask));
375 __ Branch(&cons_string, eq, at, Operand(zero_reg));
379 __ lw(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
380 __ lw(string, FieldMemOperand(string, SlicedString::kParentOffset));
381 __ sra(at, result, kSmiTagSize);
382 __ Addu(index, index, at);
383 __ jmp(&indirect_string_loaded);
390 __ bind(&cons_string);
391 __ lw(result, FieldMemOperand(string, ConsString::kSecondOffset));
392 __ LoadRoot(at, Heap::kEmptyStringRootIndex);
393 __ Branch(call_runtime, ne, result, Operand(at));
395 __ lw(string, FieldMemOperand(string, ConsString::kFirstOffset));
397 __ bind(&indirect_string_loaded);
398 __ lw(result, FieldMemOperand(string, HeapObject::kMapOffset));
399 __ lbu(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
405 __ bind(&check_sequential);
407 __ And(at, result, Operand(kStringRepresentationMask));
408 __ Branch(&external_string, ne, at, Operand(zero_reg));
412 __ Addu(string,
415 __ jmp(&check_encoding);
418 __ bind(&external_string);
422 __ And(at, result, Operand(kIsIndirectStringMask));
423 __ Assert(eq, "external string expected, but not found",
428 __ And(at, result, Operand(kShortExternalStringMask));
429 __ Branch(call_runtime, ne, at, Operand(zero_reg));
430 __ lw(string, FieldMemOperand(string, ExternalString::kResourceDataOffset));
433 __ bind(&check_encoding);
435 __ And(at, result, Operand(kStringEncodingMask));
436 __ Branch(&ascii, ne, at, Operand(zero_reg));
438 __ sll(at, index, 1);
439 __ Addu(at, string, at);
440 __ lhu(result, MemOperand(at));
441 __ jmp(&done);
442 __ bind(&ascii);
444 __ Addu(at, string, index);
445 __ lbu(result, MemOperand(at));
446 __ bind(&done);
449 #undef __