Home | History | Annotate | Download | only in arm64

Lines Matching full:scratch

56 void Arm64Assembler::GetCurrentThread(FrameOffset offset, ManagedRegister /* scratch */) {
155 Arm64ManagedRegister scratch = m_scratch.AsArm64();
156 CHECK(scratch.IsCoreRegister()) << scratch;
157 LoadImmediate(scratch.AsCoreRegister(), imm);
158 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP,
164 Arm64ManagedRegister scratch = m_scratch.AsArm64();
165 CHECK(scratch.IsCoreRegister()) << scratch;
166 LoadImmediate(scratch.AsCoreRegister(), imm);
167 StoreToOffset(scratch.AsCoreRegister(), ETR, offs.Int32Value());
173 Arm64ManagedRegister scratch = m_scratch.AsArm64();
174 CHECK(scratch.IsCoreRegister()) << scratch;
175 AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
176 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
189 Arm64ManagedRegister scratch = m_scratch.AsArm64();
191 LoadFromOffset(scratch.AsCoreRegister(), SP, in_off.Int32Value());
192 StoreToOffset(scratch.AsCoreRegister(), SP, dest_off.Int32Value() + 8);
351 Arm64ManagedRegister scratch = m_scratch.AsArm64();
352 CHECK(scratch.IsCoreRegister()) << scratch;
353 LoadFromOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
354 StoreToOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
360 Arm64ManagedRegister scratch = m_scratch.AsArm64();
361 CHECK(scratch.IsCoreRegister()) << scratch;
362 LoadFromOffset(scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
363 StoreToOffset(scratch.AsCoreRegister(), ETR, tr_offs.Int32Value());
368 Arm64ManagedRegister scratch = m_scratch.AsArm64();
369 CHECK(scratch.IsCoreRegister()) << scratch;
370 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(),
372 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(),
378 Arm64ManagedRegister scratch = m_scratch.AsArm64();
379 CHECK(scratch.IsCoreRegister()) << scratch;
382 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, src.Int32Value());
383 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), SP, dest.Int32Value());
385 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
386 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
394 Arm64ManagedRegister scratch = m_scratch.AsArm64();
397 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
400 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), base.AsCoreRegister(),
402 StoreWToOffset(kStoreWord, scratch.AsWRegister(), SP, dest.Int32Value());
404 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), src_offset.Int32Value());
405 StoreToOffset(scratch.AsCoreRegister(), SP, dest.Int32Value());
413 Arm64ManagedRegister scratch = m_scratch.AsArm64();
416 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
419 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), SP, src.Int32Value());
420 StoreWToOffset(kStoreWord, scratch.AsWRegister(), base.AsCoreRegister(),
423 LoadFromOffset(scratch.AsCoreRegister(), SP, src.Int32Value());
424 StoreToOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), dest_offs.Int32Value());
438 Arm64ManagedRegister scratch = m_scratch.AsArm64();
443 CHECK(scratch.IsCoreRegister() || scratch.IsWRegister()) << scratch;
446 if (scratch.IsWRegister()) {
447 LoadWFromOffset(kLoadWord, scratch.AsWRegister(), src.AsCoreRegister(),
449 StoreWToOffset(kStoreWord, scratch.AsWRegister(), dest.AsCoreRegister(),
452 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), src.AsCoreRegister(),
454 StoreWToOffset(kStoreWord, scratch.AsOverlappingCoreRegisterLow(), dest.AsCoreRegister(),
458 LoadFromOffset(scratch.AsCoreRegister(), src.AsCoreRegister(), src_offset.Int32Value());
459 StoreToOffset(scratch.AsCoreRegister(), dest.AsCoreRegister(), dest_offset.Int32Value());
467 ManagedRegister /*scratch*/, size_t /*size*/) {
510 Arm64ManagedRegister scratch = m_scratch.AsArm64();
512 CHECK(scratch.IsCoreRegister()) << scratch;
513 LoadFromOffset(scratch.AsCoreRegister(), base.AsCoreRegister(), offs.Int32Value());
514 ___ Blr(reg_x(scratch.AsCoreRegister()));
519 Arm64ManagedRegister scratch = m_scratch.AsArm64();
521 CHECK(scratch.IsCoreRegister()) << scratch;
522 // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
524 temps.Exclude(reg_x(base.AsCoreRegister()), reg_x(scratch.AsCoreRegister()));
525 ___ Ldr(reg_x(scratch.AsCoreRegister()), MEM_OP(reg_x(base.AsCoreRegister()), offs.Int32Value()));
526 ___ Br(reg_x(scratch.AsCoreRegister()));
530 Arm64ManagedRegister scratch = m_scratch.AsArm64();
531 CHECK(scratch.IsCoreRegister()) << scratch;
533 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP, base.Int32Value());
534 LoadFromOffset(scratch.AsCoreRegister(), scratch.AsCoreRegister(), offs.Int32Value());
535 ___ Blr(reg_x(scratch.AsCoreRegister()));
538 void Arm64Assembler::CallFromThread64(ThreadOffset<8> /*offset*/, ManagedRegister /*scratch*/) {
570 Arm64ManagedRegister scratch = m_scratch.AsArm64();
571 CHECK(scratch.IsCoreRegister()) << scratch;
573 LoadWFromOffset(kLoadWord, scratch.AsOverlappingCoreRegisterLow(), SP,
577 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
578 ___ Cmp(reg_w(scratch.AsOverlappingCoreRegisterLow()), 0);
580 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
582 AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
584 StoreToOffset(scratch.AsCoreRegister(), SP, out_off.Int32Value());
605 Arm64ManagedRegister scratch = m_scratch.AsArm64();
606 Arm64Exception *current_exception = new Arm64Exception(scratch, stack_adjust);
608 LoadFromOffset(scratch.AsCoreRegister(), ETR, Thread::ExceptionOffset<8>().Int32Value());
609 ___ Cbnz(reg_x(scratch.AsCoreRegister()), current_exception->Entry());