HomeSort by relevance Sort by last modified time
    Searched refs:lir (Results 1 - 25 of 28) sorted by null

1 2

  /dalvik/vm/compiler/codegen/mips/
ArchUtility.cpp 34 static void buildInsnString(const char *fmt, MipsLIR *lir, char* buf,
53 operand = lir->operands[nc-'0'];
118 (int) baseAddr + lir->generic.offset + 4 +
120 lir->generic.target);
127 int offset_1 = lir->operands[0];
128 int offset_2 = NEXT_LIR(lir)->operands[0];
130 ((((intptr_t) baseAddr + lir->generic.offset + 4) &
165 void dvmDumpResourceMask(LIR *lir, u8 mask, const char *prefix)
169 MipsLIR *mipsLIR = (MipsLIR *) lir;
    [all...]
CodegenCommon.cpp 35 static void setMemRefType(MipsLIR *lir, bool isLoad, int memType)
40 assert(EncodingMap[lir->opcode].flags & (IS_LOAD | IS_STORE));
43 maskPtr = &lir->useMask;
45 maskPtr = &lir->defMask;
63 assert(!(EncodingMap[lir->opcode].flags & IS_STORE));
77 static void annotateDalvikRegAccess(MipsLIR *lir, int regId, bool isLoad)
80 setMemRefType(lir, isLoad, kDalvikReg);
86 lir->aliasInfo = regId;
87 if (DOUBLEREG(lir->operands[0])) {
88 lir->aliasInfo |= 0x80000000
    [all...]
Assemble.cpp 431 * Assemble the LIR into binary instruction format. Note that we may
440 MipsLIR *lir; local
442 for (lir = (MipsLIR *) cUnit->firstLIRInsn; lir; lir = NEXT_LIR(lir)) {
443 if (lir->opcode < 0) {
448 if (lir->flags.isNop) {
452 if (lir->opcode == kMipsB || lir->opcode == kMipsBal)
    [all...]
Codegen.h 78 extern void dvmCompilerSetupResourceMasks(MipsLIR *lir);
MipsLIR.h 139 struct LIR *defStart; // Starting inst in last def sequence
140 struct LIR *defEnd; // Ending inst in last def sequence
590 * Each instance of this struct holds a pseudo or real LIR instruction:
604 LIR generic;
608 bool isNop:1; // LIR is optimized away
627 /* Utility macros to traverse the LIR/MipsLIR list */
628 #define NEXT_LIR(lir) ((MipsLIR *) lir->generic.next)
629 #define PREV_LIR(lir) ((MipsLIR *) lir->generic.prev
    [all...]
  /dalvik/vm/compiler/codegen/arm/
CodegenCommon.cpp 35 static void setMemRefType(ArmLIR *lir, bool isLoad, int memType)
39 assert(EncodingMap[lir->opcode].flags & (IS_LOAD | IS_STORE));
41 maskPtr = &lir->useMask;
43 maskPtr = &lir->defMask;
61 assert(!(EncodingMap[lir->opcode].flags & IS_STORE));
75 static void annotateDalvikRegAccess(ArmLIR *lir, int regId, bool isLoad)
77 setMemRefType(lir, isLoad, kDalvikReg);
83 lir->aliasInfo = regId;
84 if (DOUBLEREG(lir->operands[0])) {
85 lir->aliasInfo |= 0x80000000
    [all...]
ArchUtility.cpp 76 static void buildInsnString(const char *fmt, ArmLIR *lir, char* buf,
96 operand = lir->operands[nc-'0'];
199 (int) baseAddr + lir->generic.offset + 4 +
201 lir->generic.target);
204 int offset_1 = lir->operands[0];
205 int offset_2 = NEXT_LIR(lir)->operands[0];
207 ((((intptr_t) baseAddr + lir->generic.offset + 4) &
219 decodeRegList(lir->opcode, operand, tbuf);
241 void dvmDumpResourceMask(LIR *lir, u8 mask, const char *prefix
    [all...]
Assemble.cpp 941 ArmLIR *lir; local
    [all...]
Codegen.h 62 extern void dvmCompilerSetupResourceMasks(ArmLIR *lir);
ArmLIR.h 110 struct LIR *defStart; // Starting inst in last def sequence
111 struct LIR *defEnd; // Ending inst in last def sequence
    [all...]
  /art/compiler/dex/quick/mips/
assemble_mips.cc 77 /* NOTE: must be kept in sync with enum MipsOpcode from LIR.h */
460 void MipsMir2Lir::ConvertShortToLongBranch(LIR* lir) {
463 int opcode = lir->opcode;
464 int dalvik_offset = lir->dalvik_offset;
482 LIR* hop_target = NULL;
485 LIR* hop_branch = RawLIR(dalvik_offset, opcode, lir->operands[0],
486 lir->operands[1], 0, 0, 0, hop_target);
487 InsertLIRBefore(lir, hop_branch)
516 LIR *lir; local
    [all...]
target_mips.cc 123 void MipsMir2Lir::SetupTargetResourceMasks(LIR* lir) {
127 uint64_t flags = MipsMir2Lir::EncodingMap[lir->opcode].flags;
130 lir->def_mask |= ENCODE_MIPS_REG_SP;
134 lir->use_mask |= ENCODE_MIPS_REG_SP;
138 lir->def_mask |= ENCODE_MIPS_REG_LR;
155 std::string MipsMir2Lir::BuildInsnString(const char *fmt, LIR *lir, unsigned char* base_addr) {
172 operand = lir->operands[nc-'0'];
205 sprintf(tbuf, "0x%08x (L%p)", reinterpret_cast<uintptr_t>(base_addr) + lir->offset + 4
    [all...]
codegen_mips.h 33 LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
34 LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
36 LIR* LoadBaseIndexed(int rBase, int r_index, int r_dest, int scale, OpSize size);
37 LIR* LoadBaseIndexedDisp(int rBase, int r_index, int scale, int displacement,
39 LIR* LoadConstantNoClobber(int r_dest, int value);
40 LIR* LoadConstantWide(int r_dest_lo, int r_dest_hi, int64_t value);
41 LIR* StoreBaseDisp(int rBase, int displacement, int r_src, OpSize size);
42 LIR* StoreBaseDispWide(int rBase, int displacement, int r_src_lo, int r_src_hi);
43 LIR* StoreBaseIndexed(int rBase, int r_index, int r_src, int scale, OpSize size);
44 LIR* StoreBaseIndexedDisp(int rBase, int r_index, int scale, int displacement
    [all...]
  /art/compiler/dex/quick/arm/
assemble_arm.cc 78 /* NOTE: must be kept in sync with enum ArmOpcode from LIR.h */
1006 LIR* lir; local
    [all...]
target_arm.cc 121 void ArmMir2Lir::SetupTargetResourceMasks(LIR* lir) {
125 uint64_t flags = ArmMir2Lir::EncodingMap[lir->opcode].flags;
126 int opcode = lir->opcode;
129 lir->def_mask |= ENCODE_ARM_REG_SP;
133 lir->use_mask |= ENCODE_ARM_REG_SP;
137 lir->def_mask |= ENCODE_ARM_REG_LIST(lir->operands[0]);
141 lir->def_mask |= ENCODE_ARM_REG_LIST(lir->operands[1])
    [all...]
codegen_arm.h 32 LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
33 LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
35 LIR* LoadBaseIndexed(int rBase, int r_index, int r_dest, int scale, OpSize size);
36 LIR* LoadBaseIndexedDisp(int rBase, int r_index, int scale, int displacement,
38 LIR* LoadConstantNoClobber(int r_dest, int value);
39 LIR* LoadConstantWide(int r_dest_lo, int r_dest_hi, int64_t value);
40 LIR* StoreBaseDisp(int rBase, int displacement, int r_src, OpSize size);
41 LIR* StoreBaseDispWide(int rBase, int displacement, int r_src_lo, int r_src_hi);
42 LIR* StoreBaseIndexed(int rBase, int r_index, int r_src, int scale, OpSize size);
43 LIR* StoreBaseIndexedDisp(int rBase, int r_index, int scale, int displacement
    [all...]
  /art/compiler/dex/quick/
mir_to_lir-inl.h 41 inline LIR* Mir2Lir::RawLIR(int dalvik_offset, int opcode, int op0,
42 int op1, int op2, int op3, int op4, LIR* target) {
43 LIR* insn = static_cast<LIR*>(arena_->Alloc(sizeof(LIR), ArenaAllocator::kAllocLIR));
65 inline LIR* Mir2Lir::NewLIR0(int opcode) {
70 LIR* insn = RawLIR(current_dalvik_offset_, opcode);
75 inline LIR* Mir2Lir::NewLIR1(int opcode, int dest) {
80 LIR* insn = RawLIR(current_dalvik_offset_, opcode, dest);
85 inline LIR* Mir2Lir::NewLIR2(int opcode, int dest, int src1)
    [all...]
codegen_util.cc 47 void Mir2Lir::MarkSafepointPC(LIR* inst) {
49 LIR* safepoint_pc = NewLIR0(kPseudoSafepointPC);
59 void Mir2Lir::NopLIR(LIR* lir) {
60 lir->flags.is_nop = true;
63 void Mir2Lir::SetMemRefType(LIR* lir, bool is_load, int mem_type) {
66 DCHECK(GetTargetInstFlags(lir->opcode) & (IS_LOAD | IS_STORE));
68 mask_ptr = &lir->use_mask;
70 mask_ptr = &lir->def_mask
696 LIR* lir; local
    [all...]
mir_to_lir.h 111 struct LIR {
114 LIR* next;
115 LIR* prev;
116 LIR* target;
120 bool is_nop:1; // LIR is optimized away.
138 // Utility macros to traverse the LIR list.
139 #define NEXT_LIR(lir) (lir->next)
140 #define PREV_LIR(lir) (lir->prev
    [all...]
  /dalvik/vm/compiler/
IntermediateRep.cpp 78 * Append an LIR instruction to the LIR list maintained by a compilation
81 void dvmCompilerAppendLIR(CompilationUnit *cUnit, LIR *lir)
85 cUnit->lastLIRInsn = cUnit->firstLIRInsn = lir;
86 lir->prev = lir->next = NULL;
88 cUnit->lastLIRInsn->next = lir;
89 lir->prev = cUnit->lastLIRInsn;
90 lir->next = NULL
    [all...]
CompilerUtility.h 56 struct LIR;
71 void dvmDumpLIRInsn(struct LIR *lir, unsigned char *baseAddr);
72 void dvmDumpResourceMask(struct LIR *lir, u8 mask, const char *prefix);
CompilerIR.h 81 typedef struct LIR {
83 struct LIR *next;
84 struct LIR *prev;
85 struct LIR *target;
86 } LIR;
125 LIR *misPredBranchOver;
209 LIR *firstLIRInsn;
210 LIR *lastLIRInsn;
211 LIR *literalList; // Constants
212 LIR *classPointerList; // Relocatabl
    [all...]
  /art/compiler/dex/quick/x86/
assemble_x86.cc 1164 LIR *lir; local
    [all...]
target_x86.cc 135 void X86Mir2Lir::SetupTargetResourceMasks(LIR* lir) {
139 uint64_t flags = X86Mir2Lir::EncodingMap[lir->opcode].flags;
142 lir->use_mask |= ENCODE_X86_REG_SP;
146 lir->def_mask |= ENCODE_X86_REG_SP;
150 SetupRegMask(&lir->def_mask, rAX);
154 SetupRegMask(&lir->def_mask, rDX);
157 SetupRegMask(&lir->use_mask, rAX);
161 SetupRegMask(&lir->use_mask, rCX);
165 SetupRegMask(&lir->use_mask, rDX)
    [all...]
codegen_x86.h 33 LIR* LoadBaseDisp(int rBase, int displacement, int r_dest, OpSize size, int s_reg);
34 LIR* LoadBaseDispWide(int rBase, int displacement, int r_dest_lo, int r_dest_hi,
36 LIR* LoadBaseIndexed(int rBase, int r_index, int r_dest, int scale, OpSize size);
37 LIR* LoadBaseIndexedDisp(int rBase, int r_index, int scale, int displacement,
39 LIR* LoadConstantNoClobber(int r_dest, int value);
40 LIR* LoadConstantWide(int r_dest_lo, int r_dest_hi, int64_t value);
41 LIR* StoreBaseDisp(int rBase, int displacement, int r_src, OpSize size);
42 LIR* StoreBaseDispWide(int rBase, int displacement, int r_src_lo, int r_src_hi);
43 LIR* StoreBaseIndexed(int rBase, int r_index, int r_src, int scale, OpSize size);
44 LIR* StoreBaseIndexedDisp(int rBase, int r_index, int scale, int displacement
    [all...]

Completed in 1135 milliseconds

1 2