/art/runtime/jit/ |
jit_code_cache_test.cc | 82 uint8_t* code_ptr = nullptr; local 85 code_ptr = code_cache->ReserveCode(Thread::Current(), kCodeArrSize); 87 if (code_ptr != nullptr) { 93 } while (code_ptr != nullptr || data_ptr != nullptr);
|
/art/runtime/ |
exception_test.cc | 97 const uint8_t* code_ptr = &fake_header_code_and_maps_[gc_map_offset]; local 101 method_f_->SetEntryPointFromQuickCompiledCode(code_ptr); 105 method_g_->SetEntryPointFromQuickCompiledCode(code_ptr);
|
/art/compiler/ |
common_compiler_test.cc | 96 const void* code_ptr = &(*chunk)[code_offset]; local 97 MakeExecutable(code_ptr, code->size()); 98 const void* method_code = CompiledMethod::CodePointer(code_ptr,
|
/art/compiler/jit/ |
jit_compiler.cc | 189 auto* code_ptr = reserve_begin; local 190 OatQuickMethodHeader* method_header = reinterpret_cast<OatQuickMethodHeader*>(code_ptr) - 1; 197 std::copy(quick_code->data(), quick_code->data() + code_size, code_ptr); 201 code_ptr - mapping_table, code_ptr - vmap_table, code_ptr - gc_map, frame_size_in_bytes, 204 return code_ptr; 244 auto* code_ptr = WriteMethodHeaderAndCode( local 248 __builtin___clear_cache(reinterpret_cast<char*>(code_ptr), 249 reinterpret_cast<char*>(code_ptr + quick_code->size())) [all...] |
/external/v8/src/base/platform/ |
platform-macos.cc | 137 char* code_ptr = getsectdatafromheader_64( local 144 char* code_ptr = getsectdatafromheader(header, SEG_TEXT, SECT_TEXT, &size); local 146 if (code_ptr == NULL) continue; 148 const uintptr_t start = reinterpret_cast<uintptr_t>(code_ptr) + slide;
|
/art/compiler/dex/ |
mir_graph.cc | 169 int MIRGraph::ParseInsn(const uint16_t* code_ptr, MIR::DecodedInstruction* decoded_instruction) { 170 const Instruction* inst = Instruction::At(code_ptr); 456 int width, int flags, const uint16_t* code_ptr, 506 } else if (code_ptr < code_end) { 597 const uint16_t* code_ptr, const uint16_t* code_end, 649 if (code_ptr < code_end) { 712 const uint16_t* code_ptr = current_code_item_->insns_; local 762 while (code_ptr < code_end) { 766 int width = ParseInsn(code_ptr, &insn->dalvikInsn); 789 if ((width == 1) && ((current_offset_ & 0x1) == 0x1) && ((code_end - code_ptr) > 1)) [all...] |
/art/compiler/optimizing/ |
builder.cc | 265 const uint16_t* code_ptr = code_item.insns_; local 267 code_start_ = code_ptr; 285 if (!ComputeBranchTargets(code_ptr, code_end, &number_of_branches)) { 317 while (code_ptr < code_end) { 320 const Instruction& instruction = *Instruction::At(code_ptr); 325 code_ptr += instruction.SizeInCodeUnits(); 355 bool HGraphBuilder::ComputeBranchTargets(const uint16_t* code_ptr, 358 branch_targets_.SetSize(code_end - code_ptr); 368 while (code_ptr < code_end) { 369 const Instruction& instruction = *Instruction::At(code_ptr); [all...] |
/external/pcre/dist/sljit/ |
sljitNativeARM_32.c | 266 static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_ptr, sljit_uw* const_pool, sljit_uw cpool_size) 274 SLJIT_ASSERT(const_pool - code_ptr <= CONST_POOL_ALIGNMENT); 281 while (last_pc_patch < code_ptr) { 390 static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_uw *code) 399 code_ptr--; 402 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2)); 405 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)); 414 *code_ptr = (BL - CONDITIONAL) | (*(code_ptr + 1) & COND_MASK); 421 *code_ptr = (B - CONDITIONAL) | (*code_ptr & COND_MASK) 558 sljit_uw *code_ptr; local [all...] |
sljitNativeARM_64.c | 155 static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 171 diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4); 176 code_ptr[-5] ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1; 191 code_ptr[-5] -= (2 << 5); 192 code_ptr[-2] = code_ptr[0]; 197 code_ptr[-5] -= (1 << 5); 199 code_ptr[-1] = code_ptr[0]; 211 sljit_ins *code_ptr; local [all...] |
sljitNativeARM_T2_32.c | 224 static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_uh *code_ptr, sljit_uh *code) 235 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2)) >> 1; 239 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)) >> 1; 346 sljit_uh *code_ptr; local 363 code_ptr = code; 373 *code_ptr = *buf_ptr++; 379 label->addr = ((sljit_uw)code_ptr) | 0x1; 380 label->size = code_ptr - code; 384 jump->addr = (sljit_uw)code_ptr - ((jump->flags & IS_COND) ? 10 : 8); 385 code_ptr -= detect_jump_type(jump, code_ptr, code) [all...] |
sljitNativeMIPS_common.c | 221 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 230 return code_ptr; 233 return code_ptr; 349 return code_ptr; 353 static __attribute__ ((noinline)) void sljit_cache_flush(void* code, void* code_ptr) 355 SLJIT_CACHE_FLUSH(code, code_ptr); 363 sljit_ins *code_ptr; local 381 code_ptr = code; 390 *code_ptr = *buf_ptr++; 397 label->addr = (sljit_uw)code_ptr; [all...] |
sljitNativePPC_common.c | 248 static SLJIT_INLINE sljit_si detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 274 diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr)) & ~0x3l; 321 sljit_ins *code_ptr; local 346 code_ptr = code; 355 *code_ptr = *buf_ptr++; 362 label->addr = (sljit_uw)code_ptr; 363 label->size = code_ptr - code; 368 jump->addr = (sljit_uw)(code_ptr - 3); 370 jump->addr = (sljit_uw)(code_ptr - 6); 372 if (detect_jump_type(jump, code_ptr, code)) [all...] |
sljitNativeSPARC_common.c | 198 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 206 return code_ptr; 268 return code_ptr; 275 sljit_ins *code_ptr; local 293 code_ptr = code; 302 *code_ptr = *buf_ptr++; 309 label->addr = (sljit_uw)code_ptr; 310 label->size = code_ptr - code; 315 jump->addr = (sljit_uw)(code_ptr - 3); 317 jump->addr = (sljit_uw)(code_ptr - 6) [all...] |
sljitNativeX86_common.c | 386 static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type); 389 static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si type); 392 static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_ub *code, sljit_si type) 405 return generate_far_jump_code(jump, code_ptr, type); 410 *code_ptr++ = JMP_i8; 412 *code_ptr++ = JMP_i32; 417 *code_ptr++ = CALL_i32; 421 *code_ptr++ = get_jump_code(type) - 0x10; 425 *code_ptr++ = GROUP_0F; 426 *code_ptr++ = get_jump_code(type) 449 sljit_ub *code_ptr; local [all...] |
sljitNativeTILEGX_64.c | 903 static SLJIT_INLINE sljit_ins * detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code) 911 return code_ptr; 968 return code_ptr; 994 return code_ptr; 1001 sljit_ins *code_ptr; local 1019 code_ptr = code; 1028 *code_ptr = *buf_ptr++; 1035 label->addr = (sljit_uw) code_ptr; 1036 label->size = code_ptr - code; 1042 jump->addr = (sljit_uw)(code_ptr - 4) [all...] |