Lines Matching refs:Code
2 // Use of this source code is governed by a BSD-style license that can be
80 // frame code that computes the caller state to access the top
188 Code* interpreter_entry_trampoline =
190 Code* interpreter_bytecode_advance =
192 Code* interpreter_bytecode_dispatch =
382 Code* StackFrame::GetSafepointData(Isolate* isolate,
389 entry->safepoint_entry = entry->code->GetSafepointEntry(inner_pointer);
393 entry->code->GetSafepointEntry(inner_pointer)));
396 // Fill in the results and return the code.
397 Code* code = entry->code;
399 *stack_slots = code->stack_slots();
400 return code;
410 Address* constant_pool_address, Code* holder) {
414 Object* code = holder;
415 v->VisitPointer(&code);
416 if (code != holder) {
417 holder = reinterpret_cast<Code*>(code);
463 // Look up the code object to figure out the type of the stack frame.
464 Code* code_obj =
468 case Code::BUILTIN:
474 // TODO(bmeurer): We treat frames for BUILTIN Code objects as
481 case Code::FUNCTION:
483 case Code::OPTIMIZED_FUNCTION:
485 case Code::WASM_FUNCTION:
487 case Code::WASM_TO_JS_FUNCTION:
489 case Code::JS_TO_WASM_FUNCTION:
491 case Code::WASM_INTERPRETER_ENTRY:
549 Code* EntryFrame::unchecked_code() const {
572 Code* EntryConstructFrame::unchecked_code() const {
582 Code* ExitFrame::unchecked_code() const {
583 return reinterpret_cast<Code*>(code_slot());
701 Code* code = NULL;
703 accumulator->PrintFunction(function, receiver, &code);
741 AbstractCode* code = AbstractCode::cast(LookupCode());
742 int code_offset = static_cast<int>(pc() - code->instruction_start());
743 return code->SourcePosition(code_offset);
794 Code* code = StackFrame::GetSafepointData(
892 IteratePc(v, pc_address(), constant_pool_address(), code);
907 Code* StubFrame::unchecked_code() const {
959 Code* JavaScriptFrame::unchecked_code() const {
960 return function()->code();
994 Code* code = LookupCode();
995 int offset = static_cast<int>(pc() - code->instruction_start());
996 AbstractCode* abstract_code = AbstractCode::cast(code);
1028 AbstractCode* code,
1036 int source_pos = code->SourcePosition(code_offset);
1071 Code* code = frame->unchecked_code();
1072 code_offset = static_cast<int>(frame->pc() - code->instruction_start());
1096 AbstractCode* code,
1105 int source_pos = code->SourcePosition(code_offset);
1129 Code* code = frame->unchecked_code();
1130 code_offset = static_cast<int>(frame->pc() - code->instruction_start());
1150 bool CannotDeoptFromAsmCode(Code* code, JSFunction* function) {
1151 return code->is_turbofanned() && function->shared()->asm_function();
1167 Code::cast(abstract_code)->kind() != Code::OPTIMIZED_FUNCTION ||
1168 CannotDeoptFromAsmCode(Code::cast(abstract_code), function) ||
1250 Isolate* isolate, Handle<WasmInstanceObject> instance, Handle<Code> code,
1254 code_(code),
1258 FixedArray* deopt_data = code()->deoptimization_data();
1267 return AbstractCode::cast(*code())->SourcePosition(code_offset());
1350 Code* code = LookupCode();
1351 if (code->kind() == Code::BUILTIN ||
1352 CannotDeoptFromAsmCode(code, function())) {
1427 Codecode = shared_info->code();
1429 DeoptimizationOutputData::cast(code->deoptimization_data());
1433 abstract_code = AbstractCode::cast(code);
1460 // We cannot perform exception prediction on optimized code. Instead, we need
1461 // to use FrameSummary to find the corresponding code offset in unoptimized
1462 // code to perform prediction there.
1464 Code* code = LookupCode();
1465 HandlerTable* table = HandlerTable::cast(code->handler_table());
1466 int pc_offset = static_cast<int>(pc() - code->entry());
1467 if (stack_slots) *stack_slots = code->stack_slots();
1477 Code* code = opt_function->code();
1479 // The code object may have been replaced by lazy deoptimization. Fall
1481 // code object.
1482 if (!code->contains(pc())) {
1483 code = isolate()->inner_pointer_to_code_cache()->
1486 DCHECK(code != NULL);
1487 DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
1489 SafepointEntry safepoint_entry = code->GetSafepointEntry(pc());
1492 return DeoptimizationInputData::cast(code->deoptimization_data());
1498 Code* code = LookupCode();
1499 if (code->kind() == Code::BUILTIN) {
1517 Code* code = LookupCode();
1518 if (code->kind() == Code::BUILTIN ||
1519 CannotDeoptFromAsmCode(code, function())) {
1571 AbstractCode* code = AbstractCode::cast(GetBytecodeArray());
1573 return code->SourcePosition(code_offset);
1665 Code* ArgumentsAdaptorFrame::unchecked_code() const {
1684 Code* InternalFrame::unchecked_code() const {
1686 Object* code = Memory::Object_at(fp() + offset);
1687 DCHECK(code != NULL);
1688 return reinterpret_cast<Code*>(code);
1719 Code* WasmCompiledFrame::unchecked_code() const {
1753 Handle<Code> code(LookupCode(), isolate());
1754 int offset = static_cast<int>(pc() - code->instruction_start());
1757 isolate(), instance, code, offset, at_to_number_conversion());
1765 Code* code = callee_pc ? isolate()->FindCodeObject(callee_pc) : nullptr;
1766 if (!code || code->kind() != Code::WASM_TO_JS_FUNCTION) return false;
1767 int offset = static_cast<int>(callee_pc - code->instruction_start());
1768 int pos = AbstractCode::cast(code)->SourcePosition(offset);
1776 Code* code = LookupCode();
1777 HandlerTable* table = HandlerTable::cast(code->handler_table());
1778 int pc_offset = static_cast<int>(pc() - code->entry());
1779 *stack_slots = code->stack_slots();
1810 Code* WasmInterpreterEntryFrame::unchecked_code() const {
1837 Code* code) {
1838 if (FLAG_max_stack_trace_source_length != 0 && code != NULL) {
1861 Code* code = NULL;
1863 accumulator->PrintFunction(function, receiver, &code);
1865 // Get scope information for nicer output, if possible. If code is NULL, or
1878 if (code != NULL && code->kind() == Code::FUNCTION &&
1879 pc >= code->instruction_start() && pc < code->instruction_end()) {
1880 int offset = static_cast<int>(pc - code->instruction_start());
1881 int source_pos = AbstractCode::cast(code)->SourcePosition(offset);
1922 PrintFunctionSource(accumulator, shared, code);
1990 PrintFunctionSource(accumulator, shared, code);
2070 Code* StubFailureTrampolineFrame::unchecked_code() const {
2071 Code* trampoline;
2120 static bool GcSafeCodeContains(HeapObject* code, Address addr) {
2121 Map* map = GcSafeMapOfCodeSpaceObject(code);
2122 DCHECK(map == code->GetHeap()->code_map());
2123 Address start = code->address();
2124 Address end = code->address() + code->SizeFromMap(map);
2130 Code* InnerPointerToCodeCache::GcSafeCastToCode(HeapObject* object,
2132 Code* code = reinterpret_cast<Code*>(object);
2133 DCHECK(code != NULL && GcSafeCodeContains(code, inner_pointer));
2134 return code;
2138 Code* InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(
2190 DCHECK(entry->code == GcSafeFindCodeForInnerPointer(inner_pointer));
2192 // Because this code may be interrupted by a profiling signal that
2193 // also queries the cache, we cannot update inner_pointer before the code
2195 // the code has been computed.
2196 entry->code = GcSafeFindCodeForInnerPointer(inner_pointer);