Lines Matching refs:Code
2 // Use of this source code is governed by a BSD-style license that can be
9 // code. gcc is not happy when attempting to inline too deep.
142 TYPE_CHECKER(Code, CODE_TYPE)
618 CAST_ACCESSOR(Code)
1045 int chars_; // Caches the number of characters when computing the hash code.
4019 // it to be used directly by generated code.
4056 // it to be used directly by generated code.
4418 return reinterpret_cast<Code*>(this)->CodeSize();
4640 Code::Flags Code::flags() {
4719 // Code caches are always fixed arrays. The empty fixed array is used as a
4720 // sentinel for an absent code cache.
4851 void Code::set_flags(Code::Flags flags) {
4852 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4857 Code::Kind Code::kind() {
4861 bool Code::IsCodeStubOrIC() {
4874 ExtraICState Code::extra_ic_state() {
4882 void Code::set_raw_kind_specific_flags1(int value) {
4887 void Code::set_raw_kind_specific_flags2(int value) {
4892 inline bool Code::is_crankshafted() {
4898 inline bool Code::is_hydrogen_stub() {
4902 inline bool Code::is_interpreter_trampoline_builtin() {
4909 inline bool Code::has_unwinding_info() const {
4913 inline void Code::set_has_unwinding_info(bool state) {
4919 inline void Code::set_is_crankshafted(bool value) {
4926 inline bool Code::is_turbofanned() {
4932 inline void Code::set_is_turbofanned(bool value) {
4939 inline bool Code::can_have_weak_objects() {
4946 inline void Code::set_can_have_weak_objects(bool value) {
4953 inline bool Code::is_construct_stub() {
4959 inline void Code::set_is_construct_stub(bool value) {
4966 inline bool Code::is_promise_rejection() {
4972 inline void Code::set_is_promise_rejection(bool value) {
4979 inline bool Code::is_exception_caught() {
4985 inline void Code::set_is_exception_caught(bool value) {
4992 bool Code::has_deoptimization_support() {
4999 void Code::set_has_deoptimization_support(bool value) {
5007 bool Code::has_debug_break_slots() {
5014 void Code::set_has_debug_break_slots(bool value) {
5022 bool Code::has_reloc_info_for_serialization() {
5029 void Code::set_has_reloc_info_for_serialization(bool value) {
5037 int Code::allow_osr_at_loop_nesting_level() {
5044 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5053 int Code::profiler_ticks() {
5060 void Code::set_profiler_ticks(int ticks) {
5068 int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
5070 void Code::set_builtin_index(int index) {
5075 unsigned Code::stack_slots() {
5082 void Code::set_stack_slots(unsigned slots) {
5091 unsigned Code::safepoint_table_offset() {
5098 void Code::set_safepoint_table_offset(unsigned offset) {
5108 unsigned Code::back_edge_table_offset() {
5115 void Code::set_back_edge_table_offset(unsigned offset) {
5125 bool Code::back_edges_patched_for_osr() {
5131 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5134 bool Code::marked_for_deoptimization() {
5141 void Code::set_marked_for_deoptimization(bool flag) {
5150 bool Code::is_inline_cache_stub() {
5160 bool Code::is_debug_stub() {
5172 bool Code::is_handler() { return kind() == HANDLER; }
5173 bool Code::is_stub() { return kind() == STUB; }
5174 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
5175 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
5176 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
5177 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5178 bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
5180 Address Code::constant_pool() {
5191 Code::Flags Code::ComputeFlags(Kind kind, ExtraICState extra_ic_state,
5200 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5202 return ComputeFlags(Code::HANDLER, handler_kind, holder);
5206 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5211 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5216 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5220 Code::Flags Code::RemoveHolderFromFlags(Flags flags) {
5226 Code* Code::GetCodeFromTargetAddress(Address address) {
5227 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5230 // Code::cast. Code::cast does not work when the object's map is
5232 Code* result = reinterpret_cast<Code*>(code);
5237 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5239 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5243 bool Code::CanContainWeakObjects() {
5248 bool Code::IsWeakObject(Object* object) {
5253 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5332 static_cast<AbstractCode::Kind>(Code::FUNCTION));
5339 Code* AbstractCode::GetCode() { return Code::cast(this); }
5624 ACCESSORS(Module, code, Object, kCodeOffset)
5632 bool Module::evaluated() const { return code()->IsModuleInfo(); }
5638 JSFunction::cast(code())->shared()->scope_info()->ModuleDescriptorInfo());
5641 bool Module::instantiated() const { return !code()->IsSharedFunctionInfo(); }
5644 if (evaluated()) return ModuleInfo::cast(code());
5646 ? JSFunction::cast(code())->shared()->scope_info()
5647 : SharedFunctionInfo::cast(code())->scope_info();
5814 Code* code = shared()->code();
5815 bool has = code->kind() == Code::FUNCTION;
5816 DCHECK(!has || code->has_debug_break_slots());
5830 Code* DebugInfo::DebugCode() {
5832 return shared()->code();
5841 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5961 return AbstractCode::cast(code());
6052 DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
6068 Code* SharedFunctionInfo::code() const {
6069 return Code::cast(READ_FIELD(this, kCodeOffset));
6073 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
6074 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
6084 void SharedFunctionInfo::ReplaceCode(Code* value) {
6086 // enqueued as a code flushing candidate and we remove it now.
6087 if (code()->gc_metadata() != NULL) {
6092 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
6094 Code::VerifyRecompiledCode(code(), value);
6101 return code()->is_interpreter_trampoline_builtin();
6105 return code()->kind() == Code::FUNCTION;
6128 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
6129 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
6130 DCHECK(code() != builtins->builtin(Builtins::kCompileBaseline));
6131 return code() != builtins->builtin(Builtins::kCompileLazy);
6152 if (HasBaselineCode()) return code()->has_debug_break_slots();
6310 Code* code = this->code();
6311 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6354 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6358 return code()->is_interpreter_trampoline_builtin();
6362 return code() ==
6367 return code() == GetIsolate()->builtins()->builtin(
6373 return code() == GetIsolate()->builtins()->builtin(
6379 return code() == GetIsolate()->builtins()->builtin(
6409 return AbstractCode::cast(code());
6413 Code* JSFunction::code() {
6414 return Code::cast(
6415 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6419 void JSFunction::set_code(Code* value) {
6430 void JSFunction::set_code_no_write_barrier(Code* value) {
6437 void JSFunction::ReplaceCode(Code* code) {
6439 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6442 shared()->EvictFromOptimizedCodeMap(this->code(),
6443 "Replacing with another optimized code");
6446 set_code(code);
6547 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6548 code() != builtins->builtin(Builtins::kCompileBaseline) &&
6549 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6550 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6651 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6652 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6653 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6655 ACCESSORS_CHECKED2(Code, name, type, offset, true, \
6665 void Code::WipeOutHeader() {
6670 // Do not wipe out major/minor keys on a code stub or IC
6679 Object* Code::type_feedback_info() {
6685 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6693 uint32_t Code::stub_key() {
6700 void Code::set_stub_key(uint32_t key) {
6706 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6707 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6710 byte* Code::instruction_start() {
6715 byte* Code::instruction_end() {
6719 int Code::GetUnwindingInfoSizeOffset() const {
6724 int Code::unwinding_info_size() const {
6730 void Code::set_unwinding_info_size(int value) {
6735 byte* Code::unwinding_info_start() {
6740 byte* Code::unwinding_info_end() {
6745 int Code::body_size() {
6753 int Code::SizeIncludingMetadata() {
6762 ByteArray* Code::unchecked_relocation_info() {
6767 byte* Code::relocation_start() {
6772 int Code::relocation_size() {
6777 byte* Code::entry() {
6782 bool Code::contains(byte* inner_pointer) {
6787 int Code::ExecutableSize() {
6788 // Check that the assumptions about the layout of the code object holds.
6790 Code::kHeaderSize);
6791 return instruction_size() + Code::kHeaderSize;
6795 int Code::CodeSize() { return SizeFor(body_size()); }
7172 // Fast case: has hash code already been computed?
7175 // Slow case: compute hash code and set it. Has to be a string.
7353 // code
8027 // shared between two code objects. The can only happen when
8028 // the debugger made a shallow copy of code object (see Heap::CopyCode).