1 // Copyright 2018 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #include "src/builtins/builtins-lazy-gen.h" 6 7 #include "src/builtins/builtins-utils-gen.h" 8 #include "src/builtins/builtins.h" 9 #include "src/feedback-vector.h" 10 #include "src/globals.h" 11 #include "src/objects/shared-function-info.h" 12 13 namespace v8 { 14 namespace internal { 15 16 void LazyBuiltinsAssembler::GenerateTailCallToJSCode( 17 TNode<Code> code, TNode<JSFunction> function) { 18 TNode<Int32T> argc = 19 UncheckedCast<Int32T>(Parameter(Descriptor::kActualArgumentsCount)); 20 TNode<Context> context = CAST(Parameter(Descriptor::kContext)); 21 TNode<Object> new_target = CAST(Parameter(Descriptor::kNewTarget)); 22 23 TailCallJSCode(code, context, function, new_target, argc); 24 } 25 26 void LazyBuiltinsAssembler::GenerateTailCallToReturnedCode( 27 Runtime::FunctionId function_id, TNode<JSFunction> function) { 28 TNode<Context> context = CAST(Parameter(Descriptor::kContext)); 29 TNode<Code> code = CAST(CallRuntime(function_id, context, function)); 30 GenerateTailCallToJSCode(code, function); 31 } 32 33 void LazyBuiltinsAssembler::TailCallRuntimeIfMarkerEquals( 34 TNode<Smi> marker, OptimizationMarker expected_marker, 35 Runtime::FunctionId function_id, TNode<JSFunction> function) { 36 Label no_match(this); 37 GotoIfNot(SmiEqual(marker, SmiConstant(expected_marker)), &no_match); 38 GenerateTailCallToReturnedCode(function_id, function); 39 BIND(&no_match); 40 } 41 42 void LazyBuiltinsAssembler::MaybeTailCallOptimizedCodeSlot( 43 TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) { 44 Label fallthrough(this); 45 46 TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField( 47 feedback_vector, FeedbackVector::kOptimizedCodeOffset); 48 49 // Check if the code entry is a Smi. If yes, we interpret it as an 50 // optimisation marker. Otherwise, interpret it as a weak reference to a code 51 // object. 52 Label optimized_code_slot_is_smi(this), optimized_code_slot_is_weak_ref(this); 53 Branch(TaggedIsSmi(maybe_optimized_code_entry), &optimized_code_slot_is_smi, 54 &optimized_code_slot_is_weak_ref); 55 56 BIND(&optimized_code_slot_is_smi); 57 { 58 // Optimized code slot is a Smi optimization marker. 59 TNode<Smi> marker = CAST(maybe_optimized_code_entry); 60 61 // Fall through if no optimization trigger. 62 GotoIf(SmiEqual(marker, SmiConstant(OptimizationMarker::kNone)), 63 &fallthrough); 64 65 // TODO(ishell): introduce Runtime::kHandleOptimizationMarker and check 66 // all these marker values there. 67 TailCallRuntimeIfMarkerEquals(marker, 68 OptimizationMarker::kLogFirstExecution, 69 Runtime::kFunctionFirstExecution, function); 70 TailCallRuntimeIfMarkerEquals(marker, OptimizationMarker::kCompileOptimized, 71 Runtime::kCompileOptimized_NotConcurrent, 72 function); 73 TailCallRuntimeIfMarkerEquals( 74 marker, OptimizationMarker::kCompileOptimizedConcurrent, 75 Runtime::kCompileOptimized_Concurrent, function); 76 77 // Otherwise, the marker is InOptimizationQueue, so fall through hoping 78 // that an interrupt will eventually update the slot with optimized code. 79 CSA_ASSERT(this, 80 SmiEqual(marker, 81 SmiConstant(OptimizationMarker::kInOptimizationQueue))); 82 Goto(&fallthrough); 83 } 84 85 BIND(&optimized_code_slot_is_weak_ref); 86 { 87 // Optimized code slot is a weak reference. 88 TNode<Code> optimized_code = 89 CAST(ToWeakHeapObject(maybe_optimized_code_entry, &fallthrough)); 90 91 // Check if the optimized code is marked for deopt. If it is, call the 92 // runtime to clear it. 93 Label found_deoptimized_code(this); 94 TNode<CodeDataContainer> code_data_container = 95 CAST(LoadObjectField(optimized_code, Code::kCodeDataContainerOffset)); 96 97 TNode<Int32T> code_kind_specific_flags = LoadObjectField<Int32T>( 98 code_data_container, CodeDataContainer::kKindSpecificFlagsOffset); 99 GotoIf(IsSetWord32<Code::MarkedForDeoptimizationField>( 100 code_kind_specific_flags), 101 &found_deoptimized_code); 102 103 // Optimized code is good, get it into the closure and link the closure into 104 // the optimized functions list, then tail call the optimized code. 105 StoreObjectField(function, JSFunction::kCodeOffset, optimized_code); 106 GenerateTailCallToJSCode(optimized_code, function); 107 108 // Optimized code slot contains deoptimized code, evict it and re-enter the 109 // closure's code. 110 BIND(&found_deoptimized_code); 111 GenerateTailCallToReturnedCode(Runtime::kEvictOptimizedCodeSlot, function); 112 } 113 114 // Fall-through if the optimized code cell is clear and there is no 115 // optimization marker. 116 BIND(&fallthrough); 117 } 118 119 void LazyBuiltinsAssembler::CompileLazy(TNode<JSFunction> function) { 120 // First lookup code, maybe we don't need to compile! 121 Label compile_function(this, Label::kDeferred); 122 123 // Compile function if we don't have a valid feedback vector. 124 TNode<FeedbackVector> feedback_vector = 125 LoadFeedbackVector(function, &compile_function); 126 127 // Is there an optimization marker or optimized code in the feedback vector? 128 MaybeTailCallOptimizedCodeSlot(function, feedback_vector); 129 130 // We found no optimized code. Infer the code object needed for the SFI. 131 TNode<SharedFunctionInfo> shared = 132 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset)); 133 // If code entry points to anything other than CompileLazy, install that, 134 // otherwise call runtime to compile the function. 135 TNode<Code> code = GetSharedFunctionInfoCode(shared, &compile_function); 136 137 CSA_ASSERT( 138 this, 139 WordNotEqual(code, HeapConstant(BUILTIN_CODE(isolate(), CompileLazy)))); 140 141 // Install the SFI's code entry. 142 StoreObjectField(function, JSFunction::kCodeOffset, code); 143 GenerateTailCallToJSCode(code, function); 144 145 BIND(&compile_function); 146 { GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function); } 147 } 148 149 TF_BUILTIN(CompileLazy, LazyBuiltinsAssembler) { 150 TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget)); 151 152 CompileLazy(function); 153 } 154 155 TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) { 156 TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget)); 157 158 // Set the code slot inside the JSFunction to CompileLazy. 159 TNode<Code> code = HeapConstant(BUILTIN_CODE(isolate(), CompileLazy)); 160 StoreObjectField(function, JSFunction::kCodeOffset, code); 161 GenerateTailCallToJSCode(code, function); 162 } 163 164 // Lazy deserialization design doc: http://goo.gl/dxkYDZ. 165 TF_BUILTIN(DeserializeLazy, LazyBuiltinsAssembler) { 166 Label deserialize_in_runtime(this, Label::kDeferred); 167 168 TNode<JSFunction> function = CAST(Parameter(Descriptor::kTarget)); 169 170 // Load the builtin id for lazy deserialization from SharedFunctionInfo. 171 TNode<SharedFunctionInfo> shared = 172 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset)); 173 174 TNode<Smi> sfi_data = 175 CAST(LoadObjectField(shared, SharedFunctionInfo::kFunctionDataOffset)); 176 177 // The builtin may already have been deserialized. If that is the case, it is 178 // stored in the builtins table, and we can copy to correct code object to 179 // both the shared function info and function without calling into runtime. 180 // 181 // Otherwise, we need to call into runtime to deserialize. 182 183 TNode<Code> code = LoadBuiltin(sfi_data); 184 185 // Check if the loaded code object has already been deserialized. This is 186 // the case iff it does not equal DeserializeLazy. 187 GotoIf( 188 WordEqual(code, HeapConstant(BUILTIN_CODE(isolate(), DeserializeLazy))), 189 &deserialize_in_runtime); 190 191 // If we've reached this spot, the target builtin has been deserialized and 192 // we simply need to copy it over to the target function. 193 StoreObjectField(function, JSFunction::kCodeOffset, code); 194 195 // All copying is done. Jump to the deserialized code object. 196 GenerateTailCallToJSCode(code, function); 197 198 BIND(&deserialize_in_runtime); 199 { GenerateTailCallToReturnedCode(Runtime::kDeserializeLazy, function); } 200 } 201 202 } // namespace internal 203 } // namespace v8 204