v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-lazy-gen.cc
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
13
14namespace v8 {
15namespace internal {
16
18
20 TNode<Code> code, TNode<JSFunction> function) {
21 auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
22 auto context = Parameter<Context>(Descriptor::kContext);
23 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
24#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
25 auto dispatch_handle =
26 UncheckedParameter<JSDispatchHandleT>(Descriptor::kDispatchHandle);
27#else
28 auto dispatch_handle = InvalidDispatchHandleConstant();
29#endif
30 // TODO(40931165): Check that dispatch_handle-argcount == code-argcount.
31 TailCallJSCode(code, context, function, new_target, argc, dispatch_handle);
32}
33
35 Runtime::FunctionId function_id, TNode<JSFunction> function) {
36 auto context = Parameter<Context>(Descriptor::kContext);
37 TNode<Code> code = CAST(CallRuntime(function_id, context, function));
38 GenerateTailCallToJSCode(code, function);
39}
40
41#ifndef V8_ENABLE_LEAPTIERING
42
44 TNode<JSFunction> function, TNode<FeedbackVector> feedback_vector) {
45 Label fallthrough(this), may_have_optimized_code(this),
46 maybe_needs_logging(this);
47
48 TNode<Uint16T> flags =
49 LoadObjectField<Uint16T>(feedback_vector, FeedbackVector::kFlagsOffset);
50
51 // Fall through if no optimization trigger or optimized code.
52 constexpr uint32_t kFlagMask =
54 CodeKind::INTERPRETED_FUNCTION);
55 GotoIfNot(IsSetWord32(flags, kFlagMask), &fallthrough);
56
59 &maybe_needs_logging);
60 GenerateTailCallToReturnedCode(Runtime::kCompileOptimized, function);
61
62 BIND(&maybe_needs_logging);
63 {
65 &may_have_optimized_code);
66 GenerateTailCallToReturnedCode(Runtime::kFunctionLogNextExecution,
67 function);
68 }
69
70 BIND(&may_have_optimized_code);
71 {
72 Label heal_optimized_code_slot(this);
73 TNode<MaybeObject> maybe_optimized_code_entry = LoadMaybeWeakObjectField(
74 feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset);
75
76 // Optimized code slot is a weak reference to Code object.
78 maybe_optimized_code_entry, &heal_optimized_code_slot));
79 TNode<Code> optimized_code =
80 LoadCodePointerFromObject(code_wrapper, CodeWrapper::kCodeOffset);
81
82 // Check if the optimized code is marked for deopt. If it is, call the
83 // runtime to clear it.
84 GotoIf(IsMarkedForDeoptimization(optimized_code),
85 &heal_optimized_code_slot);
86
87 // Optimized code is good, get it into the closure and link the closure into
88 // the optimized functions list, then tail call the optimized code.
89 StoreCodePointerField(function, JSFunction::kCodeOffset, optimized_code);
90 Comment("MaybeTailCallOptimizedCodeSlot:: GenerateTailCallToJSCode");
91 GenerateTailCallToJSCode(optimized_code, function);
92
93 // Optimized code slot contains deoptimized code, or the code is cleared
94 // and tiering state hasn't yet been updated. Evict the code, update the
95 // state and re-enter the closure's code.
96 BIND(&heal_optimized_code_slot);
97 GenerateTailCallToReturnedCode(Runtime::kHealOptimizedCodeSlot, function);
98 }
99
100 // Fall-through if the optimized code cell is clear and the tiering state is
101 // kNone.
102 BIND(&fallthrough);
103}
104
105#endif // !V8_ENABLE_LEAPTIERING
106
108 // First lookup code, maybe we don't need to compile!
109 Label compile_function(this, Label::kDeferred);
110
111 // Check the code object for the SFI. If SFI's code entry points to
112 // CompileLazy, then we need to lazy compile regardless of the function or
113 // tiering state.
115 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
116 TVARIABLE(Uint16T, sfi_data_type);
117 TNode<Code> sfi_code =
118 GetSharedFunctionInfoCode(shared, &sfi_data_type, &compile_function);
119
120 TNode<HeapObject> feedback_cell_value = LoadFeedbackCellValue(function);
121
122 // If feedback cell isn't initialized, compile function
123 GotoIf(IsUndefined(feedback_cell_value), &compile_function);
124
126 isolate(), CompileLazy))));
127 USE(sfi_code);
128#ifndef V8_ENABLE_LEAPTIERING
129 // In the leaptiering case, the code is installed below, through the
130 // InstallSFICode runtime function.
131 StoreCodePointerField(function, JSFunction::kCodeOffset, sfi_code);
132#endif // V8_ENABLE_LEAPTIERING
133
134 Label maybe_use_sfi_code(this);
135 // If there is no feedback, don't check for optimized code.
136 GotoIf(HasInstanceType(feedback_cell_value, CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
137 &maybe_use_sfi_code);
138
139 // If it isn't undefined or fixed array it must be a feedback vector.
140 CSA_DCHECK(this, IsFeedbackVector(feedback_cell_value));
141
142#ifndef V8_ENABLE_LEAPTIERING
143 // Is there a tiering state or optimized code in the feedback vector?
144 MaybeTailCallOptimizedCodeSlot(function, CAST(feedback_cell_value));
145#endif // !V8_ENABLE_LEAPTIERING
146 Goto(&maybe_use_sfi_code);
147
148 // At this point we have a candidate InstructionStream object. It's *not* a
149 // cached optimized InstructionStream object (we'd have tail-called it above).
150 // A usual case would be the InterpreterEntryTrampoline to start executing
151 // existing bytecode.
152 BIND(&maybe_use_sfi_code);
153#ifdef V8_ENABLE_LEAPTIERING
154 // In the leaptiering case, we now simply install the code of the SFI on the
155 // function's dispatch table entry and call it. Installing the code is
156 // necessary as the dispatch table entry may still contain the CompileLazy
157 // builtin at this point (we can only update dispatch table code from C++).
158 GenerateTailCallToReturnedCode(Runtime::kInstallSFICode, function);
159#else
160 Label tailcall_code(this), baseline(this);
161 TVARIABLE(Code, code);
162
163 // Check if we have baseline code.
164 GotoIf(InstanceTypeEqual(sfi_data_type.value(), CODE_TYPE), &baseline);
165
166 code = sfi_code;
167 Goto(&tailcall_code);
168
169 BIND(&baseline);
170 // Ensure we have a feedback vector.
171 code = Select<Code>(
172 IsFeedbackVector(feedback_cell_value), [=]() { return sfi_code; },
173 [=, this]() {
174 return CAST(CallRuntime(Runtime::kInstallBaselineCode,
175 Parameter<Context>(Descriptor::kContext),
176 function));
177 });
178 Goto(&tailcall_code);
179
180 BIND(&tailcall_code);
181 GenerateTailCallToJSCode(code.value(), function);
182#endif // V8_ENABLE_LEAPTIERING
183
184 BIND(&compile_function);
185 GenerateTailCallToReturnedCode(Runtime::kCompileLazy, function);
186}
187
189 auto function = Parameter<JSFunction>(Descriptor::kTarget);
190
191 CompileLazy(function);
192}
193
194#ifdef V8_ENABLE_LEAPTIERING
195
196template <typename Function>
197void LazyBuiltinsAssembler::TieringBuiltinImpl(const Function& Impl) {
198 auto function = Parameter<JSFunction>(Descriptor::kTarget);
199 auto context = Parameter<Context>(Descriptor::kContext);
200 auto argc = UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
201 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
202
203#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
204 auto dispatch_handle =
205 UncheckedParameter<JSDispatchHandleT>(Descriptor::kDispatchHandle);
206#else
208 auto dispatch_handle = LoadObjectField<JSDispatchHandleT>(
209 function, JSFunction::kDispatchHandleOffset);
210#endif
211
212 // Apply the actual tiering. This function must uninstall the tiering builtin.
213 Impl(context, function);
214
215 // The dispatch handle of the function shouldn't change.
216 CSA_DCHECK(this,
217 Word32Equal(dispatch_handle,
219 function, JSFunction::kDispatchHandleOffset)));
220
221 // Load the code directly from the dispatch table to guarantee the signature
222 // of the code matches with the number of arguments passed when calling into
223 // this trampoline.
224 // TODO(saelo): consider removing the {code} parameter from TailCallJSCode
225 // entirely and only passing the dispatch_handle.
226 TNode<Code> code = LoadCodeObjectFromJSDispatchTable(dispatch_handle);
227 TailCallJSCode(code, context, function, new_target, argc, dispatch_handle);
228}
229
230TF_BUILTIN(FunctionLogNextExecution, LazyBuiltinsAssembler) {
231 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
232 CallRuntime(Runtime::kFunctionLogNextExecution, context, function);
233 });
234}
235
236TF_BUILTIN(StartMaglevOptimizeJob, LazyBuiltinsAssembler) {
237 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
238 CallRuntime(Runtime::kStartMaglevOptimizeJob, context, function);
239 });
240}
241
242TF_BUILTIN(StartTurbofanOptimizeJob, LazyBuiltinsAssembler) {
243 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
244 CallRuntime(Runtime::kStartTurbofanOptimizeJob, context, function);
245 });
246}
247
248TF_BUILTIN(OptimizeMaglevEager, LazyBuiltinsAssembler) {
249 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
250 CallRuntime(Runtime::kOptimizeMaglevEager, context, function);
251 });
252}
253
254TF_BUILTIN(OptimizeTurbofanEager, LazyBuiltinsAssembler) {
255 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
256 CallRuntime(Runtime::kOptimizeTurbofanEager, context, function);
257 });
258}
259
260TF_BUILTIN(MarkLazyDeoptimized, LazyBuiltinsAssembler) {
261 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
262 CallRuntime(Runtime::kMarkLazyDeoptimized, context, function,
263 /* reoptimize */ SmiConstant(false));
264 });
265}
266
267TF_BUILTIN(MarkReoptimizeLazyDeoptimized, LazyBuiltinsAssembler) {
268 TieringBuiltinImpl([&](TNode<Context> context, TNode<JSFunction> function) {
269 CallRuntime(Runtime::kMarkLazyDeoptimized, context, function,
270 /* reoptimize */ SmiConstant(true));
271 });
272}
273
274#else
275
276TF_BUILTIN(CompileLazyDeoptimizedCode, LazyBuiltinsAssembler) {
277 auto function = Parameter<JSFunction>(Descriptor::kTarget);
278
279 TNode<Code> code = HeapConstantNoHole(BUILTIN_CODE(isolate(), CompileLazy));
280 // Set the code slot inside the JSFunction to CompileLazy.
281 StoreCodePointerField(function, JSFunction::kCodeOffset, code);
282 GenerateTailCallToJSCode(code, function);
283}
284
285#endif // V8_ENABLE_LEAPTIERING
286
288
289} // namespace internal
290} // namespace v8
#define BIND(label)
#define TVARIABLE(...)
#define CSA_DCHECK(csa,...)
#define TF_BUILTIN(Name, AssemblerBase)
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
TNode< MaybeObject > LoadMaybeWeakObjectField(TNode< HeapObject > object, int offset)
TNode< JSDispatchHandleT > InvalidDispatchHandleConstant()
TNode< BoolT > InstanceTypeEqual(TNode< Int32T > instance_type, int type)
void StoreCodePointerField(TNode< HeapObject > object, int offset, TNode< Code > value)
TNode< Code > LoadCodePointerFromObject(TNode< HeapObject > object, int offset)
TNode< T > LoadObjectField(TNode< HeapObject > object, int offset)
TNode< BoolT > TaggedNotEqual(TNode< AnyTaggedT > a, TNode< AnyTaggedT > b)
TNode< Code > GetSharedFunctionInfoCode(TNode< SharedFunctionInfo > shared_info, TVariable< Uint16T > *data_type_out=nullptr, Label *if_compile_lazy=nullptr)
TNode< BoolT > IsSetWord32(TNode< Word32T > word32)
TNode< HeapObject > LoadFeedbackCellValue(TNode< JSFunction > closure)
TNode< BoolT > HasInstanceType(TNode< HeapObject > object, InstanceType type)
TNode< T > Select(TNode< BoolT > condition, const NodeGenerator< T > &true_body, const NodeGenerator< T > &false_body, BranchHint branch_hint=BranchHint::kNone)
TNode< BoolT > IsMarkedForDeoptimization(TNode< Code > code)
TNode< HeapObject > GetHeapObjectAssumeWeak(TNode< MaybeObject > value)
static constexpr uint32_t kFlagsTieringStateIsAnyRequested
static constexpr uint32_t kFlagsLogNextExecution
static constexpr uint32_t FlagMaskForNeedsProcessingCheckFrom(CodeKind code_kind)
void GenerateTailCallToJSCode(TNode< Code > code, TNode< JSFunction > function)
void GenerateTailCallToReturnedCode(Runtime::FunctionId function_id, TNode< JSFunction > function)
void CompileLazy(TNode< JSFunction > function)
void MaybeTailCallOptimizedCodeSlot(TNode< JSFunction > function, TNode< FeedbackVector > feedback_vector)
void Comment(MessageWithSourceLocation message, Args &&... args)
void GotoIfNot(TNode< IntegralT > condition, Label *false_label, GotoHint goto_hint=GotoHint::kNone)
void GotoIf(TNode< IntegralT > condition, Label *true_label, GotoHint goto_hint=GotoHint::kNone)
void TailCallJSCode(TNode< Code > code, TNode< Context > context, TNode< JSFunction > function, TNode< Object > new_target, TNode< Int32T > arg_count, TNode< JSDispatchHandleT > dispatch_handle)
TNode< Type > HeapConstantNoHole(Handle< Type > object)
TNode< BoolT > Word32Equal(TNode< Word32T > left, TNode< Word32T > right)
TNode< T > CallRuntime(Runtime::FunctionId function, TNode< Object > context, TArgs... args)
TNode< T > Parameter(int value, const SourceLocation &loc=SourceLocation::Current())
#define CAST(x)
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
DirectHandle< Object > new_target
Definition execution.cc:75
Isolate * isolate
#define CHECK(condition)
Definition logging.h:124
#define USE(...)
Definition macros.h:293