v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
js-function-inl.h
Go to the documentation of this file.
1// Copyright 2020 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_JS_FUNCTION_INL_H_
6#define V8_OBJECTS_JS_FUNCTION_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
11#include <optional>
12
13
14// Include other inline headers *after* including js-function.h, such that e.g.
15// the definition of JSFunction is available (and this comment prevents
16// clang-format from merging that include into the following ones).
17#include "src/debug/debug.h"
19#include "src/ic/ic.h"
29
30// Has to be the last include (doesn't have include guards):
32
33namespace v8::internal {
34
35#include "torque-generated/src/objects/js-function-tq-inl.inc"
36
37TQ_OBJECT_CONSTRUCTORS_IMPL(JSFunctionOrBoundFunctionOrWrappedFunction)
38TQ_OBJECT_CONSTRUCTORS_IMPL(JSBoundFunction)
39TQ_OBJECT_CONSTRUCTORS_IMPL(JSWrappedFunction)
41
43 kFeedbackCellOffset)
45 kFeedbackCellOffset)
46
48 DCHECK(has_feedback_vector(cage_base));
49 return Cast<FeedbackVector>(raw_feedback_cell(cage_base)->value(cage_base));
50}
51
57
59 return code(isolate)->checks_tiering_state();
60}
61
63 if (!has_prototype_slot()) return;
64 if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
65 MapUpdater::CompleteInobjectSlackTracking(GetIsolate(), initial_map());
66 }
67}
68
69template <typename IsolateT>
71 if (ActiveTierIsIgnition(isolate)) {
72 return Cast<AbstractCode>(shared()->GetBytecodeArray(isolate));
73 } else {
74 return Cast<AbstractCode>(code(isolate, kAcquireLoad));
75 }
76}
77
78int JSFunction::length() { return shared()->length(); }
79
81 WriteBarrierMode mode) {
83 DCHECK(code->is_optimized_code());
84#ifdef V8_ENABLE_LEAPTIERING
85 if (code->is_context_specialized()) {
86 // We can only set context-specialized code for single-closure cells.
87 if (raw_feedback_cell()->map() !=
88 ReadOnlyRoots(isolate).one_closure_cell_map()) {
89 return;
90 }
91 }
92 // Required for being able to deoptimize this code.
93 code->set_js_dispatch_handle(dispatch_handle());
94#endif // V8_ENABLE_LEAPTIERING
95 UpdateCodeImpl(code, mode, false);
96}
97
99 bool keep_tiering_request) {
101
102#ifdef V8_ENABLE_LEAPTIERING
103 JSDispatchHandle handle = dispatch_handle();
105 handle = raw_feedback_cell()->dispatch_handle();
107 set_dispatch_handle(handle, mode);
108 }
109 if (keep_tiering_request) {
111 } else {
112 UpdateDispatchEntry(value, mode);
113 }
114
115 if (V8_UNLIKELY(v8_flags.log_function_events)) {
116 IsolateGroup::current()->js_dispatch_table()->SetTieringRequest(
117 dispatch_handle(), TieringBuiltin::kFunctionLogNextExecution,
118 GetIsolate());
119 }
120#else
121 WriteCodePointerField(kCodeOffset, value);
122 CONDITIONAL_CODE_POINTER_WRITE_BARRIER(*this, kCodeOffset, value, mode);
123
124 if (V8_UNLIKELY(v8_flags.log_function_events && has_feedback_vector())) {
125 feedback_vector()->set_log_next_execution(true);
126 }
127#endif // V8_ENABLE_LEAPTIERING
128}
129
131 // Optimized code must go through UpdateOptimized code, which sets a
132 // back-reference in the code object to the dispatch handle for
133 // deoptimization.
134 CHECK(!code->is_optimized_code());
135 UpdateCodeImpl(code, mode, false);
136}
137
139 WriteBarrierMode mode) {
140 CHECK(!code->is_optimized_code());
141 UpdateCodeImpl(code, mode, true);
142}
143
144Tagged<Code> JSFunction::code(IsolateForSandbox isolate) const {
145#ifdef V8_ENABLE_LEAPTIERING
146 return IsolateGroup::current()->js_dispatch_table()->GetCode(
147 dispatch_handle());
148#else
149 return ReadCodePointerField(kCodeOffset, isolate);
150#endif
151}
152
153Tagged<Code> JSFunction::code(IsolateForSandbox isolate,
154 AcquireLoadTag tag) const {
155#ifdef V8_ENABLE_LEAPTIERING
156 return IsolateGroup::current()->js_dispatch_table()->GetCode(
157 dispatch_handle(tag));
158#else
159 return ReadCodePointerField(kCodeOffset, isolate);
160#endif
161}
162
164#if V8_ENABLE_LEAPTIERING
165 JSDispatchHandle handle = dispatch_handle();
166 if (handle == kNullJSDispatchHandle) return Smi::zero();
167 return IsolateGroup::current()->js_dispatch_table()->GetCode(handle);
168#elif V8_ENABLE_SANDBOX
169 return RawIndirectPointerField(kCodeOffset, kCodeIndirectPointerTag)
170 .Relaxed_Load(isolate);
171#else
172 return RELAXED_READ_FIELD(*this, JSFunction::kCodeOffset);
173#endif // V8_ENABLE_SANDBOX
174}
175
177 AcquireLoadTag tag) const {
178#if V8_ENABLE_LEAPTIERING
179 JSDispatchHandle handle = dispatch_handle(tag);
180 if (handle == kNullJSDispatchHandle) return Smi::zero();
181 return IsolateGroup::current()->js_dispatch_table()->GetCode(handle);
182#elif V8_ENABLE_SANDBOX
183 return RawIndirectPointerField(kCodeOffset, kCodeIndirectPointerTag)
184 .Acquire_Load(isolate);
185#else
186 return ACQUIRE_READ_FIELD(*this, JSFunction::kCodeOffset);
187#endif // V8_ENABLE_SANDBOX
188}
189
190#ifdef V8_ENABLE_LEAPTIERING
191// static
192JSDispatchHandle JSFunction::AllocateDispatchHandle(Handle<JSFunction> function,
193 Isolate* isolate,
194 uint16_t parameter_count,
196 WriteBarrierMode mode) {
197 DCHECK_EQ(function->raw_feedback_cell()->dispatch_handle(),
199 return AllocateAndInstallJSDispatchHandle(
200 function, kDispatchHandleOffset, isolate, parameter_count, code, mode);
201}
202
203void JSFunction::clear_dispatch_handle() {
204 WriteField<JSDispatchHandle::underlying_type>(kDispatchHandleOffset,
206}
207void JSFunction::set_dispatch_handle(JSDispatchHandle handle,
208 WriteBarrierMode mode) {
209 Relaxed_WriteField<JSDispatchHandle::underlying_type>(kDispatchHandleOffset,
210 handle.value());
212}
214 WriteBarrierMode mode) {
215 JSDispatchHandle handle = dispatch_handle();
216 IsolateGroup::current()->js_dispatch_table()->SetCodeNoWriteBarrier(handle,
217 new_code);
219}
221 WriteBarrierMode mode) {
222 JSDispatchHandle handle = dispatch_handle();
224 ->js_dispatch_table()
225 ->SetCodeKeepTieringRequestNoWriteBarrier(handle, new_code);
227}
228JSDispatchHandle JSFunction::dispatch_handle() const {
229 return JSDispatchHandle(Relaxed_ReadField<JSDispatchHandle::underlying_type>(
230 kDispatchHandleOffset));
231}
232
233JSDispatchHandle JSFunction::dispatch_handle(AcquireLoadTag tag) const {
234 return JSDispatchHandle(Acquire_ReadField<JSDispatchHandle::underlying_type>(
235 kDispatchHandleOffset));
236}
237#endif // V8_ENABLE_LEAPTIERING
238
239RELEASE_ACQUIRE_ACCESSORS(JSFunction, context, Tagged<Context>, kContextOffset)
240
241Address JSFunction::instruction_start(IsolateForSandbox isolate) const {
242 return code(isolate)->instruction_start();
243}
244
245// TODO(ishell): Why relaxed read but release store?
247 return shared(cage_base, kRelaxedLoad);
248}
249
252 kSharedFunctionInfoOffset>::Relaxed_Load(cage_base, *this);
253}
254
255void JSFunction::set_shared(Tagged<SharedFunctionInfo> value,
256 WriteBarrierMode mode) {
257 // Release semantics to support acquire read in NeedsResetDueToFlushedBytecode
258 RELEASE_WRITE_FIELD(*this, kSharedFunctionInfoOffset, value);
259 CONDITIONAL_WRITE_BARRIER(*this, kSharedFunctionInfoOffset, value, mode);
260}
261
263#ifdef V8_ENABLE_LEAPTIERING
264 if (!has_feedback_vector()) return false;
265 return feedback_vector()->tiering_in_progress();
266#else
267 return IsInProgress(tiering_state());
268#endif
269}
270
272#ifdef V8_ENABLE_LEAPTIERING
273 if (!has_feedback_vector()) return false;
274 return tiering_in_progress() ||
275 IsolateGroup::current()->js_dispatch_table()->IsTieringRequested(
276 dispatch_handle());
277#else
278 return tiering_state() != TieringState::kNone;
279#endif
280}
281
283#ifdef V8_ENABLE_LEAPTIERING
284 return IsolateGroup::current()->js_dispatch_table()->IsTieringRequested(
285 dispatch_handle(), TieringBuiltin::kFunctionLogNextExecution, isolate);
286#else
287 return feedback_vector()->log_next_execution();
288#endif
289}
290
292#ifdef V8_ENABLE_LEAPTIERING
293 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
294 Address entrypoint = jdt->GetEntrypoint(dispatch_handle());
295 const EmbeddedData& embedded_data = EmbeddedData::FromBlob(isolate);
296#define CASE(name, ...) \
297 if (entrypoint == embedded_data.InstructionStartOf(Builtin::k##name)) { \
298 DCHECK(jdt->IsTieringRequested(dispatch_handle(), TieringBuiltin::k##name, \
299 isolate)); \
300 return TieringBuiltin::k##name != \
301 TieringBuiltin::kFunctionLogNextExecution; \
302 }
303 BUILTIN_LIST_BASE_TIERING_MAGLEV(CASE)
304#undef CASE
305 return {};
306#else
308#endif
309}
310
312#ifdef V8_ENABLE_LEAPTIERING
313 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
314 Address entrypoint = jdt->GetEntrypoint(dispatch_handle());
315 const EmbeddedData& embedded_data = EmbeddedData::FromBlob(isolate);
316#define CASE(name, ...) \
317 if (entrypoint == embedded_data.InstructionStartOf(Builtin::k##name)) { \
318 DCHECK(jdt->IsTieringRequested(dispatch_handle(), TieringBuiltin::k##name, \
319 isolate)); \
320 return TieringBuiltin::k##name != \
321 TieringBuiltin::kFunctionLogNextExecution; \
322 }
323 BUILTIN_LIST_BASE_TIERING_TURBOFAN(CASE)
324#undef CASE
325 return {};
326#else
328#endif
329}
330
332 return IsMaglevRequested(isolate) || IsTurbofanRequested(isolate);
333}
334
336 Isolate* isolate, ConcurrencyMode mode) const {
337#ifdef V8_ENABLE_LEAPTIERING
338 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
339 Address entrypoint = jdt->GetEntrypoint(dispatch_handle());
340 const EmbeddedData& embedded_data = EmbeddedData::FromBlob(isolate);
341 auto builtin = ([&]() -> std::optional<TieringBuiltin> {
342#define CASE(name, ...) \
343 if (entrypoint == embedded_data.InstructionStartOf(Builtin::k##name)) { \
344 DCHECK(jdt->IsTieringRequested(dispatch_handle(), TieringBuiltin::k##name, \
345 isolate)); \
346 return TieringBuiltin::k##name; \
347 }
349#undef CASE
350 DCHECK(!jdt->IsTieringRequested(dispatch_handle()));
351 return {};
352 })();
353 if (V8_LIKELY(!builtin)) return {};
354 switch (*builtin) {
355 case TieringBuiltin::kOptimizeMaglevEager:
356 if (mode == ConcurrencyMode::kSynchronous) return CodeKind::MAGLEV;
357 break;
358 case TieringBuiltin::kStartMaglevOptimizeJob:
359 if (mode == ConcurrencyMode::kConcurrent) return CodeKind::MAGLEV;
360 break;
361 case TieringBuiltin::kOptimizeTurbofanEager:
362 if (mode == ConcurrencyMode::kSynchronous) return CodeKind::TURBOFAN_JS;
363 break;
364 case TieringBuiltin::kStartTurbofanOptimizeJob:
365 if (mode == ConcurrencyMode::kConcurrent) return CodeKind::TURBOFAN_JS;
366 break;
367 case TieringBuiltin::kMarkLazyDeoptimized:
368 case TieringBuiltin::kMarkReoptimizeLazyDeoptimized:
369 case TieringBuiltin::kFunctionLogNextExecution:
370 break;
371 }
372#else
373 switch (mode) {
375 if (IsRequestTurbofan_Concurrent(tiering_state())) {
376 return CodeKind::TURBOFAN_JS;
377 }
378 if (IsRequestMaglev_Concurrent(tiering_state())) {
379 return CodeKind::MAGLEV;
380 }
381 break;
383 if (IsRequestTurbofan_Synchronous(tiering_state())) {
384 return CodeKind::TURBOFAN_JS;
385 }
386 if (IsRequestMaglev_Synchronous(tiering_state())) {
387 return CodeKind::MAGLEV;
388 }
389 break;
390 }
391#endif // !V8_ENABLE_LEAPTIERING
392 return {};
393}
394
396#ifdef V8_ENABLE_LEAPTIERING
397 IsolateGroup::current()->js_dispatch_table()->ResetTieringRequest(
398 dispatch_handle());
399#else
400 if (has_feedback_vector() && !tiering_in_progress()) {
401 feedback_vector()->reset_tiering_state();
402 }
403#endif // V8_ENABLE_LEAPTIERING
404}
405
407 BytecodeOffset osr_offset) {
408 if (!has_feedback_vector()) return;
409 if (osr_offset.IsNone()) {
410#ifdef V8_ENABLE_LEAPTIERING
411 bool was_in_progress = tiering_in_progress();
412 feedback_vector()->set_tiering_in_progress(in_progress);
413 if (!in_progress && was_in_progress) {
415 }
416#else
417 if (in_progress) {
418 feedback_vector()->set_tiering_state(TieringState::kInProgress);
419 } else if (tiering_in_progress()) {
420 feedback_vector()->reset_tiering_state();
422 }
423#endif // V8_ENABLE_LEAPTIERING
424 } else {
425 feedback_vector()->set_osr_tiering_in_progress(in_progress);
426 }
427}
428
429#ifndef V8_ENABLE_LEAPTIERING
430
432 if (!has_feedback_vector()) return TieringState::kNone;
433 return feedback_vector()->tiering_state();
434}
435
437 TieringState state) {
438 DCHECK(has_feedback_vector());
439 DCHECK(IsNone(state) || ChecksTieringState(isolate));
440 feedback_vector()->set_tiering_state(state);
441}
442
443#endif // !V8_ENABLE_LEAPTIERING
444
446 DCHECK(has_feedback_vector());
447 return feedback_vector()->osr_tiering_in_progress();
448}
449
450DEF_GETTER(JSFunction, has_feedback_vector, bool) {
451 return shared(cage_base)->is_compiled() &&
452 IsFeedbackVector(raw_feedback_cell(cage_base)->value(cage_base),
453 cage_base);
454}
455
457 return shared()->is_compiled() &&
458 IsClosureFeedbackCellArray(raw_feedback_cell()->value());
459}
460
464
468
470 return IsContext(TaggedField<HeapObject, kContextOffset>::load(*this));
471}
472
474 return context()->global_proxy();
475}
476
478 return context()->native_context();
479}
480
483 kPrototypeOrInitialMapOffset,
485
487 return map(cage_base)->has_prototype_slot();
488}
489
491 return Cast<Map>(prototype_or_initial_map(cage_base, kAcquireLoad));
492}
493
494DEF_GETTER(JSFunction, has_initial_map, bool) {
495 DCHECK(has_prototype_slot(cage_base));
496 return IsMap(prototype_or_initial_map(cage_base, kAcquireLoad), cage_base);
497}
498
499DEF_GETTER(JSFunction, has_instance_prototype, bool) {
500 DCHECK(has_prototype_slot(cage_base));
501 return has_initial_map(cage_base) ||
502 !IsTheHole(prototype_or_initial_map(cage_base, kAcquireLoad));
503}
504
505DEF_GETTER(JSFunction, has_prototype, bool) {
506 DCHECK(has_prototype_slot(cage_base));
507 return map(cage_base)->has_non_instance_prototype() ||
508 has_instance_prototype(cage_base);
509}
510
511DEF_GETTER(JSFunction, has_prototype_property, bool) {
512 return (has_prototype_slot(cage_base) && IsConstructor(*this, cage_base)) ||
513 IsGeneratorFunction(shared(cage_base)->kind());
514}
515
516DEF_GETTER(JSFunction, PrototypeRequiresRuntimeLookup, bool) {
517 return !has_prototype_property(cage_base) ||
518 map(cage_base)->has_non_instance_prototype();
519}
520
522 DCHECK(has_instance_prototype(cage_base));
523 if (has_initial_map(cage_base)) {
524 return initial_map(cage_base)->prototype(cage_base);
525 }
526 // When there is no initial map and the prototype is a JSReceiver, the
527 // initial map field is used for the prototype field.
528 return Cast<JSPrototype>(prototype_or_initial_map(cage_base, kAcquireLoad));
529}
530
532 DCHECK(has_prototype(cage_base));
533 // If the function's prototype property has been set to a non-JSReceiver
534 // value, that value is stored in the constructor field of the map.
535 Tagged<Map> map = this->map(cage_base);
536 if (map->has_non_instance_prototype()) {
537 return map->GetNonInstancePrototype(cage_base);
538 }
539 return instance_prototype(cage_base);
540}
541
543 return code(isolate, kAcquireLoad)->builtin_id() != Builtin::kCompileLazy &&
544 shared()->is_compiled();
545}
546
548 // Do a raw read for shared and code fields here since this function may be
549 // called on a concurrent thread. JSFunction itself should be fully
550 // initialized here but the SharedFunctionInfo, Code objects may not be
551 // initialized. We read using acquire loads to defend against that.
552 // TODO(v8) the branches for !IsSharedFunctionInfo() and !IsCode() are
553 // probably dead code by now. Investigate removing them or replacing them
554 // with CHECKs.
555 Tagged<Object> maybe_shared =
556 ACQUIRE_READ_FIELD(*this, kSharedFunctionInfoOffset);
557 if (!IsSharedFunctionInfo(maybe_shared)) return false;
558
559 Tagged<Object> maybe_code = raw_code(isolate, kAcquireLoad);
560 if (!IsCode(maybe_code)) return false;
561 Tagged<Code> code = Cast<Code>(maybe_code);
562
564 return !shared->is_compiled() &&
565 (code->builtin_id() != Builtin::kCompileLazy ||
566 // With leaptiering we can have CompileLazy as the code object but
567 // still an optimization trampoline installed.
569}
570
572 return code(isolate)->kind() == CodeKind::BASELINE &&
573 !shared()->HasBaselineCode();
574}
575
577 Isolate* isolate,
578 std::optional<std::function<void(Tagged<HeapObject> object, ObjectSlot slot,
579 Tagged<HeapObject> target)>>
580 gc_notify_updated_slot) {
581 const bool kBytecodeCanFlush =
582 v8_flags.flush_bytecode || v8_flags.stress_snapshot;
583 const bool kBaselineCodeCanFlush =
584 v8_flags.flush_baseline_code || v8_flags.stress_snapshot;
585 if (!kBytecodeCanFlush && !kBaselineCodeCanFlush) return;
586
587 DCHECK_IMPLIES(NeedsResetDueToFlushedBytecode(isolate), kBytecodeCanFlush);
588 if (kBytecodeCanFlush && NeedsResetDueToFlushedBytecode(isolate)) {
589 // Bytecode was flushed and function is now uncompiled, reset JSFunction
590 // by setting code to CompileLazy and clearing the feedback vector.
592 UpdateCode(*BUILTIN_CODE(isolate, CompileLazy));
593 raw_feedback_cell()->reset_feedback_vector(gc_notify_updated_slot);
594 return;
595 }
596
598 kBaselineCodeCanFlush);
599 if (kBaselineCodeCanFlush && NeedsResetDueToFlushedBaselineCode(isolate)) {
600 // Flush baseline code from the closure if required
602 UpdateCode(*BUILTIN_CODE(isolate, InterpreterEntryTrampoline));
603 }
604}
605
606} // namespace v8::internal
607
609
610#endif // V8_OBJECTS_JS_FUNCTION_INL_H_
#define BUILTIN_LIST_BASE_TIERING(TFC)
int16_t parameter_count
Definition builtins.cc:67
Builtins::Kind kind
Definition builtins.cc:40
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
constexpr UnderlyingType & value() &
constexpr bool IsNone() const
Definition utils.h:684
static EmbeddedData FromBlob()
static IsolateGroup * current()
void UpdateCodeImpl(Tagged< Code > code, WriteBarrierMode mode, bool keep_tiering_request)
void UpdateDispatchEntryKeepTieringRequest(Tagged< Code > new_code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
Tagged< Context > context()
TieringState tiering_state() const
Address instruction_start(IsolateForSandbox isolate) const
bool IsMaglevRequested(Isolate *isolate) const
void UpdateCodeKeepTieringRequests(Tagged< Code > code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
bool NeedsResetDueToFlushedBaselineCode(IsolateForSandbox isolate)
void set_tiering_state(IsolateForSandbox isolate, TieringState state)
Tagged< Object > raw_code(IsolateForSandbox isolate) const
bool has_closure_feedback_cell_array() const
void UpdateOptimizedCode(Isolate *isolate, Tagged< Code > code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
bool is_compiled(IsolateForSandbox isolate) const
bool IsTurbofanRequested(Isolate *isolate) const
void UpdateDispatchEntry(Tagged< Code > new_code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
Tagged< ClosureFeedbackCellArray > closure_feedback_cell_array() const
Tagged< NativeContext > native_context()
void SetInterruptBudget(Isolate *isolate, BudgetModification kind, std::optional< CodeKind > override_active_tier={})
bool IsLoggingRequested(Isolate *isolate) const
bool IsTieringRequestedOrInProgress() const
void SetTieringInProgress(bool in_progress, BytecodeOffset osr_offset=BytecodeOffset::None())
bool ChecksTieringState(IsolateForSandbox isolate)
Tagged< JSGlobalProxy > global_proxy()
V8_INLINE std::optional< CodeKind > GetRequestedOptimizationIfAny(Isolate *isolate, ConcurrencyMode mode=ConcurrencyMode::kConcurrent) const
bool IsOptimizationRequested(Isolate *isolate) const
void ResetIfCodeFlushed(Isolate *isolate, std::optional< std::function< void(Tagged< HeapObject > object, ObjectSlot slot, Tagged< HeapObject > target)> > gc_notify_updated_slot=std::nullopt)
bool NeedsResetDueToFlushedBytecode(Isolate *isolate)
V8_EXPORT_PRIVATE bool ActiveTierIsIgnition(IsolateForSandbox isolate) const
Tagged< AbstractCode > abstract_code(IsolateT *isolate)
void UpdateCode(Tagged< Code > code, WriteBarrierMode mode=WriteBarrierMode::UPDATE_WRITE_BARRIER)
static void CompleteInobjectSlackTracking(Isolate *isolate, Tagged< Map > initial_map)
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static PtrType load(Tagged< HeapObject > host, int offset=0)
static PtrType Relaxed_Load(Tagged< HeapObject > host, int offset=0)
Handle< Code > code
#define V8_ENABLE_LEAPTIERING_BOOL
Definition globals.h:151
SharedFunctionInfoRef shared
std::map< const std::string, const std::string > map
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
bool IsNone(Tagged< FieldType > obj)
Definition field-type.h:50
Tagged(T object) -> Tagged< T >
bool IsGeneratorFunction(FunctionKind kind)
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
Definition globals.h:557
constexpr bool IsRequestTurbofan(TieringState state)
Definition globals.h:2525
constexpr JSDispatchHandle kNullJSDispatchHandle(0)
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
Definition union.h:123
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool IsRequestMaglev(TieringState state)
Definition globals.h:2521
return value
Definition map-inl.h:893
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define CONDITIONAL_JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle, mode)
#define ACCESSORS(holder, name, type, offset)
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, condition)
#define RELAXED_READ_FIELD(p, offset)
#define DEF_RELAXED_GETTER(holder, name,...)
#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type)
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
#define CONDITIONAL_CODE_POINTER_WRITE_BARRIER(object, offset, value, mode)
#define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset)
#define ACQUIRE_READ_FIELD(p, offset)
#define RELEASE_WRITE_FIELD(p, offset, value)
#define DEF_GETTER(Camel, Lower, Bit)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660