30 for (
int i = 1;
i < entries_per_slot;
i++) {
38 if (!feedback.GetHeapObjectIfStrong(&heap_object))
return false;
39 if (IsString(heap_object)) {
40 DCHECK(IsInternalizedString(heap_object));
43 if (!IsSymbol(heap_object))
return false;
46 return symbol != roots.uninitialized_symbol() &&
47 symbol != roots.mega_dom_symbol() &&
48 symbol != roots.megamorphic_symbol();
57 int data =
get(index);
63 int data =
get(index);
82template <
typename IsolateT>
85 auto* factory = isolate->factory();
90 return factory->empty_feedback_metadata();
96 for (
int j = 1; j < entry_size; j++) {
112 metadata->SetKind(slot,
kind);
155 return "LoadProperty";
157 return "LoadGlobalInsideTypeof";
159 return "LoadGlobalNotInsideTypeof";
165 return "SetNamedSloppy";
167 return "SetNamedStrict";
169 return "DefineNamedOwn";
171 return "DefineKeyedOwn";
173 return "StoreGlobalSloppy";
175 return "StoreGlobalStrict";
177 return "StoreKeyedSloppy";
179 return "StoreKeyedStrict";
181 return "StoreInArrayLiteral";
187 return "DefineKeyedOwnPropertyInLiteral";
197 return "CloneObject";
205 return metadata()->GetKind(slot);
211 return metadata(tag)->GetKind(slot);
218 int length = shared->feedback_metadata()->create_closure_slot_count();
220 return isolate->factory()->empty_closure_feedback_cell_array();
229#ifdef V8_ENABLE_LEAPTIERING
231 shared->feedback_metadata()->GetCreateClosureParameterCount(
i);
233 FeedbackCell::AllocateAndInstallJSDispatchHandle(
240 std::optional<DisallowGarbageCollection> no_gc;
256 Factory* factory = isolate->factory();
260 const int slot_count = feedback_metadata->slot_count();
263 shared, closure_feedback_cell_array, parent_feedback_cell);
267 DCHECK_EQ(vector->shared_function_info(), *shared);
268 DCHECK_EQ(vector->invocation_count(), 0);
269#ifndef V8_ENABLE_LEAPTIERING
270 DCHECK_EQ(vector->tiering_state(), TieringState::kNone);
271 DCHECK(!vector->maybe_has_maglev_code());
272 DCHECK(!vector->maybe_has_turbofan_code());
273 DCHECK(vector->maybe_optimized_code().IsCleared());
279 *uninitialized_sentinel);
280 for (
int i = 0;
i < slot_count;) {
326 for (
int j = 1; j < entry_size; j++) {
332 if (!isolate->is_best_effort_code_coverage()) {
345 isolate->factory()->NewSharedFunctionInfoForBuiltin(
346 isolate->factory()->empty_string(), Builtin::kIllegal, 0,
kDontAdapt);
349 shared->set_raw_outer_scope_info_or_feedback_metadata(*metadata);
353 isolate->factory()->NewNoClosuresCell();
357 parent_cell, &is_compiled_scope);
379 DCHECK(!isolate->is_best_effort_code_coverage());
380 if (!vector->shared_function_info()->IsSubjectToDebugging())
return;
382 isolate->factory()->feedback_vectors_for_profiling_tools());
384 isolate->SetFeedbackVectorsForProfilingTools(*list);
387#ifdef V8_ENABLE_LEAPTIERING
389void FeedbackVector::set_tiering_in_progress(
bool in_progress) {
390 set_flags(TieringInProgressBit::update(
flags(), in_progress));
398 int32_t state =
flags();
401 state = MaybeHasTurbofanCodeBit::update(state,
false);
404 if (!
v8_flags.stress_concurrent_inlining_attach_code &&
411 state = MaybeHasTurbofanCodeBit::update(state,
false);
418 set_maybe_optimized_code(
MakeWeak(code->wrapper()));
421 state = TieringStateBits::update(state, TieringState::kNone);
422 if (code->is_maglevved()) {
423 DCHECK(!MaybeHasTurbofanCodeBit::decode(state));
424 state = MaybeHasMaglevCodeBit::update(state,
true);
426 DCHECK(code->is_turbofanned());
427 state = MaybeHasTurbofanCodeBit::update(state,
true);
428 state = MaybeHasMaglevCodeBit::update(state,
false);
451 if (code->marked_for_deoptimization()) {
458 int32_t new_flags =
flags();
459 new_flags = TieringStateBits::update(new_flags, state);
460 set_flags(new_flags);
467#ifdef V8_ENABLE_LEAPTIERING
468 TieringInProgressBit::encode(
false) |
470 TieringStateBits::encode(TieringState::kNone) |
471 LogNextExecutionBit::encode(
false) |
472 MaybeHasMaglevCodeBit::encode(
false) |
473 MaybeHasTurbofanCodeBit::encode(
false) |
475 OsrTieringInProgressBit::encode(
false) |
476 MaybeHasMaglevOsrCodeBit::encode(
false) |
477 MaybeHasTurbofanOsrCodeBit::encode(
false));
485 if (
V8_UNLIKELY(current && current.value()->kind() > code->kind())) {
493 return OsrTieringInProgressBit::decode(
flags());
497 set_flags(OsrTieringInProgressBit::update(
flags(), osr_in_progress));
501 if (!shared_function_info()->HasFeedbackMetadata())
return false;
505 bool feedback_updated =
false;
511 if (obj != uninitialized_sentinel) {
513 feedback_updated |= nexus.
Clear(behavior);
516 return feedback_updated;
519#ifdef V8_TRACE_FEEDBACK_UPDATES
522void FeedbackVector::TraceFeedbackChange(
Isolate* isolate,
525 const char* reason) {
526 int slot_count = vector->metadata()->slot_count();
529 os <<
"[Feedback slots in ";
532 os <<
"[Feedback slot " << slot.
ToInt() <<
"/" << slot_count <<
" ("
536 ShortPrint(vector->shared_function_info(), os);
540 os <<
" updated to ";
541 vector->FeedbackSlotPrint(os, slot);
544 os << reason <<
"]" << std::endl;
566 vector->Set(start_slot, feedback, mode);
567 vector->Set(start_slot.
WithOffset(1), feedback_extra, mode_extra);
577 return std::make_pair(feedback, feedback_extra);
582 : vector_handle_(vector),
598 : vector_handle_(vector),
606 config()->isolate()->factory()->NewWeakFixedArray(length);
651 bool feedback_updated =
false;
663 feedback_updated =
true;
670 feedback_updated =
true;
695 feedback_updated =
true;
702 return feedback_updated;
731 bool update_required =
732 feedback.first != sentinel || feedback.second != maybe_extra;
733 if (update_required) {
736 return update_required;
764 DCHECK(feedback.IsWeakOrCleared());
791 if (feedback.IsWeakOrCleared()) {
796 if (feedback.GetHeapObjectIfStrong(&heap_object)) {
797 if (IsWeakFixedArray(heap_object)) {
802 if (IsName(heap_object)) {
815 reinterpret_cast<void*
>(feedback.ptr()),
816 reinterpret_cast<void*
>(extra.ptr()),
817 reinterpret_cast<void*
>(vector_ptr),
818 reinterpret_cast<void*
>(
static_cast<intptr_t
>(
slot_.
ToInt())),
819 reinterpret_cast<void*
>(
static_cast<intptr_t
>(
kind())),
821 reinterpret_cast<void*
>(
829 }
else if (feedback.IsWeakOrCleared()) {
830 if (feedback.GetHeapObjectIfWeak(&heap_object)) {
831 if (IsFeedbackCell(heap_object)) {
834 CHECK(IsJSFunction(heap_object) || IsJSBoundFunction(heap_object));
837 }
else if (feedback.GetHeapObjectIfStrong(&heap_object) &&
838 IsAllocationSite(heap_object)) {
880 static_cast<uint32_t
>(feedback.ToSmi().value())) == 1) {
897 }
else if (feedback.IsWeakOrCleared()) {
912 if (feedback.IsWeakOrCleared()) {
916 DCHECK(IsWeakFixedArray(feedback.GetHeapObjectAssumeStrong()));
934bool shouldStressLexicalIC(
int script_context_index,
int context_slot_index) {
935 return (script_context_index + context_slot_index) % 100 == 0;
941 int context_slot_index,
948 shouldStressLexicalIC(script_context_index, context_slot_index)) {
952 if (!ContextIndexBits::is_valid(script_context_index) ||
953 !SlotIndexBits::is_valid(context_slot_index) ||
954 !ImmutabilityBit::is_valid(immutable)) {
957 int config = ContextIndexBits::encode(script_context_index) |
958 SlotIndexBits::encode(context_slot_index) |
959 ImmutabilityBit::encode(immutable);
979 auto GetHandler = [=]() {
980 if (
IsSmi(*handler_handle)) {
981 return *handler_handle;
1002 if (feedback.is_null() || feedback.is_identical_to(source_map) ||
1003 Cast<Map>(*feedback)->is_deprecated()) {
1011 raw_array->set(0,
MakeWeak(*feedback));
1013 raw_array->set(2,
MakeWeak(*source_map));
1014 raw_array->set(3, GetHandler());
1019 const int kMaxElements =
v8_flags.max_valid_polymorphic_map_count *
1029 cached_map->is_deprecated())
1034 if (
i == kMaxElements) {
1044 for (
int j = 0; j < array->length(); ++j) {
1045 new_array->set(j, array->get(j));
1052 array->set(
i + 1, GetHandler());
1066 uint32_t value =
static_cast<uint32_t
>(
Smi::ToInt(call_count));
1089 uint32_t value =
static_cast<uint32_t
>(
Smi::ToInt(call_count));
1098 uint32_t value =
static_cast<uint32_t
>(
Smi::ToInt(call_count));
1107 if (invocation_count == 0.0) {
1110 return static_cast<float>(call_count / invocation_count);
1120 if (name.is_null()) {
1124 array->set(0,
MakeWeak(*receiver_map));
1125 array->set(1, *handler);
1133 int receiver_count =
static_cast<int>(maps_and_handlers.
size());
1137 for (
int current = 0; current < receiver_count; ++
current) {
1139 array->set(current * 2,
MakeWeak(*map));
1141 array->set(current * 2 + 1, *handler);
1144 if (name.is_null()) {
1156 maps->push_back(
config()->NewHandle(it.map()));
1189 if (map_handler && !(map_handler(map).
ToHandle(&map))) {
1205 if (it.map() == *map && !it.handler().IsCleared()) {
1206 return config()->NewHandle(it.handler());
1217 return Cast<Name>(feedback.GetHeapObjectAssumeStrong());
1223 return Cast<Name>(extra.GetHeapObjectAssumeStrong());
1239 for (
auto [
_, handler] : maps_and_handlers) {
1248bool BuiltinHasKeyedAccessStoreMode(
Builtin builtin) {
1251 case Builtin::kKeyedStoreIC_SloppyArguments_InBounds:
1252 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionGrowAndHandleCOW:
1253 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionIgnoreTypedArrayOOB:
1254 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionHandleCOW:
1255 case Builtin::kStoreFastElementIC_InBounds:
1256 case Builtin::kStoreFastElementIC_NoTransitionGrowAndHandleCOW:
1257 case Builtin::kStoreFastElementIC_NoTransitionIgnoreTypedArrayOOB:
1258 case Builtin::kStoreFastElementIC_NoTransitionHandleCOW:
1259 case Builtin::kElementsTransitionAndStore_InBounds:
1260 case Builtin::kElementsTransitionAndStore_NoTransitionGrowAndHandleCOW:
1261 case Builtin::kElementsTransitionAndStore_NoTransitionIgnoreTypedArrayOOB:
1262 case Builtin::kElementsTransitionAndStore_NoTransitionHandleCOW:
1271 DCHECK(BuiltinHasKeyedAccessStoreMode(builtin));
1273 case Builtin::kKeyedStoreIC_SloppyArguments_InBounds:
1274 case Builtin::kStoreFastElementIC_InBounds:
1275 case Builtin::kElementsTransitionAndStore_InBounds:
1277 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionGrowAndHandleCOW:
1278 case Builtin::kStoreFastElementIC_NoTransitionGrowAndHandleCOW:
1279 case Builtin::kElementsTransitionAndStore_NoTransitionGrowAndHandleCOW:
1281 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionIgnoreTypedArrayOOB:
1282 case Builtin::kStoreFastElementIC_NoTransitionIgnoreTypedArrayOOB:
1283 case Builtin::kElementsTransitionAndStore_NoTransitionIgnoreTypedArrayOOB:
1285 case Builtin::kKeyedStoreIC_SloppyArguments_NoTransitionHandleCOW:
1286 case Builtin::kStoreFastElementIC_NoTransitionHandleCOW:
1287 case Builtin::kElementsTransitionAndStore_NoTransitionHandleCOW:
1306 for (
auto [
_, maybe_code_handler] : maps_and_handlers) {
1309 if (IsStoreHandler(*maybe_code_handler.object())) {
1312 if (
IsSmi(data_handler->smi_handler())) {
1320 builtin_handler = code->builtin_id();
1323 }
else if (
IsSmi(*maybe_code_handler.object())) {
1339 builtin_handler = code->builtin_id();
1343 if (!BuiltinHasKeyedAccessStoreMode(builtin_handler))
continue;
1345 mode = KeyedAccessStoreModeForBuiltin(builtin_handler);
1399 if (feedback.GetHeapObjectIfWeak(&heap_object)) {
1421 if ((feedback.GetHeapObjectIfStrong(&heap_object) &&
1422 IsWeakFixedArray(heap_object)) ||
1423 is_named_feedback) {
1427 if (is_named_feedback) {
1435 }
else if (feedback.GetHeapObjectIfWeak(&heap_object)) {
1463 while (
index_ < length) {
#define BUILTIN_CODE(isolate, name)
static constexpr T decode(U value)
static V8_NODISCARD constexpr U update(U previous, T value)
static int index(int base_index, int item)
static U encode(U previous, int item, T value)
static T decode(U data, int item)
static V8_EXPORT_PRIVATE DirectHandle< ArrayList > Add(Isolate *isolate, DirectHandle< ArrayList > array, Tagged< Smi > obj, AllocationType allocation=AllocationType::kYoung)
static constexpr bool IsBuiltinId(Builtin builtin)
static V8_EXPORT_PRIVATE DirectHandle< ClosureFeedbackCellArray > New(Isolate *isolate, DirectHandle< SharedFunctionInfo > shared, AllocationType allocation=AllocationType::kYoung)
static void TraceEvictFromOptimizedCodeCache(Isolate *isolate, Tagged< SharedFunctionInfo > sfi, const char *reason)
void push_back(const DirectHandle< T > &x)
V8_INLINE bool is_identical_to(Handle< S > other) const
Handle< FeedbackVector > NewFeedbackVector(DirectHandle< SharedFunctionInfo > shared, DirectHandle< ClosureFeedbackCellArray > closure_feedback_cell_array, DirectHandle< FeedbackCell > parent_feedback_cell)
void AdvancePolymorphic()
Tagged< MaybeObject > handler_
static constexpr int kEntrySize
DirectHandle< WeakFixedArray > polymorphic_feedback_
FeedbackIterator(const FeedbackNexus *nexus)
static constexpr int kHandlerOffset
Tagged< MaybeObject > handler()
DirectHandle< WeakFixedArray > CreateArrayOfSize(int length)
Tagged< MaybeObject > UninitializedSentinel() const
void ConfigureCloneObject(DirectHandle< Map > source_map, const MaybeObjectHandle &handler)
bool ConfigureLexicalVarMode(int script_context_index, int context_slot_index, bool immutable)
std::function< MaybeHandle< Map >(Handle< Map >)> TryUpdateHandler
void ConfigureMegaDOM(const MaybeObjectDirectHandle &handler)
CompareOperationHint GetCompareOperationFeedback() const
const NexusConfig * config() const
Tagged< FeedbackVector > vector() const
Tagged< MaybeObject > GetFeedbackExtra() const
std::pair< Tagged< MaybeObject >, Tagged< MaybeObject > > GetFeedbackPair() const
FeedbackSlotKind kind() const
KeyedAccessStoreMode GetKeyedAccessStoreMode() const
IcCheckType GetKeyType() const
float ComputeCallFrequency()
ForInHint GetForInFeedback() const
Tagged< Name > GetName() const
MaybeObjectDirectHandle FindHandlerForMap(DirectHandle< Map > map) const
MaybeDirectHandle< JSObject > GetConstructorFeedback() const
int ExtractMapsAndHandlers(MapsAndHandlers *maps_and_handlers, TryUpdateHandler map_handler=TryUpdateHandler()) const
MaybeObjectHandle ExtractMegaDOMHandler()
FeedbackSlot slot() const
bool Clear(ClearBehavior behavior)
void ConfigureHandlerMode(const MaybeObjectDirectHandle &handler)
Tagged< MaybeObject > GetFeedback() const
CallFeedbackContent GetCallFeedbackContent()
void ConfigureUninitialized()
Tagged< MaybeObject > MegaDOMSentinel() const
void ConfigurePolymorphic(DirectHandle< Name > name, MapsAndHandlers const &maps_and_handlers)
void ConfigureMonomorphic(DirectHandle< Name > name, DirectHandle< Map > receiver_map, const MaybeObjectDirectHandle &handler)
MaybeObjectHandle ToHandle(Tagged< MaybeObject > value) const
FeedbackNexus(Isolate *isolate, Handle< FeedbackVector > vector, FeedbackSlot slot)
void ConfigurePropertyCellMode(DirectHandle< PropertyCell > cell)
bool ConfigureMegamorphic()
KeyedAccessLoadMode GetKeyedAccessLoadMode() const
TypeOfFeedback::Result GetTypeOfFeedback() const
int ExtractMaps(MapHandles *maps) const
static constexpr int kCloneObjectPolymorphicEntrySize
void SetSpeculationMode(SpeculationMode mode)
InlineCacheState ic_state() const
Tagged< MaybeObject > MegamorphicSentinel() const
SpeculationMode GetSpeculationMode()
void SetFeedback(Tagged< FeedbackType > feedback, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
BinaryOperationHint GetBinaryOperationFeedback() const
Tagged< Map > GetFirstMap() const
FeedbackSlot WithOffset(int offset) const
FeedbackSlotKind GetKind(FeedbackSlot slot) const
int create_closure_slot_count() const
void append(FeedbackSlotKind kind)
FeedbackSlot AddCompareICSlot()
FeedbackSlot AddSlot(FeedbackSlotKind kind)
FeedbackSlot AddBinaryOpICSlot()
uint16_t GetCreateClosureParameterCount(int index) const
V8_EXPORT_PRIVATE void set_tiering_state(TieringState state)
Tagged< MaybeObject > Get(FeedbackSlot slot) const
static DirectHandle< Symbol > UninitializedSentinel(Isolate *isolate)
Tagged< Code > optimized_code(IsolateForSandbox isolate) const
void set_maybe_has_turbofan_code(bool value)
static V8_EXPORT_PRIVATE Handle< FeedbackVector > NewWithOneBinarySlotForTesting(Zone *zone, Isolate *isolate)
static V8_EXPORT_PRIVATE Handle< FeedbackVector > NewForTesting(Isolate *isolate, const FeedbackVectorSpec *spec)
void set_maybe_has_optimized_osr_code(bool value, CodeKind code_kind)
bool has_optimized_code() const
void SetOptimizedOsrCode(Isolate *isolate, FeedbackSlot slot, Tagged< Code > code)
void Set(FeedbackSlot slot, Tagged< MaybeObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static Tagged< Symbol > RawUninitializedSentinel(Isolate *isolate)
static V8_EXPORT_PRIVATE Handle< FeedbackVector > New(Isolate *isolate, DirectHandle< SharedFunctionInfo > shared, DirectHandle< ClosureFeedbackCellArray > closure_feedback_cell_array, DirectHandle< FeedbackCell > parent_feedback_cell, IsCompiledScope *is_compiled_scope)
bool maybe_has_turbofan_code() const
bool maybe_has_maglev_code() const
std::optional< Tagged< Code > > GetOptimizedOsrCode(Isolate *isolate, FeedbackSlot slot)
bool osr_tiering_in_progress()
bool ClearSlots(Isolate *isolate)
static void AddToVectorsForProfilingTools(Isolate *isolate, DirectHandle< FeedbackVector > vector)
void set_maybe_has_maglev_code(bool value)
void EvictOptimizedCodeMarkedForDeoptimization(Isolate *isolate, Tagged< SharedFunctionInfo > shared, const char *reason)
void SetOptimizedCode(IsolateForSandbox isolate, Tagged< Code > code)
void ClearOptimizedCode()
static V8_EXPORT_PRIVATE Handle< FeedbackVector > NewWithOneCompareSlotForTesting(Zone *zone, Isolate *isolate)
void set_osr_tiering_in_progress(bool osr_in_progress)
V8_EXPORT_PRIVATE FeedbackSlotKind GetKind(FeedbackSlot slot) const
static constexpr int OffsetOfElementAt(int index)
static constexpr int kHeaderSize
T ReadField(size_t offset) const
void WriteField(size_t offset, T value) const
static bool IsHandler(Tagged< MaybeObject > object)
V8_NOINLINE void PushParamsAndDie(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
static KeyedAccessLoadMode GetKeyedAccessLoadMode(Tagged< MaybeObject > handler)
void emplace_back(DirectHandle< Map > map, MaybeObjectDirectHandle handler)
Isolate * isolate() const
MaybeObjectHandle NewHandle(Tagged< MaybeObject > object) const
LocalHeap *const local_heap_
std::pair< Tagged< MaybeObject >, Tagged< MaybeObject > > GetFeedbackPair(Tagged< FeedbackVector > vector, FeedbackSlot slot) const
void SetFeedbackPair(Tagged< FeedbackVector > vector, FeedbackSlot start_slot, Tagged< MaybeObject > feedback, WriteBarrierMode mode, Tagged< MaybeObject > feedback_extra, WriteBarrierMode mode_extra) const
static constexpr int ToInt(const Tagged< Object > object)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr Tagged< Smi > From31BitPattern(int value)
static constexpr Tagged< Smi > zero()
static KeyedAccessStoreMode GetKeyedAccessStoreMode(Tagged< MaybeObject > handler)
static Tagged< Smi > StoreProxy()
static Handle< ClosureFeedbackCellArray > Allocate(IsolateT *isolate, int capacity, std::optional< DisallowGarbageCollection > *no_gc_out, AllocationType allocation=AllocationType::kYoung)
V8_INLINE constexpr StorageType ptr() const
Tagged< HeapObject > GetHeapObjectAssumeStrong() const
constexpr bool IsStrongOrWeak() const
constexpr bool IsCleared() const
bool GetHeapObject(Tagged< HeapObject > *result) const
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
ZoneVector< RpoNumber > & result
constexpr unsigned CountPopulation(T value)
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
bool IsKeyedHasICKind(FeedbackSlotKind kind)
bool StoreModeIsInBounds(KeyedAccessStoreMode store_mode)
constexpr bool CodeKindIsOptimizedJSFunction(CodeKind kind)
bool IsDefineNamedOwnICKind(FeedbackSlotKind kind)
ForInHint ForInHintFromFeedback(ForInFeedback type_feedback)
ReadOnlyRoots GetReadOnlyRoots()
CompareOperationHint CompareOperationHintFromFeedback(int type_feedback)
bool IsSetNamedICKind(FeedbackSlotKind kind)
constexpr bool CodeKindCanTierUp(CodeKind kind)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
@ kLoadGlobalNotInsideTypeof
@ kLoadGlobalInsideTypeof
@ kDefineKeyedOwnPropertyInLiteral
bool IsLoadICKind(FeedbackSlotKind kind)
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long mode(MIPS/PPC only)") DEFINE_BOOL(partial_constant_pool
std::ostream & operator<<(std::ostream &os, AtomicMemoryOrder order)
KeyedAccessLoadMode GeneralizeKeyedAccessLoadMode(KeyedAccessLoadMode mode1, KeyedAccessLoadMode mode2)
bool IsCallICKind(FeedbackSlotKind kind)
bool IsStoreInArrayLiteralICKind(FeedbackSlotKind kind)
void ShortPrint(Tagged< Object > obj, FILE *out)
constexpr AdaptArguments kDontAdapt
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
V8_EXPORT_PRIVATE FlagValues v8_flags
bool IsKeyedLoadICKind(FeedbackSlotKind kind)
bool IsDefineKeyedOwnICKind(FeedbackSlotKind kind)
constexpr int kUInt16Size
bool IsDefineKeyedOwnPropertyInLiteralKind(FeedbackSlotKind kind)
BinaryOperationHint BinaryOperationHintFromFeedback(int type_feedback)
static bool IsPropertyNameFeedback(Tagged< MaybeObject > feedback)
bool IsGlobalICKind(FeedbackSlotKind kind)
bool IsKeyedStoreICKind(FeedbackSlotKind kind)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr ReleaseStoreTag kReleaseStore
static constexpr RelaxedLoadTag kRelaxedLoad
static constexpr AcquireLoadTag kAcquireLoad
#define DCHECK_LE(v1, v2)
#define CHECK_GT(lhs, rhs)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)