v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
maglev-early-lowering-reducer-inl.h
Go to the documentation of this file.
1// Copyright 2024 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_MAGLEV_EARLY_LOWERING_REDUCER_INL_H_
6#define V8_COMPILER_TURBOSHAFT_MAGLEV_EARLY_LOWERING_REDUCER_INL_H_
7
8#include <optional>
9
18
20
22
23template <class Next>
24class MaglevEarlyLoweringReducer : public Next {
25 // This Reducer provides some helpers that are used during
26 // MaglevGraphBuildingPhase to lower some Maglev operators. Depending on what
27 // we decide going forward (regarding SimplifiedLowering for instance), we
28 // could introduce new Simplified or JS operations instead of using these
29 // helpers to lower, and turn the helpers into regular REDUCE methods in the
30 // new simplified lowering or in MachineLoweringReducer.
31
32 public:
33 TURBOSHAFT_REDUCER_BOILERPLATE(MaglevEarlyLowering)
34
35 void CheckInstanceType(V<Object> input, V<FrameState> frame_state,
36 const FeedbackSource& feedback,
37 InstanceType first_instance_type,
38 InstanceType last_instance_type, bool check_smi) {
39 if (check_smi) {
40 __ DeoptimizeIf(__ IsSmi(input), frame_state,
41 DeoptimizeReason::kWrongInstanceType, feedback);
42 }
43
44 V<i::Map> map = __ LoadMapField(input);
45
46 if (first_instance_type == last_instance_type) {
47#if V8_STATIC_ROOTS_BOOL
48 if (InstanceTypeChecker::UniqueMapOfInstanceType(first_instance_type)) {
49 std::optional<RootIndex> expected_index =
51 CHECK(expected_index.has_value());
52 Handle<HeapObject> expected_map =
53 Cast<HeapObject>(isolate_->root_handle(expected_index.value()));
54 __ DeoptimizeIfNot(__ TaggedEqual(map, __ HeapConstant(expected_map)),
55 frame_state, DeoptimizeReason::kWrongInstanceType,
56 feedback);
57 return;
58 }
59#endif // V8_STATIC_ROOTS_BOOL
60 V<Word32> instance_type = __ LoadInstanceTypeField(map);
61 __ DeoptimizeIfNot(__ Word32Equal(instance_type, first_instance_type),
62 frame_state, DeoptimizeReason::kWrongInstanceType,
63 feedback);
64 } else {
65 __ DeoptimizeIfNot(CheckInstanceTypeIsInRange(map, first_instance_type,
66 last_instance_type),
67 frame_state, DeoptimizeReason::kWrongInstanceType,
68 feedback);
69 }
70 }
71
73 V<Object> object, V<FrameState> frame_state, bool check_smi,
74 const FeedbackSource& feedback) {
75 if (check_smi) {
76 __ DeoptimizeIf(__ IsSmi(object), frame_state, DeoptimizeReason::kSmi,
77 feedback);
78 }
79
81 V<Map> map = __ LoadMapField(object);
82 V<Word32> instance_type = __ LoadInstanceTypeField(map);
83
84 // Go to the slow path if this is a non-string, or a non-internalised
85 // string.
86 static_assert((kStringTag | kInternalizedTag) == 0);
87 IF (UNLIKELY(__ Word32BitwiseAnd(
88 instance_type, kIsNotStringMask | kIsNotInternalizedMask))) {
89 // Deopt if this isn't a string.
90 __ DeoptimizeIf(__ Word32BitwiseAnd(instance_type, kIsNotStringMask),
91 frame_state, DeoptimizeReason::kWrongMap, feedback);
92 // Deopt if this isn't a thin string.
94 __ DeoptimizeIfNot(__ Word32BitwiseAnd(instance_type, kThinStringTagBit),
95 frame_state, DeoptimizeReason::kWrongMap, feedback);
96 // Load internalized string from thin string.
97 V<InternalizedString> intern_string =
98 __ template LoadField<InternalizedString>(
100 GOTO(done, intern_string);
101 } ELSE {
102 GOTO(done, V<InternalizedString>::Cast(object));
103 }
104
105 BIND(done, result);
106 return result;
107 }
108
110 V<FrameState> frame_state,
111 const FeedbackSource& feedback) {
112 IF_NOT (LIKELY(__ TaggedEqual(object, __ HeapConstant(value.object())))) {
113 __ DeoptimizeIfNot(__ ObjectIsString(object), frame_state,
114 DeoptimizeReason::kNotAString, feedback);
115 V<Boolean> is_same_string_bool =
116 __ StringEqual(V<String>::Cast(object),
117 __ template HeapConstant<String>(value.object()));
118 __ DeoptimizeIf(
119 __ RootEqual(is_same_string_bool, RootIndex::kFalseValue, isolate_),
120 frame_state, DeoptimizeReason::kWrongValue, feedback);
121 }
122 }
123
124 V<Object> LoadScriptContextSideData(V<Context> script_context, int index) {
125 V<FixedArray> side_table = __ template LoadTaggedField<FixedArray>(
126 script_context,
128 return __ LoadTaggedField(side_table,
131 }
132
134 ScopedVar<Object> property(this, side_data);
135 IF_NOT (__ IsSmi(side_data)) {
136 property = __ LoadTaggedField(
137 side_data, ContextSidePropertyCell::kPropertyDetailsRawOffset);
138 }
139 return property;
140 }
141
143 int index,
144 V<HeapNumber> heap_number) {
145 V<Object> data = __ LoadScriptContextSideData(script_context, index);
147 ScopedVar<HeapNumber> result(this, heap_number);
148 Label<> done(this);
149 if (v8_flags.script_context_mutable_heap_int32) {
150 IF (__ TaggedEqual(
151 property,
153 result = __ AllocateHeapNumberWithValue(
154 __ ChangeInt32ToFloat64(__ LoadHeapInt32Value(heap_number)),
155 isolate_->factory());
156 GOTO(done);
157 }
158 }
159 IF (__ TaggedEqual(
160 property,
162 result = __ AllocateHeapNumberWithValue(
163 __ LoadHeapNumberValue(heap_number), isolate_->factory());
164 }
165 GOTO(done);
166 BIND(done);
167 return result;
168 }
169
171 V<Object> old_value, V<Object> new_value,
172 V<Object> side_data,
173 V<FrameState> frame_state,
174 const FeedbackSource& feedback,
175 Label<>& done) {
176 // Check if Undefined.
177 __ DeoptimizeIf(
178 __ RootEqual(side_data, RootIndex::kUndefinedValue, isolate_),
179 frame_state, DeoptimizeReason::kWrongValue, feedback);
181 // Check for const case.
182 __ DeoptimizeIf(
183 __ TaggedEqual(property,
184 __ SmiConstant(ContextSidePropertyCell::Const())),
185 frame_state, DeoptimizeReason::kWrongValue, feedback);
186 if (v8_flags.script_context_mutable_heap_number) {
187 // Check for smi case.
188 IF (__ TaggedEqual(
189 property, __ SmiConstant(ContextSidePropertyCell::SmiMarker()))) {
190 __ DeoptimizeIfNot(__ IsSmi(new_value), frame_state,
191 DeoptimizeReason::kWrongValue, feedback);
192 } ELSE {
193 if (v8_flags.script_context_mutable_heap_int32) {
194 // Check for mutable heap int32 case.
195 IF (__ TaggedEqual(
196 property,
198 ScopedVar<Word32> number_value(this);
199 IF (__ IsSmi(new_value)) {
200 number_value = __ UntagSmi(V<Smi>::Cast(new_value));
201 } ELSE {
202 V<i::Map> map = __ LoadMapField(new_value);
203 __ DeoptimizeIfNot(
204 __ TaggedEqual(map,
205 __ HeapConstant(factory_->heap_number_map())),
206 frame_state, DeoptimizeReason::kWrongValue, feedback);
207 number_value = __ ChangeFloat64ToInt32OrDeopt(
208 __ LoadHeapNumberValue(V<HeapNumber>::Cast(new_value)),
210 feedback);
211 }
212 __ StoreField(old_value, AccessBuilder::ForHeapInt32Value(),
213 number_value);
214 GOTO(done);
215 }
216 }
217 // It must be a mutable heap number case.
218 ScopedVar<Float64> number_value(this);
219 IF (__ IsSmi(new_value)) {
220 number_value =
221 __ ChangeInt32ToFloat64(__ UntagSmi(V<Smi>::Cast(new_value)));
222 } ELSE {
223 V<i::Map> map = __ LoadMapField(new_value);
224 __ DeoptimizeIfNot(
225 __ TaggedEqual(map, __ HeapConstant(factory_->heap_number_map())),
226 frame_state, DeoptimizeReason::kWrongValue, feedback);
227 number_value = __ LoadHeapNumberValue(V<HeapNumber>::Cast(new_value));
228 }
229 __ StoreField(old_value, AccessBuilder::ForHeapNumberValue(),
230 number_value);
231 GOTO(done);
232 }
233 }
234 }
235
237 V<Object> implicit_receiver) {
238 // If the result is an object (in the ECMA sense), we should get rid
239 // of the receiver and use the result; see ECMA-262 version 5.1
240 // section 13.2.2-7 on page 74.
241 Label<Object> done(this);
242
243 GOTO_IF(
244 __ RootEqual(construct_result, RootIndex::kUndefinedValue, isolate_),
245 done, implicit_receiver);
246
247 // If the result is a smi, it is *not* an object in the ECMA sense.
248 GOTO_IF(__ IsSmi(construct_result), done, implicit_receiver);
249
250 // Check if the type of the result is not an object in the ECMA sense.
251 GOTO_IF(JSAnyIsNotPrimitive(V<HeapObject>::Cast(construct_result)), done,
252 construct_result);
253
254 // Throw away the result of the constructor invocation and use the
255 // implicit receiver as the result.
256 GOTO(done, implicit_receiver);
257
258 BIND(done, result);
259 return result;
260 }
261
263 V<FrameState> frame_state,
265 LazyDeoptOnThrow lazy_deopt_on_throw) {
266 // The result of a derived construct should be an object (in the ECMA
267 // sense).
268 Label<> do_throw(this);
269 Label<> end(this);
270
271 // If the result is a smi, it is *not* an object in the ECMA sense.
272 GOTO_IF(UNLIKELY(__ IsSmi(construct_result)), do_throw);
273
274 // Check if the type of the result is not an object in the ECMA sense.
276 end);
277 GOTO(do_throw);
278
279 BIND(do_throw);
280 {
281 __ CallRuntime_ThrowConstructorReturnedNonObject(
282 isolate_, frame_state, native_context, lazy_deopt_on_throw);
283 // ThrowConstructorReturnedNonObject should not return.
284 __ Unreachable();
285 }
286
287 BIND(end);
288 }
289
291 V<Word32> index) {
292 Label<Smi> done(this);
293 IF (__ Uint32LessThan(index, length_raw)) {
294 GOTO(done, __ TagSmi(length_raw));
295 } ELSE {
296 V<Word32> new_length_raw =
297 __ Word32Add(index, 1); // This cannot overflow.
298 V<Smi> new_length_tagged = __ TagSmi(new_length_raw);
299 __ Store(object, new_length_tagged, StoreOp::Kind::TaggedBase(),
301 WriteBarrierKind::kNoWriteBarrier, JSArray::kLengthOffset);
302 GOTO(done, new_length_tagged);
303 }
304
305 BIND(done, length_tagged);
306 return length_tagged;
307 }
308
310 V<Object> object, V<Map> map,
311 const ZoneVector<compiler::MapRef>& transition_sources,
312 const MapRef transition_target) {
313 Label<Map> end(this);
314
315 TransitionElementsKind(object, map, transition_sources, transition_target,
316 end);
317 GOTO(end, map);
318 BIND(end, result);
319 return result;
320 }
321
323 V<Object> object, V<Map> map,
324 const ZoneVector<compiler::MapRef>& transition_sources,
325 const MapRef transition_target, Label<Map>& end) {
326 // Turboshaft's TransitionElementsKind operation loads the map everytime, so
327 // we don't call it to have a single map load (in practice,
328 // LateLoadElimination should probably eliminate the subsequent map loads,
329 // but let's not risk it).
330 V<Map> target_map = __ HeapConstant(transition_target.object());
331
332 for (const compiler::MapRef transition_source : transition_sources) {
333 bool is_simple = IsSimpleMapChangeTransition(
334 transition_source.elements_kind(), transition_target.elements_kind());
335 IF (__ TaggedEqual(map, __ HeapConstant(transition_source.object()))) {
336 if (is_simple) {
337 __ StoreField(object, AccessBuilder::ForMap(), target_map);
338 } else {
339 __ CallRuntime_TransitionElementsKind(
340 isolate_, __ NoContextConstant(), V<HeapObject>::Cast(object),
341 target_map);
342 }
343 GOTO(end, target_map);
344 }
345 }
346 }
347
349 V<Map> map = __ LoadMapField(heap_object);
350 if constexpr (V8_STATIC_ROOTS_BOOL) {
351 // All primitive object's maps are allocated at the start of the read only
352 // heap. Thus JS_RECEIVER's must have maps with larger (compressed)
353 // addresses.
354 return __ Uint32LessThanOrEqual(
356 __ TruncateWordPtrToWord32(__ BitcastTaggedToWordPtr(map)));
357 } else {
358 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
359 return __ Uint32LessThanOrEqual(FIRST_JS_RECEIVER_TYPE,
360 __ LoadInstanceTypeField(map));
361 }
362 }
363
365 V<FrameState> frame_state,
367 LazyDeoptOnThrow lazy_deopt_on_throw) {
368 Label<Boolean> done(this);
369
370 V<Boolean> true_bool = __ HeapConstant(factory_->true_value());
371 V<Boolean> false_bool = __ HeapConstant(factory_->false_value());
372 V<HeapObject> target_proto = __ HeapConstant(prototype.object());
373
374 GOTO_IF(__ IsSmi(object), done, false_bool);
375
376 LoopLabel<Map> loop(this);
377 GOTO(loop, __ LoadMapField(object));
378
379 BIND_LOOP(loop, map) {
380 Label<> object_is_direct(this);
381
384 Label<> call_runtime(this);
385 V<Word32> instance_type = __ LoadInstanceTypeField(map);
386
387 GOTO_IF(__ Word32Equal(instance_type, JS_PROXY_TYPE), call_runtime);
388
389 V<Word32> bitfield =
390 __ template LoadField<Word32>(map, AccessBuilder::ForMapBitField());
391 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
392 Map::Bits1::IsAccessCheckNeededBit::kMask;
393 GOTO_IF_NOT(__ Word32BitwiseAnd(bitfield, mask), object_is_direct);
394 GOTO(call_runtime);
395
396 BIND(call_runtime);
397 GOTO(done, __ CallRuntime_HasInPrototypeChain(
398 isolate_, frame_state, native_context,
399 lazy_deopt_on_throw, object, target_proto));
400 }
401 GOTO(object_is_direct);
402
403 BIND(object_is_direct);
404 V<HeapObject> proto = __ template LoadField<HeapObject>(
406 GOTO_IF(__ RootEqual(proto, RootIndex::kNullValue, isolate_), done,
407 false_bool);
408 GOTO_IF(__ TaggedEqual(proto, target_proto), done, true_bool);
409
410 GOTO(loop, __ LoadMapField(proto));
411 }
412
413 BIND(done, result);
414 return result;
415 }
416
418 V<FrameState> frame_state,
419 const FeedbackSource& feedback) {
420 ScopedVar<Map> result(this, map);
421
422 V<Word32> bitfield3 =
423 __ template LoadField<Word32>(map, AccessBuilder::ForMapBitField3());
424 IF (UNLIKELY(__ Word32BitwiseAnd(bitfield3,
425 Map::Bits3::IsDeprecatedBit::kMask))) {
426 V<Object> object_or_smi = __ CallRuntime_TryMigrateInstance(
427 isolate_, __ NoContextConstant(), object);
428 __ DeoptimizeIf(__ ObjectIsSmi(object_or_smi), frame_state,
429 DeoptimizeReason::kInstanceMigrationFailed, feedback);
430 // Reload the map since TryMigrateInstance might have changed it.
431 result = __ LoadMapField(V<HeapObject>::Cast(object_or_smi));
432 }
433
434 return result;
435 }
436
438 V<PropertyArray> old_property_array, V<JSObject> object, int old_length,
439 V<FrameState> frame_state, const FeedbackSource& feedback) {
440 // Allocate new PropertyArray.
441 int new_length = old_length + JSObject::kFieldsAdded;
442 Uninitialized<PropertyArray> new_property_array =
443 __ template Allocate<PropertyArray>(
446 __ InitializeField(new_property_array, AccessBuilder::ForMap(),
447 __ HeapConstant(factory_->property_array_map()));
448
449 // Copy existing properties over.
450 for (int i = 0; i < old_length; i++) {
451 V<Object> old_value = __ template LoadField<Object>(
452 old_property_array, AccessBuilder::ForPropertyArraySlot(i));
453 __ InitializeField(new_property_array,
455 }
456
457 // Initialize new properties to undefined.
458 V<Undefined> undefined = __ HeapConstant(factory_->undefined_value());
459 for (int i = 0; i < JSObject::kFieldsAdded; ++i) {
460 __ InitializeField(new_property_array,
462 undefined);
463 }
464
465 // Read the hash.
466 ScopedVar<Word32> hash(this);
467 if (old_length == 0) {
468 // The object might still have a hash, stored in properties_or_hash. If
469 // properties_or_hash is a SMI, then it's the hash. It can also be an
470 // empty PropertyArray.
471 V<Object> hash_obj = __ template LoadField<Object>(
473 IF (__ IsSmi(hash_obj)) {
474 hash = __ Word32ShiftLeft(__ UntagSmi(V<Smi>::Cast(hash_obj)),
476 } ELSE {
477 hash = __ Word32Constant(PropertyArray::kNoHashSentinel);
478 }
479 } else {
480 V<Smi> hash_smi = __ template LoadField<Smi>(
482 hash = __ Word32BitwiseAnd(__ UntagSmi(hash_smi),
484 }
485
486 // Add the new length and write the length-and-hash field.
487 static_assert(PropertyArray::LengthField::kShift == 0);
488 V<Word32> length_and_hash = __ Word32BitwiseOr(hash, new_length);
489 __ InitializeField(new_property_array,
491 __ TagSmi(length_and_hash));
492
493 V<PropertyArray> initialized_new_property_array =
494 __ FinishInitialization(std::move(new_property_array));
495
496 // Replace the old property array in {object}.
498 initialized_new_property_array);
499
500 return initialized_new_property_array;
501 }
502
504 base::SmallVector<OpIndex, 32> parameters_and_registers,
505 int suspend_id, int bytecode_offset) {
506 V<FixedArray> array = __ template LoadTaggedField<FixedArray>(
507 generator, JSGeneratorObject::kParametersAndRegistersOffset);
508 for (int i = 0; static_cast<size_t>(i) < parameters_and_registers.size();
509 i++) {
510 __ Store(array, parameters_and_registers[i], StoreOp::Kind::TaggedBase(),
514 }
515 __ Store(generator, __ SmiConstant(Smi::FromInt(suspend_id)),
518 JSGeneratorObject::kContinuationOffset);
519 __ Store(generator, __ SmiConstant(Smi::FromInt(bytecode_offset)),
522 JSGeneratorObject::kInputOrDebugPosOffset);
523
524 __ Store(generator, context, StoreOp::Kind::TaggedBase(),
527 JSGeneratorObject::kContextOffset);
528 }
529
530 private:
532 InstanceType first_instance_type,
533 InstanceType last_instance_type) {
534 V<Word32> instance_type = __ LoadInstanceTypeField(map);
535
536 if (first_instance_type == 0) {
537 return __ Uint32LessThanOrEqual(instance_type, last_instance_type);
538 } else {
539 return __ Uint32LessThanOrEqual(
540 __ Word32Sub(instance_type, first_instance_type),
541 last_instance_type - first_instance_type);
542 }
543 }
544
549};
550
552
553} // namespace v8::internal::compiler::turboshaft
554
555#endif // V8_COMPILER_TURBOSHAFT_MAGLEV_EARLY_LOWERING_REDUCER_INL_H_
#define BIND(label)
#define ELSE
#define GOTO(label,...)
#define IF_NOT(...)
#define UNLIKELY(...)
#define LIKELY(...)
#define BIND_LOOP(loop_label,...)
#define GOTO_IF_NOT(cond, label,...)
#define IF(...)
#define GOTO_IF(cond, label,...)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
static constexpr U kMask
Definition bit-field.h:41
static constexpr int kShift
Definition bit-field.h:39
size_t size() const
static Tagged< Smi > MutableHeapNumber()
static V8_INLINE constexpr int OffsetOfElementAt(int index)
Definition contexts.h:512
v8::internal::Factory * factory()
Definition isolate.h:1527
LocalIsolate * AsLocalIsolate()
Definition isolate.h:2188
Handle< Object > root_handle(RootIndex index)
Definition isolate.h:1269
static const int kFieldsAdded
Definition js-objects.h:954
v8::internal::LocalFactory * factory()
static constexpr int SizeFor(int length)
static const int kNoHashSentinel
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static FieldAccess ForMap(WriteBarrierKind write_barrier=kMapWriteBarrier)
static FieldAccess ForJSObjectPropertiesOrHash()
static FieldAccess ForPropertyArraySlot(int index)
static FieldAccess ForPropertyArrayLengthAndHash()
IndirectHandle< Map > object() const
ElementsKind elements_kind() const
void TransitionElementsKind(V< Object > object, V< Map > map, const ZoneVector< compiler::MapRef > &transition_sources, const MapRef transition_target, Label< Map > &end)
V< Smi > UpdateJSArrayLength(V< Word32 > length_raw, V< JSArray > object, V< Word32 > index)
void CheckValueEqualsString(V< Object > object, InternalizedStringRef value, V< FrameState > frame_state, const FeedbackSource &feedback)
V< Boolean > HasInPrototypeChain(V< Object > object, HeapObjectRef prototype, V< FrameState > frame_state, V< NativeContext > native_context, LazyDeoptOnThrow lazy_deopt_on_throw)
void CheckInstanceType(V< Object > input, V< FrameState > frame_state, const FeedbackSource &feedback, InstanceType first_instance_type, InstanceType last_instance_type, bool check_smi)
void CheckDerivedConstructResult(V< Object > construct_result, V< FrameState > frame_state, V< NativeContext > native_context, LazyDeoptOnThrow lazy_deopt_on_throw)
V< Word32 > CheckInstanceTypeIsInRange(V< Map > map, InstanceType first_instance_type, InstanceType last_instance_type)
V< Object > LoadScriptContextSideData(V< Context > script_context, int index)
V< InternalizedString > CheckedInternalizedString(V< Object > object, V< FrameState > frame_state, bool check_smi, const FeedbackSource &feedback)
void StoreScriptContextSlowPath(V< Context > script_context, V< Object > old_value, V< Object > new_value, V< Object > side_data, V< FrameState > frame_state, const FeedbackSource &feedback, Label<> &done)
V< Object > CheckConstructResult(V< Object > construct_result, V< Object > implicit_receiver)
V< Map > MigrateMapIfNeeded(V< HeapObject > object, V< Map > map, V< FrameState > frame_state, const FeedbackSource &feedback)
V< Object > LoadHeapNumberFromScriptContext(V< Context > script_context, int index, V< HeapNumber > heap_number)
V< PropertyArray > ExtendPropertiesBackingStore(V< PropertyArray > old_property_array, V< JSObject > object, int old_length, V< FrameState > frame_state, const FeedbackSource &feedback)
V< Map > TransitionMultipleElementsKind(V< Object > object, V< Map > map, const ZoneVector< compiler::MapRef > &transition_sources, const MapRef transition_target)
void GeneratorStore(V< Context > context, V< JSGeneratorObject > generator, base::SmallVector< OpIndex, 32 > parameters_and_registers, int suspend_id, int bytecode_offset)
static constexpr MemoryRepresentation AnyTagged()
static constexpr MemoryRepresentation TaggedSigned()
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
Definition assembler.h:823
int end
Isolate * isolate
JSHeapBroker * broker
const std::string property
ZoneVector< RpoNumber > & result
uint32_t const mask
constexpr unsigned CountPopulation(T value)
Definition bits.h:26
V8_INLINE constexpr std::optional< RootIndex > UniqueMapOfInstanceType(InstanceType type)
static const Operator * IntPtrConstant(CommonOperatorBuilder *common, intptr_t value)
const uint32_t kThinStringTagBit
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
const uint32_t kStringTag
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
V8_EXPORT_PRIVATE FlagValues v8_flags
const uint32_t kInternalizedTag
const uint32_t kIsNotInternalizedMask
const uint32_t kIsNotStringMask
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
#define CHECK(condition)
Definition logging.h:124
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001