v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-lowering-reducer.h
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_TURBOSHAFT_WASM_LOWERING_REDUCER_H_
6#define V8_COMPILER_TURBOSHAFT_WASM_LOWERING_REDUCER_H_
7
9#if !V8_ENABLE_WEBASSEMBLY
10#error This header should only be included if WebAssembly is enabled.
11#endif // !V8_ENABLE_WEBASSEMBLY
12
23
25
27
28template <class Next>
29class WasmLoweringReducer : public Next {
30 public:
32
34 const wasm::WasmGlobal* global) {
35 return LowerGlobalSetOrGet(instance, OpIndex::Invalid(), global,
37 }
38
40 const wasm::WasmGlobal* global) {
41 return LowerGlobalSetOrGet(instance, value, global, GlobalMode::kStore);
42 }
43
45 OpIndex roots = __ LoadRootRegister();
46 // We load the value as a pointer here and not as a TaggedPointer because
47 // it is stored uncompressed in the IsolateData, and a load of a
48 // TaggedPointer loads compressed pointers.
49#if V8_TARGET_BIG_ENDIAN
50 // On big endian a full pointer load is needed as otherwise the wrong half
51 // of the 64 bit address is loaded.
52 return __ BitcastWordPtrToTagged(__ Load(
53 roots, LoadOp::Kind::RawAligned().Immutable(),
55#else
56 // On little endian a tagged load is enough and saves the bitcast.
57 return __ Load(roots, LoadOp::Kind::RawAligned().Immutable(),
60#endif
61 }
62
64#if V8_STATIC_ROOTS_BOOL
65 if (RootsTable::IsReadOnly(index)) {
66 V<Object> root = V<Object>::Cast(__ UintPtrConstant(
67 StaticReadOnlyRootsPointerTable[static_cast<size_t>(index)]));
68 return __ TaggedEqual(object, root);
69 }
70#endif
71 return __ TaggedEqual(object, __ RootConstant(index));
72 }
73
75 RootIndex index =
76 type.use_wasm_null() ? RootIndex::kWasmNull : RootIndex::kNullValue;
77 return ReduceRootConstant(index);
78 }
79
81 RootIndex index =
82 type.use_wasm_null() ? RootIndex::kWasmNull : RootIndex::kNullValue;
83 return ReduceIsRootConstant(object, index);
84 }
85
87 TrapId trap_id) {
88 if (trap_id == TrapId::kTrapNullDereference) {
89 // Skip the check altogether if null checks are turned off.
90 if (!v8_flags.experimental_wasm_skip_null_checks) {
91 // Use an explicit null check if
92 // (1) we cannot use trap handler or
93 // (2) the object might be a Smi or
94 // (3) the object might be a JS object.
96 wasm::IsSubtypeOf(wasm::kWasmI31Ref.AsNonNull(), type, module_) ||
97 !type.use_wasm_null()) {
98 __ TrapIf(__ IsNull(object, type), trap_id);
99 } else {
100 // Otherwise, load the word after the map word.
101 static_assert(WasmStruct::kHeaderSize > kTaggedSize);
102 static_assert(WasmArray::kHeaderSize > kTaggedSize);
103 static_assert(WasmInternalFunction::kHeaderSize > kTaggedSize);
104 __ Load(object, LoadOp::Kind::TrapOnNull().Immutable(),
106 }
107 }
108 } else {
109 __ TrapIf(__ IsNull(object, type), trap_id);
110 }
111 return object;
112 }
113
115 wasm::ModuleTypeIndex type_index) {
116 int map_offset =
117 OFFSET_OF_DATA_START(FixedArray) + type_index.index * kTaggedSize;
118 return __ Load(rtts, LoadOp::Kind::TaggedBase().Immutable(),
119 MemoryRepresentation::AnyTagged(), map_offset);
120 }
121
123 WasmTypeCheckConfig config) {
124 if (rtt.has_value()) {
125 return ReduceWasmTypeCheckRtt(object, rtt, config);
126 } else {
127 return ReduceWasmTypeCheckAbstract(object, config);
128 }
129 }
130
132 WasmTypeCheckConfig config) {
133 if (rtt.has_value()) {
134 return ReduceWasmTypeCastRtt(object, rtt, config);
135 } else {
136 return ReduceWasmTypeCastAbstract(object, config);
137 }
138 }
139
141 Label<Object> end_label(&Asm());
142 Label<> null_label(&Asm());
143 Label<> smi_label(&Asm());
144 Label<> int_to_smi_label(&Asm());
145 Label<> heap_number_label(&Asm());
146
147 constexpr int32_t kInt31MaxValue = 0x3fffffff;
148 constexpr int32_t kInt31MinValue = -kInt31MaxValue - 1;
149
150 GOTO_IF(__ IsNull(object, wasm::kWasmExternRef), null_label);
151 GOTO_IF(__ IsSmi(object), smi_label);
152 GOTO_IF(__ HasInstanceType(object, HEAP_NUMBER_TYPE), heap_number_label);
153 // For anything else, just pass through the value.
154 GOTO(end_label, object);
155
156 BIND(null_label);
157 GOTO(end_label, __ Null(wasm::kWasmAnyRef));
158
159 // Canonicalize SMI.
160 BIND(smi_label);
161 if constexpr (SmiValuesAre31Bits()) {
162 GOTO(end_label, object);
163 } else {
164 Label<> convert_to_heap_number_label(&Asm());
165 V<Word32> int_value = __ UntagSmi(V<Smi>::Cast(object));
166
167 // Convert to heap number if the int32 does not fit into an i31ref.
168 GOTO_IF(__ Int32LessThan(__ Word32Constant(kInt31MaxValue), int_value),
169 convert_to_heap_number_label);
170 GOTO_IF(__ Int32LessThan(int_value, __ Word32Constant(kInt31MinValue)),
171 convert_to_heap_number_label);
172 GOTO(end_label, object);
173
174 BIND(convert_to_heap_number_label);
175 V<Object> heap_number = __ template WasmCallBuiltinThroughJumptable<
176 BuiltinCallDescriptor::WasmInt32ToHeapNumber>({int_value});
177 GOTO(end_label, heap_number);
178 }
179
180 // Convert HeapNumber to SMI if possible.
181 BIND(heap_number_label);
182 V<Float64> float_value =
183 __ LoadHeapNumberValue(V<HeapNumber>::Cast(object));
184 // Check range of float value.
185 GOTO_IF(__ Float64LessThan(float_value, __ Float64Constant(kInt31MinValue)),
186 end_label, object);
187 GOTO_IF(__ Float64LessThan(__ Float64Constant(kInt31MaxValue), float_value),
188 end_label, object);
189 // Check if value is -0.
190 V<Word32> is_minus_zero;
191 if constexpr (Is64()) {
192 V<Word64> minus_zero = __ Word64Constant(kMinusZeroBits);
193 V<Word64> float_bits = __ BitcastFloat64ToWord64(float_value);
194 is_minus_zero = __ Word64Equal(float_bits, minus_zero);
195 } else {
196 Label<Word32> done(&Asm());
197
198 V<Word32> value_lo = __ Float64ExtractLowWord32(float_value);
199 GOTO_IF_NOT(__ Word32Equal(value_lo, __ Word32Constant(kMinusZeroLoBits)),
200 done, __ Word32Constant(0));
201 V<Word32> value_hi = __ Float64ExtractHighWord32(float_value);
202 GOTO(done, __ Word32Equal(value_hi, __ Word32Constant(kMinusZeroHiBits)));
203 BIND(done, phi_is_minus_zero);
204 is_minus_zero = phi_is_minus_zero;
205 }
206 GOTO_IF(is_minus_zero, end_label, object);
207 // Check if value is integral.
208 V<Word32> int_value =
209 __ TruncateFloat64ToInt32OverflowUndefined(float_value);
210 GOTO_IF(__ Float64Equal(float_value, __ ChangeInt32ToFloat64(int_value)),
211 int_to_smi_label);
212 GOTO(end_label, object);
213
214 BIND(int_to_smi_label);
215 GOTO(end_label, __ TagSmi(int_value));
216
217 BIND(end_label, result);
218 return result;
219 }
220
222 Label<Object> end(&Asm());
223 GOTO_IF_NOT(__ IsNull(object, wasm::kWasmAnyRef), end, object);
225 BIND(end, result);
226 return result;
227 }
228
230 // Remove type annotation operations as they are not needed any more.
231 return value;
232 }
233
235 const wasm::StructType* type,
236 wasm::ModuleTypeIndex type_index, int field_index,
237 bool is_signed, CheckForNull null_check) {
238 auto [explicit_null_check, implicit_null_check] =
239 null_checks_for_struct_op(null_check, field_index);
240
241 if (explicit_null_check) {
242 __ TrapIf(__ IsNull(object, wasm::kWasmAnyRef),
243 TrapId::kTrapNullDereference);
244 }
245
246 LoadOp::Kind load_kind = implicit_null_check ? LoadOp::Kind::TrapOnNull()
248 if (!type->mutability(field_index)) {
249 load_kind = load_kind.Immutable();
250 }
252 RepresentationFor(type->field(field_index), is_signed);
253
254 return __ Load(object, load_kind, repr, field_offset(type, field_index));
255 }
256
258 const wasm::StructType* type,
259 wasm::ModuleTypeIndex type_index, int field_index,
260 CheckForNull null_check) {
261 auto [explicit_null_check, implicit_null_check] =
262 null_checks_for_struct_op(null_check, field_index);
263
264 if (explicit_null_check) {
265 __ TrapIf(__ IsNull(object, wasm::kWasmAnyRef),
266 TrapId::kTrapNullDereference);
267 }
268
269 StoreOp::Kind store_kind = implicit_null_check
273 RepresentationFor(type->field(field_index), true);
274
275 __ Store(object, value, store_kind, repr,
276 type->field(field_index).is_reference() ? kFullWriteBarrier
278 field_offset(type, field_index));
279
280 return OpIndex::Invalid();
281 }
282
284 const wasm::ArrayType* array_type, bool is_signed) {
285 bool is_mutable = array_type->mutability();
286 LoadOp::Kind load_kind = is_mutable
289 return __ Load(array, __ ChangeInt32ToIntPtr(index), load_kind,
290 RepresentationFor(array_type->element_type(), is_signed),
291 WasmArray::kHeaderSize,
292 array_type->element_type().value_kind_size_log2());
293 }
294
296 V<Any> value, wasm::ValueType element_type) {
297 __ Store(array, __ ChangeInt32ToIntPtr(index), value,
298 LoadOp::Kind::TaggedBase(), RepresentationFor(element_type, true),
299 element_type.is_reference() ? kFullWriteBarrier : kNoWriteBarrier,
300 WasmArray::kHeaderSize, element_type.value_kind_size_log2());
301 return {};
302 }
303
305 CheckForNull null_check) {
306 bool explicit_null_check =
307 null_check == kWithNullCheck &&
309 bool implicit_null_check =
310 null_check == kWithNullCheck &&
312
313 if (explicit_null_check) {
314 __ TrapIf(__ IsNull(array, wasm::kWasmAnyRef),
315 TrapId::kTrapNullDereference);
316 }
317
318 LoadOp::Kind load_kind = implicit_null_check
321
322 return __ Load(array, load_kind, RepresentationFor(wasm::kWasmI32, true),
323 WasmArray::kLengthOffset);
324 }
325
327 const wasm::ArrayType* array_type) {
328 __ TrapIfNot(
329 __ Uint32LessThanOrEqual(
330 length, __ Word32Constant(WasmArray::MaxLength(array_type))),
331 TrapId::kTrapArrayTooLarge);
332 wasm::ValueType element_type = array_type->element_type();
333
334 // RoundUp(length * value_size, kObjectAlignment) =
335 // RoundDown(length * value_size + kObjectAlignment - 1,
336 // kObjectAlignment);
337 V<Word32> padded_length = __ Word32BitwiseAnd(
338 __ Word32Add(__ Word32Mul(length, __ Word32Constant(
339 element_type.value_kind_size())),
340 __ Word32Constant(int32_t{kObjectAlignment - 1})),
341 __ Word32Constant(int32_t{-kObjectAlignment}));
342 Uninitialized<WasmArray> a = __ template Allocate<WasmArray>(
343 __ ChangeUint32ToUintPtr(__ Word32Add(
344 padded_length, __ Word32Constant(WasmArray::kHeaderSize))),
346
347 // TODO(14108): The map and empty fixed array initialization should be an
348 // immutable store.
350 rtt);
352 LOAD_ROOT(EmptyFixedArray));
353 __ InitializeField(a, AccessBuilder::ForWasmArrayLength(), length);
354
355 // Note: Only the array header initialization is finished here, the elements
356 // still need to be initialized by other code.
357 V<WasmArray> array = __ FinishInitialization(std::move(a));
358 return array;
359 }
360
362 V<Map> rtt, const wasm::StructType* struct_type) {
363 int size = WasmStruct::Size(struct_type);
365 __ template Allocate<WasmStruct>(size, AllocationType::kYoung);
367 rtt);
369 LOAD_ROOT(EmptyFixedArray));
370 // Note: Struct initialization isn't finished here, the user defined fields
371 // still need to be initialized by other operations.
372 V<WasmStruct> struct_value = __ FinishInitialization(std::move(s));
373 return struct_value;
374 }
375
377 uint32_t function_index) {
379 wasm_instance, FuncRefs, MemoryRepresentation::TaggedPointer());
380 V<Object> maybe_func_ref =
381 __ LoadFixedArrayElement(func_refs, function_index);
382
383 Label<WasmFuncRef> done(&Asm());
384 IF (UNLIKELY(__ IsSmi(maybe_func_ref))) {
385 bool extract_shared_data =
386 !shared_ && module_->function_is_shared(function_index);
387
388 V<WasmFuncRef> from_builtin = __ template WasmCallBuiltinThroughJumptable<
389 BuiltinCallDescriptor::WasmRefFunc>(
390 {__ Word32Constant(function_index),
391 __ Word32Constant(extract_shared_data ? 1 : 0)});
392
393 GOTO(done, from_builtin);
394 } ELSE {
395 GOTO(done, V<WasmFuncRef>::Cast(maybe_func_ref));
396 }
397
398 BIND(done, result_value);
399 return result_value;
400 }
401
403 Label<String> done(&Asm());
404 V<Word32> instance_type = __ LoadInstanceTypeField(__ LoadMapField(string));
405 V<Word32> string_representation = __ Word32BitwiseAnd(
406 instance_type, __ Word32Constant(kStringRepresentationMask));
407 GOTO_IF(__ Word32Equal(string_representation, kSeqStringTag), done, string);
408
409 GOTO(done, __ template WasmCallBuiltinThroughJumptable<
410 BuiltinCallDescriptor::WasmStringAsWtf16>({string}));
411 BIND(done, result);
412 return result;
413 }
414
416 LoopLabel<Object /*string*/, Word32 /*instance type*/, Word32 /*offset*/>
417 dispatch(&Asm());
418 Label<Object /*string*/, Word32 /*instance type*/, Word32 /*offset*/>
419 direct_string(&Asm());
420
421 // These values will be used to replace the original node's projections.
422 // The first, "string", is either a SeqString or Tagged<Smi>(0) (in case of
423 // external string). Notably this makes it GC-safe: if that string moves,
424 // this pointer will be updated accordingly. The second, "offset", has full
425 // register width so that it can be used to store external pointers: for
426 // external strings, we add up the character backing store's base address
427 // and any slice offset. The third, "character width", is a shift width,
428 // i.e. it is 0 for one-byte strings, 1 for two-byte strings,
429 // kCharWidthBailoutSentinel for uncached external strings (for which
430 // "string"/"offset" are invalid and unusable).
431 Label<Object /*string*/, WordPtr /*offset*/, Word32 /*character width*/>
432 done(&Asm());
433
434 V<Word32> original_type =
435 __ LoadInstanceTypeField(__ LoadMapField(original_string));
436 GOTO(dispatch, original_string, original_type, __ Word32Constant(0));
437
438 BIND_LOOP(dispatch, string, instance_type, offset) {
439 Label<> thin_string(&Asm());
440 Label<> cons_string(&Asm());
441
442 static_assert(kIsIndirectStringTag == 1);
443 static constexpr int kIsDirectStringTag = 0;
444 GOTO_IF(__ Word32Equal(
445 __ Word32BitwiseAnd(instance_type, kIsIndirectStringMask),
446 kIsDirectStringTag),
447 direct_string, string, instance_type, offset);
448
449 // Handle indirect strings.
450 V<Word32> string_representation =
451 __ Word32BitwiseAnd(instance_type, kStringRepresentationMask);
452 GOTO_IF(__ Word32Equal(string_representation, kThinStringTag),
453 thin_string);
454 GOTO_IF(__ Word32Equal(string_representation, kConsStringTag),
455 cons_string);
456
457 // Sliced string.
458 V<Word32> new_offset = __ Word32Add(
459 offset, __ UntagSmi(__ template LoadField<Smi>(
461 V<Object> parent = __ template LoadField<Object>(
463 V<Word32> parent_type = __ LoadInstanceTypeField(__ LoadMapField(parent));
464 GOTO(dispatch, parent, parent_type, new_offset);
465
466 // Thin string.
467 BIND(thin_string);
468 V<Object> actual = __ template LoadField<Object>(
470 V<Word32> actual_type = __ LoadInstanceTypeField(__ LoadMapField(actual));
471 // ThinStrings always reference (internalized) direct strings.
472 GOTO(direct_string, actual, actual_type, offset);
473
474 // Flat cons string. (Non-flat cons strings are ruled out by
475 // string.as_wtf16.)
476 BIND(cons_string);
477 V<Object> first = __ template LoadField<Object>(
479 V<Word32> first_type = __ LoadInstanceTypeField(__ LoadMapField(first));
480 GOTO(dispatch, first, first_type, offset);
481 }
482 {
483 BIND(direct_string, string, instance_type, offset);
484
485 V<Word32> is_onebyte =
486 __ Word32BitwiseAnd(instance_type, kStringEncodingMask);
487 // Char width shift is 1 - (is_onebyte).
488 static_assert(kStringEncodingMask == 1 << 3);
489 V<Word32> charwidth_shift =
490 __ Word32Sub(1, __ Word32ShiftRightLogical(is_onebyte, 3));
491
492 Label<> external(&Asm());
493 V<Word32> string_representation =
494 __ Word32BitwiseAnd(instance_type, kStringRepresentationMask);
495 GOTO_IF(__ Word32Equal(string_representation, kExternalStringTag),
496 external);
497
498 // Sequential string.
501 const int chars_start_offset =
503 V<Word32> final_offset =
504 __ Word32Add(chars_start_offset - kHeapObjectTag,
505 __ Word32ShiftLeft(offset, charwidth_shift));
506 GOTO(done, string, __ ChangeInt32ToIntPtr(final_offset), charwidth_shift);
507
508 // External string.
509 BIND(external);
510 GOTO_IF(__ Word32BitwiseAnd(instance_type, kUncachedExternalStringMask),
511 done, string, /*offset*/ 0, kCharWidthBailoutSentinel);
513 V<WordPtr> resource = __ LoadExternalPointerFromObject(
514 string, field_access.offset, field_access.external_pointer_tag);
515 V<Word32> shifted_offset = __ Word32ShiftLeft(offset, charwidth_shift);
516 V<WordPtr> final_offset_external =
517 __ WordPtrAdd(resource, __ ChangeInt32ToIntPtr(shifted_offset));
518 GOTO(done, __ SmiConstant(Smi::FromInt(0)), final_offset_external,
519 charwidth_shift);
520 }
521 {
522 BIND(done, base, final_offset, charwidth_shift);
523 return __ Tuple({base, final_offset, charwidth_shift});
524 }
525 }
526
527 private:
528 enum class GlobalMode { kLoad, kStore };
529
532 : MemoryRepresentation::UintPtr();
533
535 switch (type.kind()) {
536 case wasm::kI8:
539 case wasm::kI16:
542 case wasm::kI32:
545 case wasm::kI64:
548 case wasm::kF16:
550 case wasm::kF32:
552 case wasm::kF64:
554 case wasm::kS128:
556 case wasm::kRef:
557 case wasm::kRefNull:
559 case wasm::kVoid:
560 case wasm::kTop:
561 case wasm::kBottom:
562 UNREACHABLE();
563 }
564 }
565
567 WasmTypeCheckConfig config) {
568 const bool object_can_be_null = config.from.is_nullable();
569 const bool null_succeeds = config.to.is_nullable();
570 const bool object_can_be_i31 =
571 wasm::IsSubtypeOf(wasm::kWasmI31Ref.AsNonNull(), config.from,
572 module_) ||
574
576 Label<Word32> end_label(&Asm());
577
578 wasm::HeapType::Representation to_rep = config.to.heap_representation();
579 do {
580 // The none-types only perform a null check. They need no control flow.
581 if (to_rep == wasm::HeapType::kNone ||
582 to_rep == wasm::HeapType::kNoExtern ||
583 to_rep == wasm::HeapType::kNoFunc ||
584 to_rep == wasm::HeapType::kNoExn) {
585 result = __ IsNull(object, config.from);
586 break;
587 }
588 // Null checks performed by any other type check need control flow. We can
589 // skip the null check if null fails, because it's covered by the Smi
590 // check or instance type check we'll do later.
591 if (object_can_be_null && null_succeeds) {
592 const int kResult = 1;
593 GOTO_IF(UNLIKELY(__ IsNull(object, wasm::kWasmAnyRef)), end_label,
594 __ Word32Constant(kResult));
595 }
596 // i31 is special in that the Smi check is the last thing to do.
597 if (to_rep == wasm::HeapType::kI31) {
598 // If earlier optimization passes reached the limit of possible graph
599 // transformations, we could DCHECK(object_can_be_i31) here.
600 result = object_can_be_i31 ? __ IsSmi(object) : __ Word32Constant(0);
601 break;
602 }
603 if (to_rep == wasm::HeapType::kEq) {
604 if (object_can_be_i31) {
605 GOTO_IF(UNLIKELY(__ IsSmi(object)), end_label, __ Word32Constant(1));
606 }
607 result = IsDataRefMap(__ LoadMapField(object));
608 break;
609 }
610 // array, struct, string: i31 fails.
611 if (object_can_be_i31) {
612 GOTO_IF(UNLIKELY(__ IsSmi(object)), end_label, __ Word32Constant(0));
613 }
614 if (to_rep == wasm::HeapType::kArray) {
615 result = __ HasInstanceType(object, WASM_ARRAY_TYPE);
616 break;
617 }
618 if (to_rep == wasm::HeapType::kStruct) {
619 result = __ HasInstanceType(object, WASM_STRUCT_TYPE);
620 break;
621 }
622 if (to_rep == wasm::HeapType::kString ||
624 V<Word32> instance_type =
625 __ LoadInstanceTypeField(__ LoadMapField(object));
626 result = __ Uint32LessThan(instance_type,
627 __ Word32Constant(FIRST_NONSTRING_TYPE));
628 break;
629 }
630 UNREACHABLE();
631 } while (false);
632
633 DCHECK(__ generating_unreachable_operations() || result.valid());
634 GOTO(end_label, result);
635 BIND(end_label, final_result);
636 return final_result;
637 }
638
640 WasmTypeCheckConfig config) {
641 const bool object_can_be_null = config.from.is_nullable();
642 const bool null_succeeds = config.to.is_nullable();
643 const bool object_can_be_i31 =
644 wasm::IsSubtypeOf(wasm::kWasmI31Ref.AsNonNull(), config.from,
645 module_) ||
647
648 Label<> end_label(&Asm());
649
650 wasm::HeapType::Representation to_rep = config.to.heap_representation();
651
652 do {
653 // The none-types only perform a null check.
654 if (to_rep == wasm::HeapType::kNone ||
655 to_rep == wasm::HeapType::kNoExtern ||
656 to_rep == wasm::HeapType::kNoFunc ||
657 to_rep == wasm::HeapType::kNoExn) {
658 __ TrapIfNot(__ IsNull(object, config.from), TrapId::kTrapIllegalCast);
659 break;
660 }
661 // Null checks performed by any other type cast can be skipped if null
662 // fails, because it's covered by the Smi check
663 // or instance type check we'll do later.
664 if (object_can_be_null && null_succeeds &&
665 !v8_flags.experimental_wasm_skip_null_checks) {
666 GOTO_IF(UNLIKELY(__ IsNull(object, config.from)), end_label);
667 }
668 if (to_rep == wasm::HeapType::kI31) {
669 // If earlier optimization passes reached the limit of possible graph
670 // transformations, we could DCHECK(object_can_be_i31) here.
671 V<Word32> success =
672 object_can_be_i31 ? __ IsSmi(object) : __ Word32Constant(0);
673 __ TrapIfNot(success, TrapId::kTrapIllegalCast);
674 break;
675 }
676 if (to_rep == wasm::HeapType::kEq) {
677 if (object_can_be_i31) {
678 GOTO_IF(UNLIKELY(__ IsSmi(object)), end_label);
679 }
680 __ TrapIfNot(IsDataRefMap(__ LoadMapField(object)),
681 TrapId::kTrapIllegalCast);
682 break;
683 }
684 // array, struct, string: i31 fails.
685 if (object_can_be_i31) {
686 __ TrapIf(__ IsSmi(object), TrapId::kTrapIllegalCast);
687 }
688 if (to_rep == wasm::HeapType::kArray) {
689 __ TrapIfNot(__ HasInstanceType(object, WASM_ARRAY_TYPE),
690 TrapId::kTrapIllegalCast);
691 break;
692 }
693 if (to_rep == wasm::HeapType::kStruct) {
694 __ TrapIfNot(__ HasInstanceType(object, WASM_STRUCT_TYPE),
695 TrapId::kTrapIllegalCast);
696 break;
697 }
698 if (to_rep == wasm::HeapType::kString ||
700 V<Word32> instance_type =
701 __ LoadInstanceTypeField(__ LoadMapField(object));
702 __ TrapIfNot(__ Uint32LessThan(instance_type,
703 __ Word32Constant(FIRST_NONSTRING_TYPE)),
704 TrapId::kTrapIllegalCast);
705 break;
706 }
707 UNREACHABLE();
708 } while (false);
709
710 GOTO(end_label);
711 BIND(end_label);
712 return object;
713 }
714
716 WasmTypeCheckConfig config) {
717 DCHECK(rtt.has_value());
718 int rtt_depth = wasm::GetSubtypingDepth(module_, config.to.ref_index());
719 bool object_can_be_null = config.from.is_nullable();
720 bool object_can_be_i31 =
721 wasm::IsSubtypeOf(wasm::kWasmI31Ref.AsNonNull(), config.from, module_);
722
723 Label<> end_label(&Asm());
724 bool is_cast_from_any = config.from.is_reference_to(wasm::HeapType::kAny);
725
726 // If we are casting from any and null results in check failure, then the
727 // {IsDataRefMap} check below subsumes the null check. Otherwise, perform
728 // an explicit null check now.
729 if (object_can_be_null && (!is_cast_from_any || config.to.is_nullable())) {
730 V<Word32> is_null = __ IsNull(object, wasm::kWasmAnyRef);
731 if (config.to.is_nullable()) {
732 GOTO_IF(UNLIKELY(is_null), end_label);
733 } else if (!v8_flags.experimental_wasm_skip_null_checks) {
734 __ TrapIf(is_null, TrapId::kTrapIllegalCast);
735 }
736 }
737
738 if (object_can_be_i31) {
739 __ TrapIf(__ IsSmi(object), TrapId::kTrapIllegalCast);
740 }
741
742 V<Map> map = __ LoadMapField(object);
743
744 DCHECK_IMPLIES(module_->type(config.to.ref_index()).is_final,
745 config.exactness == kExactMatchOnly);
746
747 if (config.exactness == kExactMatchOnly) {
748 __ TrapIfNot(__ TaggedEqual(map, rtt.value()), TrapId::kTrapIllegalCast);
749 GOTO(end_label);
750 } else {
751 // First, check if types happen to be equal. This has been shown to give
752 // large speedups.
753 GOTO_IF(LIKELY(__ TaggedEqual(map, rtt.value())), end_label);
754
755 // Check if map instance type identifies a wasm object.
756 if (is_cast_from_any) {
757 V<Word32> is_wasm_obj = IsDataRefMap(map);
758 __ TrapIfNot(is_wasm_obj, TrapId::kTrapIllegalCast);
759 }
760
761 V<Object> type_info = LoadWasmTypeInfo(map);
762 DCHECK_GE(rtt_depth, 0);
763 // If the depth of the rtt is known to be less that the minimum supertype
764 // array length, we can access the supertype without bounds-checking the
765 // supertype array.
766 if (static_cast<uint32_t>(rtt_depth) >=
768 V<Word32> supertypes_length = __ UntagSmi(
769 __ Load(type_info, LoadOp::Kind::TaggedBase().Immutable(),
771 WasmTypeInfo::kSupertypesLengthOffset));
772 __ TrapIfNot(__ Uint32LessThan(rtt_depth, supertypes_length),
773 TrapId::kTrapIllegalCast);
774 }
775
776 V<Object> maybe_match =
777 __ Load(type_info, LoadOp::Kind::TaggedBase().Immutable(),
779 WasmTypeInfo::kSupertypesOffset + kTaggedSize * rtt_depth);
780
781 __ TrapIfNot(__ TaggedEqual(maybe_match, rtt.value()),
782 TrapId::kTrapIllegalCast);
783 GOTO(end_label);
784 }
785
786 BIND(end_label);
787 return object;
788 }
789
791 WasmTypeCheckConfig config) {
792 DCHECK(rtt.has_value());
793 int rtt_depth = wasm::GetSubtypingDepth(module_, config.to.ref_index());
794 bool object_can_be_null = config.from.is_nullable();
795 bool object_can_be_i31 =
796 wasm::IsSubtypeOf(wasm::kWasmI31Ref.AsNonNull(), config.from, module_);
797 bool is_cast_from_any = config.from.is_reference_to(wasm::HeapType::kAny);
798
799 Label<Word32> end_label(&Asm());
800
801 // If we are casting from any and null results in check failure, then the
802 // {IsDataRefMap} check below subsumes the null check. Otherwise, perform
803 // an explicit null check now.
804 if (object_can_be_null && (!is_cast_from_any || config.to.is_nullable())) {
805 const int kResult = config.to.is_nullable() ? 1 : 0;
806 GOTO_IF(UNLIKELY(__ IsNull(object, wasm::kWasmAnyRef)), end_label,
807 __ Word32Constant(kResult));
808 }
809
810 if (object_can_be_i31) {
811 GOTO_IF(__ IsSmi(object), end_label, __ Word32Constant(0));
812 }
813
814 V<Map> map = __ LoadMapField(object);
815
816 DCHECK_IMPLIES(module_->type(config.to.ref_index()).is_final,
817 config.exactness == kExactMatchOnly);
818
819 if (config.exactness == kExactMatchOnly) {
820 GOTO(end_label, __ TaggedEqual(map, rtt.value()));
821 } else {
822 // First, check if types happen to be equal. This has been shown to give
823 // large speedups.
824 GOTO_IF(LIKELY(__ TaggedEqual(map, rtt.value())), end_label,
825 __ Word32Constant(1));
826
827 // Check if map instance type identifies a wasm object.
828 if (is_cast_from_any) {
829 V<Word32> is_wasm_obj = IsDataRefMap(map);
830 GOTO_IF_NOT(LIKELY(is_wasm_obj), end_label, __ Word32Constant(0));
831 }
832
833 V<Object> type_info = LoadWasmTypeInfo(map);
834 DCHECK_GE(rtt_depth, 0);
835 // If the depth of the rtt is known to be less that the minimum supertype
836 // array length, we can access the supertype without bounds-checking the
837 // supertype array.
838 if (static_cast<uint32_t>(rtt_depth) >=
840 V<Word32> supertypes_length = __ UntagSmi(
841 __ Load(type_info, LoadOp::Kind::TaggedBase().Immutable(),
843 WasmTypeInfo::kSupertypesLengthOffset));
844 GOTO_IF_NOT(LIKELY(__ Uint32LessThan(rtt_depth, supertypes_length)),
845 end_label, __ Word32Constant(0));
846 }
847
848 V<Object> maybe_match =
849 __ Load(type_info, LoadOp::Kind::TaggedBase().Immutable(),
851 WasmTypeInfo::kSupertypesOffset + kTaggedSize * rtt_depth);
852
853 GOTO(end_label, __ TaggedEqual(maybe_match, rtt.value()));
854 }
855
856 BIND(end_label, result);
857 return result;
858 }
859
861 const wasm::WasmGlobal* global, GlobalMode mode) {
862 bool is_mutable = global->mutability;
863 DCHECK_IMPLIES(!is_mutable, mode == GlobalMode::kLoad);
864 if (is_mutable && global->imported) {
865 V<FixedAddressArray> imported_mutable_globals =
866 LOAD_IMMUTABLE_INSTANCE_FIELD(instance, ImportedMutableGlobals,
869 if (global->type.is_reference()) {
871 instance, ImportedMutableGlobalsBuffers,
873 int offset_in_buffers = FixedArray::OffsetOfElementAt(global->offset);
876 MemoryRepresentation::AnyTagged(), offset_in_buffers);
877 V<Word32> index = __ Load(imported_mutable_globals, OpIndex::Invalid(),
880 V<WordPtr> index_ptr = __ ChangeInt32ToIntPtr(index);
881 if (mode == GlobalMode::kLoad) {
882 return __ Load(base, index_ptr, LoadOp::Kind::TaggedBase(),
885 } else {
886 __ Store(base, index_ptr, value, StoreOp::Kind::TaggedBase(),
890 return OpIndex::Invalid();
891 }
892 } else {
893 // Global is imported mutable but not a reference.
894 OpIndex base = __ Load(imported_mutable_globals, OpIndex::Invalid(),
897 if (mode == GlobalMode::kLoad) {
899 RepresentationFor(global->type, true), 0);
900 } else {
901 __ Store(base, value, StoreOp::Kind::RawAligned(),
902 RepresentationFor(global->type, true),
904 return OpIndex::Invalid();
905 }
906 }
907 } else if (global->type.is_reference()) {
909 instance, TaggedGlobalsBuffer, MemoryRepresentation::TaggedPointer());
910 int offset =
912 if (mode == GlobalMode::kLoad) {
913 LoadOp::Kind load_kind = is_mutable
916 return __ Load(base, load_kind, MemoryRepresentation::AnyTagged(),
917 offset);
918 } else {
919 __ Store(base, value, StoreOp::Kind::TaggedBase(),
922 return OpIndex::Invalid();
923 }
924 } else {
926 instance, GlobalsStart, MemoryRepresentation::UintPtr());
927 if (mode == GlobalMode::kLoad) {
928 LoadOp::Kind load_kind = is_mutable
931 return __ Load(base, load_kind, RepresentationFor(global->type, true),
932 global->offset);
933 } else {
934 __ Store(base, value, StoreOp::Kind::RawAligned(),
935 RepresentationFor(global->type, true),
937 return OpIndex::Invalid();
938 }
939 }
940 }
941
943 V<Word32> instance_type = __ LoadInstanceTypeField(map);
944 // We're going to test a range of WasmObject instance types with a single
945 // unsigned comparison.
946 V<Word32> comparison_value =
947 __ Word32Sub(instance_type, FIRST_WASM_OBJECT_TYPE);
948 return __ Uint32LessThanOrEqual(
949 comparison_value, LAST_WASM_OBJECT_TYPE - FIRST_WASM_OBJECT_TYPE);
950 }
951
953 int offset = Map::kConstructorOrBackPointerOrNativeContextOffset;
954 return __ Load(map, LoadOp::Kind::TaggedBase().Immutable(),
956 }
957
958 std::pair<bool, bool> null_checks_for_struct_op(CheckForNull null_check,
959 int field_index) {
960 bool explicit_null_check =
961 null_check == kWithNullCheck &&
964 bool implicit_null_check =
965 null_check == kWithNullCheck && !explicit_null_check;
966 return {explicit_null_check, implicit_null_check};
967 }
968
969 int field_offset(const wasm::StructType* type, int field_index) {
970 return WasmStruct::kHeaderSize + type->field_offset(field_index);
971 }
972
973 const wasm::WasmModule* module_ = __ data() -> wasm_module();
974 const bool shared_ = __ data() -> wasm_shared();
979};
980
982
983} // namespace v8::internal::compiler::turboshaft
984
985#endif // V8_COMPILER_TURBOSHAFT_WASM_LOWERING_REDUCER_H_
#define BIND(label)
#define REDUCE(operation)
#define ELSE
#define GOTO(label,...)
#define UNLIKELY(...)
#define LIKELY(...)
#define BIND_LOOP(loop_label,...)
#define GOTO_IF_NOT(cond, label,...)
#define IF(...)
#define GOTO_IF(cond, label,...)
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
static constexpr int OffsetOfElementAt(int index)
static constexpr int root_slot_offset(RootIndex root_index)
static constexpr bool IsReadOnly(RootIndex root_index)
Definition roots.h:623
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr int MaxLength(uint32_t element_size_bytes)
static int Size(const wasm::StructType *type)
static ElementAccess ForSeqTwoByteStringCharacter()
static FieldAccess ForMap(WriteBarrierKind write_barrier=kMapWriteBarrier)
static ElementAccess ForSeqOneByteStringCharacter()
static FieldAccess ForExternalStringResourceData()
static FieldAccess ForJSObjectPropertiesOrHash()
static constexpr MemoryRepresentation AnyTagged()
static constexpr MemoryRepresentation Float16()
static constexpr MemoryRepresentation Uint32()
static constexpr MemoryRepresentation TaggedSigned()
static constexpr MemoryRepresentation Int32()
static constexpr MemoryRepresentation Int64()
static constexpr MemoryRepresentation Simd128()
static constexpr MemoryRepresentation SandboxedPointer()
static constexpr MemoryRepresentation Uint16()
static constexpr MemoryRepresentation TaggedPointer()
static constexpr MemoryRepresentation UintPtr()
static constexpr MemoryRepresentation Uint8()
static constexpr MemoryRepresentation Int16()
static constexpr MemoryRepresentation Uint64()
static constexpr MemoryRepresentation Float32()
static constexpr MemoryRepresentation Float64()
static constexpr OpIndex Invalid()
Definition index.h:88
static V< T > Cast(V< U > index)
Definition index.h:632
static constexpr MemoryRepresentation kMaybeSandboxedPointer
std::pair< bool, bool > null_checks_for_struct_op(CheckForNull null_check, int field_index)
V< Any > REDUCE ArrayGet(V< WasmArrayNullable > array, V< Word32 > index, const wasm::ArrayType *array_type, bool is_signed)
V< None > REDUCE StructSet(V< WasmStructNullable > object, V< Any > value, const wasm::StructType *type, wasm::ModuleTypeIndex type_index, int field_index, CheckForNull null_check)
int field_offset(const wasm::StructType *type, int field_index)
OpIndex REDUCE StringPrepareForGetCodeUnit(V< Object > original_string)
V< Word32 > REDUCE WasmTypeCheck(V< Object > object, OptionalV< Map > rtt, WasmTypeCheckConfig config)
V< Word32 > REDUCE IsRootConstant(OpIndex object, RootIndex index)
V< Word32 > REDUCE ArrayLength(V< WasmArrayNullable > array, CheckForNull null_check)
V< Object > REDUCE WasmTypeCast(V< Object > object, OptionalV< Map > rtt, WasmTypeCheckConfig config)
V< Map > REDUCE RttCanon(V< FixedArray > rtts, wasm::ModuleTypeIndex type_index)
OpIndex REDUCE GlobalSet(V< WasmTrustedInstanceData > instance, V< Any > value, const wasm::WasmGlobal *global)
V< Object > ReduceWasmTypeCastRtt(V< Object > object, OptionalV< Map > rtt, WasmTypeCheckConfig config)
V< Object > ReduceWasmTypeCastAbstract(V< Object > object, WasmTypeCheckConfig config)
V< Word32 > REDUCE IsNull(OpIndex object, wasm::ValueType type)
V< Word32 > ReduceWasmTypeCheckAbstract(V< Object > object, WasmTypeCheckConfig config)
V< Object > REDUCE WasmTypeAnnotation(V< Object > value, wasm::ValueType type)
V< Any > REDUCE StructGet(V< WasmStructNullable > object, const wasm::StructType *type, wasm::ModuleTypeIndex type_index, int field_index, bool is_signed, CheckForNull null_check)
V< None > REDUCE ArraySet(V< WasmArrayNullable > array, V< Word32 > index, V< Any > value, wasm::ValueType element_type)
V< Word32 > ReduceWasmTypeCheckRtt(V< Object > object, OptionalV< Map > rtt, WasmTypeCheckConfig config)
V< WasmArray > REDUCE WasmAllocateArray(V< Map > rtt, V< Word32 > length, const wasm::ArrayType *array_type)
V< WasmFuncRef > REDUCE WasmRefFunc(V< WasmTrustedInstanceData > wasm_instance, uint32_t function_index)
OpIndex LowerGlobalSetOrGet(V< WasmTrustedInstanceData > instance, V< Any > value, const wasm::WasmGlobal *global, GlobalMode mode)
MemoryRepresentation RepresentationFor(wasm::ValueType type, bool is_signed)
V< Any > REDUCE GlobalGet(V< WasmTrustedInstanceData > instance, const wasm::WasmGlobal *global)
V< Object > REDUCE AssertNotNull(V< Object > object, wasm::ValueType type, TrapId trap_id)
V< WasmStruct > REDUCE WasmAllocateStruct(V< Map > rtt, const wasm::StructType *struct_type)
constexpr int value_kind_size() const
Definition value-type.h:485
constexpr bool is_reference() const
Definition value-type.h:600
constexpr bool is_nullable() const
Definition value-type.h:393
constexpr bool is_reference_to(HeapType::Representation repr) const
Definition value-type.h:968
constexpr HeapType::Representation heap_representation() const
Definition value-type.h:963
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
constexpr int32_t kMinusZeroLoBits
Definition globals.h:185
constexpr int64_t kMinusZeroBits
Definition globals.h:187
constexpr int32_t kMinusZeroHiBits
Definition globals.h:186
#define TURBOSHAFT_REDUCER_BOILERPLATE(Name)
Definition assembler.h:823
int end
int32_t offset
ZoneVector< RpoNumber > & result
bool null_succeeds
std::conditional_t< Is64(), Word64, Word32 > WordPtr
Definition index.h:225
static constexpr int kCharWidthBailoutSentinel
int GetSubtypingDepth(const WasmModule *module, ModuleTypeIndex type_index)
constexpr uint32_t kMinimumSupertypeArraySize
constexpr IndependentHeapType kWasmAnyRef
constexpr IndependentHeapType kWasmExternRef
constexpr IndependentValueType kWasmI32
constexpr IndependentHeapType kWasmI31Ref
constexpr int kMaxStructFieldIndexForImplicitNullCheck
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
const uint32_t kStringEncodingMask
constexpr int kTaggedSize
Definition globals.h:542
constexpr intptr_t kObjectAlignment
Definition globals.h:930
const uint32_t kUncachedExternalStringMask
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
constexpr int kTaggedSizeLog2
Definition globals.h:543
constexpr bool SmiValuesAre31Bits()
const uint32_t kStringRepresentationMask
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset kProtectedDispatchTable0Offset kProtectedDispatchTableForImportsOffset func_refs
const int kHeapObjectTag
Definition v8-internal.h:72
const uint32_t kIsIndirectStringTag
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
constexpr bool Is64()
const uint32_t kIsIndirectStringMask
bool is_signed(Condition cond)
template const char * string
i::Address Load(i::Address address)
Definition unwinder.cc:19
Definition c-api.cc:87
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define OFFSET_OF_DATA_START(Type)
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001
#define LOAD_ROOT(name)
#define LOAD_IMMUTABLE_INSTANCE_FIELD(instance, name, representation)
wasm::ValueType type