v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
function-body-decoder-impl.h
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
6#define V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
7
8#if !V8_ENABLE_WEBASSEMBLY
9#error This header should only be included if WebAssembly is enabled.
10#endif // !V8_ENABLE_WEBASSEMBLY
11
12// Do only include this header for implementing new Interface of the
13// WasmFullDecoder.
14
15#include <inttypes.h>
16
17#include <optional>
18
19#include "src/base/bounds.h"
21#include "src/base/strings.h"
22#include "src/base/vector.h"
23#include "src/strings/unicode.h"
25#include "src/wasm/decoder.h"
27#include "src/wasm/value-type.h"
33
34namespace v8::internal::wasm {
35
36struct WasmGlobal;
37struct WasmTag;
38
39#define TRACE(...) \
40 do { \
41 if (v8_flags.trace_wasm_decoder) PrintF(__VA_ARGS__); \
42 } while (false)
43
44#define TRACE_INST_FORMAT " @%-8d #%-30s|"
45
46// Return the evaluation of {condition} if {ValidationTag::validate} is true,
47// DCHECK that it is true and always return true otherwise.
48// Note that this needs to be a macro, because the "likely" annotation does not
49// survive inlining.
50#ifdef DEBUG
51#define VALIDATE(condition) \
52 (ValidationTag::validate ? V8_LIKELY(condition) \
53 : ValidateAssumeTrue(condition, #condition))
54
55V8_INLINE bool ValidateAssumeTrue(bool condition, const char* message) {
56 DCHECK_WITH_MSG(condition, message);
57 return true;
58}
59#else
60#define VALIDATE(condition) (!ValidationTag::validate || V8_LIKELY(condition))
61#endif
62
63#define CHECK_PROTOTYPE_OPCODE(feat) \
64 DCHECK(this->module_->origin == kWasmOrigin); \
65 if (!VALIDATE(this->enabled_.has_##feat())) { \
66 this->DecodeError( \
67 "Invalid opcode 0x%02x (enable with --experimental-wasm-" #feat ")", \
68 opcode); \
69 return 0; \
70 } \
71 this->detected_->add_##feat()
72
73static constexpr LoadType GetLoadType(WasmOpcode opcode) {
74 // Hard-code the list of load types. The opcodes are highly unlikely to
75 // ever change, and we have some checks here to guard against that.
76 static_assert(sizeof(LoadType) == sizeof(uint8_t), "LoadType is compact");
77 constexpr uint8_t kMinOpcode = kExprI32LoadMem;
78 constexpr uint8_t kMaxOpcode = kExprI64LoadMem32U;
79 constexpr LoadType kLoadTypes[] = {
80 LoadType::kI32Load, LoadType::kI64Load, LoadType::kF32Load,
81 LoadType::kF64Load, LoadType::kI32Load8S, LoadType::kI32Load8U,
82 LoadType::kI32Load16S, LoadType::kI32Load16U, LoadType::kI64Load8S,
83 LoadType::kI64Load8U, LoadType::kI64Load16S, LoadType::kI64Load16U,
84 LoadType::kI64Load32S, LoadType::kI64Load32U};
85 static_assert(arraysize(kLoadTypes) == kMaxOpcode - kMinOpcode + 1);
86 DCHECK_LE(kMinOpcode, opcode);
87 DCHECK_GE(kMaxOpcode, opcode);
88 return kLoadTypes[opcode - kMinOpcode];
89}
90
91static constexpr StoreType GetStoreType(WasmOpcode opcode) {
92 // Hard-code the list of store types. The opcodes are highly unlikely to
93 // ever change, and we have some checks here to guard against that.
94 static_assert(sizeof(StoreType) == sizeof(uint8_t), "StoreType is compact");
95 constexpr uint8_t kMinOpcode = kExprI32StoreMem;
96 constexpr uint8_t kMaxOpcode = kExprI64StoreMem32;
97 constexpr StoreType kStoreTypes[] = {
98 StoreType::kI32Store, StoreType::kI64Store, StoreType::kF32Store,
99 StoreType::kF64Store, StoreType::kI32Store8, StoreType::kI32Store16,
100 StoreType::kI64Store8, StoreType::kI64Store16, StoreType::kI64Store32};
101 static_assert(arraysize(kStoreTypes) == kMaxOpcode - kMinOpcode + 1);
102 DCHECK_LE(kMinOpcode, opcode);
103 DCHECK_GE(kMaxOpcode, opcode);
104 return kStoreTypes[opcode - kMinOpcode];
105}
106
107#define ATOMIC_OP_LIST(V) \
108 V(AtomicNotify, Uint32) \
109 V(I32AtomicWait, Uint32) \
110 V(I64AtomicWait, Uint64) \
111 V(I32AtomicLoad, Uint32) \
112 V(I64AtomicLoad, Uint64) \
113 V(I32AtomicLoad8U, Uint8) \
114 V(I32AtomicLoad16U, Uint16) \
115 V(I64AtomicLoad8U, Uint8) \
116 V(I64AtomicLoad16U, Uint16) \
117 V(I64AtomicLoad32U, Uint32) \
118 V(I32AtomicAdd, Uint32) \
119 V(I32AtomicAdd8U, Uint8) \
120 V(I32AtomicAdd16U, Uint16) \
121 V(I64AtomicAdd, Uint64) \
122 V(I64AtomicAdd8U, Uint8) \
123 V(I64AtomicAdd16U, Uint16) \
124 V(I64AtomicAdd32U, Uint32) \
125 V(I32AtomicSub, Uint32) \
126 V(I64AtomicSub, Uint64) \
127 V(I32AtomicSub8U, Uint8) \
128 V(I32AtomicSub16U, Uint16) \
129 V(I64AtomicSub8U, Uint8) \
130 V(I64AtomicSub16U, Uint16) \
131 V(I64AtomicSub32U, Uint32) \
132 V(I32AtomicAnd, Uint32) \
133 V(I64AtomicAnd, Uint64) \
134 V(I32AtomicAnd8U, Uint8) \
135 V(I32AtomicAnd16U, Uint16) \
136 V(I64AtomicAnd8U, Uint8) \
137 V(I64AtomicAnd16U, Uint16) \
138 V(I64AtomicAnd32U, Uint32) \
139 V(I32AtomicOr, Uint32) \
140 V(I64AtomicOr, Uint64) \
141 V(I32AtomicOr8U, Uint8) \
142 V(I32AtomicOr16U, Uint16) \
143 V(I64AtomicOr8U, Uint8) \
144 V(I64AtomicOr16U, Uint16) \
145 V(I64AtomicOr32U, Uint32) \
146 V(I32AtomicXor, Uint32) \
147 V(I64AtomicXor, Uint64) \
148 V(I32AtomicXor8U, Uint8) \
149 V(I32AtomicXor16U, Uint16) \
150 V(I64AtomicXor8U, Uint8) \
151 V(I64AtomicXor16U, Uint16) \
152 V(I64AtomicXor32U, Uint32) \
153 V(I32AtomicExchange, Uint32) \
154 V(I64AtomicExchange, Uint64) \
155 V(I32AtomicExchange8U, Uint8) \
156 V(I32AtomicExchange16U, Uint16) \
157 V(I64AtomicExchange8U, Uint8) \
158 V(I64AtomicExchange16U, Uint16) \
159 V(I64AtomicExchange32U, Uint32) \
160 V(I32AtomicCompareExchange, Uint32) \
161 V(I64AtomicCompareExchange, Uint64) \
162 V(I32AtomicCompareExchange8U, Uint8) \
163 V(I32AtomicCompareExchange16U, Uint16) \
164 V(I64AtomicCompareExchange8U, Uint8) \
165 V(I64AtomicCompareExchange16U, Uint16) \
166 V(I64AtomicCompareExchange32U, Uint32)
167
168#define ATOMIC_STORE_OP_LIST(V) \
169 V(I32AtomicStore, Uint32) \
170 V(I64AtomicStore, Uint64) \
171 V(I32AtomicStore8U, Uint8) \
172 V(I32AtomicStore16U, Uint16) \
173 V(I64AtomicStore8U, Uint8) \
174 V(I64AtomicStore16U, Uint16) \
175 V(I64AtomicStore32U, Uint32)
176
177// Decoder error with explicit PC and optional format arguments.
178// Depending on the validation tag and the number of arguments, this forwards to
179// a V8_NOINLINE and V8_PRESERVE_MOST method of the decoder.
180template <typename ValidationTag, typename... Args>
181V8_INLINE void DecodeError(Decoder* decoder, const uint8_t* pc, const char* str,
182 Args&&... args) {
183 // Decode errors can only happen if we are validating; the compiler should
184 // know this e.g. from the VALIDATE macro, but this assumption tells it again
185 // that this path is impossible.
186 V8_ASSUME(ValidationTag::validate);
187 if constexpr (sizeof...(Args) == 0) {
188 decoder->error(pc, str);
189 } else {
190 decoder->errorf(pc, str, std::forward<Args>(args)...);
191 }
192}
193
194// Decoder error without explicit PC and with optional format arguments.
195// Depending on the validation tag and the number of arguments, this forwards to
196// a V8_NOINLINE and V8_PRESERVE_MOST method of the decoder.
197template <typename ValidationTag, typename... Args>
198V8_INLINE void DecodeError(Decoder* decoder, const char* str, Args&&... args) {
199 // Decode errors can only happen if we are validating; the compiler should
200 // know this e.g. from the VALIDATE macro, but this assumption tells it again
201 // that this path is impossible.
202 V8_ASSUME(ValidationTag::validate);
203 if constexpr (sizeof...(Args) == 0) {
204 decoder->error(str);
205 } else {
206 decoder->errorf(str, std::forward<Args>(args)...);
207 }
208}
209
210namespace value_type_reader {
211
212template <typename ValidationTag>
213std::pair<HeapType, uint32_t> read_heap_type(Decoder* decoder,
214 const uint8_t* pc,
215 WasmEnabledFeatures enabled) {
216 auto [heap_index, length] =
217 decoder->read_i33v<ValidationTag>(pc, "heap type");
219 uint32_t type_index;
220 if (heap_index < 0) {
221 int64_t min_1_byte_leb128 = -64;
222 if (!VALIDATE(heap_index >= min_1_byte_leb128)) {
223 DecodeError<ValidationTag>(decoder, pc, "Unknown heap type %" PRId64,
224 heap_index);
225 return {kWasmBottom, length};
226 }
227 uint8_t uint_7_mask = 0x7F;
228 uint8_t code = static_cast<ValueTypeCode>(heap_index) & uint_7_mask;
229 bool is_shared = false;
230 if (code == kSharedFlagCode) {
231 if (!VALIDATE(enabled.has_shared())) {
233 decoder, pc,
234 "invalid heap type 0x%hhx, enable with --experimental-wasm-shared",
236 return {kWasmBottom, length};
237 }
238 code = decoder->read_u8<ValidationTag>(pc + length, "heap type");
239 length++;
240 is_shared = true;
241 }
242 switch (code) {
243 case kEqRefCode:
244 case kI31RefCode:
245 case kStructRefCode:
246 case kArrayRefCode:
247 case kAnyRefCode:
248 case kNoneCode:
249 case kNoExternCode:
250 case kNoFuncCode:
251 case kExternRefCode:
252 case kFuncRefCode:
253 return {HeapType::from_code(code, is_shared), length};
254 case kNoExnCode:
255 case kExnRefCode:
256 if (!VALIDATE(enabled.has_exnref())) {
258 decoder, pc,
259 "invalid heap type '%s', enable with --experimental-wasm-exnref",
260 HeapType::from_code(code, is_shared).name().c_str());
261 return {kWasmBottom, 0};
262 }
263 return {HeapType::from_code(code, is_shared), length};
264 case kStringRefCode:
268 if (!VALIDATE(enabled.has_stringref())) {
270 decoder, pc,
271 "invalid heap type '%s', enable with "
272 "--experimental-wasm-stringref",
273 HeapType::from_code(code, is_shared).name().c_str());
274 return {kWasmBottom, 0};
275 }
276 return {HeapType::from_code(code, is_shared), length};
277 case kNoContCode:
278 case kContRefCode:
279 if (!VALIDATE(enabled.has_wasmfx())) {
281 decoder, pc,
282 "invalid heap type '%s', enable with "
283 "--experimental-wasm-wasmfx",
284 HeapType::from_code(code, is_shared).name().c_str());
285 return {kWasmBottom, 0};
286 }
287 return {HeapType::from_code(code, is_shared), length};
288 case kExactCode: {
289 if (!VALIDATE(enabled.has_custom_descriptors())) {
291 "invalid heap type 'exact', enable with "
292 "--experimental-wasm-custom-descriptors");
293 return {kWasmBottom, 0};
294 }
295 auto [nested_index, index_length] =
296 decoder->read_u32v<ValidationTag>(pc + 1, "type index");
297 type_index = nested_index;
298 exactness = Exactness::kExact;
299 length += index_length;
300 break;
301 }
302 default:
303 DecodeError<ValidationTag>(decoder, pc, "Unknown heap type %" PRId64,
304 heap_index);
305 return {kWasmBottom, length};
306 }
307 } else {
308 type_index = static_cast<uint32_t>(heap_index);
309 }
310 if (!VALIDATE(type_index < kV8MaxWasmTypes)) {
312 decoder, pc,
313 "Type index %u is greater than the maximum number %zu "
314 "of type definitions supported by V8",
315 type_index, kV8MaxWasmTypes);
316 return {kWasmBottom, length};
317 }
318 // We don't have a module yet, so we can only fill in default values:
319 bool kDefaultShared = false;
320 RefTypeKind kDefaultKind = RefTypeKind::kOther;
321 return {HeapType::Index(ModuleTypeIndex{type_index}, kDefaultShared,
322 kDefaultKind, exactness),
323 length};
324}
325
326// Read a value type starting at address {pc} using {decoder}.
327// No bytes are consumed.
328// Returns the read value type and the number of bytes read (a.k.a. length).
329template <typename ValidationTag>
330std::pair<ValueType, uint32_t> read_value_type(Decoder* decoder,
331 const uint8_t* pc,
332 WasmEnabledFeatures enabled) {
333 uint8_t val = decoder->read_u8<ValidationTag>(pc, "value type opcode");
334 if (!VALIDATE(decoder->ok())) {
335 return {kWasmBottom, 0};
336 }
337 ValueTypeCode code = static_cast<ValueTypeCode>(val);
338 switch (code) {
339 case kEqRefCode:
340 case kI31RefCode:
341 case kStructRefCode:
342 case kArrayRefCode:
343 case kAnyRefCode:
344 case kNoneCode:
345 case kNoExternCode:
346 case kNoFuncCode:
347 case kExternRefCode:
348 case kFuncRefCode:
349 return {ValueType::RefNull(HeapType::from_code(code, false)), 1};
350 case kNoExnCode:
351 case kExnRefCode:
352 if (!VALIDATE(enabled.has_exnref())) {
354 decoder, pc,
355 "invalid value type '%s', enable with --experimental-wasm-exnref",
356 HeapType::from_code(code, false).name().c_str());
357 return {kWasmBottom, 0};
358 }
359 return {code == kExnRefCode ? kWasmExnRef : kWasmNullExnRef, 1};
360 case kStringRefCode:
363 case kStringViewIterCode: {
364 if (!VALIDATE(enabled.has_stringref())) {
366 decoder, pc,
367 "invalid value type '%sref', enable with "
368 "--experimental-wasm-stringref",
369 HeapType::from_code(code, false).name().c_str());
370 return {kWasmBottom, 0};
371 }
372 // String views are not nullable, so interpret the shorthand accordingly.
373 ValueType type = code == kStringRefCode
375 : ValueType::Ref(HeapType::from_code(code, false));
376 return {type, 1};
377 }
378 case kContRefCode:
379 case kNoContCode:
380 if (!VALIDATE(enabled.has_wasmfx())) {
382 decoder, pc,
383 "invalid value type '%s', enable with --experimental-wasm-wasmfx",
384 HeapType::from_code(code, false).name().c_str());
385 return {kWasmBottom, 0};
386 }
387 return {code == kContRefCode ? kWasmContRef : kWasmNullContRef, 1};
388 case kI32Code:
389 return {kWasmI32, 1};
390 case kI64Code:
391 return {kWasmI64, 1};
392 case kF32Code:
393 return {kWasmF32, 1};
394 case kF64Code:
395 return {kWasmF64, 1};
396 case kRefCode:
397 case kRefNullCode: {
398 Nullability nullability = code == kRefNullCode ? kNullable : kNonNullable;
399 auto [heap_type, length] =
400 read_heap_type<ValidationTag>(decoder, pc + 1, enabled);
401 if (!VALIDATE(!heap_type.is_string_view() ||
402 nullability == kNonNullable)) {
404 "nullable string views don't exist");
405 return {kWasmBottom, 0};
406 }
407 ValueType type = heap_type.is_bottom()
409 : ValueType::RefMaybeNull(heap_type, nullability);
410 return {type, length + 1};
411 }
412 case kS128Code: {
414 if (v8_flags.correctness_fuzzer_suppressions) {
415 FATAL("Aborting on missing Wasm SIMD support");
416 }
417 DecodeError<ValidationTag>(decoder, pc, "Wasm SIMD unsupported");
418 return {kWasmBottom, 0};
419 }
420 return {kWasmS128, 1};
421 }
422 // Although these codes are included in ValueTypeCode, they technically
423 // do not correspond to value types and are only used in specific
424 // contexts. The caller of this function is responsible for handling them.
425 case kVoidCode:
426 case kI8Code:
427 case kI16Code:
428 case kF16Code:
429 case kExactCode:
430 // Fall through to the error reporting below.
431 break;
432 }
433 // Anything that doesn't match an enumeration value is an invalid type code.
434 if constexpr (!ValidationTag::validate) UNREACHABLE();
435 DecodeError<ValidationTag>(decoder, pc, "invalid value type 0x%x", code);
436 return {kWasmBottom, 0};
437}
438
439template <typename ValidationTag>
440bool ValidateHeapType(Decoder* decoder, const uint8_t* pc,
441 const WasmModule* module, HeapType type) {
442 if (!VALIDATE(!type.is_bottom())) return false;
443 if (!type.is_index()) return true;
444 // A {nullptr} module is accepted if we are not validating anyway (e.g. for
445 // opcode length computation).
446 if (!ValidationTag::validate && module == nullptr) return true;
447 if (!VALIDATE(type.ref_index().index < module->types.size())) {
448 DecodeError<ValidationTag>(decoder, pc, "Type index %u is out of bounds",
449 type.ref_index().index);
450 return false;
451 }
452 return true;
453}
454
455template <typename ValidationTag>
456bool ValidateValueType(Decoder* decoder, const uint8_t* pc,
457 const WasmModule* module, ValueType type) {
458 if (!VALIDATE(!type.is_bottom())) return false;
459 if (V8_LIKELY(!type.is_object_reference())) return true;
460 return ValidateHeapType<ValidationTag>(decoder, pc, module, type.heap_type());
461}
462
463// Updates {unfinished_type} in-place using information from {module}.
464static void Populate(HeapType* unfinished_type, const WasmModule* module) {
465 if (!unfinished_type->has_index()) return;
466 DCHECK(module->has_type(unfinished_type->ref_index()));
467 const TypeDefinition& type_def = module->type(unfinished_type->ref_index());
468 unfinished_type->Populate(type_def.is_shared,
469 static_cast<RefTypeKind>(type_def.kind));
470}
471
472// Updates {unfinished_type} in-place using information from {module}.
473static void Populate(ValueType* unfinished_type, const WasmModule* module) {
474 if (!unfinished_type->has_index()) return;
475 DCHECK(module->has_type(unfinished_type->ref_index()));
476 const TypeDefinition& type_def = module->type(unfinished_type->ref_index());
477 unfinished_type->Populate(type_def.is_shared,
478 static_cast<RefTypeKind>(type_def.kind));
479}
480
481} // namespace value_type_reader
482
484
485// Helpers for decoding different kinds of immediates which follow bytecodes.
487 int32_t value;
488 uint32_t length;
489
490 template <typename ValidationTag>
491 ImmI32Immediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
492 std::tie(value, length) = decoder->read_i32v<ValidationTag>(pc, "immi32");
493 }
494};
495
497 int64_t value;
498 uint32_t length;
499
500 template <typename ValidationTag>
501 ImmI64Immediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
502 std::tie(value, length) = decoder->read_i64v<ValidationTag>(pc, "immi64");
503 }
504};
505
507 float value;
508 uint32_t length = 4;
509
510 template <typename ValidationTag>
511 ImmF32Immediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
512 // We can't use base::bit_cast here because calling any helper function
513 // that returns a float would potentially flip NaN bits per C++ semantics,
514 // so we have to inline the memcpy call directly.
515 uint32_t tmp = decoder->read_u32<ValidationTag>(pc, "immf32");
516 memcpy(&value, &tmp, sizeof(value));
517 }
518};
519
521 double value;
522 uint32_t length = 8;
523
524 template <typename ValidationTag>
525 ImmF64Immediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
526 // Avoid base::bit_cast because it might not preserve the signalling bit
527 // of a NaN.
528 uint64_t tmp = decoder->read_u64<ValidationTag>(pc, "immf64");
529 memcpy(&value, &tmp, sizeof(value));
530 }
531};
532
534 enum Values {
536 RES_IS_NULL = 1 << 1,
537 };
538
539 bool src_is_null = false;
540 bool res_is_null = false;
541
542 BrOnCastFlags() = default;
543 explicit BrOnCastFlags(uint8_t value)
544 : src_is_null((value & BrOnCastFlags::SRC_IS_NULL) != 0),
545 res_is_null((value & BrOnCastFlags::RES_IS_NULL) != 0) {
547 }
548};
549
552 uint8_t raw_value = 0;
553 uint32_t length = 1;
554
555 template <typename ValidationTag>
556 BrOnCastImmediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
557 raw_value = decoder->read_u8<ValidationTag>(pc, "br_on_cast flags");
559 decoder->errorf(pc, "invalid br_on_cast flags %u", raw_value);
560 return;
561 }
562 flags = BrOnCastFlags(raw_value);
563 }
564};
565
566// Parent class for all Immediates which read a u32v index value in their
567// constructor.
569 uint32_t index;
570 uint32_t length;
571
572 template <typename ValidationTag>
573 IndexImmediate(Decoder* decoder, const uint8_t* pc, const char* name,
574 ValidationTag = {}) {
575 std::tie(index, length) = decoder->read_u32v<ValidationTag>(pc, name);
576 }
577};
578
580 const WasmMemory* memory = nullptr;
581
582 template <typename ValidationTag>
583 MemoryIndexImmediate(Decoder* decoder, const uint8_t* pc,
584 ValidationTag validate = {})
585 : IndexImmediate(decoder, pc, "memory index", validate) {}
586};
587
589 const WasmTable* table = nullptr;
590
591 template <typename ValidationTag>
592 TableIndexImmediate(Decoder* decoder, const uint8_t* pc,
593 ValidationTag validate = {})
594 : IndexImmediate(decoder, pc, "table index", validate) {}
595};
596
598 const WasmTag* tag = nullptr;
599
600 template <typename ValidationTag>
601 TagIndexImmediate(Decoder* decoder, const uint8_t* pc,
602 ValidationTag validate = {})
603 : IndexImmediate(decoder, pc, "tag index", validate) {}
604};
605
607 const WasmGlobal* global = nullptr;
608
609 template <typename ValidationTag>
610 GlobalIndexImmediate(Decoder* decoder, const uint8_t* pc,
611 ValidationTag validate = {})
612 : IndexImmediate(decoder, pc, "global index", validate) {}
613};
614
617 uint32_t length;
618
619 template <typename ValidationTag>
620 TypeIndexImmediate(Decoder* decoder, const uint8_t* pc, const char* name,
621 ValidationTag = {}) {
622 uint32_t raw_index;
623 std::tie(raw_index, length) = decoder->read_u32v<ValidationTag>(pc, name);
624 index = ModuleTypeIndex{raw_index};
625 }
626};
627
629 const FunctionSig* sig = nullptr;
630 bool shared = false;
631
632 template <typename ValidationTag>
633 SigIndexImmediate(Decoder* decoder, const uint8_t* pc,
634 ValidationTag validate = {})
635 : TypeIndexImmediate(decoder, pc, "signature index", validate) {}
636
638 return HeapType::Index(index, shared, RefTypeKind::kFunction);
639 }
640};
641
643 const ContType* cont_type = nullptr;
644 bool shared = false;
645
646 template <typename ValidationTag>
647 ContIndexImmediate(Decoder* decoder, const uint8_t* pc,
648 ValidationTag validate = {})
649 : TypeIndexImmediate(decoder, pc, "cont index", validate) {}
650
652 return HeapType::Index(index, shared, RefTypeKind::kCont);
653 }
654};
655
657 const StructType* struct_type = nullptr;
658 bool shared = false;
659
660 template <typename ValidationTag>
661 StructIndexImmediate(Decoder* decoder, const uint8_t* pc,
662 ValidationTag validate = {})
663 : TypeIndexImmediate(decoder, pc, "struct index", validate) {}
664
666 return HeapType::Index(index, shared, RefTypeKind::kStruct);
667 }
668};
669
671 const ArrayType* array_type = nullptr;
672 bool shared = false;
673
674 template <typename ValidationTag>
675 ArrayIndexImmediate(Decoder* decoder, const uint8_t* pc,
676 ValidationTag validate = {})
677 : TypeIndexImmediate(decoder, pc, "array index", validate) {}
678
680 return HeapType::Index(index, shared, RefTypeKind::kArray);
681 }
682};
683
685 const FunctionSig* sig = nullptr;
686
687 template <typename ValidationTag>
688 CallFunctionImmediate(Decoder* decoder, const uint8_t* pc,
689 ValidationTag validate = {})
690 : IndexImmediate(decoder, pc, "function index", validate) {}
691};
692
694 uint32_t length;
696
697 template <typename ValidationTag>
699 const uint8_t* pc, ValidationTag = {}) {
700 uint8_t num_types;
701 std::tie(num_types, length) =
702 decoder->read_u32v<ValidationTag>(pc, "number of select types");
703 if (!VALIDATE(num_types == 1)) {
705 decoder, pc,
706 "Invalid number of types. Select accepts exactly one type");
707 return;
708 }
709 uint32_t type_length;
710 std::tie(type, type_length) =
712 enabled);
713 length += type_length;
714 }
715};
716
718 uint32_t length = 1;
719 // After decoding, either {sig_index} is set XOR {sig} points to
720 // {single_return_sig_storage}.
723 // Internal field, potentially pointed to by {sig}. Do not access directly.
725
726 // Do not copy or move, as {sig} might point to {single_return_sig_storage} so
727 // this cannot trivially be copied. If needed, define those operators later.
732
733 template <typename ValidationTag>
735 const uint8_t* pc, ValidationTag = {}) {
736 int64_t block_type;
737 std::tie(block_type, length) =
738 decoder->read_i33v<ValidationTag>(pc, "block type");
739 if (block_type < 0) {
740 // All valid negative types are 1 byte in length, so we check against the
741 // minimum 1-byte LEB128 value.
742 constexpr int64_t min_1_byte_leb128 = -64;
743 if (!VALIDATE(block_type >= min_1_byte_leb128)) {
744 DecodeError<ValidationTag>(decoder, pc, "invalid block type %" PRId64,
745 block_type);
746 return;
747 }
748 if (static_cast<ValueTypeCode>(block_type & 0x7F) != kVoidCode) {
750 std::tie(single_return_sig_storage[0], length) =
752 enabled);
753 }
754 } else {
755 sig = FunctionSig{0, 0, nullptr};
756 sig_index = ModuleTypeIndex{static_cast<uint32_t>(block_type)};
757 }
758 }
759
760 uint32_t in_arity() const {
761 return static_cast<uint32_t>(sig.parameter_count());
762 }
763 uint32_t out_arity() const {
764 return static_cast<uint32_t>(sig.return_count());
765 }
766 ValueType in_type(uint32_t index) const { return sig.GetParam(index); }
767 ValueType out_type(uint32_t index) const { return sig.GetReturn(index); }
768};
769
771 uint32_t depth;
772 uint32_t length;
773
774 template <typename ValidationTag>
775 BranchDepthImmediate(Decoder* decoder, const uint8_t* pc,
776 ValidationTag = {}) {
777 std::tie(depth, length) =
778 decoder->read_u32v<ValidationTag>(pc, "branch depth");
779 }
780};
781
785 uint32_t length;
786
787 template <typename ValidationTag>
788 FieldImmediate(Decoder* decoder, const uint8_t* pc,
789 ValidationTag validate = {})
790 : struct_imm(decoder, pc, validate),
791 field_imm(decoder, pc + struct_imm.length, "field index", validate),
792 length(struct_imm.length + field_imm.length) {}
793};
794
798 uint32_t length;
799 const FunctionSig* sig = nullptr;
800
801 template <typename ValidationTag>
802 CallIndirectImmediate(Decoder* decoder, const uint8_t* pc,
803 ValidationTag validate = {})
804 : sig_imm(decoder, pc, validate),
805 table_imm(decoder, pc + sig_imm.length, validate),
806 length(sig_imm.length + table_imm.length) {}
807};
808
810 uint32_t table_count;
811 const uint8_t* start;
812 const uint8_t* table;
813
814 template <typename ValidationTag>
815 BranchTableImmediate(Decoder* decoder, const uint8_t* pc,
816 ValidationTag = {}) {
817 start = pc;
818 uint32_t len;
819 std::tie(table_count, len) =
820 decoder->read_u32v<ValidationTag>(pc, "table count");
821 table = pc + len;
822 }
823};
824
826
827// A helper to iterate over a branch table.
828template <typename ValidationTag>
830 public:
831 uint32_t cur_index() const { return index_; }
832 bool has_next() const {
833 return VALIDATE(decoder_->ok()) && index_ <= table_count_;
834 }
835 uint32_t next() {
836 DCHECK(has_next());
837 index_++;
838 auto [result, length] =
839 decoder_->read_u32v<ValidationTag>(pc_, "branch table entry");
840 pc_ += length;
841 return result;
842 }
843 // length, including the length of the {BranchTableImmediate}, but not the
844 // opcode. This consumes the table entries, so it is invalid to call next()
845 // before or after this method.
846 uint32_t length() {
847 while (has_next()) next();
848 return static_cast<uint32_t>(pc_ - start_);
849 }
850 const uint8_t* pc() const { return pc_; }
851
853 : decoder_(decoder),
854 start_(imm.start),
855 pc_(imm.table),
856 table_count_(imm.table_count) {}
857
858 private:
860 const uint8_t* const start_;
861 const uint8_t* pc_;
862 uint32_t index_ = 0; // the current index.
863 const uint32_t table_count_; // the count of entries, not including default.
864};
865
866struct CatchCase {
868 // The union contains a TagIndexImmediate iff kind == kCatch or kind ==
869 // kCatchRef.
875};
876
877// A helper to iterate over a try table.
878template <typename ValidationTag>
880 public:
881 uint32_t cur_index() const { return index_; }
882 bool has_next() const {
883 return VALIDATE(decoder_->ok()) && index_ < table_count_;
884 }
885
887 uint8_t kind =
888 static_cast<CatchKind>(decoder_->read_u8<ValidationTag>(pc_));
889 pc_ += 1;
890 CatchCase::MaybeTagIndex maybe_tag{0};
891 if (kind == kCatch || kind == kCatchRef) {
892 maybe_tag.tag_imm = TagIndexImmediate(decoder_, pc_, ValidationTag{});
893 pc_ += maybe_tag.tag_imm.length;
894 }
895 BranchDepthImmediate br_imm(decoder_, pc_, ValidationTag{});
896 pc_ += br_imm.length;
897 index_++;
898 return CatchCase{static_cast<CatchKind>(kind), maybe_tag, br_imm};
899 }
900
901 // length, including the length of the {TryTableImmediate}, but not the
902 // opcode. This consumes the table entries, so it is invalid to call next()
903 // before or after this method.
904 uint32_t length() {
905 while (has_next()) next();
906 return static_cast<uint32_t>(pc_ - start_);
907 }
908 const uint8_t* pc() const { return pc_; }
909
911 : decoder_(decoder),
912 start_(imm.start),
913 pc_(imm.table),
914 table_count_(imm.table_count) {}
915
916 private:
918 const uint8_t* const start_;
919 const uint8_t* pc_;
920 uint32_t index_ = 0; // the current index.
921 const uint32_t table_count_; // the count of entries, not including default.
922};
923
925
927 SwitchKind kind; // Regular handler or a switch site.
928 TagIndexImmediate tag; // Tag defining this handler site.
933};
934
935// A helper to iterate over a handler table.
936template <typename ValidationTag>
938 public:
939 uint32_t cur_index() const { return index_; }
940 bool has_next() const {
941 return VALIDATE(decoder_->ok()) && index_ < table_count_;
942 }
943
945 uint8_t kind =
946 static_cast<CatchKind>(decoder_->read_u8<ValidationTag>(pc_));
947 pc_ += 1;
948 TagIndexImmediate tag = TagIndexImmediate(decoder_, pc_, ValidationTag{});
949 pc_ += tag.length;
950
951 HandlerCase::MaybeHandlerDepth maybe_depth{0};
952
953 if (kind == kOnSuspend) {
954 maybe_depth.br = BranchDepthImmediate(decoder_, pc_, ValidationTag{});
955 pc_ += maybe_depth.br.length;
956 }
957 index_++;
958
959 return HandlerCase{static_cast<SwitchKind>(kind), tag, maybe_depth};
960 }
961
962 // length, including the length of the {EffectHandlerTableImmediate}, but not
963 // the opcode. This consumes the table entries, so it is invalid to call
964 // next() before or after this method.
965 uint32_t length() {
966 while (has_next()) next();
967 return static_cast<uint32_t>(pc_ - start_);
968 }
969 const uint8_t* pc() const { return pc_; }
970
973 : decoder_(decoder),
974 start_(imm.start),
975 pc_(imm.table),
976 table_count_(imm.table_count) {}
977
978 private:
980 const uint8_t* const start_;
981 const uint8_t* pc_;
982 uint32_t index_ = 0; // the current index.
983 const uint32_t table_count_;
984};
985
987 uint32_t alignment;
988 uint32_t mem_index;
989 uint64_t offset;
990 const WasmMemory* memory = nullptr;
991
992 uint32_t length;
993
994 template <typename ValidationTag>
995 V8_INLINE MemoryAccessImmediate(Decoder* decoder, const uint8_t* pc,
996 uint32_t max_alignment, ValidationTag = {}) {
997 // Check for the fast path (two single-byte LEBs, mem index 0).
998 const bool two_bytes = !ValidationTag::validate || decoder->end() - pc >= 2;
999 const bool use_fast_path = two_bytes && !(pc[0] & 0xc0) && !(pc[1] & 0x80);
1000 if (V8_LIKELY(use_fast_path)) {
1001 alignment = pc[0];
1002 mem_index = 0;
1003 offset = pc[1];
1004 length = 2;
1005 } else {
1006 ConstructSlow<ValidationTag>(decoder, pc, max_alignment);
1007 }
1008 if (!VALIDATE(alignment <= max_alignment)) {
1010 decoder, pc,
1011 "invalid alignment; expected maximum alignment is %u, "
1012 "actual alignment is %u",
1013 max_alignment, alignment);
1014 }
1015 }
1016
1017 private:
1018 template <typename ValidationTag>
1020 const uint8_t* pc,
1021 uint32_t max_alignment) {
1022 uint32_t alignment_length;
1023 std::tie(alignment, alignment_length) =
1024 decoder->read_u32v<ValidationTag>(pc, "alignment");
1025 length = alignment_length;
1026 if (alignment & 0x40) {
1027 alignment &= ~0x40;
1028 uint32_t mem_index_length;
1029 std::tie(mem_index, mem_index_length) =
1030 decoder->read_u32v<ValidationTag>(pc + length, "memory index");
1031 length += mem_index_length;
1032 } else {
1033 mem_index = 0;
1034 }
1035 uint32_t offset_length;
1036 std::tie(offset, offset_length) =
1037 decoder->read_u64v<ValidationTag>(pc + length, "offset");
1038 length += offset_length;
1039 }
1040};
1041
1042// Immediate for SIMD lane operations.
1044 uint8_t lane;
1045 uint32_t length = 1;
1046
1047 template <typename ValidationTag>
1048 SimdLaneImmediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
1049 lane = decoder->read_u8<ValidationTag>(pc, "lane");
1050 }
1051};
1052
1053// Immediate for SIMD S8x16 shuffle operations.
1055 uint8_t value[kSimd128Size] = {0};
1056
1057 template <typename ValidationTag>
1058 Simd128Immediate(Decoder* decoder, const uint8_t* pc, ValidationTag = {}) {
1059 for (uint32_t i = 0; i < kSimd128Size; ++i) {
1060 value[i] = decoder->read_u8<ValidationTag>(pc + i, "value");
1061 }
1062 }
1063};
1064
1068 uint32_t length;
1069
1070 template <typename ValidationTag>
1071 MemoryInitImmediate(Decoder* decoder, const uint8_t* pc,
1072 ValidationTag validate = {})
1073 : data_segment(decoder, pc, "data segment index", validate),
1074 memory(decoder, pc + data_segment.length, validate),
1075 length(data_segment.length + memory.length) {}
1076};
1077
1081 uint32_t length;
1082
1083 template <typename ValidationTag>
1084 MemoryCopyImmediate(Decoder* decoder, const uint8_t* pc,
1085 ValidationTag validate = {})
1086 : memory_dst(decoder, pc, validate),
1087 memory_src(decoder, pc + memory_dst.length, validate),
1088 length(memory_src.length + memory_dst.length) {}
1089};
1090
1094 uint32_t length;
1095
1096 template <typename ValidationTag>
1097 TableInitImmediate(Decoder* decoder, const uint8_t* pc,
1098 ValidationTag validate = {})
1099 : element_segment(decoder, pc, "element segment index", validate),
1100 table(decoder, pc + element_segment.length, validate),
1101 length(element_segment.length + table.length) {}
1102};
1103
1107 uint32_t length;
1108
1109 template <typename ValidationTag>
1110 TableCopyImmediate(Decoder* decoder, const uint8_t* pc,
1111 ValidationTag validate = {})
1112 : table_dst(decoder, pc, validate),
1113 table_src(decoder, pc + table_dst.length, validate),
1114 length(table_src.length + table_dst.length) {}
1115};
1116
1118 uint32_t length;
1120
1121 template <typename ValidationTag>
1123 const uint8_t* pc, ValidationTag = {}) {
1124 std::tie(type, length) =
1126 }
1127};
1128
1130 uint32_t index;
1131 uint32_t length;
1132
1133 template <typename ValidationTag>
1134 StringConstImmediate(Decoder* decoder, const uint8_t* pc,
1135 ValidationTag = {}) {
1136 std::tie(index, length) =
1137 decoder->read_u32v<ValidationTag>(pc, "stringref literal index");
1138 }
1139};
1140
1141template <bool validate>
1143 static_assert(validate == false);
1144 explicit PcForErrors(const uint8_t* /* pc */) {}
1145
1146 const uint8_t* pc() const { return nullptr; }
1147};
1148
1149template <>
1150struct PcForErrors<true> {
1151 const uint8_t* pc_for_errors = nullptr;
1152
1153 explicit PcForErrors(const uint8_t* pc) : pc_for_errors(pc) {}
1154
1155 const uint8_t* pc() const { return pc_for_errors; }
1156};
1157
1158// An entry on the value stack.
1159template <typename ValidationTag>
1160struct ValueBase : public PcForErrors<ValidationTag::validate> {
1162
1163 ValueBase(const uint8_t* pc, ValueType type)
1164 : PcForErrors<ValidationTag::validate>(pc), type(type) {}
1165};
1166
1167template <typename Value>
1168struct Merge {
1169 uint32_t arity = 0;
1170 union { // Either multiple values or a single value.
1173 } vals = {nullptr}; // Initialize {array} with {nullptr}.
1174
1175 // Tracks whether this merge was ever reached. Uses precise reachability, like
1176 // Reachability::kReachable.
1178
1179 explicit Merge(bool reached = false) : reached(reached) {}
1180
1181 Value& operator[](uint32_t i) {
1182 DCHECK_GT(arity, i);
1183 return arity == 1 ? vals.first : vals.array[i];
1184 }
1185};
1186
1197
1198enum Reachability : uint8_t {
1199 // reachable code.
1201 // reachable code in unreachable block (implies normal validation).
1203 // code unreachable in its own block (implies polymorphic validation).
1206
1207// An entry on the control stack (i.e. if, block, loop, or try).
1208template <typename Value, typename ValidationTag>
1209struct ControlBase : public PcForErrors<ValidationTag::validate> {
1212
1213 // For try-table.
1215
1216 uint32_t stack_depth = 0; // Stack height at the beginning of the construct.
1217 uint32_t init_stack_depth = 0; // Height of "locals initialization" stack
1218 // at the beginning of the construct.
1219 int32_t previous_catch = -1; // Depth of the innermost catch containing this
1220 // 'try'.
1221
1222 // Values merged into the start or end of this control construct.
1225
1226 bool might_throw = false;
1227
1229
1231 uint32_t init_stack_depth, const uint8_t* pc,
1233 : PcForErrors<ValidationTag::validate>(pc),
1234 kind(kind),
1239
1240 // Check whether the current block is reachable.
1241 bool reachable() const { return reachability == kReachable; }
1242
1243 // Check whether the rest of the block is unreachable.
1244 // Note that this is different from {!reachable()}, as there is also the
1245 // "indirect unreachable state", for which both {reachable()} and
1246 // {unreachable()} return false.
1247 bool unreachable() const { return reachability == kUnreachable; }
1248
1249 // Return the reachability of new control structs started in this block.
1253
1254 bool is_if() const { return is_onearmed_if() || is_if_else(); }
1255 bool is_onearmed_if() const { return kind == kControlIf; }
1256 bool is_if_else() const { return kind == kControlIfElse; }
1257 bool is_block() const { return kind == kControlBlock; }
1258 bool is_loop() const { return kind == kControlLoop; }
1259 bool is_incomplete_try() const { return kind == kControlTry; }
1260 bool is_try_catch() const { return kind == kControlTryCatch; }
1261 bool is_try_catchall() const { return kind == kControlTryCatchAll; }
1262 bool is_try() const {
1264 }
1265 bool is_try_table() { return kind == kControlTryTable; }
1266
1268 return is_loop() ? &this->start_merge : &this->end_merge;
1269 }
1270};
1271
1272// This is the list of callback functions that an interface for the
1273// WasmFullDecoder should implement.
1274// F(Name, args...)
1275#define INTERFACE_FUNCTIONS(F) \
1276 INTERFACE_META_FUNCTIONS(F) \
1277 INTERFACE_CONSTANT_FUNCTIONS(F) \
1278 INTERFACE_NON_CONSTANT_FUNCTIONS(F)
1279
1280#define INTERFACE_META_FUNCTIONS(F) \
1281 F(TraceInstruction, uint32_t value) \
1282 F(StartFunction) \
1283 F(StartFunctionBody, Control* block) \
1284 F(FinishFunction) \
1285 F(OnFirstError) \
1286 F(NextInstruction, WasmOpcode)
1287
1288#define INTERFACE_CONSTANT_FUNCTIONS(F) /* force 80 columns */ \
1289 F(I32Const, Value* result, int32_t value) \
1290 F(I64Const, Value* result, int64_t value) \
1291 F(F32Const, Value* result, float value) \
1292 F(F64Const, Value* result, double value) \
1293 F(S128Const, const Simd128Immediate& imm, Value* result) \
1294 F(GlobalGet, Value* result, const GlobalIndexImmediate& imm) \
1295 F(DoReturn, uint32_t drop_values) \
1296 F(UnOp, WasmOpcode opcode, const Value& value, Value* result) \
1297 F(BinOp, WasmOpcode opcode, const Value& lhs, const Value& rhs, \
1298 Value* result) \
1299 F(RefNull, ValueType type, Value* result) \
1300 F(RefFunc, uint32_t function_index, Value* result) \
1301 F(StructNew, const StructIndexImmediate& imm, const Value& descriptor, \
1302 const Value args[], Value* result) \
1303 F(StructNewDefault, const StructIndexImmediate& imm, \
1304 const Value& descriptor, Value* result) \
1305 F(ArrayNew, const ArrayIndexImmediate& imm, const Value& length, \
1306 const Value& initial_value, Value* result) \
1307 F(ArrayNewDefault, const ArrayIndexImmediate& imm, const Value& length, \
1308 Value* result) \
1309 F(ArrayNewFixed, const ArrayIndexImmediate& imm, \
1310 const IndexImmediate& length_imm, const Value elements[], Value* result) \
1311 F(ArrayNewSegment, const ArrayIndexImmediate& array_imm, \
1312 const IndexImmediate& data_segment, const Value& offset, \
1313 const Value& length, Value* result) \
1314 F(RefI31, const Value& input, Value* result) \
1315 F(StringConst, const StringConstImmediate& imm, Value* result)
1316
1317#define INTERFACE_NON_CONSTANT_FUNCTIONS(F) /* force 80 columns */ \
1318 /* Control: */ \
1319 F(Block, Control* block) \
1320 F(Loop, Control* block) \
1321 F(Try, Control* block) \
1322 F(TryTable, Control* block) \
1323 F(CatchCase, Control* block, const CatchCase& catch_case, \
1324 base::Vector<Value> caught_values) \
1325 F(If, const Value& cond, Control* if_block) \
1326 F(FallThruTo, Control* c) \
1327 F(PopControl, Control* block) \
1328 /* Instructions: */ \
1329 F(RefAsNonNull, const Value& arg, Value* result) \
1330 F(Drop) \
1331 F(LocalGet, Value* result, const IndexImmediate& imm) \
1332 F(LocalSet, const Value& value, const IndexImmediate& imm) \
1333 F(LocalTee, const Value& value, Value* result, const IndexImmediate& imm) \
1334 F(GlobalSet, const Value& value, const GlobalIndexImmediate& imm) \
1335 F(TableGet, const Value& index, Value* result, const IndexImmediate& imm) \
1336 F(TableSet, const Value& index, const Value& value, \
1337 const IndexImmediate& imm) \
1338 F(Trap, TrapReason reason) \
1339 F(NopForTestingUnsupportedInLiftoff) \
1340 F(Forward, const Value& from, Value* to) \
1341 F(Select, const Value& cond, const Value& fval, const Value& tval, \
1342 Value* result) \
1343 F(BrOrRet, uint32_t depth) \
1344 F(BrIf, const Value& cond, uint32_t depth) \
1345 F(BrTable, const BranchTableImmediate& imm, const Value& key) \
1346 F(Else, Control* if_block) \
1347 F(LoadMem, LoadType type, const MemoryAccessImmediate& imm, \
1348 const Value& index, Value* result) \
1349 F(LoadTransform, LoadType type, LoadTransformationKind transform, \
1350 const MemoryAccessImmediate& imm, const Value& index, Value* result) \
1351 F(LoadLane, LoadType type, const Value& value, const Value& index, \
1352 const MemoryAccessImmediate& imm, const uint8_t laneidx, Value* result) \
1353 F(StoreMem, StoreType type, const MemoryAccessImmediate& imm, \
1354 const Value& index, const Value& value) \
1355 F(StoreLane, StoreType type, const MemoryAccessImmediate& imm, \
1356 const Value& index, const Value& value, const uint8_t laneidx) \
1357 F(CurrentMemoryPages, const MemoryIndexImmediate& imm, Value* result) \
1358 F(MemoryGrow, const MemoryIndexImmediate& imm, const Value& value, \
1359 Value* result) \
1360 F(CallDirect, const CallFunctionImmediate& imm, const Value args[], \
1361 Value returns[]) \
1362 F(CallIndirect, const Value& index, const CallIndirectImmediate& imm, \
1363 const Value args[], Value returns[]) \
1364 F(CallRef, const Value& func_ref, const FunctionSig* sig, \
1365 const Value args[], const Value returns[]) \
1366 F(ReturnCallRef, const Value& func_ref, const FunctionSig* sig, \
1367 const Value args[]) \
1368 F(ReturnCall, const CallFunctionImmediate& imm, const Value args[]) \
1369 F(ReturnCallIndirect, const Value& index, const CallIndirectImmediate& imm, \
1370 const Value args[]) \
1371 F(BrOnNull, const Value& ref_object, uint32_t depth, \
1372 bool pass_null_along_branch, Value* result_on_fallthrough) \
1373 F(BrOnNonNull, const Value& ref_object, Value* result, uint32_t depth, \
1374 bool drop_null_on_fallthrough) \
1375 F(SimdOp, WasmOpcode opcode, const Value args[], Value* result) \
1376 F(SimdLaneOp, WasmOpcode opcode, const SimdLaneImmediate& imm, \
1377 base::Vector<const Value> inputs, Value* result) \
1378 F(Simd8x16ShuffleOp, const Simd128Immediate& imm, const Value& input0, \
1379 const Value& input1, Value* result) \
1380 F(Throw, const TagIndexImmediate& imm, const Value args[]) \
1381 F(ThrowRef, Value* value) \
1382 F(Rethrow, Control* block) \
1383 F(CatchException, const TagIndexImmediate& imm, Control* block, \
1384 base::Vector<Value> caught_values) \
1385 F(Delegate, uint32_t depth, Control* block) \
1386 F(CatchAll, Control* block) \
1387 F(ContNew, const Value& func_ref, const ContIndexImmediate* imm, \
1388 Value* result) \
1389 F(Resume, const ContIndexImmediate* imm, base::Vector<HandlerCase> handlers, \
1390 const Value args[], const Value returns[]) \
1391 F(Suspend, const TagIndexImmediate& imm, const Value args[], \
1392 const Value returns[]) \
1393 F(AtomicOp, WasmOpcode opcode, const Value args[], const size_t argc, \
1394 const MemoryAccessImmediate& imm, Value* result) \
1395 F(AtomicFence) \
1396 F(MemoryInit, const MemoryInitImmediate& imm, const Value& dst, \
1397 const Value& src, const Value& size) \
1398 F(DataDrop, const IndexImmediate& imm) \
1399 F(MemoryCopy, const MemoryCopyImmediate& imm, const Value& dst, \
1400 const Value& src, const Value& size) \
1401 F(MemoryFill, const MemoryIndexImmediate& imm, const Value& dst, \
1402 const Value& value, const Value& size) \
1403 F(TableInit, const TableInitImmediate& imm, const Value& dst, \
1404 const Value& src, const Value& size) \
1405 F(ElemDrop, const IndexImmediate& imm) \
1406 F(TableCopy, const TableCopyImmediate& imm, const Value& dst, \
1407 const Value& src, const Value& size) \
1408 F(TableGrow, const IndexImmediate& imm, const Value& value, \
1409 const Value& delta, Value* result) \
1410 F(TableSize, const IndexImmediate& imm, Value* result) \
1411 F(TableFill, const IndexImmediate& imm, const Value& start, \
1412 const Value& value, const Value& count) \
1413 F(StructGet, const Value& struct_object, const FieldImmediate& field, \
1414 bool is_signed, Value* result) \
1415 F(StructSet, const Value& struct_object, const FieldImmediate& field, \
1416 const Value& field_value) \
1417 F(ArrayGet, const Value& array_obj, const ArrayIndexImmediate& imm, \
1418 const Value& index, bool is_signed, Value* result) \
1419 F(ArraySet, const Value& array_obj, const ArrayIndexImmediate& imm, \
1420 const Value& index, const Value& value) \
1421 F(ArrayLen, const Value& array_obj, Value* result) \
1422 F(ArrayCopy, const Value& dst, const Value& dst_index, const Value& src, \
1423 const Value& src_index, const ArrayIndexImmediate& src_imm, \
1424 const Value& length) \
1425 F(ArrayFill, const ArrayIndexImmediate& imm, const Value& array, \
1426 const Value& index, const Value& value, const Value& length) \
1427 F(ArrayInitSegment, const ArrayIndexImmediate& array_imm, \
1428 const IndexImmediate& segment_imm, const Value& array, \
1429 const Value& array_index, const Value& segment_offset, \
1430 const Value& length) \
1431 F(I31GetS, const Value& input, Value* result) \
1432 F(I31GetU, const Value& input, Value* result) \
1433 F(RefGetDesc, const Value& ref, Value* desc) \
1434 F(RefTest, HeapType target_type, const Value& obj, Value* result, \
1435 bool null_succeeds) \
1436 F(RefTestAbstract, const Value& obj, HeapType type, Value* result, \
1437 bool null_succeeds) \
1438 F(RefCast, const Value& obj, Value* result) \
1439 F(RefCastDesc, const Value& obj, const Value& desc, Value* result) \
1440 F(RefCastAbstract, const Value& obj, HeapType type, Value* result, \
1441 bool null_succeeds) \
1442 F(AssertNullTypecheck, const Value& obj, Value* result) \
1443 F(AssertNotNullTypecheck, const Value& obj, Value* result) \
1444 F(BrOnCast, HeapType target_type, const Value& obj, Value* result_on_branch, \
1445 uint32_t depth, bool null_succeeds) \
1446 F(BrOnCastFail, HeapType target_type, const Value& obj, \
1447 Value* result_on_fallthrough, uint32_t depth, bool null_succeeds) \
1448 F(BrOnCastDesc, HeapType target_type, const Value& obj, const Value& desc, \
1449 Value* result_on_branch, uint32_t depth, bool null_succeeds) \
1450 F(BrOnCastDescFail, HeapType target_type, const Value& obj, \
1451 const Value& desc, Value* result_on_fallthrough, uint32_t depth, \
1452 bool null_succeeds) \
1453 F(BrOnCastAbstract, const Value& obj, HeapType type, \
1454 Value* result_on_branch, uint32_t depth, bool null_succeeds) \
1455 F(BrOnCastFailAbstract, const Value& obj, HeapType type, \
1456 Value* result_on_fallthrough, uint32_t depth, bool null_succeeds) \
1457 F(StringNewWtf8, const MemoryIndexImmediate& memory, \
1458 const unibrow::Utf8Variant variant, const Value& offset, \
1459 const Value& size, Value* result) \
1460 F(StringNewWtf8Array, const unibrow::Utf8Variant variant, \
1461 const Value& array, const Value& start, const Value& end, Value* result) \
1462 F(StringNewWtf16, const MemoryIndexImmediate& memory, const Value& offset, \
1463 const Value& size, Value* result) \
1464 F(StringNewWtf16Array, const Value& array, const Value& start, \
1465 const Value& end, Value* result) \
1466 F(StringMeasureWtf8, const unibrow::Utf8Variant variant, const Value& str, \
1467 Value* result) \
1468 F(StringMeasureWtf16, const Value& str, Value* result) \
1469 F(StringEncodeWtf8, const MemoryIndexImmediate& memory, \
1470 const unibrow::Utf8Variant variant, const Value& str, \
1471 const Value& address, Value* result) \
1472 F(StringEncodeWtf8Array, const unibrow::Utf8Variant variant, \
1473 const Value& str, const Value& array, const Value& start, Value* result) \
1474 F(StringEncodeWtf16, const MemoryIndexImmediate& memory, const Value& str, \
1475 const Value& address, Value* result) \
1476 F(StringEncodeWtf16Array, const Value& str, const Value& array, \
1477 const Value& start, Value* result) \
1478 F(StringConcat, const Value& head, const Value& tail, Value* result) \
1479 F(StringEq, const Value& a, const Value& b, Value* result) \
1480 F(StringIsUSVSequence, const Value& str, Value* result) \
1481 F(StringAsWtf8, const Value& str, Value* result) \
1482 F(StringViewWtf8Advance, const Value& view, const Value& pos, \
1483 const Value& bytes, Value* result) \
1484 F(StringViewWtf8Encode, const MemoryIndexImmediate& memory, \
1485 const unibrow::Utf8Variant variant, const Value& view, const Value& addr, \
1486 const Value& pos, const Value& bytes, Value* next_pos, \
1487 Value* bytes_written) \
1488 F(StringViewWtf8Slice, const Value& view, const Value& start, \
1489 const Value& end, Value* result) \
1490 F(StringAsWtf16, const Value& str, Value* result) \
1491 F(StringViewWtf16GetCodeUnit, const Value& view, const Value& pos, \
1492 Value* result) \
1493 F(StringViewWtf16Encode, const MemoryIndexImmediate& memory, \
1494 const Value& view, const Value& addr, const Value& pos, \
1495 const Value& codeunits, Value* result) \
1496 F(StringViewWtf16Slice, const Value& view, const Value& start, \
1497 const Value& end, Value* result) \
1498 F(StringAsIter, const Value& str, Value* result) \
1499 F(StringViewIterNext, const Value& view, Value* result) \
1500 F(StringViewIterAdvance, const Value& view, const Value& codepoints, \
1501 Value* result) \
1502 F(StringViewIterRewind, const Value& view, const Value& codepoints, \
1503 Value* result) \
1504 F(StringViewIterSlice, const Value& view, const Value& codepoints, \
1505 Value* result) \
1506 F(StringCompare, const Value& lhs, const Value& rhs, Value* result) \
1507 F(StringFromCodePoint, const Value& code_point, Value* result) \
1508 F(StringHash, const Value& string, Value* result)
1509
1510// This is a global constant invalid instruction trace, to be pointed at by
1511// the current instruction trace pointer in the default case
1512const std::pair<uint32_t, uint32_t> invalid_instruction_trace = {0, 0};
1513
1514// A fast vector implementation, without implicit bounds checks (see
1515// https://crbug.com/1358853).
1516template <typename T>
1518 public:
1519 FastZoneVector() = default;
1520 explicit FastZoneVector(int initial_size, Zone* zone) {
1521 Grow(initial_size, zone);
1522 }
1523
1524#ifdef DEBUG
1525 ~FastZoneVector() {
1526 // Check that {Reset} was called on this vector.
1528 }
1529#endif
1530
1531 void Reset(Zone* zone) {
1532 if (begin_ == nullptr) return;
1533 if constexpr (!std::is_trivially_destructible_v<T>) {
1534 for (T* ptr = begin_; ptr != end_; ++ptr) {
1535 ptr->~T();
1536 }
1537 }
1539 begin_ = nullptr;
1540 end_ = nullptr;
1541 capacity_end_ = nullptr;
1542 }
1543
1544 T* begin() const { return begin_; }
1545 T* end() const { return end_; }
1546
1547 T& front() {
1548 DCHECK(!empty());
1549 return begin_[0];
1550 }
1551
1552 T& back() {
1553 DCHECK(!empty());
1554 return end_[-1];
1555 }
1556
1557 uint32_t size() const { return static_cast<uint32_t>(end_ - begin_); }
1558
1559 bool empty() const { return begin_ == end_; }
1560
1561 T& operator[](uint32_t index) {
1562 DCHECK_GE(size(), index);
1563 return begin_[index];
1564 }
1565
1566 void shrink_to(uint32_t new_size) {
1567 static_assert(std::is_trivially_destructible_v<T>);
1568 DCHECK_GE(size(), new_size);
1569 end_ = begin_ + new_size;
1570 }
1571
1572 void pop(uint32_t num = 1) {
1573 DCHECK_GE(size(), num);
1574 for (T* new_end = end_ - num; end_ != new_end;) {
1575 --end_;
1576 end_->~T();
1577 }
1578 }
1579
1580 void push(T value) {
1582 *end_ = std::move(value);
1583 ++end_;
1584 }
1585
1586 template <typename... Args>
1587 void emplace_back(Args&&... args) {
1589 new (end_) T{std::forward<Args>(args)...};
1590 ++end_;
1591 }
1592
1593 V8_INLINE void EnsureMoreCapacity(int slots_needed, Zone* zone) {
1594 if (V8_LIKELY(capacity_end_ - end_ >= slots_needed)) return;
1595 Grow(slots_needed, zone);
1596 }
1597
1598 private:
1599 V8_NOINLINE V8_PRESERVE_MOST void Grow(int slots_needed, Zone* zone) {
1600 size_t new_capacity = std::max(
1601 size_t{8}, base::bits::RoundUpToPowerOfTwo(size() + slots_needed));
1602 CHECK_GE(kMaxUInt32, new_capacity);
1603 DCHECK_LT(capacity_end_ - begin_, new_capacity);
1604 T* new_begin = zone->template AllocateArray<T>(new_capacity);
1605 if (begin_) {
1606 for (T *ptr = begin_, *new_ptr = new_begin; ptr != end_;
1607 ++ptr, ++new_ptr) {
1608 new (new_ptr) T{std::move(*ptr)};
1609 ptr->~T();
1610 }
1612 }
1613 end_ = new_begin + (end_ - begin_);
1614 begin_ = new_begin;
1615 capacity_end_ = new_begin + new_capacity;
1616 }
1617
1618 // The array is zone-allocated inside {EnsureMoreCapacity}.
1619 T* begin_ = nullptr;
1620 T* end_ = nullptr;
1621 T* capacity_end_ = nullptr;
1622};
1623
1624// Generic Wasm bytecode decoder with utilities for decoding immediates,
1625// lengths, etc.
1626template <typename ValidationTag, DecodingMode decoding_mode = kFunctionBody>
1627class WasmDecoder : public Decoder {
1628 public:
1630 WasmDetectedFeatures* detected, const FunctionSig* sig,
1631 bool is_shared, const uint8_t* start, const uint8_t* end,
1632 uint32_t buffer_offset = 0)
1634 zone_(zone),
1635 module_(module),
1636 enabled_(enabled),
1637 detected_(detected),
1638 sig_(sig),
1641 if (V8_UNLIKELY(module_ && !module_->inst_traces.empty())) {
1642 auto last_trace = module_->inst_traces.end() - 1;
1643 auto first_inst_trace =
1644 std::lower_bound(module_->inst_traces.begin(), last_trace,
1645 std::make_pair(buffer_offset, 0),
1646 [](const std::pair<uint32_t, uint32_t>& a,
1647 const std::pair<uint32_t, uint32_t>& b) {
1648 return a.first < b.first;
1649 });
1650 if (V8_UNLIKELY(first_inst_trace != last_trace)) {
1651 current_inst_trace_ = &*first_inst_trace;
1652 }
1653 }
1654 }
1655
1656 Zone* zone() const { return zone_; }
1657
1658 uint32_t num_locals() const { return num_locals_; }
1659
1661 return base::VectorOf(local_types_, num_locals_);
1662 }
1663 ValueType local_type(uint32_t index) const {
1664 DCHECK_GE(num_locals_, index);
1665 return local_types_[index];
1666 }
1667
1668 // Decodes local definitions in the current decoder.
1669 // The decoded locals will be appended to {this->local_types_}.
1670 // The decoder's pc is not advanced.
1671 // The total length of decoded locals is returned.
1672 uint32_t DecodeLocals(const uint8_t* pc) {
1673 DCHECK_NULL(local_types_);
1675
1676 // In a first step, count the number of locals and store the decoded
1677 // entries.
1678 num_locals_ = static_cast<uint32_t>(this->sig_->parameter_count());
1679
1680 // Decode local declarations, if any.
1681 auto [entries, entries_length] =
1682 read_u32v<ValidationTag>(pc, "local decls count");
1683
1684 if (!VALIDATE(ok())) {
1685 DecodeError(pc, "invalid local decls count");
1686 return 0;
1687 }
1688 TRACE("local decls count: %u\n", entries);
1689
1690 // Do an early validity check, to avoid allocating too much memory below.
1691 // Every entry needs at least two bytes (count plus type); if that many are
1692 // not available any more, flag that as an error.
1693 if (available_bytes() / 2 < entries) {
1694 DecodeError(pc, "local decls count bigger than remaining function size");
1695 return 0;
1696 }
1697
1698 struct DecodedLocalEntry {
1699 uint32_t count;
1701 };
1703 uint32_t total_length = entries_length;
1704 for (uint32_t entry = 0; entry < entries; ++entry) {
1705 if (!VALIDATE(more())) {
1706 DecodeError(end(),
1707 "expected more local decls but reached end of input");
1708 return 0;
1709 }
1710
1711 auto [count, count_length] =
1712 read_u32v<ValidationTag>(pc + total_length, "local count");
1713 if (!VALIDATE(ok())) {
1714 DecodeError(pc + total_length, "invalid local count");
1715 return 0;
1716 }
1719 DecodeError(pc + total_length, "local count too large");
1720 return 0;
1721 }
1722 total_length += count_length;
1723
1724 auto [type, type_length] =
1726 this, pc + total_length, enabled_);
1727 ValidateValueType(pc + total_length, type);
1728 if (!VALIDATE(ok())) return 0;
1729 if (module_) {
1731 } else {
1732 DCHECK(!ValidationTag::validate);
1733 }
1734 if (!VALIDATE(!is_shared_ || type.is_shared())) {
1735 DecodeError(pc + total_length, "local must have shared type");
1736 return 0;
1737 }
1738 total_length += type_length;
1739
1740 num_locals_ += count;
1741 decoded_locals[entry] = DecodedLocalEntry{count, type};
1742 }
1743 DCHECK(ok());
1744
1745 if (num_locals_ > 0) {
1746 // Now build the array of local types from the parsed entries.
1747 local_types_ = zone_->AllocateArray<ValueType>(num_locals_);
1748 ValueType* locals_ptr = local_types_;
1749
1750 if (sig_->parameter_count() > 0) {
1751 std::copy(sig_->parameters().begin(), sig_->parameters().end(),
1752 locals_ptr);
1753 locals_ptr += sig_->parameter_count();
1754 }
1755
1756 for (auto& entry : decoded_locals) {
1757 std::fill_n(locals_ptr, entry.count, entry.type);
1758 locals_ptr += entry.count;
1759 }
1760 DCHECK_EQ(locals_ptr, local_types_ + num_locals_);
1761 }
1762 return total_length;
1763 }
1764
1765 // Shorthand that forwards to the {DecodeError} functions above, passing our
1766 // {ValidationTag}.
1767 template <typename... Args>
1769 wasm::DecodeError<ValidationTag>(this, std::forward<Args>(args)...);
1770 }
1771
1772 // Returns a BitVector of length {locals_count + 1} representing the set of
1773 // variables that are assigned in the loop starting at {pc}. The additional
1774 // position at the end of the vector represents possible assignments to
1775 // the instance cache.
1777 const uint8_t* pc,
1778 uint32_t locals_count, Zone* zone,
1779 bool* loop_is_innermost = nullptr) {
1780 if (pc >= decoder->end()) return nullptr;
1781 if (*pc != kExprLoop) return nullptr;
1782 // The number of locals_count is augmented by 1 so that the 'locals_count'
1783 // index can be used to track the instance cache.
1784 BitVector* assigned = zone->New<BitVector>(locals_count + 1, zone);
1785 int depth = -1; // We will increment the depth to 0 when we decode the
1786 // starting 'loop' opcode.
1787 if (loop_is_innermost) *loop_is_innermost = true;
1788 // Iteratively process all AST nodes nested inside the loop.
1789 while (pc < decoder->end() && VALIDATE(decoder->ok())) {
1790 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1791 switch (opcode) {
1792 case kExprLoop:
1793 if (loop_is_innermost && depth >= 0) *loop_is_innermost = false;
1794 [[fallthrough]];
1795 case kExprIf:
1796 case kExprBlock:
1797 case kExprTry:
1798 case kExprTryTable:
1799 depth++;
1800 break;
1801 case kExprLocalSet:
1802 case kExprLocalTee: {
1803 IndexImmediate imm(decoder, pc + 1, "local index", validate);
1804 // Unverified code might have an out-of-bounds index.
1805 if (imm.index < locals_count) assigned->Add(imm.index);
1806 break;
1807 }
1808 case kExprMemoryGrow:
1809 case kExprCallFunction:
1810 case kExprCallIndirect:
1811 case kExprCallRef:
1812 // Add instance cache to the assigned set.
1813 assigned->Add(locals_count);
1814 break;
1815 case kExprEnd:
1816 depth--;
1817 break;
1818 default:
1819 break;
1820 }
1821 if (depth < 0) break;
1822 pc += OpcodeLength(decoder, pc);
1823 }
1824 return VALIDATE(decoder->ok()) ? assigned : nullptr;
1825 }
1826
1827 bool Validate(const uint8_t* pc, TagIndexImmediate& imm) {
1828 size_t num_tags = module_->tags.size();
1829 if (!VALIDATE(imm.index < num_tags)) {
1830 DecodeError(pc, "Invalid tag index: %u", imm.index);
1831 return false;
1832 }
1833 V8_ASSUME(imm.index < num_tags);
1834 imm.tag = &module_->tags[imm.index];
1835 return true;
1836 }
1837
1838 bool Validate(const uint8_t* pc, GlobalIndexImmediate& imm) {
1839 // We compare with the current size of the globals vector. This is important
1840 // if we are decoding a constant expression in the global section.
1841 size_t num_globals = module_->globals.size();
1842 if (!VALIDATE(imm.index < num_globals)) {
1843 DecodeError(pc, "Invalid global index: %u", imm.index);
1844 return false;
1845 }
1846 V8_ASSUME(imm.index < num_globals);
1847 imm.global = &module_->globals[imm.index];
1848 if (!VALIDATE(!is_shared_ || imm.global->shared)) {
1849 DecodeError(pc, "Cannot access non-shared global %d in a shared %s",
1850 imm.index,
1851 decoding_mode == kConstantExpression ? "constant expression"
1852 : "function");
1853 return false;
1854 }
1855
1856 if constexpr (decoding_mode == kConstantExpression) {
1857 if (!VALIDATE(!imm.global->mutability)) {
1858 this->DecodeError(pc,
1859 "mutable globals cannot be used in constant "
1860 "expressions");
1861 return false;
1862 }
1863 }
1864
1865 return true;
1866 }
1867
1868 bool Validate(const uint8_t* pc, SigIndexImmediate& imm) {
1869 if (!VALIDATE(module_->has_signature(imm.index))) {
1870 DecodeError(pc, "invalid signature index: %u", imm.index.index);
1871 return false;
1872 }
1873 imm.sig = module_->signature(imm.index);
1874 imm.shared = module_->type(imm.index).is_shared;
1875 return true;
1876 }
1877
1878 bool Validate(const uint8_t* pc, StructIndexImmediate& imm) {
1879 if (!VALIDATE(module_->has_struct(imm.index))) {
1880 DecodeError(pc, "invalid struct index: %u", imm.index.index);
1881 return false;
1882 }
1883 imm.struct_type = module_->struct_type(imm.index);
1884 imm.shared = module_->type(imm.index).is_shared;
1885 return true;
1886 }
1887
1888 bool Validate(const uint8_t* pc, FieldImmediate& imm) {
1889 if (!Validate(pc, imm.struct_imm)) return false;
1890 if (!VALIDATE(imm.field_imm.index <
1891 imm.struct_imm.struct_type->field_count())) {
1892 DecodeError(pc + imm.struct_imm.length, "invalid field index: %u",
1893 imm.field_imm.index);
1894 return false;
1895 }
1896 return true;
1897 }
1898
1899 bool Validate(const uint8_t* pc, ArrayIndexImmediate& imm) {
1900 if (!VALIDATE(module_->has_array(imm.index))) {
1901 DecodeError(pc, "invalid array index: %u", imm.index.index);
1902 return false;
1903 }
1904 imm.array_type = module_->array_type(imm.index);
1905 imm.shared = module_->type(imm.index).is_shared;
1906 return true;
1907 }
1908
1909 bool CanReturnCall(const FunctionSig* target_sig) {
1910 if (sig_->return_count() != target_sig->return_count()) return false;
1911 auto target_sig_it = target_sig->returns().begin();
1912 for (ValueType ret_type : sig_->returns()) {
1913 if (!IsSubtypeOf(*target_sig_it++, ret_type, this->module_)) return false;
1914 }
1915 return true;
1916 }
1917
1918 bool Validate(const uint8_t* pc, CallFunctionImmediate& imm) {
1919 size_t num_functions = module_->functions.size();
1920 if (!VALIDATE(imm.index < num_functions)) {
1921 DecodeError(pc, "function index #%u is out of bounds", imm.index);
1922 return false;
1923 }
1924 if (is_shared_ && !module_->function_is_shared(imm.index)) {
1925 DecodeError(pc, "cannot call non-shared function %u", imm.index);
1926 return false;
1927 }
1928 V8_ASSUME(imm.index < num_functions);
1929 imm.sig = module_->functions[imm.index].sig;
1930 return true;
1931 }
1932
1933 bool Validate(const uint8_t* pc, CallIndirectImmediate& imm) {
1934 if (!Validate(pc, imm.sig_imm)) return false;
1935 if (!Validate(pc + imm.sig_imm.length, imm.table_imm)) return false;
1936 ValueType table_type = imm.table_imm.table->type;
1937 if (!VALIDATE(table_type.ref_type_kind() == RefTypeKind::kFunction)) {
1939 pc, "call_indirect: immediate table #%u is not of a function type",
1940 imm.table_imm.index);
1941 return false;
1942 }
1943 // The type specified by the immediate does not need to have any static
1944 // relation (neither sub nor super) to the type of the table. The type
1945 // of the function will be checked at runtime.
1946
1947 imm.sig = module_->signature(imm.sig_imm.index);
1948 return true;
1949 }
1950
1951 bool Validate(const uint8_t* pc, BranchDepthImmediate& imm,
1952 size_t control_depth) {
1953 if (!VALIDATE(imm.depth < control_depth)) {
1954 DecodeError(pc, "invalid branch depth: %u", imm.depth);
1955 return false;
1956 }
1957 return true;
1958 }
1959
1960 bool Validate(const uint8_t* pc, BranchTableImmediate& imm) {
1962 DecodeError(pc, "invalid table count (> max br_table size): %u",
1963 imm.table_count);
1964 return false;
1965 }
1966 return checkAvailable(imm.table_count);
1967 }
1968
1969 bool Validate(const uint8_t* pc, WasmOpcode opcode, SimdLaneImmediate& imm) {
1970 uint8_t num_lanes = 0;
1971 switch (opcode) {
1972 case kExprF64x2ExtractLane:
1973 case kExprF64x2ReplaceLane:
1974 case kExprI64x2ExtractLane:
1975 case kExprI64x2ReplaceLane:
1976 case kExprS128Load64Lane:
1977 case kExprS128Store64Lane:
1978 num_lanes = 2;
1979 break;
1980 case kExprF32x4ExtractLane:
1981 case kExprF32x4ReplaceLane:
1982 case kExprI32x4ExtractLane:
1983 case kExprI32x4ReplaceLane:
1984 case kExprS128Load32Lane:
1985 case kExprS128Store32Lane:
1986 num_lanes = 4;
1987 break;
1988 case kExprF16x8ExtractLane:
1989 case kExprF16x8ReplaceLane:
1990 case kExprI16x8ExtractLaneS:
1991 case kExprI16x8ExtractLaneU:
1992 case kExprI16x8ReplaceLane:
1993 case kExprS128Load16Lane:
1994 case kExprS128Store16Lane:
1995 num_lanes = 8;
1996 break;
1997 case kExprI8x16ExtractLaneS:
1998 case kExprI8x16ExtractLaneU:
1999 case kExprI8x16ReplaceLane:
2000 case kExprS128Load8Lane:
2001 case kExprS128Store8Lane:
2002 num_lanes = 16;
2003 break;
2004 default:
2005 UNREACHABLE();
2006 break;
2007 }
2008 if (!VALIDATE(imm.lane >= 0 && imm.lane < num_lanes)) {
2009 DecodeError(pc, "invalid lane index");
2010 return false;
2011 } else {
2012 return true;
2013 }
2014 }
2015
2016 bool Validate(const uint8_t* pc, Simd128Immediate& imm) {
2017 uint8_t max_lane = 0;
2018 for (uint32_t i = 0; i < kSimd128Size; ++i) {
2019 max_lane = std::max(max_lane, imm.value[i]);
2020 }
2021 // Shuffle indices must be in [0..31] for a 16 lane shuffle.
2022 if (!VALIDATE(max_lane < 2 * kSimd128Size)) {
2023 DecodeError(pc, "invalid shuffle mask");
2024 return false;
2025 }
2026 return true;
2027 }
2028
2029 bool Validate(const uint8_t* pc, BlockTypeImmediate& imm) {
2030 if (imm.sig.all().begin() == nullptr) {
2031 // Then use {sig_index} to initialize the signature.
2032 if (!VALIDATE(module_->has_signature(imm.sig_index))) {
2033 DecodeError(pc, "block type index %u is not a signature definition",
2034 imm.sig_index);
2035 return false;
2036 }
2037 imm.sig = *module_->signature(imm.sig_index);
2038 } else {
2039 // Then it's an MVP immediate with 0 parameters and 0-1 returns.
2040 DCHECK_EQ(0, imm.sig.parameter_count());
2041 DCHECK_GE(1, imm.sig.return_count());
2042 if (imm.sig.return_count()) {
2043 if (!ValidateValueType(pc, imm.sig.GetReturn(0))) return false;
2044 DCHECK_EQ(imm.sig.all().begin(), imm.single_return_sig_storage);
2046 }
2047 }
2048 return true;
2049 }
2050
2051 bool Validate(const uint8_t* pc, MemoryIndexImmediate& imm) {
2052 size_t num_memories = module_->memories.size();
2053 if (imm.index > 0 || imm.length > 1) {
2054 this->detected_->add_multi_memory();
2055 if (v8_flags.wasm_jitless) {
2056 DecodeError(pc, "Multiple memories not supported in Wasm jitless mode");
2057 return false;
2058 }
2059 }
2060
2061 if (!VALIDATE(imm.index < num_memories)) {
2063 "memory index %u exceeds number of declared memories (%zu)",
2064 imm.index, num_memories);
2065 return false;
2066 }
2067
2068 V8_ASSUME(imm.index < num_memories);
2069 imm.memory = this->module_->memories.data() + imm.index;
2070
2071 return true;
2072 }
2073
2074 bool Validate(const uint8_t* pc, MemoryAccessImmediate& imm) {
2075 size_t num_memories = module_->memories.size();
2076 if (!VALIDATE(imm.mem_index < num_memories)) {
2078 "memory index %u exceeds number of declared memories (%zu)",
2079 imm.mem_index, num_memories);
2080 return false;
2081 }
2082 if (!VALIDATE(this->module_->memories[imm.mem_index].is_memory64() ||
2083 imm.offset <= kMaxUInt32)) {
2084 this->DecodeError(pc, "memory offset outside 32-bit range: %" PRIu64,
2085 imm.offset);
2086 return false;
2087 }
2088
2089 V8_ASSUME(imm.mem_index < num_memories);
2090 imm.memory = this->module_->memories.data() + imm.mem_index;
2091
2092 return true;
2093 }
2094
2095 bool Validate(const uint8_t* pc, MemoryInitImmediate& imm) {
2096 return ValidateDataSegment(pc, imm.data_segment) &&
2097 Validate(pc + imm.data_segment.length, imm.memory);
2098 }
2099
2100 bool Validate(const uint8_t* pc, MemoryCopyImmediate& imm) {
2101 return Validate(pc, imm.memory_src) &&
2102 Validate(pc + imm.memory_src.length, imm.memory_dst);
2103 }
2104
2105 bool Validate(const uint8_t* pc, TableInitImmediate& imm) {
2106 if (!ValidateElementSegment(pc, imm.element_segment)) return false;
2107 if (!Validate(pc + imm.element_segment.length, imm.table)) {
2108 return false;
2109 }
2110 ValueType elem_type =
2111 module_->elem_segments[imm.element_segment.index].type;
2112 if (!VALIDATE(IsSubtypeOf(elem_type, imm.table.table->type, module_))) {
2113 DecodeError(pc, "table %u is not a super-type of %s", imm.table.index,
2114 elem_type.name().c_str());
2115 return false;
2116 }
2117 return true;
2118 }
2119
2120 bool Validate(const uint8_t* pc, TableCopyImmediate& imm) {
2121 if (!Validate(pc, imm.table_src)) return false;
2122 if (!Validate(pc + imm.table_src.length, imm.table_dst)) return false;
2123 ValueType src_type = imm.table_src.table->type;
2124 if (!VALIDATE(IsSubtypeOf(src_type, imm.table_dst.table->type, module_))) {
2125 DecodeError(pc, "table %u is not a super-type of %s", imm.table_dst.index,
2126 src_type.name().c_str());
2127 return false;
2128 }
2129 return true;
2130 }
2131
2132 bool Validate(const uint8_t* pc, StringConstImmediate& imm) {
2133 if (!VALIDATE(imm.index < module_->stringref_literals.size())) {
2134 DecodeError(pc, "Invalid string literal index: %u", imm.index);
2135 return false;
2136 }
2137 return true;
2138 }
2139
2140 bool Validate(const uint8_t* pc, TableIndexImmediate& imm) {
2141 if (imm.index > 0 || imm.length > 1) {
2142 this->detected_->add_reftypes();
2143 }
2144 size_t num_tables = module_->tables.size();
2145 if (!VALIDATE(imm.index < num_tables)) {
2146 DecodeError(pc, "table index %u exceeds number of tables (%zu)",
2147 imm.index, num_tables);
2148 return false;
2149 }
2150 imm.table = this->module_->tables.data() + imm.index;
2151
2152 if (!VALIDATE(!is_shared_ || imm.table->shared)) {
2154 "cannot reference non-shared table %u from shared function",
2155 imm.index);
2156 return false;
2157 }
2158
2159 return true;
2160 }
2161
2162 // The following Validate* functions all validate an `IndexImmediate`, albeit
2163 // differently according to context.
2164 bool ValidateElementSegment(const uint8_t* pc, IndexImmediate& imm) {
2165 size_t num_elem_segments = module_->elem_segments.size();
2166 if (!VALIDATE(imm.index < num_elem_segments)) {
2167 DecodeError(pc, "invalid element segment index: %u", imm.index);
2168 return false;
2169 }
2170 V8_ASSUME(imm.index < num_elem_segments);
2171 if (!VALIDATE(!is_shared_ || module_->elem_segments[imm.index].shared)) {
2173 pc,
2174 "cannot reference non-shared element segment %u from shared function",
2175 imm.index);
2176 return false;
2177 }
2178 return true;
2179 }
2180
2181 bool ValidateLocal(const uint8_t* pc, IndexImmediate& imm) {
2182 if (!VALIDATE(imm.index < num_locals())) {
2183 DecodeError(pc, "invalid local index: %u", imm.index);
2184 return false;
2185 }
2186 return true;
2187 }
2188
2189 bool ValidateFunction(const uint8_t* pc, IndexImmediate& imm) {
2190 size_t num_functions = module_->functions.size();
2191 if (!VALIDATE(imm.index < num_functions)) {
2192 DecodeError(pc, "function index #%u is out of bounds", imm.index);
2193 return false;
2194 }
2195 V8_ASSUME(imm.index < num_functions);
2196 if (decoding_mode == kFunctionBody &&
2197 !VALIDATE(module_->functions[imm.index].declared)) {
2198 DecodeError(pc, "undeclared reference to function #%u", imm.index);
2199 return false;
2200 }
2201 return true;
2202 }
2203
2204 bool ValidateCont(const uint8_t* pc, ContIndexImmediate& imm) {
2205 if (!VALIDATE(module_->has_cont_type(imm.index))) {
2206 DecodeError(pc, "invalid cont index: %u", imm.index.index);
2207 return false;
2208 }
2209 imm.cont_type = module_->cont_type(imm.index);
2210 imm.shared = module_->type(imm.index).is_shared;
2211
2212 return true;
2213 }
2214
2215 bool ValidateDataSegment(const uint8_t* pc, IndexImmediate& imm) {
2216 if (!VALIDATE(imm.index < module_->num_declared_data_segments)) {
2217 DecodeError(pc, "invalid data segment index: %u", imm.index);
2218 return false;
2219 }
2220 // TODO(14616): Data segments aren't available during eager validation.
2221 // Discussion: github.com/WebAssembly/shared-everything-threads/issues/83
2222 if (!VALIDATE(!is_shared_ || module_->data_segments[imm.index].shared)) {
2224 pc, "cannot refer to non-shared segment %u from a shared function",
2225 imm.index);
2226 return false;
2227 }
2228 return true;
2229 }
2230
2231 bool Validate(const uint8_t* pc, SelectTypeImmediate& imm) {
2232 if (!VALIDATE(ValidateValueType(pc, imm.type))) return false;
2234 return true;
2235 }
2236
2237 bool Validate(const uint8_t* pc, HeapTypeImmediate& imm) {
2238 if (!VALIDATE(ValidateHeapType(pc, imm.type))) return false;
2240 return true;
2241 }
2242
2243 bool ValidateValueType(const uint8_t* pc, ValueType type) {
2245 module_, type);
2246 }
2247
2248 bool ValidateHeapType(const uint8_t* pc, HeapType type) {
2250 type);
2251 }
2252
2253 // Returns the length of the opcode under {pc}.
2254 template <typename... ImmediateObservers>
2255 static uint32_t OpcodeLength(WasmDecoder* decoder, const uint8_t* pc,
2256 ImmediateObservers&... ios) {
2257 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
2258 switch (opcode) {
2259 /********** Control opcodes **********/
2260 case kExprUnreachable:
2261 case kExprNop:
2262 case kExprNopForTestingUnsupportedInLiftoff:
2263 case kExprElse:
2264 case kExprEnd:
2265 case kExprReturn:
2266 return 1;
2267 case kExprTry:
2268 case kExprIf:
2269 case kExprLoop:
2270 case kExprBlock: {
2272 validate);
2273 (ios.BlockType(imm), ...);
2274 return 1 + imm.length;
2275 }
2276 case kExprRethrow:
2277 case kExprBr:
2278 case kExprBrIf:
2279 case kExprBrOnNull:
2280 case kExprBrOnNonNull:
2281 case kExprDelegate: {
2282 BranchDepthImmediate imm(decoder, pc + 1, validate);
2283 (ios.BranchDepth(imm), ...);
2284 return 1 + imm.length;
2285 }
2286 case kExprBrTable: {
2287 BranchTableImmediate imm(decoder, pc + 1, validate);
2288 (ios.BranchTable(imm), ...);
2289 BranchTableIterator<ValidationTag> iterator(decoder, imm);
2290 return 1 + iterator.length();
2291 }
2292 case kExprTryTable: {
2293 BlockTypeImmediate block_type_imm(WasmEnabledFeatures::All(), decoder,
2294 pc + 1, validate);
2295 (ios.BlockType(block_type_imm), ...);
2296 TryTableImmediate try_table_imm(decoder, pc + 1 + block_type_imm.length,
2297 validate);
2298 (ios.TryTable(try_table_imm), ...);
2299 TryTableIterator<ValidationTag> iterator(decoder, try_table_imm);
2300 return 1 + block_type_imm.length + iterator.length();
2301 }
2302 case kExprThrow:
2303 case kExprCatch: {
2304 TagIndexImmediate imm(decoder, pc + 1, validate);
2305 (ios.TagIndex(imm), ...);
2306 return 1 + imm.length;
2307 }
2308 case kExprThrowRef:
2309 return 1;
2310
2311 /********** Core stack switching ********/
2312 case kExprContNew: {
2313 ContIndexImmediate imm(decoder, pc + 1, validate);
2314 (ios.TypeIndex(imm), ...);
2315 return 1 + imm.length;
2316 }
2317 case kExprContBind: {
2318 ContIndexImmediate src(decoder, pc + 1, validate);
2319 (ios.TypeIndex(src), ...);
2320 ContIndexImmediate dst(decoder, pc + 1 + src.length, validate);
2321 (ios.TypeIndex(dst), ...);
2322 return 1 + src.length + dst.length;
2323 }
2324 case kExprSuspend: {
2325 TagIndexImmediate imm(decoder, pc + 1, validate);
2326 (ios.TagIndex(imm), ...);
2327 return 1 + imm.length;
2328 }
2329 case kExprResume: {
2330 ContIndexImmediate src(decoder, pc + 1, validate);
2331 (ios.TypeIndex(src), ...);
2332 EffectHandlerTableImmediate handler_table(decoder, pc + 1 + src.length,
2333 validate);
2334 (ios.EffectHandlerTable(handler_table), ...);
2336 handler_table);
2337 return 1 + src.length + iterator.length();
2338 }
2339 case kExprResumeThrow: {
2340 ContIndexImmediate src(decoder, pc + 1, validate);
2341 (ios.TypeIndex(src), ...);
2342 TagIndexImmediate event(decoder, pc + src.length + 1, validate);
2343 (ios.TagIndex(event), ...);
2344 EffectHandlerTableImmediate handler_table(
2345 decoder, pc + 1 + src.length + event.length, validate);
2346 (ios.EffectHandlerTable(handler_table), ...);
2348 handler_table);
2349 return 1 + src.length + event.length + iterator.length();
2350 }
2351 case kExprSwitch: {
2352 ContIndexImmediate src(decoder, pc + 1, validate);
2353 (ios.TypeIndex(src), ...);
2354 TagIndexImmediate tag(decoder, pc + src.length + 1, validate);
2355 (ios.TagIndex(tag), ...);
2356 return 1 + src.length + tag.length;
2357 }
2358
2359 /********** Misc opcodes **********/
2360 case kExprCallFunction:
2361 case kExprReturnCall: {
2362 CallFunctionImmediate imm(decoder, pc + 1, validate);
2363 (ios.FunctionIndex(imm), ...);
2364 return 1 + imm.length;
2365 }
2366 case kExprCallIndirect:
2367 case kExprReturnCallIndirect: {
2368 CallIndirectImmediate imm(decoder, pc + 1, validate);
2369 (ios.CallIndirect(imm), ...);
2370 return 1 + imm.length;
2371 }
2372 case kExprCallRef:
2373 case kExprReturnCallRef: {
2374 SigIndexImmediate imm(decoder, pc + 1, validate);
2375 (ios.TypeIndex(imm), ...);
2376 return 1 + imm.length;
2377 }
2378 case kExprDrop:
2379 case kExprSelect:
2380 case kExprCatchAll:
2381 case kExprRefEq:
2382 return 1;
2383 case kExprSelectWithType: {
2385 validate);
2386 (ios.SelectType(imm), ...);
2387 return 1 + imm.length;
2388 }
2389
2390 case kExprLocalGet:
2391 case kExprLocalSet:
2392 case kExprLocalTee: {
2393 IndexImmediate imm(decoder, pc + 1, "local index", validate);
2394 (ios.LocalIndex(imm), ...);
2395 return 1 + imm.length;
2396 }
2397 case kExprGlobalGet:
2398 case kExprGlobalSet: {
2399 GlobalIndexImmediate imm(decoder, pc + 1, validate);
2400 (ios.GlobalIndex(imm), ...);
2401 return 1 + imm.length;
2402 }
2403 case kExprTableGet:
2404 case kExprTableSet: {
2405 TableIndexImmediate imm(decoder, pc + 1, validate);
2406 (ios.TableIndex(imm), ...);
2407 return 1 + imm.length;
2408 }
2409 case kExprI32Const: {
2410 ImmI32Immediate imm(decoder, pc + 1, validate);
2411 (ios.I32Const(imm), ...);
2412 return 1 + imm.length;
2413 }
2414 case kExprI64Const: {
2415 ImmI64Immediate imm(decoder, pc + 1, validate);
2416 (ios.I64Const(imm), ...);
2417 return 1 + imm.length;
2418 }
2419 case kExprF32Const:
2420 if (sizeof...(ios) > 0) {
2421 ImmF32Immediate imm(decoder, pc + 1, validate);
2422 (ios.F32Const(imm), ...);
2423 }
2424 return 5;
2425 case kExprF64Const:
2426 if (sizeof...(ios) > 0) {
2427 ImmF64Immediate imm(decoder, pc + 1, validate);
2428 (ios.F64Const(imm), ...);
2429 }
2430 return 9;
2431 case kExprRefNull: {
2432 HeapTypeImmediate imm(WasmEnabledFeatures::All(), decoder, pc + 1,
2433 validate);
2434 (ios.HeapType(imm), ...);
2435 return 1 + imm.length;
2436 }
2437 case kExprRefIsNull:
2438 case kExprRefAsNonNull:
2439 return 1;
2440 case kExprRefFunc: {
2441 IndexImmediate imm(decoder, pc + 1, "function index", validate);
2442 (ios.FunctionIndex(imm), ...);
2443 return 1 + imm.length;
2444 }
2445
2446#define DECLARE_OPCODE_CASE(name, ...) case kExpr##name:
2447 // clang-format off
2448 /********** Simple and memory opcodes **********/
2451 return 1;
2454 MemoryAccessImmediate imm(decoder, pc + 1, UINT32_MAX,
2455 validate);
2456 (ios.MemoryAccess(imm), ...);
2457 return 1 + imm.length;
2458 }
2459 // clang-format on
2460 case kExprMemoryGrow:
2461 case kExprMemorySize: {
2462 MemoryIndexImmediate imm(decoder, pc + 1, validate);
2463 (ios.MemoryIndex(imm), ...);
2464 return 1 + imm.length;
2465 }
2466
2467 /********** Prefixed opcodes **********/
2468 case kNumericPrefix: {
2469 uint32_t length;
2470 std::tie(opcode, length) =
2471 decoder->read_prefixed_opcode<ValidationTag>(pc);
2472 switch (opcode) {
2473 case kExprI32SConvertSatF32:
2474 case kExprI32UConvertSatF32:
2475 case kExprI32SConvertSatF64:
2476 case kExprI32UConvertSatF64:
2477 case kExprI64SConvertSatF32:
2478 case kExprI64UConvertSatF32:
2479 case kExprI64SConvertSatF64:
2480 case kExprI64UConvertSatF64:
2481 return length;
2482 case kExprMemoryInit: {
2483 MemoryInitImmediate imm(decoder, pc + length, validate);
2484 (ios.MemoryInit(imm), ...);
2485 return length + imm.length;
2486 }
2487 case kExprDataDrop: {
2488 IndexImmediate imm(decoder, pc + length, "data segment index",
2489 validate);
2490 (ios.DataSegmentIndex(imm), ...);
2491 return length + imm.length;
2492 }
2493 case kExprMemoryCopy: {
2494 MemoryCopyImmediate imm(decoder, pc + length, validate);
2495 (ios.MemoryCopy(imm), ...);
2496 return length + imm.length;
2497 }
2498 case kExprMemoryFill: {
2499 MemoryIndexImmediate imm(decoder, pc + length, validate);
2500 (ios.MemoryIndex(imm), ...);
2501 return length + imm.length;
2502 }
2503 case kExprTableInit: {
2504 TableInitImmediate imm(decoder, pc + length, validate);
2505 (ios.TableInit(imm), ...);
2506 return length + imm.length;
2507 }
2508 case kExprElemDrop: {
2509 IndexImmediate imm(decoder, pc + length, "element segment index",
2510 validate);
2511 (ios.ElemSegmentIndex(imm), ...);
2512 return length + imm.length;
2513 }
2514 case kExprTableCopy: {
2515 TableCopyImmediate imm(decoder, pc + length, validate);
2516 (ios.TableCopy(imm), ...);
2517 return length + imm.length;
2518 }
2519 case kExprTableGrow:
2520 case kExprTableSize:
2521 case kExprTableFill: {
2522 TableIndexImmediate imm(decoder, pc + length, validate);
2523 (ios.TableIndex(imm), ...);
2524 return length + imm.length;
2525 }
2526 case kExprF32LoadMemF16:
2527 case kExprF32StoreMemF16: {
2528 MemoryAccessImmediate imm(decoder, pc + length, UINT32_MAX,
2529 validate);
2530 (ios.MemoryAccess(imm), ...);
2531 return length + imm.length;
2532 }
2533 default:
2534 // This path is only possible if we are validating.
2535 V8_ASSUME(ValidationTag::validate);
2536 decoder->DecodeError(pc, "invalid numeric opcode");
2537 return length;
2538 }
2539 }
2540 case kAsmJsPrefix: {
2541 uint32_t length;
2542 std::tie(opcode, length) =
2543 decoder->read_prefixed_opcode<ValidationTag>(pc);
2544 switch (opcode) {
2546 return length;
2547 default:
2548 // This path is only possible if we are validating.
2549 V8_ASSUME(ValidationTag::validate);
2550 decoder->DecodeError(pc, "invalid opcode");
2551 return length;
2552 }
2553 }
2554 case kSimdPrefix: {
2555 uint32_t length;
2556 std::tie(opcode, length) =
2557 decoder->read_prefixed_opcode<ValidationTag>(pc);
2558 switch (opcode) {
2559 // clang-format off
2561 return length;
2563 if (sizeof...(ios) > 0) {
2564 SimdLaneImmediate lane_imm(decoder, pc + length, validate);
2565 (ios.SimdLane(lane_imm), ...);
2566 }
2567 return length + 1;
2569 MemoryAccessImmediate imm(decoder, pc + length, UINT32_MAX,
2570 validate);
2571 (ios.MemoryAccess(imm), ...);
2572 return length + imm.length;
2573 }
2576 decoder, pc + length, UINT32_MAX,
2577 validate);
2578 if (sizeof...(ios) > 0) {
2579 SimdLaneImmediate lane_imm(decoder,
2580 pc + length + imm.length, validate);
2581 (ios.MemoryAccess(imm), ...);
2582 (ios.SimdLane(lane_imm), ...);
2583 }
2584 // 1 more byte for lane index immediate.
2585 return length + imm.length + 1;
2586 }
2587 // clang-format on
2588 // Shuffles require a byte per lane, or 16 immediate bytes.
2589 case kExprS128Const:
2590 case kExprI8x16Shuffle:
2591 if (sizeof...(ios) > 0) {
2592 Simd128Immediate imm(decoder, pc + length, validate);
2593 (ios.S128Const(imm), ...);
2594 }
2595 return length + kSimd128Size;
2596 default:
2597 // This path is only possible if we are validating.
2598 V8_ASSUME(ValidationTag::validate);
2599 decoder->DecodeError(pc, "invalid SIMD opcode");
2600 return length;
2601 }
2602 }
2603 case kAtomicPrefix: {
2604 uint32_t length;
2605 std::tie(opcode, length) =
2606 decoder->read_prefixed_opcode<ValidationTag>(pc, "atomic_index");
2607 switch (opcode) {
2609 MemoryAccessImmediate imm(decoder, pc + length, UINT32_MAX,
2610 validate);
2611 (ios.MemoryAccess(imm), ...);
2612 return length + imm.length;
2613 }
2615 // One unused zero-byte.
2616 return length + 1;
2617 }
2618 default:
2619 // This path is only possible if we are validating.
2620 V8_ASSUME(ValidationTag::validate);
2621 decoder->DecodeError(pc, "invalid Atomics opcode");
2622 return length;
2623 }
2624 }
2625 case kGCPrefix: {
2626 uint32_t length;
2627 std::tie(opcode, length) =
2628 decoder->read_prefixed_opcode<ValidationTag>(pc, "gc_index");
2629 switch (opcode) {
2630 case kExprStructNew:
2631 case kExprStructNewDefault:
2632 case kExprRefGetDesc: {
2633 StructIndexImmediate imm(decoder, pc + length, validate);
2634 (ios.TypeIndex(imm), ...);
2635 return length + imm.length;
2636 }
2637 case kExprStructGet:
2638 case kExprStructGetS:
2639 case kExprStructGetU:
2640 case kExprStructSet: {
2641 FieldImmediate imm(decoder, pc + length, validate);
2642 (ios.Field(imm), ...);
2643 return length + imm.length;
2644 }
2645 case kExprArrayNew:
2646 case kExprArrayNewDefault:
2647 case kExprArrayGet:
2648 case kExprArrayGetS:
2649 case kExprArrayGetU:
2650 case kExprArraySet: {
2651 ArrayIndexImmediate imm(decoder, pc + length, validate);
2652 (ios.TypeIndex(imm), ...);
2653 return length + imm.length;
2654 }
2655 case kExprArrayNewFixed: {
2656 ArrayIndexImmediate array_imm(decoder, pc + length, validate);
2657 IndexImmediate length_imm(decoder, pc + length + array_imm.length,
2658 "array length", validate);
2659 (ios.TypeIndex(array_imm), ...);
2660 (ios.Length(length_imm), ...);
2661 return length + array_imm.length + length_imm.length;
2662 }
2663 case kExprArrayCopy: {
2664 ArrayIndexImmediate dst_imm(decoder, pc + length, validate);
2665 ArrayIndexImmediate src_imm(decoder, pc + length + dst_imm.length,
2666 validate);
2667 (ios.ArrayCopy(dst_imm, src_imm), ...);
2668 return length + dst_imm.length + src_imm.length;
2669 }
2670 case kExprArrayFill: {
2671 ArrayIndexImmediate imm(decoder, pc + length, validate);
2672 (ios.TypeIndex(imm), ...);
2673 return length + imm.length;
2674 }
2675 case kExprArrayNewData:
2676 case kExprArrayNewElem:
2677 case kExprArrayInitData:
2678 case kExprArrayInitElem: {
2679 ArrayIndexImmediate array_imm(decoder, pc + length, validate);
2680 IndexImmediate data_imm(decoder, pc + length + array_imm.length,
2681 "segment index", validate);
2682 (ios.TypeIndex(array_imm), ...);
2683 (ios.DataSegmentIndex(data_imm), ...);
2684 return length + array_imm.length + data_imm.length;
2685 }
2686 case kExprRefCast:
2687 case kExprRefCastNull:
2688 case kExprRefCastNop:
2689 case kExprRefCastDesc:
2690 case kExprRefCastDescNull:
2691 case kExprRefTest:
2692 case kExprRefTestNull: {
2694 pc + length, validate);
2695 (ios.HeapType(imm), ...);
2696 return length + imm.length;
2697 }
2698 case kExprBrOnCast:
2699 case kExprBrOnCastFail:
2700 case kExprBrOnCastDesc:
2701 case kExprBrOnCastDescFail: {
2702 BrOnCastImmediate flags_imm(decoder, pc + length, validate);
2703 BranchDepthImmediate branch(decoder, pc + length + flags_imm.length,
2704 validate);
2705 HeapTypeImmediate source_imm(
2706 WasmEnabledFeatures::All(), decoder,
2707 pc + length + flags_imm.length + branch.length, validate);
2708 HeapTypeImmediate target_imm(WasmEnabledFeatures::All(), decoder,
2709 pc + length + flags_imm.length +
2710 branch.length + source_imm.length,
2711 validate);
2712 (ios.BrOnCastFlags(flags_imm), ...);
2713 (ios.BranchDepth(branch), ...);
2714 // This code has grown historically (while the GC proposal's design
2715 // evolved), but it's convenient: for the text format, we want to
2716 // pretend that we have two ValueTypes; whereas the mjsunit
2717 // module builder format cares only about the encapsulated
2718 // HeapTypes (and the raw flags value, see callback above).
2719 (ios.ValueType(ValueType::RefMaybeNull(
2720 source_imm.type,
2721 flags_imm.flags.src_is_null ? kNullable : kNonNullable)),
2722 ...);
2723 (ios.ValueType(ValueType::RefMaybeNull(
2724 target_imm.type,
2725 flags_imm.flags.res_is_null ? kNullable : kNonNullable)),
2726 ...);
2727 return length + flags_imm.length + branch.length +
2728 source_imm.length + target_imm.length;
2729 }
2730 case kExprRefI31:
2731 case kExprI31GetS:
2732 case kExprI31GetU:
2733 case kExprAnyConvertExtern:
2734 case kExprExternConvertAny:
2735 case kExprArrayLen:
2736 return length;
2737 case kExprStringNewUtf8:
2738 case kExprStringNewUtf8Try:
2739 case kExprStringNewLossyUtf8:
2740 case kExprStringNewWtf8:
2741 case kExprStringEncodeUtf8:
2742 case kExprStringEncodeLossyUtf8:
2743 case kExprStringEncodeWtf8:
2744 case kExprStringViewWtf8EncodeUtf8:
2745 case kExprStringViewWtf8EncodeLossyUtf8:
2746 case kExprStringViewWtf8EncodeWtf8:
2747 case kExprStringNewWtf16:
2748 case kExprStringEncodeWtf16:
2749 case kExprStringViewWtf16Encode: {
2750 MemoryIndexImmediate imm(decoder, pc + length, validate);
2751 (ios.MemoryIndex(imm), ...);
2752 return length + imm.length;
2753 }
2754 case kExprStringConst: {
2755 StringConstImmediate imm(decoder, pc + length, validate);
2756 (ios.StringConst(imm), ...);
2757 return length + imm.length;
2758 }
2759 case kExprStringMeasureUtf8:
2760 case kExprStringMeasureWtf8:
2761 case kExprStringNewUtf8Array:
2762 case kExprStringNewUtf8ArrayTry:
2763 case kExprStringNewLossyUtf8Array:
2764 case kExprStringNewWtf8Array:
2765 case kExprStringEncodeUtf8Array:
2766 case kExprStringEncodeLossyUtf8Array:
2767 case kExprStringEncodeWtf8Array:
2768 case kExprStringMeasureWtf16:
2769 case kExprStringConcat:
2770 case kExprStringEq:
2771 case kExprStringIsUSVSequence:
2772 case kExprStringAsWtf8:
2773 case kExprStringViewWtf8Advance:
2774 case kExprStringViewWtf8Slice:
2775 case kExprStringAsWtf16:
2776 case kExprStringViewWtf16Length:
2777 case kExprStringViewWtf16GetCodeunit:
2778 case kExprStringViewWtf16Slice:
2779 case kExprStringAsIter:
2780 case kExprStringViewIterNext:
2781 case kExprStringViewIterAdvance:
2782 case kExprStringViewIterRewind:
2783 case kExprStringViewIterSlice:
2784 case kExprStringNewWtf16Array:
2785 case kExprStringEncodeWtf16Array:
2786 case kExprStringCompare:
2787 case kExprStringFromCodePoint:
2788 case kExprStringHash:
2789 return length;
2790 default:
2791 // This path is only possible if we are validating.
2792 V8_ASSUME(ValidationTag::validate);
2793 decoder->DecodeError(pc, "invalid gc opcode");
2794 return length;
2795 }
2796 }
2797
2798 // clang-format off
2799 // Prefixed opcodes (already handled, included here for completeness of
2800 // switch)
2807 UNREACHABLE();
2808 // clang-format on
2809#undef DECLARE_OPCODE_CASE
2810 }
2811 // Invalid modules will reach this point.
2812 if (ValidationTag::validate) {
2813 decoder->DecodeError(pc, "invalid opcode");
2814 }
2815 return 1;
2816 }
2817
2818 static constexpr ValidationTag validate = {};
2819
2820 Zone* const zone_;
2821
2822 ValueType* local_types_ = nullptr;
2823 uint32_t num_locals_ = 0;
2824
2830 const std::pair<uint32_t, uint32_t>* current_inst_trace_;
2831};
2832
2833// Only call this in contexts where {current_code_reachable_and_ok_} is known to
2834// hold.
2835#define CALL_INTERFACE(name, ...) \
2836 do { \
2837 DCHECK(!control_.empty()); \
2838 DCHECK(current_code_reachable_and_ok_); \
2839 DCHECK_EQ(current_code_reachable_and_ok_, \
2840 this->ok() && control_.back().reachable()); \
2841 interface_.name(this, ##__VA_ARGS__); \
2842 } while (false)
2843#define CALL_INTERFACE_IF_OK_AND_REACHABLE(name, ...) \
2844 do { \
2845 DCHECK(!control_.empty()); \
2846 DCHECK_EQ(current_code_reachable_and_ok_, \
2847 this->ok() && control_.back().reachable()); \
2848 if (V8_LIKELY(current_code_reachable_and_ok_)) { \
2849 interface_.name(this, ##__VA_ARGS__); \
2850 } \
2851 } while (false)
2852#define CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(name, ...) \
2853 do { \
2854 DCHECK(!control_.empty()); \
2855 if (VALIDATE(this->ok()) && \
2856 (control_.size() == 1 || control_at(1)->reachable())) { \
2857 interface_.name(this, ##__VA_ARGS__); \
2858 } \
2859 } while (false)
2860
2861// An empty class used in place of a {base::SmallVector} for cases where the
2862// content is not needed afterwards.
2863// This is used for implementations which set {kUsesPoppedArgs} to {false}.
2865 public:
2866 // Construct from anything; {NoVector} is always empty.
2867 template <typename... Ts>
2868 explicit NoVector(Ts&&...) V8_NOEXCEPT {}
2869
2870 constexpr std::nullptr_t data() const { return nullptr; }
2871};
2872
2873template <typename ValidationTag, typename Interface,
2874 DecodingMode decoding_mode = kFunctionBody>
2875class WasmFullDecoder : public WasmDecoder<ValidationTag, decoding_mode> {
2876 using Value = typename Interface::Value;
2877 using Control = typename Interface::Control;
2880 std::conditional_t<Interface::kUsesPoppedArgs,
2883
2884 // All Value types should be trivially copyable for performance. We push, pop,
2885 // and store them in local variables.
2887
2888 public:
2889 template <typename... InterfaceArgs>
2890 WasmFullDecoder(Zone* zone, const WasmModule* module,
2891 WasmEnabledFeatures enabled, WasmDetectedFeatures* detected,
2892 const FunctionBody& body, InterfaceArgs&&... interface_args)
2893 : WasmDecoder<ValidationTag, decoding_mode>(
2894 zone, module, enabled, detected, body.sig, body.is_shared,
2895 body.start, body.end, body.offset),
2896 interface_(std::forward<InterfaceArgs>(interface_args)...),
2897 stack_(16, zone),
2898 control_(16, zone) {}
2899
2901 control_.Reset(this->zone_);
2902 stack_.Reset(this->zone_);
2903 locals_initializers_stack_.Reset(this->zone_);
2904 }
2905
2906 Interface& interface() { return interface_; }
2907
2908 void Decode() {
2909 DCHECK(stack_.empty());
2910 DCHECK(control_.empty());
2911 DCHECK_LE(this->pc_, this->end_);
2912 DCHECK_EQ(this->num_locals(), 0);
2913
2914 locals_offset_ = this->pc_offset();
2915 uint32_t locals_length = this->DecodeLocals(this->pc());
2916 if (!VALIDATE(this->ok())) return TraceFailed();
2917 this->consume_bytes(locals_length);
2918 int non_defaultable = 0;
2919 uint32_t params_count =
2920 static_cast<uint32_t>(this->sig_->parameter_count());
2921 for (uint32_t index = params_count; index < this->num_locals(); index++) {
2922 if (!this->local_type(index).is_defaultable()) non_defaultable++;
2923 // We need this because reference locals are initialized with null, and
2924 // later we run a lowering step for null based on {detected_}.
2925 if (this->local_type(index).is_reference()) {
2926 this->detected_->add_reftypes();
2927 }
2928 }
2929 this->InitializeInitializedLocalsTracking(non_defaultable);
2930
2931 // Cannot use CALL_INTERFACE_* macros because control is empty.
2932 interface().StartFunction(this);
2933 DecodeFunctionBody();
2934 // Decoding can fail even without validation, e.g. due to missing Liftoff
2935 // support.
2936 if (this->failed()) return TraceFailed();
2937
2938 if (!VALIDATE(control_.empty())) {
2939 if (control_.size() > 1) {
2940 this->DecodeError(control_.back().pc(),
2941 "unterminated control structure");
2942 } else {
2943 this->DecodeError("function body must end with \"end\" opcode");
2944 }
2945 return TraceFailed();
2946 }
2947 // Cannot use CALL_INTERFACE_* macros because control is empty.
2948 interface().FinishFunction(this);
2949 if (this->failed()) return TraceFailed();
2950
2951 DCHECK(stack_.empty());
2952 TRACE("wasm-decode ok\n\n");
2953 }
2954
2956 if (this->error_.offset()) {
2957 TRACE("wasm-error module+%-6d func+%d: %s\n\n", this->error_.offset(),
2958 this->GetBufferRelativeOffset(this->error_.offset()),
2959 this->error_.message().c_str());
2960 } else {
2961 TRACE("wasm-error: %s\n\n", this->error_.message().c_str());
2962 }
2963 }
2964
2965 const char* SafeOpcodeNameAt(const uint8_t* pc) {
2966 if (!pc) return "<null>";
2967 if (pc >= this->end_) return "<end>";
2968 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
2969 if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
2970 return WasmOpcodes::OpcodeName(static_cast<WasmOpcode>(opcode));
2971 }
2972 opcode = this->template read_prefixed_opcode<Decoder::FullValidationTag>(pc)
2973 .first;
2974 return WasmOpcodes::OpcodeName(opcode);
2975 }
2976
2978 int offset = static_cast<int>(this->pc_ - this->start_);
2979 DCHECK_EQ(this->pc_ - this->start_, offset); // overflows cannot happen
2980 return offset;
2981 }
2982
2983 uint32_t control_depth() const {
2984 return static_cast<uint32_t>(control_.size());
2985 }
2986
2987 Control* control_at(uint32_t depth) {
2988 DCHECK_GT(control_.size(), depth);
2989 return control_.end() - 1 - depth;
2990 }
2991
2992 uint32_t stack_size() const { return stack_.size(); }
2993
2994 Value* stack_value(uint32_t depth) const {
2995 DCHECK_LT(0, depth);
2996 DCHECK_GE(stack_.size(), depth);
2997 return stack_.end() - depth;
2998 }
2999
3000 int32_t current_catch() const { return current_catch_; }
3001
3003 return control_depth() - 1 - current_catch();
3004 }
3005
3006 uint32_t pc_relative_offset() const {
3007 return this->pc_offset() - locals_offset_;
3008 }
3009
3010 bool is_local_initialized(uint32_t local_index) {
3011 DCHECK_GT(this->num_locals_, local_index);
3012 if (!has_nondefaultable_locals_) return true;
3013 return initialized_locals_[local_index];
3014 }
3015
3016 void set_local_initialized(uint32_t local_index) {
3017 DCHECK_GT(this->num_locals_, local_index);
3018 if (!has_nondefaultable_locals_) return;
3019 // This implicitly covers defaultable locals too (which are always
3020 // initialized).
3021 if (is_local_initialized(local_index)) return;
3022 initialized_locals_[local_index] = true;
3023 locals_initializers_stack_.push(local_index);
3024 }
3025
3027 return static_cast<uint32_t>(locals_initializers_stack_.size());
3028 }
3029
3031 if (!has_nondefaultable_locals_) return;
3032 uint32_t previous_stack_height = c->init_stack_depth;
3033 while (locals_initializers_stack_.size() > previous_stack_height) {
3034 uint32_t local_index = locals_initializers_stack_.back();
3035 locals_initializers_stack_.pop();
3036 initialized_locals_[local_index] = false;
3037 }
3038 }
3039
3040 void InitializeInitializedLocalsTracking(int non_defaultable_locals) {
3041 has_nondefaultable_locals_ = non_defaultable_locals > 0;
3042 if (!has_nondefaultable_locals_) return;
3043 initialized_locals_ =
3044 this->zone_->template AllocateArray<bool>(this->num_locals_);
3045 // Parameters are always initialized.
3046 const size_t num_params = this->sig_->parameter_count();
3047 std::fill_n(initialized_locals_, num_params, true);
3048 // Locals are initialized if they are defaultable.
3049 for (size_t i = num_params; i < this->num_locals_; i++) {
3050 initialized_locals_[i] = this->local_types_[i].is_defaultable();
3051 }
3052 DCHECK(locals_initializers_stack_.empty());
3053 locals_initializers_stack_.EnsureMoreCapacity(non_defaultable_locals,
3054 this->zone_);
3055 }
3056
3058 TRACE("wasm-decode %p...%p (module+%u, %d bytes)\n", this->start(),
3059 this->end(), this->pc_offset(),
3060 static_cast<int>(this->end() - this->start()));
3061
3062 // Set up initial function block.
3063 {
3064 DCHECK(control_.empty());
3065 constexpr uint32_t kStackDepth = 0;
3066 constexpr uint32_t kInitStackDepth = 0;
3067 control_.EnsureMoreCapacity(1, this->zone_);
3068 control_.emplace_back(this->zone_, kControlBlock, kStackDepth,
3069 kInitStackDepth, this->pc_, kReachable);
3070 Control* c = &control_.back();
3071 if constexpr (decoding_mode == kFunctionBody) {
3072 InitMerge(&c->start_merge, 0, nullptr);
3073 InitMerge(&c->end_merge,
3074 static_cast<uint32_t>(this->sig_->return_count()),
3075 [this](uint32_t i) {
3076 return Value{this->pc_, this->sig_->GetReturn(i)};
3077 });
3078 } else {
3079 DCHECK_EQ(this->sig_->parameter_count(), 0);
3080 DCHECK_EQ(this->sig_->return_count(), 1);
3081 c->start_merge.arity = 0;
3082 c->end_merge.arity = 1;
3083 c->end_merge.vals.first = Value{this->pc_, this->sig_->GetReturn(0)};
3084 }
3085 CALL_INTERFACE_IF_OK_AND_REACHABLE(StartFunctionBody, c);
3086 }
3087
3088 if (V8_LIKELY(this->current_inst_trace_->first == 0)) {
3089 // Decode the function body.
3090 while (this->pc_ < this->end_) {
3091 // Most operations only grow the stack by at least one element (unary
3092 // and binary operations, local.get, constants, ...). Thus check that
3093 // there is enough space for those operations centrally, and avoid any
3094 // bounds checks in those operations.
3095 stack_.EnsureMoreCapacity(1, this->zone_);
3096 uint8_t first_byte = *this->pc_;
3097 WasmOpcode opcode = static_cast<WasmOpcode>(first_byte);
3098 CALL_INTERFACE_IF_OK_AND_REACHABLE(NextInstruction, opcode);
3099 int len;
3100 // Allowing two of the most common decoding functions to get inlined
3101 // appears to be the sweet spot.
3102 // Handling _all_ opcodes via a giant switch-statement has been tried
3103 // and found to be slower than calling through the handler table.
3104 if (opcode == kExprLocalGet) {
3105 len = WasmFullDecoder::DecodeLocalGet(this, opcode);
3106 } else if (opcode == kExprI32Const) {
3107 len = WasmFullDecoder::DecodeI32Const(this, opcode);
3108 } else {
3109 OpcodeHandler handler = GetOpcodeHandler(first_byte);
3110 len = (*handler)(this, opcode);
3111 }
3112 this->pc_ += len;
3113 }
3114
3115 } else {
3116 // Decode the function body.
3117 while (this->pc_ < this->end_) {
3118 DCHECK(this->current_inst_trace_->first == 0 ||
3119 this->current_inst_trace_->first >= this->pc_offset());
3120 if (V8_UNLIKELY(this->current_inst_trace_->first ==
3121 this->pc_offset())) {
3122 TRACE("Emit trace at 0x%x with ID[0x%x]\n", this->pc_offset(),
3123 this->current_inst_trace_->second);
3124 CALL_INTERFACE_IF_OK_AND_REACHABLE(TraceInstruction,
3125 this->current_inst_trace_->second);
3126 this->current_inst_trace_++;
3127 }
3128
3129 // Most operations only grow the stack by at least one element (unary
3130 // and binary operations, local.get, constants, ...). Thus check that
3131 // there is enough space for those operations centrally, and avoid any
3132 // bounds checks in those operations.
3133 stack_.EnsureMoreCapacity(1, this->zone_);
3134 uint8_t first_byte = *this->pc_;
3135 WasmOpcode opcode = static_cast<WasmOpcode>(first_byte);
3136 CALL_INTERFACE_IF_OK_AND_REACHABLE(NextInstruction, opcode);
3137 OpcodeHandler handler = GetOpcodeHandler(first_byte);
3138 int len = (*handler)(this, opcode);
3139 this->pc_ += len;
3140 }
3141 }
3142
3143 // Even without validation, compilation could fail because of bailouts,
3144 // e.g., unsupported operations in Liftoff or the decoder for Wasm-in-JS
3145 // inlining. In those cases, {pc_} did not necessarily advance until {end_}.
3146 if (this->pc_ != this->end_) {
3147 // `DecodeError` is only available when validating, hence this guard.
3148 if constexpr (ValidationTag::validate) {
3149 this->DecodeError("Beyond end of code");
3150 }
3151 }
3152 }
3153
3154 bool HasCatchAll(Control* block) const {
3155 DCHECK(block->is_try_table());
3156 return std::any_of(block->catch_cases.begin(), block->catch_cases.end(),
3157 [](const struct CatchCase& catch_case) {
3158 return catch_case.kind == kCatchAll ||
3159 catch_case.kind == kCatchAllRef;
3160 });
3161 }
3162
3163 private:
3164 uint32_t locals_offset_ = 0;
3165 Interface interface_;
3166
3167 // The value stack, stored as individual pointers for maximum performance.
3169
3170 // Indicates whether the local with the given index is currently initialized.
3171 // Entries for defaultable locals are meaningless; we have a byte for each
3172 // local because we expect that the effort required to densify this bit
3173 // vector would more than offset the memory savings.
3175 // Keeps track of initializing assignments to non-defaultable locals that
3176 // happened, so they can be discarded at the end of the current block.
3177 // Contains no duplicates, so the size of this stack is bounded (and pre-
3178 // allocated) to the number of non-defaultable locals in the function.
3180
3181 // Control stack (blocks, loops, ifs, ...).
3183
3184 // Controls whether code should be generated for the current block (basically
3185 // a cache for {ok() && control_.back().reachable()}).
3186 bool current_code_reachable_and_ok_ = true;
3187
3188 // Performance optimization: bail out of any functions dealing with non-
3189 // defaultable locals early when there are no such locals anyway.
3190 bool has_nondefaultable_locals_ = true;
3191
3192 // Depth of the current try block.
3193 int32_t current_catch_ = -1;
3194
3195 static Value UnreachableValue(const uint8_t* pc) {
3196 return Value{pc, kWasmBottom};
3197 }
3198
3200 Control* current = &control_.back();
3201 if (current->reachable()) {
3202 current->reachability = kSpecOnlyReachable;
3203 current_code_reachable_and_ok_ = false;
3204 }
3205 }
3206
3207 // Mark that the current try-catch block might throw.
3208 // We only generate catch handlers for blocks that might throw.
3210 if (!current_code_reachable_and_ok_ || current_catch() == -1) return;
3211 control_at(control_depth_of_current_catch())->might_throw = true;
3212 }
3213
3215 return table->is_table64() ? kWasmI64 : kWasmI32;
3216 }
3217
3219 return memory->is_memory64() ? kWasmI64 : kWasmI32;
3220 }
3221
3223 MakeMemoryAccessImmediate(uint32_t pc_offset, uint32_t max_alignment) {
3224 return MemoryAccessImmediate(this, this->pc_ + pc_offset, max_alignment,
3225 validate);
3226 }
3227
3228#ifdef DEBUG
3229 class TraceLine {
3230 public:
3231 explicit TraceLine(WasmFullDecoder* decoder) : decoder_(decoder) {
3232 WasmOpcode opcode = static_cast<WasmOpcode>(*decoder->pc());
3233 if (!WasmOpcodes::IsPrefixOpcode(opcode)) AppendOpcode(opcode);
3234 }
3235
3236 void AppendOpcode(WasmOpcode opcode) {
3237 DCHECK(!WasmOpcodes::IsPrefixOpcode(opcode));
3238 Append(TRACE_INST_FORMAT, decoder_->startrel(decoder_->pc_),
3239 WasmOpcodes::OpcodeName(opcode));
3240 }
3241
3242 ~TraceLine() {
3243 if (!v8_flags.trace_wasm_decoder) return;
3244 AppendStackState();
3245 PrintF("%.*s\n", len_, buffer_);
3246 }
3247
3248 // Appends a formatted string.
3249 PRINTF_FORMAT(2, 3)
3250 void Append(const char* format, ...) {
3251 if (!v8_flags.trace_wasm_decoder) return;
3252 va_list va_args;
3253 va_start(va_args, format);
3254 size_t remaining_len = kMaxLen - len_;
3255 base::Vector<char> remaining_msg_space(buffer_ + len_, remaining_len);
3256 int len = base::VSNPrintF(remaining_msg_space, format, va_args);
3257 va_end(va_args);
3258 len_ += len < 0 ? remaining_len : len;
3259 }
3260
3261 private:
3262 void AppendStackState() {
3263 DCHECK(v8_flags.trace_wasm_decoder);
3264 Append(" ");
3265 for (Control& c : decoder_->control_) {
3266 switch (c.kind) {
3267 case kControlIf:
3268 Append("I");
3269 break;
3270 case kControlBlock:
3271 Append("B");
3272 break;
3273 case kControlLoop:
3274 Append("L");
3275 break;
3276 case kControlTry:
3277 Append("T");
3278 break;
3279 case kControlTryTable:
3280 Append("T");
3281 break;
3282 case kControlIfElse:
3283 Append("E");
3284 break;
3285 case kControlTryCatch:
3286 Append("C");
3287 break;
3289 Append("A");
3290 break;
3291 }
3292 if (c.start_merge.arity) Append("%u-", c.start_merge.arity);
3293 Append("%u", c.end_merge.arity);
3294 if (!c.reachable()) Append("%c", c.unreachable() ? '*' : '#');
3295 }
3296 Append(" | ");
3297 for (uint32_t i = 0; i < decoder_->stack_.size(); ++i) {
3298 Value& val = decoder_->stack_[i];
3299 Append(" %c", val.type.short_name());
3300 }
3301 }
3302
3303 static constexpr int kMaxLen = 512;
3304
3305 char buffer_[kMaxLen];
3306 int len_ = 0;
3307 WasmFullDecoder* const decoder_;
3308 };
3309#else
3311 public:
3313
3315
3316 PRINTF_FORMAT(2, 3)
3317 void Append(const char* format, ...) {}
3318 };
3319#endif
3320
3321#define DECODE(name) \
3322 static int Decode##name(WasmFullDecoder* decoder, WasmOpcode opcode) { \
3323 TraceLine trace_msg(decoder); \
3324 return decoder->Decode##name##Impl(&trace_msg, opcode); \
3325 } \
3326 V8_INLINE int Decode##name##Impl(TraceLine* trace_msg, WasmOpcode opcode)
3327
3328 DECODE(Nop) { return 1; }
3329
3330 DECODE(NopForTestingUnsupportedInLiftoff) {
3331 if (!VALIDATE(v8_flags.enable_testing_opcode_in_wasm)) {
3332 this->DecodeError("Invalid opcode 0x%x", opcode);
3333 return 0;
3334 }
3335 CALL_INTERFACE_IF_OK_AND_REACHABLE(NopForTestingUnsupportedInLiftoff);
3336 // Return {0} if we failed, to not advance the pc past the end.
3337 if (this->failed()) {
3338 DCHECK_EQ(this->pc_, this->end_);
3339 return 0;
3340 }
3341 return 1;
3342 }
3343
3344#define BUILD_SIMPLE_OPCODE(op, _, sig, ...) \
3345 DECODE(op) { return BuildSimpleOperator_##sig(kExpr##op); }
3347#undef BUILD_SIMPLE_OPCODE
3348
3349#define BUILD_SIMPLE_OPCODE(op, _, sig, ...) \
3350 DECODE(op) { \
3351 if constexpr (decoding_mode == kConstantExpression) { \
3352 this->detected_->add_extended_const(); \
3353 } \
3354 return BuildSimpleOperator_##sig(kExpr##op); \
3355 }
3357#undef BUILD_SIMPLE_OPCODE
3358
3360 BlockTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
3361 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3362 Control* block = PushControl(kControlBlock, imm);
3364 return 1 + imm.length;
3365 }
3366
3367 DECODE(Rethrow) {
3368 CHECK_PROTOTYPE_OPCODE(legacy_eh);
3369 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3370 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
3371 Control* c = control_at(imm.depth);
3372 if (!VALIDATE(c->is_try_catchall() || c->is_try_catch())) {
3373 this->error("rethrow not targeting catch or catch-all");
3374 return 0;
3375 }
3377 MarkMightThrow();
3378 EndControl();
3379 return 1 + imm.length;
3380 }
3381
3383 // This instruction is the same for legacy EH and exnref.
3384 // Count it as exnref if exnref is enabled so that we have an accurate eh
3385 // count for the deprecation plans.
3386 this->detected_->Add(this->enabled_.has_exnref()
3387 ? WasmDetectedFeature::exnref
3388 : WasmDetectedFeature::legacy_eh);
3389 TagIndexImmediate imm(this, this->pc_ + 1, validate);
3390 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3391 if (imm.tag->sig->return_count() != 0) {
3392 this->DecodeError("tag signature %u has non-void return", imm.index);
3393 return 0;
3394 }
3395 PoppedArgVector args = PopArgs(imm.tag->ToFunctionSig());
3397 MarkMightThrow();
3398 EndControl();
3399 return 1 + imm.length;
3400 }
3401
3402 DECODE(Try) {
3403 CHECK_PROTOTYPE_OPCODE(legacy_eh);
3404 BlockTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
3405 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3406 Control* try_block = PushControl(kControlTry, imm);
3407 try_block->previous_catch = current_catch_;
3408 current_catch_ = static_cast<int>(control_depth() - 1);
3409 CALL_INTERFACE_IF_OK_AND_REACHABLE(Try, try_block);
3410 return 1 + imm.length;
3411 }
3412
3413 DECODE(Catch) {
3414 CHECK_PROTOTYPE_OPCODE(legacy_eh);
3415 TagIndexImmediate imm(this, this->pc_ + 1, validate);
3416 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3417 DCHECK(!control_.empty());
3418 Control* c = &control_.back();
3419 if (!VALIDATE(c->is_try())) {
3420 this->DecodeError("catch does not match a try");
3421 return 0;
3422 }
3423 if (!VALIDATE(!c->is_try_catchall())) {
3424 this->DecodeError("catch after catch-all for try");
3425 return 0;
3426 }
3427 FallThrough();
3428 c->kind = kControlTryCatch;
3429 stack_.shrink_to(c->stack_depth);
3430 c->reachability = control_at(1)->innerReachability();
3431 current_code_reachable_and_ok_ = VALIDATE(this->ok()) && c->reachable();
3432 RollbackLocalsInitialization(c);
3433 const WasmTagSig* sig = imm.tag->sig;
3434
3435 // tags can have return values, so we have to check.
3436 if (sig->return_count() != 0) {
3437 this->DecodeError("tag signature %u has non-void return", imm.index);
3438 return 0;
3439 }
3440
3441 stack_.EnsureMoreCapacity(static_cast<int>(sig->parameter_count()),
3442 this->zone_);
3443 for (ValueType type : sig->parameters()) Push(type);
3444 base::Vector<Value> values(stack_.begin() + c->stack_depth,
3445 sig->parameter_count());
3446 current_catch_ = c->previous_catch; // Pop try scope.
3447 // If there is a throwing instruction in `c`, generate the header for a
3448 // catch block. Otherwise, the catch block is unreachable.
3449 if (c->might_throw) {
3450 CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(CatchException, imm, c, values);
3451 } else {
3452 SetSucceedingCodeDynamicallyUnreachable();
3453 }
3454 return 1 + imm.length;
3455 }
3456
3457 DECODE(Delegate) {
3458 CHECK_PROTOTYPE_OPCODE(legacy_eh);
3459 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3460 // -1 because the current try block is not included in the count.
3461 if (!this->Validate(this->pc_ + 1, imm, control_depth() - 1)) return 0;
3462 Control* c = &control_.back();
3463 if (!VALIDATE(c->is_incomplete_try())) {
3464 this->DecodeError("delegate does not match a try");
3465 return 0;
3466 }
3467 // +1 because the current try block is not included in the count.
3468 uint32_t target_depth = imm.depth + 1;
3469 while (target_depth < control_depth() - 1 &&
3470 (!control_at(target_depth)->is_try() ||
3471 control_at(target_depth)->is_try_catch() ||
3472 control_at(target_depth)->is_try_catchall())) {
3473 target_depth++;
3474 }
3475 FallThrough();
3476 if (c->might_throw) {
3477 CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(Delegate, target_depth, c);
3478 // Delegate propagates the `might_throw` status to the delegated-to block.
3479 if (control_at(1)->reachable() && target_depth != control_depth() - 1) {
3480 control_at(target_depth)->might_throw = true;
3481 }
3482 }
3483 current_catch_ = c->previous_catch;
3484 EndControl();
3485 PopControl();
3486 return 1 + imm.length;
3487 }
3488
3489 DECODE(CatchAll) {
3490 CHECK_PROTOTYPE_OPCODE(legacy_eh);
3491 DCHECK(!control_.empty());
3492 Control* c = &control_.back();
3493 if (!VALIDATE(c->is_try())) {
3494 this->DecodeError("catch-all does not match a try");
3495 return 0;
3496 }
3497 if (!VALIDATE(!c->is_try_catchall())) {
3498 this->error("catch-all already present for try");
3499 return 0;
3500 }
3501 FallThrough();
3502 c->kind = kControlTryCatchAll;
3503 c->reachability = control_at(1)->innerReachability();
3504 current_code_reachable_and_ok_ = VALIDATE(this->ok()) && c->reachable();
3505 RollbackLocalsInitialization(c);
3506 current_catch_ = c->previous_catch; // Pop try scope.
3507 // If there is a throwing instruction in `c`, generate the header for a
3508 // catch block. Otherwise, the catch block is unreachable.
3509 if (c->might_throw) {
3511 } else {
3512 SetSucceedingCodeDynamicallyUnreachable();
3513 }
3514 stack_.shrink_to(c->stack_depth);
3515 return 1;
3516 }
3517
3518 DECODE(TryTable) {
3519 CHECK_PROTOTYPE_OPCODE(exnref);
3520 BlockTypeImmediate block_imm(this->enabled_, this, this->pc_ + 1, validate);
3521 if (!this->Validate(this->pc_ + 1, block_imm)) return 0;
3522 Control* try_block = PushControl(kControlTryTable, block_imm);
3523 TryTableImmediate try_table_imm(this, this->pc_ + 1 + block_imm.length,
3524 validate);
3525 if (try_table_imm.table_count > 0) {
3526 try_block->previous_catch = current_catch_;
3527 current_catch_ = static_cast<int>(control_depth() - 1);
3528 }
3529 if (!this->Validate(this->pc_ + 2, try_table_imm)) return 0;
3530 TryTableIterator<ValidationTag> try_table_iterator(this, try_table_imm);
3531 try_block->catch_cases = this->zone_->template AllocateVector<CatchCase>(
3532 try_table_imm.table_count);
3533 int i = 0;
3534 while (try_table_iterator.has_next()) {
3535 CatchCase catch_case = try_table_iterator.next();
3536 if (!VALIDATE(catch_case.kind <= kLastCatchKind)) {
3537 this->DecodeError("invalid catch kind in try table");
3538 return 0;
3539 }
3540 if ((catch_case.kind == kCatch || catch_case.kind == kCatchRef)) {
3541 if (!this->Validate(this->pc_, catch_case.maybe_tag.tag_imm)) {
3542 return 0;
3543 }
3544 const WasmTagSig* sig = catch_case.maybe_tag.tag_imm.tag->sig;
3545 if (sig->return_count() != 0) {
3546 // tags can have return values, so we have to check.
3547 this->DecodeError("tag signature %u has non-void return",
3548 catch_case.maybe_tag.tag_imm.index);
3549 return 0;
3550 }
3551 }
3552 catch_case.br_imm.depth += 1;
3553 if (!this->Validate(this->pc_, catch_case.br_imm, control_.size())) {
3554 return 0;
3555 }
3556
3557 uint32_t stack_size = stack_.size();
3558 uint32_t push_count = 0;
3559 if (catch_case.kind == kCatch || catch_case.kind == kCatchRef) {
3560 const WasmTagSig* sig = catch_case.maybe_tag.tag_imm.tag->sig;
3561 stack_.EnsureMoreCapacity(static_cast<int>(sig->parameter_count()),
3562 this->zone_);
3563 for (ValueType type : sig->parameters()) Push(type);
3564 push_count += sig->parameter_count();
3565 }
3566 if (catch_case.kind == kCatchRef || catch_case.kind == kCatchAllRef) {
3567 stack_.EnsureMoreCapacity(1, this->zone_);
3568 Push(ValueType::Ref(kWasmExnRef));
3569 push_count += 1;
3570 }
3571 Control* target = control_at(catch_case.br_imm.depth);
3572 if (!VALIDATE(push_count == target->br_merge()->arity)) {
3573 this->DecodeError(
3574 "catch kind generates %d operand%s, target block expects %d",
3575 push_count, push_count != 1 ? "s" : "", target->br_merge()->arity);
3576 return 0;
3577 }
3578 if (!VALIDATE(
3579 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kNo>(
3580 target)))) {
3581 return 0;
3582 }
3583 stack_.shrink_to(stack_size);
3584 DCHECK_LT(i, try_table_imm.table_count);
3585 try_block->catch_cases[i] = catch_case;
3586 ++i;
3587 }
3588 CALL_INTERFACE_IF_OK_AND_REACHABLE(TryTable, try_block);
3589 return 1 + block_imm.length + try_table_iterator.length();
3590 }
3591
3592 DECODE(ThrowRef) {
3593 CHECK_PROTOTYPE_OPCODE(exnref);
3594 Value value = Pop(kWasmExnRef);
3595 CALL_INTERFACE_IF_OK_AND_REACHABLE(ThrowRef, &value);
3596 MarkMightThrow();
3597 EndControl();
3598 return 1;
3599 }
3600
3601 DECODE(BrOnNull) {
3602 this->detected_->add_typed_funcref();
3603 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3604 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
3605 Value ref_object = Pop();
3606 Control* c = control_at(imm.depth);
3607 if (!VALIDATE(
3608 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kYes>(
3609 c)))) {
3610 return 0;
3611 }
3612 switch (ref_object.type.kind()) {
3613 case kBottom:
3614 // We are in a polymorphic stack. Leave the stack as it is.
3615 DCHECK(!current_code_reachable_and_ok_);
3616 [[fallthrough]];
3617 case kRef:
3618 // For a non-nullable value, we won't take the branch, and can leave
3619 // the stack as it is.
3620 Push(ref_object);
3621 break;
3622 case kRefNull: {
3623 Value* result = Push(ValueType::Ref(ref_object.type.heap_type()));
3624 // The result of br_on_null has the same value as the argument (but a
3625 // non-nullable type).
3626 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3627 CALL_INTERFACE(BrOnNull, ref_object, imm.depth, false, result);
3628 c->br_merge()->reached = true;
3629 }
3630 break;
3631 }
3632 default:
3633 PopTypeError(0, ref_object, "object reference");
3634 return 0;
3635 }
3636 return 1 + imm.length;
3637 }
3638
3639 DECODE(BrOnNonNull) {
3640 this->detected_->add_typed_funcref();
3641 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3642 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
3643 Value ref_object = Pop();
3644 if (!VALIDATE(ref_object.type.is_object_reference() ||
3645 ref_object.type.is_bottom())) {
3646 PopTypeError(
3647 0, ref_object,
3648 "subtype of ((ref null any), (ref null extern) or (ref null func))");
3649 return 0;
3650 }
3651 // Typechecking the branch and creating the branch merges requires the
3652 // non-null value on the stack, so we push it temporarily.
3653 Value* value_on_branch = Push(ref_object.type.AsNonNull());
3654 Control* c = control_at(imm.depth);
3655 if (!VALIDATE(
3656 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kYes>(
3657 c)))) {
3658 return 0;
3659 }
3660 switch (ref_object.type.kind()) {
3661 case kBottom:
3662 // We are in unreachable code. Do nothing.
3663 DCHECK(!current_code_reachable_and_ok_);
3664 break;
3665 case kRef:
3666 // For a non-nullable value, we always take the branch.
3667 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3668 CALL_INTERFACE(Forward, ref_object, value_on_branch);
3669 CALL_INTERFACE(BrOrRet, imm.depth);
3670 // We know that the following code is not reachable, but according
3671 // to the spec it technically is. Set it to spec-only reachable.
3672 SetSucceedingCodeDynamicallyUnreachable();
3673 c->br_merge()->reached = true;
3674 }
3675 break;
3676 case kRefNull: {
3677 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3678 CALL_INTERFACE(BrOnNonNull, ref_object, value_on_branch, imm.depth,
3679 true);
3680 c->br_merge()->reached = true;
3681 }
3682 break;
3683 }
3684 default:
3685 PopTypeError(0, ref_object, "object reference");
3686 return 0;
3687 }
3688 Drop(*value_on_branch);
3689 return 1 + imm.length;
3690 }
3691
3692 DECODE(Loop) {
3693 BlockTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
3694 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3695 Control* block = PushControl(kControlLoop, imm);
3697 // Loops have a merge point at block entry, hence push the merge values
3698 // (Phis in case of TurboFan) after calling the interface.
3699 // TODO(clemensb): Can we skip this (and the related PushMergeValues in
3700 // PopControl) for Liftoff?
3701 PushMergeValues(block, &block->start_merge);
3702 return 1 + imm.length;
3703 }
3704
3706 BlockTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
3707 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3708 Value cond = Pop(kWasmI32);
3709 Control* if_block = PushControl(kControlIf, imm);
3710 CALL_INTERFACE_IF_OK_AND_REACHABLE(If, cond, if_block);
3711 return 1 + imm.length;
3712 }
3713
3714 DECODE(Else) {
3715 DCHECK(!control_.empty());
3716 Control* c = &control_.back();
3717 if (!VALIDATE(c->is_if())) {
3718 this->DecodeError("else does not match an if");
3719 return 0;
3720 }
3721 if (!VALIDATE(c->is_onearmed_if())) {
3722 this->DecodeError("else already present for if");
3723 return 0;
3724 }
3725 if (!VALIDATE(TypeCheckFallThru())) return 0;
3726 c->kind = kControlIfElse;
3728 if (c->reachable()) c->end_merge.reached = true;
3729 RollbackLocalsInitialization(c);
3730 PushMergeValues(c, &c->start_merge);
3731 c->reachability = control_at(1)->innerReachability();
3732 current_code_reachable_and_ok_ = VALIDATE(this->ok()) && c->reachable();
3733 return 1;
3734 }
3735
3736 DECODE(End) {
3737 DCHECK(!control_.empty());
3738 if constexpr (decoding_mode == kFunctionBody) {
3739 Control* c = &control_.back();
3740 if (c->is_incomplete_try()) {
3741 // Catch-less try, fall through to the implicit catch-all.
3742 c->kind = kControlTryCatch;
3743 current_catch_ = c->previous_catch; // Pop try scope.
3744 }
3745 if (c->is_try_catch()) {
3746 // Emulate catch-all + re-throw.
3747 FallThrough();
3748 c->reachability = control_at(1)->innerReachability();
3749 current_code_reachable_and_ok_ = VALIDATE(this->ok()) && c->reachable();
3750 // Cache `c->might_throw` so we can access it safely after `c`'s
3751 // destructor is called in `PopContol()`.
3752 bool might_throw = c->might_throw;
3753 if (might_throw) {
3756 }
3757 EndControl();
3758 PopControl();
3759 // We must mark the parent catch block as `might_throw`, since this
3760 // conceptually rethrows. Note that we do this regardless of whether
3761 // the code at this point is reachable.
3762 if (might_throw && current_catch() != -1) {
3763 control_at(control_depth_of_current_catch())->might_throw = true;
3764 }
3765 return 1;
3766 }
3767 if (c->is_onearmed_if()) {
3768 if (!VALIDATE(TypeCheckOneArmedIf(c))) return 0;
3769 }
3770 if (c->is_try_table()) {
3771 // "Pop" the {current_catch_} index. We did not push it if the block has
3772 // no handler, so also skip it here in this case.
3773 if (c->catch_cases.size() > 0) {
3774 current_catch_ = c->previous_catch;
3775 }
3776 FallThrough();
3777 // Temporarily set the reachability for the catch handlers, and restore
3778 // it before we actually exit the try block.
3779 Reachability reachability_at_end = c->reachability;
3780 c->reachability = control_at(1)->innerReachability();
3781 current_code_reachable_and_ok_ = VALIDATE(this->ok()) && c->reachable();
3782 for (CatchCase& catch_case : c->catch_cases) {
3783 uint32_t stack_size = stack_.size();
3784 size_t push_count = 0;
3785 if (catch_case.kind == kCatch || catch_case.kind == kCatchRef) {
3786 const WasmTagSig* sig = catch_case.maybe_tag.tag_imm.tag->sig;
3787 stack_.EnsureMoreCapacity(static_cast<int>(sig->parameter_count()),
3788 this->zone_);
3789 for (ValueType type : sig->parameters()) Push(type);
3790 push_count = sig->parameter_count();
3791 }
3792 if (catch_case.kind == kCatchRef || catch_case.kind == kCatchAllRef) {
3793 stack_.EnsureMoreCapacity(1, this->zone_);
3794 Push(ValueType::Ref(kWasmExnRef));
3795 push_count += 1;
3796 }
3797 base::Vector<Value> values(
3798 stack_.begin() + stack_.size() - push_count, push_count);
3799 if (c->might_throw) {
3800 // Already type checked on block entry.
3802 values);
3803 if (current_code_reachable_and_ok_) {
3804 Control* target = control_at(catch_case.br_imm.depth);
3805 target->br_merge()->reached = true;
3806 }
3807 }
3808 stack_.shrink_to(stack_size);
3809 if (catch_case.kind == kCatchAll || catch_case.kind == kCatchAllRef) {
3810 break;
3811 }
3812 }
3813 c->reachability = reachability_at_end;
3814 // If there is no catch-all case, we must mark the parent catch block as
3815 // `might_throw`, since this conceptually rethrows. Note that we do this
3816 // regardless of whether the code at this point is reachable.
3817 if (c->might_throw && !HasCatchAll(c) && current_catch() != -1) {
3818 control_at(control_depth_of_current_catch())->might_throw = true;
3819 }
3820 EndControl();
3821 PopControl();
3822 return 1;
3823 }
3824 }
3825
3826 if (control_.size() == 1) {
3827 // We need to call this first because the interface might set
3828 // {this->end_}, making the next check pass.
3829 DoReturn<kStrictCounting, decoding_mode == kFunctionBody
3830 ? kFallthroughMerge
3831 : kInitExprMerge>();
3832 // If at the last (implicit) control, check we are at end.
3833 if (!VALIDATE(this->pc_ + 1 == this->end_)) {
3834 this->DecodeError(this->pc_ + 1, "trailing code after function end");
3835 return 0;
3836 }
3837 // The result of the block is the return value.
3838 trace_msg->Append("\n" TRACE_INST_FORMAT, startrel(this->pc_),
3839 "(implicit) return");
3840 control_.pop();
3841 return 1;
3842 }
3843
3844 if (!VALIDATE(TypeCheckFallThru())) return 0;
3845 PopControl();
3846 return 1;
3847 }
3848
3849 DECODE(Select) {
3850 auto [tval, fval, cond] = Pop(kWasmBottom, kWasmBottom, kWasmI32);
3851 ValueType result_type = tval.type;
3852 if (result_type == kWasmBottom) {
3853 result_type = fval.type;
3854 } else {
3855 ValidateStackValue(1, fval, result_type);
3856 }
3857 if (!VALIDATE(!result_type.is_reference())) {
3858 this->DecodeError(
3859 "select without type is only valid for value type inputs");
3860 return 0;
3861 }
3862 Value* result = Push(result_type);
3863 CALL_INTERFACE_IF_OK_AND_REACHABLE(Select, cond, fval, tval, result);
3864 return 1;
3865 }
3866
3867 DECODE(SelectWithType) {
3868 this->detected_->add_reftypes();
3869 SelectTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
3870 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3871 auto [tval, fval, cond] = Pop(imm.type, imm.type, kWasmI32);
3872 Value* result = Push(imm.type);
3873 CALL_INTERFACE_IF_OK_AND_REACHABLE(Select, cond, fval, tval, result);
3874 return 1 + imm.length;
3875 }
3876
3878 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3879 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
3880 Control* c = control_at(imm.depth);
3881 if (!VALIDATE(
3882 (TypeCheckBranch<PushBranchValues::kNo, RewriteStackTypes::kNo>(
3883 c)))) {
3884 return 0;
3885 }
3886 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3887 CALL_INTERFACE(BrOrRet, imm.depth);
3888 c->br_merge()->reached = true;
3889 }
3890 EndControl();
3891 return 1 + imm.length;
3892 }
3893
3894 DECODE(BrIf) {
3895 BranchDepthImmediate imm(this, this->pc_ + 1, validate);
3896 if (!this->Validate(this->pc_ + 1, imm, control_.size())) return 0;
3897 Value cond = Pop(kWasmI32);
3898 Control* c = control_at(imm.depth);
3899 if (!VALIDATE(
3900 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kYes>(
3901 c)))) {
3902 return 0;
3903 }
3904 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3905 CALL_INTERFACE(BrIf, cond, imm.depth);
3906 c->br_merge()->reached = true;
3907 }
3908 return 1 + imm.length;
3909 }
3910
3911 DECODE(BrTable) {
3912 BranchTableImmediate imm(this, this->pc_ + 1, validate);
3913 BranchTableIterator<ValidationTag> iterator(this, imm);
3914 Value key = Pop(kWasmI32);
3915 if (!VALIDATE(this->ok())) return 0;
3916 if (!this->Validate(this->pc_ + 1, imm)) return 0;
3917
3918 // Cache the branch targets during the iteration, so that we can set
3919 // all branch targets as reachable after the {CALL_INTERFACE} call.
3920 SmallZoneVector<bool, 32> br_targets(control_.size(), this->zone());
3921 std::uninitialized_fill(br_targets.begin(), br_targets.end(), false);
3922
3923 uint32_t arity = 0;
3924
3925 while (iterator.has_next()) {
3926 const uint32_t index = iterator.cur_index();
3927 const uint8_t* pos = iterator.pc();
3928 const uint32_t target = iterator.next();
3929 if (!VALIDATE(target < control_depth())) {
3930 this->DecodeError(pos, "invalid branch depth: %u", target);
3931 return 0;
3932 }
3933 // Avoid redundant branch target checks.
3934 if (br_targets[target]) continue;
3935 br_targets[target] = true;
3936
3937 if (ValidationTag::validate) {
3938 if (index == 0) {
3939 arity = control_at(target)->br_merge()->arity;
3940 } else if (!VALIDATE(control_at(target)->br_merge()->arity == arity)) {
3941 this->DecodeError(
3942 pos, "br_table: label arity inconsistent with previous arity %d",
3943 arity);
3944 return 0;
3945 }
3946 if (!VALIDATE(
3947 (TypeCheckBranch<PushBranchValues::kNo, RewriteStackTypes::kNo>(
3948 control_at(target))))) {
3949 return 0;
3950 }
3951 }
3952 }
3953
3954 if (V8_LIKELY(current_code_reachable_and_ok_)) {
3955 CALL_INTERFACE(BrTable, imm, key);
3956
3957 for (uint32_t i = 0; i < control_depth(); ++i) {
3958 control_at(i)->br_merge()->reached |= br_targets[i];
3959 }
3960 }
3961 EndControl();
3962 return 1 + iterator.length();
3963 }
3964
3965 DECODE(Return) {
3966 return DoReturn<kNonStrictCounting, kReturnMerge>() ? 1 : 0;
3967 }
3968
3969 DECODE(Unreachable) {
3970 CALL_INTERFACE_IF_OK_AND_REACHABLE(Trap, TrapReason::kTrapUnreachable);
3971 EndControl();
3972 return 1;
3973 }
3974
3975 DECODE(I32Const) {
3976 ImmI32Immediate imm(this, this->pc_ + 1, validate);
3977 Value* value = Push(kWasmI32);
3978 CALL_INTERFACE_IF_OK_AND_REACHABLE(I32Const, value, imm.value);
3979 return 1 + imm.length;
3980 }
3981
3982 DECODE(I64Const) {
3983 ImmI64Immediate imm(this, this->pc_ + 1, validate);
3984 Value* value = Push(kWasmI64);
3985 CALL_INTERFACE_IF_OK_AND_REACHABLE(I64Const, value, imm.value);
3986 return 1 + imm.length;
3987 }
3988
3989 DECODE(F32Const) {
3990 ImmF32Immediate imm(this, this->pc_ + 1, validate);
3991 Value* value = Push(kWasmF32);
3992 CALL_INTERFACE_IF_OK_AND_REACHABLE(F32Const, value, imm.value);
3993 return 1 + imm.length;
3994 }
3995
3996 DECODE(F64Const) {
3997 ImmF64Immediate imm(this, this->pc_ + 1, validate);
3998 Value* value = Push(kWasmF64);
3999 CALL_INTERFACE_IF_OK_AND_REACHABLE(F64Const, value, imm.value);
4000 return 1 + imm.length;
4001 }
4002
4003 DECODE(RefNull) {
4004 this->detected_->add_reftypes();
4005 HeapTypeImmediate imm(this->enabled_, this, this->pc_ + 1, validate);
4006 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4007 if (!VALIDATE(!this->enabled_.has_stringref() ||
4008 !imm.type.is_string_view())) {
4009 this->DecodeError(this->pc_ + 1, "cannot create null string view");
4010 return 0;
4011 }
4012 ValueType type = ValueType::RefNull(imm.type).AsExactIfProposalEnabled();
4013 Value* value = Push(type);
4014 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefNull, type, value);
4015 return 1 + imm.length;
4016 }
4017
4018 DECODE(RefIsNull) {
4019 this->detected_->add_reftypes();
4020 Value value = Pop();
4021 Value* result = Push(kWasmI32);
4022 switch (value.type.kind()) {
4023 case kRefNull:
4024 CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, kExprRefIsNull, value, result);
4025 return 1;
4026 case kBottom:
4027 // We are in unreachable code, the return value does not matter.
4028 case kRef:
4029 // For non-nullable references, the result is always false.
4032 return 1;
4033 default:
4034 if constexpr (!ValidationTag::validate) UNREACHABLE();
4035 PopTypeError(0, value, "reference type");
4036 return 0;
4037 }
4038 }
4039
4040 DECODE(RefFunc) {
4041 this->detected_->add_reftypes();
4042 IndexImmediate imm(this, this->pc_ + 1, "function index", validate);
4043 if (!this->ValidateFunction(this->pc_ + 1, imm)) return 0;
4044 ModuleTypeIndex index = this->module_->functions[imm.index].sig_index;
4045 const TypeDefinition& type_def = this->module_->type(index);
4046 Value* value =
4047 Push(ValueType::Ref(index, type_def.is_shared, RefTypeKind::kFunction)
4048 .AsExactIfProposalEnabled());
4049 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefFunc, imm.index, value);
4050 return 1 + imm.length;
4051 }
4052
4053 DECODE(RefAsNonNull) {
4054 this->detected_->add_typed_funcref();
4055 Value value = Pop();
4056 switch (value.type.kind()) {
4057 case kBottom:
4058 // We are in unreachable code. Forward the bottom value.
4059 case kRef:
4060 // A non-nullable value can remain as-is.
4061 Push(value);
4062 return 1;
4063 case kRefNull: {
4064 Value* result = Push(ValueType::Ref(value.type.heap_type()));
4065 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefAsNonNull, value, result);
4066 return 1;
4067 }
4068 default:
4069 if constexpr (!ValidationTag::validate) UNREACHABLE();
4070 PopTypeError(0, value, "reference type");
4071 return 0;
4072 }
4073 }
4074
4075 V8_INLINE DECODE(LocalGet) {
4076 IndexImmediate imm(this, this->pc_ + 1, "local index", validate);
4077 if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
4078 if (!VALIDATE(this->is_local_initialized(imm.index))) {
4079 this->DecodeError(this->pc_, "uninitialized non-defaultable local: %u",
4080 imm.index);
4081 return 0;
4082 }
4083 Value* value = Push(this->local_type(imm.index));
4084 CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalGet, value, imm);
4085 return 1 + imm.length;
4086 }
4087
4088 DECODE(LocalSet) {
4089 IndexImmediate imm(this, this->pc_ + 1, "local index", validate);
4090 if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
4091 Value value = Pop(this->local_type(imm.index));
4092 CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalSet, value, imm);
4093 this->set_local_initialized(imm.index);
4094 return 1 + imm.length;
4095 }
4096
4097 DECODE(LocalTee) {
4098 IndexImmediate imm(this, this->pc_ + 1, "local index", validate);
4099 if (!this->ValidateLocal(this->pc_ + 1, imm)) return 0;
4100 ValueType local_type = this->local_type(imm.index);
4101 Value value = Pop(local_type);
4102 Value* result = Push(local_type);
4103 CALL_INTERFACE_IF_OK_AND_REACHABLE(LocalTee, value, result, imm);
4104 this->set_local_initialized(imm.index);
4105 return 1 + imm.length;
4106 }
4107
4108 DECODE(Drop) {
4109 Pop();
4111 return 1;
4112 }
4113
4114 DECODE(GlobalGet) {
4115 GlobalIndexImmediate imm(this, this->pc_ + 1, validate);
4116 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4117 Value* result = Push(imm.global->type);
4119 return 1 + imm.length;
4120 }
4121
4122 DECODE(GlobalSet) {
4123 GlobalIndexImmediate imm(this, this->pc_ + 1, validate);
4124 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4125 if (!VALIDATE(imm.global->mutability)) {
4126 this->DecodeError("immutable global #%u cannot be assigned", imm.index);
4127 return 0;
4128 }
4129 Value value = Pop(imm.global->type);
4130 CALL_INTERFACE_IF_OK_AND_REACHABLE(GlobalSet, value, imm);
4131 return 1 + imm.length;
4132 }
4133
4134 DECODE(TableGet) {
4135 this->detected_->add_reftypes();
4136 TableIndexImmediate imm(this, this->pc_ + 1, validate);
4137 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4138 Value index = Pop(TableAddressType(imm.table));
4139 Value* result = Push(imm.table->type);
4140 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableGet, index, result, imm);
4141 return 1 + imm.length;
4142 }
4143
4144 DECODE(TableSet) {
4145 this->detected_->add_reftypes();
4146 TableIndexImmediate imm(this, this->pc_ + 1, validate);
4147 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4148 ValueType table_address_type = TableAddressType(imm.table);
4149 auto [index, value] = Pop(table_address_type, imm.table->type);
4150 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableSet, index, value, imm);
4151 return 1 + imm.length;
4152 }
4153
4154 DECODE(LoadMem) { return DecodeLoadMem(GetLoadType(opcode)); }
4155
4156 DECODE(StoreMem) { return DecodeStoreMem(GetStoreType(opcode)); }
4157
4158 DECODE(MemoryGrow) {
4159 // This opcode will not be emitted by the asm translator.
4160 DCHECK_EQ(kWasmOrigin, this->module_->origin);
4161 MemoryIndexImmediate imm(this, this->pc_ + 1, validate);
4162 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4163 ValueType mem_type = MemoryAddressType(imm.memory);
4164 Value value = Pop(mem_type);
4165 Value* result = Push(mem_type);
4166 CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryGrow, imm, value, result);
4167 return 1 + imm.length;
4168 }
4169
4170 DECODE(MemorySize) {
4171 MemoryIndexImmediate imm(this, this->pc_ + 1, validate);
4172 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4173 ValueType result_type = MemoryAddressType(imm.memory);
4174 Value* result = Push(result_type);
4175 CALL_INTERFACE_IF_OK_AND_REACHABLE(CurrentMemoryPages, imm, result);
4176 return 1 + imm.length;
4177 }
4178
4179 DECODE(CallFunction) {
4180 CallFunctionImmediate imm(this, this->pc_ + 1, validate);
4181 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4182 PoppedArgVector args = PopArgs(imm.sig);
4183 Value* returns = PushReturns(imm.sig);
4184 CALL_INTERFACE_IF_OK_AND_REACHABLE(CallDirect, imm, args.data(), returns);
4185 MarkMightThrow();
4186 return 1 + imm.length;
4187 }
4188
4189 DECODE(CallIndirect) {
4190 CallIndirectImmediate imm(this, this->pc_ + 1, validate);
4191 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4192 Value index = Pop(TableAddressType(imm.table_imm.table));
4193 PoppedArgVector args = PopArgs(imm.sig);
4194 Value* returns = PushReturns(imm.sig);
4195 CALL_INTERFACE_IF_OK_AND_REACHABLE(CallIndirect, index, imm, args.data(),
4196 returns);
4197 MarkMightThrow();
4198 if (!this->module_->type(imm.sig_imm.index).is_final) {
4199 // In this case we emit an rtt.canon as part of the indirect call.
4200 this->detected_->add_gc();
4201 }
4202 return 1 + imm.length;
4203 }
4204
4205 DECODE(ReturnCall) {
4206 this->detected_->add_return_call();
4207 CallFunctionImmediate imm(this, this->pc_ + 1, validate);
4208 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4209 if (!VALIDATE(this->CanReturnCall(imm.sig))) {
4210 this->DecodeError("%s: %s", WasmOpcodes::OpcodeName(kExprReturnCall),
4211 "tail call type error");
4212 return 0;
4213 }
4214 PoppedArgVector args = PopArgs(imm.sig);
4215 CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCall, imm, args.data());
4216 EndControl();
4217 return 1 + imm.length;
4218 }
4219
4220 DECODE(ReturnCallIndirect) {
4221 this->detected_->add_return_call();
4222 CallIndirectImmediate imm(this, this->pc_ + 1, validate);
4223 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4224 if (!VALIDATE(this->CanReturnCall(imm.sig))) {
4225 this->DecodeError("%s: %s",
4226 WasmOpcodes::OpcodeName(kExprReturnCallIndirect),
4227 "tail call return types mismatch");
4228 return 0;
4229 }
4230 Value index = Pop(TableAddressType(imm.table_imm.table));
4231 PoppedArgVector args = PopArgs(imm.sig);
4232 CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCallIndirect, index, imm,
4233 args.data());
4234 EndControl();
4235 if (!this->module_->type(imm.sig_imm.index).is_final) {
4236 // In this case we emit an rtt.canon as part of the indirect call.
4237 this->detected_->add_gc();
4238 }
4239 return 1 + imm.length;
4240 }
4241
4242 DECODE(CallRef) {
4243 this->detected_->add_typed_funcref();
4244 SigIndexImmediate imm(this, this->pc_ + 1, validate);
4245 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4246 Value func_ref = Pop(ValueType::RefNull(imm.heap_type()));
4247 PoppedArgVector args = PopArgs(imm.sig);
4248 Value* returns = PushReturns(imm.sig);
4249 CALL_INTERFACE_IF_OK_AND_REACHABLE(CallRef, func_ref, imm.sig, args.data(),
4250 returns);
4251 MarkMightThrow();
4252 return 1 + imm.length;
4253 }
4254
4255 DECODE(ReturnCallRef) {
4256 this->detected_->add_typed_funcref();
4257 this->detected_->add_return_call();
4258 SigIndexImmediate imm(this, this->pc_ + 1, validate);
4259 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4260 if (!VALIDATE(this->CanReturnCall(imm.sig))) {
4261 this->DecodeError("%s: %s", WasmOpcodes::OpcodeName(kExprReturnCallRef),
4262 "tail call return types mismatch");
4263 return 0;
4264 }
4265 Value func_ref = Pop(ValueType::RefNull(imm.heap_type()));
4266 PoppedArgVector args = PopArgs(imm.sig);
4267 CALL_INTERFACE_IF_OK_AND_REACHABLE(ReturnCallRef, func_ref, imm.sig,
4268 args.data());
4269 EndControl();
4270 return 1 + imm.length;
4271 }
4272
4273 DECODE(RefEq) {
4274 this->detected_->add_gc();
4275 Value rhs = Pop();
4276 Value lhs = Pop();
4277 if (!VALIDATE(
4278 IsSubtypeOf(lhs.type.AsNonShared(), kWasmEqRef, this->module_) ||
4279 control_.back().unreachable())) {
4280 this->DecodeError(this->pc_,
4281 "ref.eq[0] expected either eqref or (ref null shared "
4282 "eq), found %s of type %s",
4283 SafeOpcodeNameAt(lhs.pc()), lhs.type.name().c_str());
4284 return 0;
4285 }
4286 if (!VALIDATE(
4287 IsSubtypeOf(rhs.type.AsNonShared(), kWasmEqRef, this->module_) ||
4288 control_.back().unreachable())) {
4289 this->DecodeError(this->pc_,
4290 "ref.eq[1] expected either eqref or (ref null shared "
4291 "eq), found %s of type %s",
4292 SafeOpcodeNameAt(rhs.pc()), rhs.type.name().c_str());
4293 return 0;
4294 }
4295 if (!VALIDATE(lhs.type.is_shared() == rhs.type.is_shared() ||
4296 control_.back().unreachable())) {
4297 this->DecodeError(this->pc_,
4298 "ref.eq: sharedness of both operands must match");
4299 return 0;
4300 }
4301 Value* result = Push(kWasmI32);
4302 CALL_INTERFACE_IF_OK_AND_REACHABLE(BinOp, kExprRefEq, lhs, rhs, result);
4303 return 1;
4304 }
4305
4306 DECODE(ContNew) {
4307 CHECK_PROTOTYPE_OPCODE(wasmfx);
4308 this->detected_->add_wasmfx();
4309 ContIndexImmediate imm(this, this->pc_ + 1, validate);
4310 if (!this->ValidateCont(this->pc_ + 1, imm)) return 0;
4311
4312 // Pop a function type.
4313 // Value func_ref =
4314 Pop(ValueType::RefNull(imm.cont_type->contfun_typeindex(), imm.shared,
4315 RefTypeKind::kFunction));
4316
4317 // Push a continuation type.
4318 // Value* value =
4319 Push(ValueType::Ref(imm.heap_type()));
4320 // TODO(fgm): uncomment when implementing Cont.new
4321 // CALL_INTERFACE_IF_OK_AND_REACHABLE(ContNew, func_ref, imm.index,
4322 // value);
4323 return 1 + imm.length;
4324 }
4325
4326 DECODE(Resume) {
4327 CHECK_PROTOTYPE_OPCODE(wasmfx);
4328 this->detected_->add_wasmfx();
4329
4330 ContIndexImmediate imm(this, this->pc_ + 1, validate);
4331 if (!this->ValidateCont(this->pc_ + 1, imm)) return 0;
4332
4333 Pop(ValueType::RefNull(imm.heap_type()));
4334
4335 EffectHandlerTableImmediate handler_table_imm(
4336 this, this->pc_ + 1 + imm.length, validate);
4337
4338 if (!this->Validate(this->pc_ + imm.length + 1, handler_table_imm))
4339 return 0;
4341 this, handler_table_imm);
4342 base::Vector<HandlerCase> handlers =
4343 this->zone_->template AllocateVector<HandlerCase>(
4344 handler_table_imm.table_count);
4345 int i = 0;
4346 while (handle_iterator.has_next()) {
4347 HandlerCase handler = handle_iterator.next();
4348
4349 if (!this->Validate(this->pc_, handler.tag)) {
4350 return 0;
4351 }
4352 handlers[i] = handler;
4353
4354 uint32_t stack_size = stack_.size();
4355 uint32_t push_count = 0;
4356 if (handler.kind == kOnSuspend) {
4357 const WasmTagSig* sig = handler.tag.tag->sig;
4358 stack_.EnsureMoreCapacity(static_cast<int>(sig->parameter_count()),
4359 this->zone_);
4360 for (ValueType type : sig->parameters()) Push(type);
4361 push_count += sig->parameter_count();
4362
4363 if (!VALIDATE((this->Validate(this->pc_, handler.maybe_depth.br,
4364 control_depth())))) {
4365 return 0;
4366 }
4367
4368 Control* target = control_at(handler.maybe_depth.br.depth);
4369 if (!VALIDATE(push_count == target->br_merge()->arity)) {
4370 this->DecodeError(
4371 "handler generates %d operand%s, target block returns %d",
4372 push_count, push_count != 1 ? "s" : "",
4373 target->br_merge()->arity);
4374 return 0;
4375 }
4376
4377 if (!VALIDATE((
4378 TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kNo>(
4379 target)))) {
4380 return 0;
4381 }
4382 stack_.shrink_to(stack_size);
4383 DCHECK_LT(i, handler_table_imm.table_count);
4384 } else if (handler.kind != kSwitch) {
4385 this->DecodeError("invalid handler kind %d", handler.kind);
4386 return 0;
4387 }
4388 i++;
4389 }
4390 // The continuation might return, treat Resume like a function call.
4391 const FunctionSig* contFunSig =
4392 this->module_->signature(imm.cont_type->contfun_typeindex());
4393
4394 // TODO(fgm): uncomment when implementing resume
4395 // PoppedArgVector args =
4396 PopArgs(contFunSig);
4397 // Value* returns =
4398 PushReturns(contFunSig);
4399 // CALL_INTERFACE_IF_OK_AND_REACHABLE(ContResume, imm.index, handlers,
4400 // args, returns);
4401 return 1 + imm.length + handle_iterator.length();
4402 }
4403
4405 CHECK_PROTOTYPE_OPCODE(wasmfx);
4406 this->detected_->add_wasmfx();
4407
4408 TagIndexImmediate imm(this, this->pc_ + 1, validate);
4409 if (!this->Validate(this->pc_ + 1, imm)) return 0;
4410
4411 const FunctionSig* sig = imm.tag->ToFunctionSig();
4412
4413 // TODO(fgm): uncomment when implementing suspend
4414 // PoppedArgVector args =
4415 PopArgs(sig);
4416
4417 // Value* returns =
4418 PushReturns(sig);
4419
4420 // CALL_INTERFACE_IF_OK_AND_REACHABLE(Suspend, imm, args.data());
4421
4422 return 1 + imm.length;
4423 }
4424
4426 auto [full_opcode, opcode_length] =
4427 this->template read_prefixed_opcode<ValidationTag>(this->pc_,
4428 "numeric index");
4429 if (full_opcode == kExprTableGrow || full_opcode == kExprTableSize ||
4430 full_opcode == kExprTableFill) {
4431 this->detected_->add_reftypes();
4432 }
4433 trace_msg->AppendOpcode(full_opcode);
4434 return DecodeNumericOpcode(full_opcode, opcode_length);
4435 }
4436
4438 auto [full_opcode, opcode_length] =
4439 this->template read_prefixed_opcode<ValidationTag>(this->pc_,
4440 "asmjs index");
4441 trace_msg->AppendOpcode(full_opcode);
4442 return DecodeAsmJsOpcode(full_opcode, opcode_length);
4443 }
4444
4445 DECODE(Simd) {
4446 this->detected_->add_simd();
4448 if (v8_flags.correctness_fuzzer_suppressions) {
4449 FATAL("Aborting on missing Wasm SIMD support");
4450 }
4451 this->DecodeError("Wasm SIMD unsupported");
4452 return 0;
4453 }
4454 auto [full_opcode, opcode_length] =
4455 this->template read_prefixed_opcode<ValidationTag>(this->pc_);
4456 if (!VALIDATE(this->ok())) return 0;
4457 trace_msg->AppendOpcode(full_opcode);
4458 if (WasmOpcodes::IsFP16SimdOpcode(full_opcode)) {
4459 this->detected_->add_fp16();
4460 } else if (WasmOpcodes::IsRelaxedSimdOpcode(full_opcode)) {
4461 this->detected_->add_relaxed_simd();
4462 }
4463 return DecodeSimdOpcode(full_opcode, opcode_length);
4464 }
4465
4466 DECODE(Atomic) {
4467 this->detected_->add_threads();
4468 auto [full_opcode, opcode_length] =
4469 this->template read_prefixed_opcode<ValidationTag>(this->pc_,
4470 "atomic index");
4471 trace_msg->AppendOpcode(full_opcode);
4472 return DecodeAtomicOpcode(full_opcode, opcode_length);
4473 }
4474
4476 auto [full_opcode, opcode_length] =
4477 this->template read_prefixed_opcode<ValidationTag>(this->pc_,
4478 "gc index");
4479 trace_msg->AppendOpcode(full_opcode);
4480 // If we are validating we could have read an illegal opcode. Handle that
4481 // separately.
4482 if (!VALIDATE(full_opcode != 0)) {
4483 DCHECK(this->failed());
4484 return 0;
4485 } else if (full_opcode >= kExprStringNewUtf8) {
4486 CHECK_PROTOTYPE_OPCODE(stringref);
4487 return DecodeStringRefOpcode(full_opcode, opcode_length);
4488 } else {
4489 this->detected_->add_gc();
4490 return DecodeGCOpcode(full_opcode, opcode_length);
4491 }
4492 }
4493
4494#define SIMPLE_PROTOTYPE_CASE(name, ...) \
4495 DECODE(name) { return BuildSimplePrototypeOperator(opcode); }
4497#undef SIMPLE_PROTOTYPE_CASE
4498
4499#undef DECODE
4500
4501 static int NonConstError(WasmFullDecoder* decoder, WasmOpcode opcode) {
4502 decoder->DecodeError("opcode %s is not allowed in constant expressions",
4503 WasmOpcodes::OpcodeName(opcode));
4504 return 0;
4505 }
4506
4507 static int UnknownOpcodeError(WasmFullDecoder* decoder, WasmOpcode opcode) {
4508 decoder->DecodeError("Invalid opcode 0x%x", opcode);
4509 return 0;
4510 }
4511
4513
4514 // Ideally we would use template specialization for the different opcodes, but
4515 // GCC does not allow to specialize templates in class scope
4516 // (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85282), and specializing
4517 // outside the class is not allowed for non-specialized classes.
4518 // Hence just list all implementations explicitly here, which also gives more
4519 // freedom to use the same implementation for different opcodes.
4520#define DECODE_IMPL(opcode) DECODE_IMPL2(kExpr##opcode, opcode)
4521#define DECODE_IMPL2(opcode, name) \
4522 if (idx == opcode) { \
4523 if constexpr (decoding_mode == kConstantExpression) { \
4524 return &WasmFullDecoder::NonConstError; \
4525 } else { \
4526 return &WasmFullDecoder::Decode##name; \
4527 } \
4528 }
4529#define DECODE_IMPL_CONST(opcode) DECODE_IMPL_CONST2(kExpr##opcode, opcode)
4530#define DECODE_IMPL_CONST2(opcode, name) \
4531 if (idx == opcode) return &WasmFullDecoder::Decode##name
4532
4533 static constexpr OpcodeHandler GetOpcodeHandlerTableEntry(size_t idx) {
4534 DECODE_IMPL(Nop);
4535#define BUILD_SIMPLE_OPCODE(op, ...) DECODE_IMPL(op);
4537#undef BUILD_SIMPLE_OPCODE
4538#define BUILD_SIMPLE_EXTENDED_CONST_OPCODE(op, ...) DECODE_IMPL_CONST(op);
4540#undef BUILD_SIMPLE_EXTENDED_CONST_OPCODE
4542 DECODE_IMPL(Rethrow);
4544 DECODE_IMPL(Try);
4545 DECODE_IMPL(TryTable);
4546 DECODE_IMPL(ThrowRef);
4547 DECODE_IMPL(Catch);
4548 DECODE_IMPL(Delegate);
4549 DECODE_IMPL(CatchAll);
4550 DECODE_IMPL(ContNew);
4551 DECODE_IMPL(Resume);
4553 DECODE_IMPL(BrOnNull);
4554 DECODE_IMPL(BrOnNonNull);
4555 DECODE_IMPL(Loop);
4556 DECODE_IMPL(If);
4557 DECODE_IMPL(Else);
4558 DECODE_IMPL_CONST(End);
4559 DECODE_IMPL(Select);
4560 DECODE_IMPL(SelectWithType);
4561 DECODE_IMPL(Br);
4562 DECODE_IMPL(BrIf);
4563 DECODE_IMPL(BrTable);
4564 DECODE_IMPL(Return);
4565 DECODE_IMPL(Unreachable);
4566 DECODE_IMPL(NopForTestingUnsupportedInLiftoff);
4567 DECODE_IMPL_CONST(I32Const);
4568 DECODE_IMPL_CONST(I64Const);
4569 DECODE_IMPL_CONST(F32Const);
4570 DECODE_IMPL_CONST(F64Const);
4571 DECODE_IMPL_CONST(RefNull);
4572 DECODE_IMPL(RefIsNull);
4573 DECODE_IMPL_CONST(RefFunc);
4574 DECODE_IMPL(RefAsNonNull);
4575 DECODE_IMPL(RefEq);
4576 DECODE_IMPL(LocalGet);
4577 DECODE_IMPL(LocalSet);
4578 DECODE_IMPL(LocalTee);
4579 DECODE_IMPL(Drop);
4580 DECODE_IMPL_CONST(GlobalGet);
4581 DECODE_IMPL(GlobalSet);
4582 DECODE_IMPL(TableGet);
4583 DECODE_IMPL(TableSet);
4584#define DECODE_LOAD_MEM(op, ...) DECODE_IMPL2(kExpr##op, LoadMem);
4586#undef DECODE_LOAD_MEM
4587#define DECODE_STORE_MEM(op, ...) DECODE_IMPL2(kExpr##op, StoreMem);
4589#undef DECODE_LOAD_MEM
4590 DECODE_IMPL(MemoryGrow);
4591 DECODE_IMPL(MemorySize);
4592 DECODE_IMPL(CallFunction);
4593 DECODE_IMPL(CallIndirect);
4594 DECODE_IMPL(ReturnCall);
4595 DECODE_IMPL(ReturnCallIndirect);
4596 DECODE_IMPL(CallRef);
4597 DECODE_IMPL(ReturnCallRef);
4598 DECODE_IMPL2(kNumericPrefix, Numeric);
4599 DECODE_IMPL2(kAsmJsPrefix, AsmJs);
4600 DECODE_IMPL_CONST2(kSimdPrefix, Simd);
4601 DECODE_IMPL2(kAtomicPrefix, Atomic);
4602 DECODE_IMPL_CONST2(kGCPrefix, GC);
4603#define SIMPLE_PROTOTYPE_CASE(name, ...) DECODE_IMPL(name);
4605#undef SIMPLE_PROTOTYPE_CASE
4606 return &WasmFullDecoder::UnknownOpcodeError;
4607 }
4608
4609#undef DECODE_IMPL
4610#undef DECODE_IMPL2
4611
4613 static constexpr std::array<OpcodeHandler, 256> kOpcodeHandlers =
4614 base::make_array<256>(GetOpcodeHandlerTableEntry);
4615 return kOpcodeHandlers[opcode];
4616 }
4617
4618 void EndControl() {
4619 DCHECK(!control_.empty());
4620 Control* current = &control_.back();
4621 stack_.shrink_to(current->stack_depth);
4622 current->reachability = kUnreachable;
4623 current_code_reachable_and_ok_ = false;
4624 }
4625
4626 template <typename func>
4627 V8_INLINE void InitMerge(Merge<Value>* merge, uint32_t arity, func get_val) {
4628 merge->arity = arity;
4629 if constexpr (std::is_null_pointer_v<func>) {
4630 DCHECK_EQ(0, arity);
4631 } else if (arity == 1) {
4632 merge->vals.first = get_val(0);
4633 } else if (arity > 1) {
4634 merge->vals.array = this->zone()->template AllocateArray<Value>(arity);
4635 for (uint32_t i = 0; i < arity; i++) {
4636 merge->vals.array[i] = get_val(i);
4637 }
4638 }
4639 }
4640
4641 // In reachable code, check if there are at least {count} values on the stack.
4642 // In unreachable code, if there are less than {count} values on the stack,
4643 // insert a number of unreachable values underneath the current values equal
4644 // to the difference, and return that number.
4646 uint32_t limit = control_.back().stack_depth;
4647 if (V8_LIKELY(stack_.size() >= count + limit)) return 0;
4648 return EnsureStackArguments_Slow(count);
4649 }
4650
4652 uint32_t limit = control_.back().stack_depth;
4653 if (!VALIDATE(control_.back().unreachable())) {
4654 NotEnoughArgumentsError(count, stack_.size() - limit);
4655 }
4656 // Silently create unreachable values out of thin air underneath the
4657 // existing stack values. To do so, we have to move existing stack values
4658 // upwards in the stack, then instantiate the new Values as
4659 // {UnreachableValue}.
4660 int current_values = stack_.size() - limit;
4661 int additional_values = count - current_values;
4662 DCHECK_GT(additional_values, 0);
4663 // Ensure that after this operation there is still room for one more value.
4664 // Callers might not expect this operation to push values on the stack
4665 // (because it only does so in exceptional cases).
4666 stack_.EnsureMoreCapacity(additional_values + 1, this->zone_);
4667 Value unreachable_value = UnreachableValue(this->pc_);
4668 for (int i = 0; i < additional_values; ++i) stack_.push(unreachable_value);
4669 if (current_values > 0) {
4670 // Move the current values up to the end of the stack, and create
4671 // unreachable values below.
4672 Value* stack_base = stack_value(current_values + additional_values);
4673 for (int i = current_values - 1; i >= 0; i--) {
4674 stack_base[additional_values + i] = stack_base[i];
4675 }
4676 for (int i = 0; i < additional_values; i++) {
4677 stack_base[i] = UnreachableValue(this->pc_);
4678 }
4679 }
4680 return additional_values;
4681 }
4682
4684 int num_params = static_cast<int>(sig->parameter_count());
4685 EnsureStackArguments(num_params);
4686 Value* param_base = stack_.end() - num_params;
4687 for (int i = 0; i < num_params; i++) {
4688 ValidateStackValue(i, param_base[i], sig->GetParam(i));
4689 }
4690 }
4691
4692 // Drops a number of stack elements equal to the {sig}'s parameter count (0 if
4693 // {sig} is null), or all of them if less are present.
4694 V8_INLINE void DropArgs(const FunctionSig* sig) {
4695 int count = static_cast<int>(sig->parameter_count());
4696 Drop(count);
4697 }
4698
4700 int count = static_cast<int>(type->field_count());
4701 EnsureStackArguments(count);
4702 DCHECK_LE(control_.back().stack_depth, stack_size());
4703 DCHECK_GE(stack_size() - control_.back().stack_depth, count);
4704 Value* args_base = stack_.end() - count;
4705 for (int i = 0; i < count; i++) {
4706 ValidateStackValue(i, args_base[i], type->field(i).Unpacked());
4707 }
4708 // Note: Popping from the {FastZoneVector} does not invalidate the old (now
4709 // out-of-range) elements.
4710 stack_.pop(count);
4711 return PoppedArgVector{base::VectorOf(args_base, count)};
4712 }
4713 // Drops a number of stack elements equal to the struct's field count, or all
4714 // of them if less are present.
4715 V8_INLINE void DropArgs(const StructType* type) {
4716 Drop(static_cast<int>(type->field_count()));
4717 }
4718
4719 // Pops arguments as required by signature, returning them by copy as a
4720 // vector.
4722 int count = static_cast<int>(sig->parameter_count());
4723 EnsureStackArguments(count);
4724 DCHECK_LE(control_.back().stack_depth, stack_size());
4725 DCHECK_GE(stack_size() - control_.back().stack_depth, count);
4726 Value* args_base = stack_.end() - count;
4727 for (int i = 0; i < count; ++i) {
4728 ValidateStackValue(i, args_base[i], sig->GetParam(i));
4729 }
4730 // Note: Popping from the {FastZoneVector} does not invalidate the old (now
4731 // out-of-range) elements.
4732 stack_.pop(count);
4733 return PoppedArgVector{base::VectorOf(args_base, count)};
4734 }
4735
4737 DCHECK(!control_.empty());
4738 ValidateParameters(&imm.sig);
4739 uint32_t consumed_values = static_cast<uint32_t>(imm.sig.parameter_count());
4740 uint32_t stack_depth = stack_.size();
4741 DCHECK_LE(consumed_values, stack_depth);
4742 uint32_t inner_stack_depth = stack_depth - consumed_values;
4743 DCHECK_LE(control_.back().stack_depth, inner_stack_depth);
4744
4745 uint32_t init_stack_depth = this->locals_initialization_stack_depth();
4746 Reachability reachability = control_.back().innerReachability();
4747 control_.EnsureMoreCapacity(1, this->zone_);
4748 control_.emplace_back(this->zone_, kind, inner_stack_depth,
4749 init_stack_depth, this->pc_, reachability);
4750 Control* new_block = &control_.back();
4751
4752 Value* arg_base = stack_.end() - consumed_values;
4753 // Update the type of input nodes to the more general types expected by the
4754 // block. In particular, in unreachable code, the input would have bottom
4755 // type otherwise.
4756 for (uint32_t i = 0; i < consumed_values; ++i) {
4757 DCHECK_IMPLIES(this->ok(), IsSubtypeOf(arg_base[i].type, imm.in_type(i),
4758 this->module_) ||
4759 arg_base[i].type == kWasmBottom);
4760 arg_base[i].type = imm.in_type(i);
4761 }
4762
4763 // Initialize start- and end-merges of {c} with values according to the
4764 // in- and out-types of {c} respectively.
4765 const uint8_t* pc = this->pc_;
4766 InitMerge(&new_block->end_merge, imm.out_arity(), [pc, &imm](uint32_t i) {
4767 return Value{pc, imm.out_type(i)};
4768 });
4769 InitMerge(&new_block->start_merge, imm.in_arity(),
4770 [arg_base](uint32_t i) { return arg_base[i]; });
4771 return new_block;
4772 }
4773
4774 void PopControl() {
4775 // This cannot be the outermost control block.
4776 DCHECK_LT(1, control_.size());
4777 Control* c = &control_.back();
4778 DCHECK_LE(c->stack_depth, stack_.size());
4779
4781
4782 // - In non-unreachable code, a loop just leaves the values on the stack.
4783 // - In unreachable code, it is not guaranteed that we have Values of the
4784 // correct types on the stack, so we have to make sure we do. Their values
4785 // do not matter, so we might as well push the (uninitialized) values of
4786 // the loop's end merge.
4787 if (!c->is_loop() || c->unreachable()) {
4788 PushMergeValues(c, &c->end_merge);
4789 }
4790 RollbackLocalsInitialization(c);
4791
4792 bool parent_reached =
4793 c->reachable() || c->end_merge.reached || c->is_onearmed_if();
4794 control_.pop();
4795 // If the parent block was reachable before, but the popped control does not
4796 // return to here, this block becomes "spec only reachable".
4797 if (!parent_reached) SetSucceedingCodeDynamicallyUnreachable();
4798 current_code_reachable_and_ok_ =
4799 VALIDATE(this->ok()) && control_.back().reachable();
4800 }
4801
4802 int DecodeLoadMem(LoadType type, int prefix_len = 1) {
4804 MakeMemoryAccessImmediate(prefix_len, type.size_log_2());
4805 if (!this->Validate(this->pc_ + prefix_len, imm)) return 0;
4806 ValueType address_type = MemoryAddressType(imm.memory);
4807 Value index = Pop(address_type);
4808 Value* result = Push(type.value_type());
4809 if (V8_LIKELY(
4810 !CheckStaticallyOutOfBounds(imm.memory, type.size(), imm.offset))) {
4811 CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadMem, type, imm, index, result);
4812 }
4813 return prefix_len + imm.length;
4814 }
4815
4817 uint32_t opcode_length) {
4818 // Load extends always load 64-bits.
4819 uint32_t max_alignment =
4820 transform == LoadTransformationKind::kExtend ? 3 : type.size_log_2();
4822 MakeMemoryAccessImmediate(opcode_length, max_alignment);
4823 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4824 ValueType address_type = MemoryAddressType(imm.memory);
4825 Value index = Pop(address_type);
4826 Value* result = Push(kWasmS128);
4827 uintptr_t op_size =
4828 transform == LoadTransformationKind::kExtend ? 8 : type.size();
4829 if (V8_LIKELY(
4830 !CheckStaticallyOutOfBounds(imm.memory, op_size, imm.offset))) {
4831 CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadTransform, type, transform, imm,
4832 index, result);
4833 }
4834 return opcode_length + imm.length;
4835 }
4836
4837 int DecodeLoadLane(WasmOpcode opcode, LoadType type, uint32_t opcode_length) {
4838 MemoryAccessImmediate mem_imm =
4839 MakeMemoryAccessImmediate(opcode_length, type.size_log_2());
4840 if (!this->Validate(this->pc_ + opcode_length, mem_imm)) return 0;
4841 SimdLaneImmediate lane_imm(this, this->pc_ + opcode_length + mem_imm.length,
4842 validate);
4843 if (!this->Validate(this->pc_ + opcode_length, opcode, lane_imm)) return 0;
4844 ValueType address_type = MemoryAddressType(mem_imm.memory);
4845 auto [index, v128] = Pop(address_type, kWasmS128);
4846
4847 Value* result = Push(kWasmS128);
4848 if (V8_LIKELY(!CheckStaticallyOutOfBounds(mem_imm.memory, type.size(),
4849 mem_imm.offset))) {
4850 CALL_INTERFACE_IF_OK_AND_REACHABLE(LoadLane, type, v128, index, mem_imm,
4851 lane_imm.lane, result);
4852 }
4853 return opcode_length + mem_imm.length + lane_imm.length;
4854 }
4855
4857 uint32_t opcode_length) {
4858 MemoryAccessImmediate mem_imm =
4859 MakeMemoryAccessImmediate(opcode_length, type.size_log_2());
4860 if (!this->Validate(this->pc_ + opcode_length, mem_imm)) return 0;
4861 SimdLaneImmediate lane_imm(this, this->pc_ + opcode_length + mem_imm.length,
4862 validate);
4863 if (!this->Validate(this->pc_ + opcode_length, opcode, lane_imm)) return 0;
4864 ValueType address_type = MemoryAddressType(mem_imm.memory);
4865 auto [index, v128] = Pop(address_type, kWasmS128);
4866
4867 if (V8_LIKELY(!CheckStaticallyOutOfBounds(mem_imm.memory, type.size(),
4868 mem_imm.offset))) {
4869 CALL_INTERFACE_IF_OK_AND_REACHABLE(StoreLane, type, mem_imm, index, v128,
4870 lane_imm.lane);
4871 }
4872 return opcode_length + mem_imm.length + lane_imm.length;
4873 }
4874
4875 bool CheckStaticallyOutOfBounds(const WasmMemory* memory, uint64_t size,
4876 uint64_t offset) {
4877 const bool statically_oob =
4878 !base::IsInBounds<uint64_t>(offset, size, memory->max_memory_size);
4879 if (V8_UNLIKELY(statically_oob)) {
4880 CALL_INTERFACE_IF_OK_AND_REACHABLE(Trap, TrapReason::kTrapMemOutOfBounds);
4881 SetSucceedingCodeDynamicallyUnreachable();
4882 }
4883 return statically_oob;
4884 }
4885
4886 int DecodeStoreMem(StoreType store, int prefix_len = 1) {
4888 MakeMemoryAccessImmediate(prefix_len, store.size_log_2());
4889 if (!this->Validate(this->pc_ + prefix_len, imm)) return 0;
4890 ValueType address_type = MemoryAddressType(imm.memory);
4891 auto [index, value] = Pop(address_type, store.value_type());
4892 if (V8_LIKELY(!CheckStaticallyOutOfBounds(imm.memory, store.size(),
4893 imm.offset))) {
4894 CALL_INTERFACE_IF_OK_AND_REACHABLE(StoreMem, store, imm, index, value);
4895 }
4896 return prefix_len + imm.length;
4897 }
4898
4899 uint32_t SimdConstOp(uint32_t opcode_length) {
4900 Simd128Immediate imm(this, this->pc_ + opcode_length, validate);
4901 Value* result = Push(kWasmS128);
4903 return opcode_length + kSimd128Size;
4904 }
4905
4906 uint32_t SimdExtractLane(WasmOpcode opcode, ValueType type,
4907 uint32_t opcode_length) {
4908 SimdLaneImmediate imm(this, this->pc_ + opcode_length, validate);
4909 if (!this->Validate(this->pc_ + opcode_length, opcode, imm)) return 0;
4910 Value input = Pop(kWasmS128);
4911 Value* result = Push(type);
4912 CALL_INTERFACE_IF_OK_AND_REACHABLE(SimdLaneOp, opcode, imm,
4913 base::VectorOf({input}), result);
4914 return opcode_length + imm.length;
4915 }
4916
4917 uint32_t SimdReplaceLane(WasmOpcode opcode, ValueType type,
4918 uint32_t opcode_length) {
4919 SimdLaneImmediate imm(this, this->pc_ + opcode_length, validate);
4920 if (!this->Validate(this->pc_ + opcode_length, opcode, imm)) return 0;
4921 auto [v128, lane_val] = Pop(kWasmS128, type);
4922 Value* result = Push(kWasmS128);
4924 SimdLaneOp, opcode, imm, base::VectorOf({v128, lane_val}), result);
4925 return opcode_length + imm.length;
4926 }
4927
4928 uint32_t Simd8x16ShuffleOp(uint32_t opcode_length) {
4929 Simd128Immediate imm(this, this->pc_ + opcode_length, validate);
4930 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
4931 auto [input0, input1] = Pop(kWasmS128, kWasmS128);
4932 Value* result = Push(kWasmS128);
4933 CALL_INTERFACE_IF_OK_AND_REACHABLE(Simd8x16ShuffleOp, imm, input0, input1,
4934 result);
4935 return opcode_length + 16;
4936 }
4937
4938 uint32_t DecodeSimdOpcode(WasmOpcode opcode, uint32_t opcode_length) {
4939 if constexpr (decoding_mode == kConstantExpression) {
4940 // Currently, only s128.const is allowed in constant expressions.
4941 if (opcode != kExprS128Const) {
4942 this->DecodeError("opcode %s is not allowed in constant expressions",
4943 this->SafeOpcodeNameAt(this->pc()));
4944 return 0;
4945 }
4946 return SimdConstOp(opcode_length);
4947 }
4948 // opcode_length is the number of bytes that this SIMD-specific opcode takes
4949 // up in the LEB128 encoded form.
4950 switch (opcode) {
4951 case kExprF64x2ExtractLane:
4952 return SimdExtractLane(opcode, kWasmF64, opcode_length);
4953 case kExprF16x8ExtractLane: {
4954 if (!v8_flags.experimental_wasm_fp16) {
4955 this->DecodeError(
4956 "invalid simd opcode: 0x%x, "
4957 "enable with --experimental-wasm-fp16",
4958 opcode);
4959 return 0;
4960 }
4961 [[fallthrough]];
4962 }
4963 case kExprF32x4ExtractLane:
4964 return SimdExtractLane(opcode, kWasmF32, opcode_length);
4965 case kExprI64x2ExtractLane:
4966 return SimdExtractLane(opcode, kWasmI64, opcode_length);
4967 case kExprI32x4ExtractLane:
4968 case kExprI16x8ExtractLaneS:
4969 case kExprI16x8ExtractLaneU:
4970 case kExprI8x16ExtractLaneS:
4971 case kExprI8x16ExtractLaneU:
4972 return SimdExtractLane(opcode, kWasmI32, opcode_length);
4973 case kExprF64x2ReplaceLane:
4974 return SimdReplaceLane(opcode, kWasmF64, opcode_length);
4975 case kExprF16x8ReplaceLane: {
4976 if (!v8_flags.experimental_wasm_fp16) {
4977 this->DecodeError(
4978 "invalid simd opcode: 0x%x, "
4979 "enable with --experimental-wasm-fp16",
4980 opcode);
4981 return 0;
4982 }
4983 [[fallthrough]];
4984 }
4985 case kExprF32x4ReplaceLane:
4986 return SimdReplaceLane(opcode, kWasmF32, opcode_length);
4987 case kExprI64x2ReplaceLane:
4988 return SimdReplaceLane(opcode, kWasmI64, opcode_length);
4989 case kExprI32x4ReplaceLane:
4990 case kExprI16x8ReplaceLane:
4991 case kExprI8x16ReplaceLane:
4992 return SimdReplaceLane(opcode, kWasmI32, opcode_length);
4993 case kExprI8x16Shuffle:
4994 return Simd8x16ShuffleOp(opcode_length);
4995 case kExprS128LoadMem:
4996 return DecodeLoadMem(LoadType::kS128Load, opcode_length);
4997 case kExprS128StoreMem:
4998 return DecodeStoreMem(StoreType::kS128Store, opcode_length);
4999 case kExprS128Load32Zero:
5000 return DecodeLoadTransformMem(LoadType::kI32Load,
5001 LoadTransformationKind::kZeroExtend,
5002 opcode_length);
5003 case kExprS128Load64Zero:
5004 return DecodeLoadTransformMem(LoadType::kI64Load,
5005 LoadTransformationKind::kZeroExtend,
5006 opcode_length);
5007 case kExprS128Load8Splat:
5008 return DecodeLoadTransformMem(LoadType::kI32Load8S,
5009 LoadTransformationKind::kSplat,
5010 opcode_length);
5011 case kExprS128Load16Splat:
5012 return DecodeLoadTransformMem(LoadType::kI32Load16S,
5013 LoadTransformationKind::kSplat,
5014 opcode_length);
5015 case kExprS128Load32Splat:
5016 return DecodeLoadTransformMem(
5017 LoadType::kI32Load, LoadTransformationKind::kSplat, opcode_length);
5018 case kExprS128Load64Splat:
5019 return DecodeLoadTransformMem(
5020 LoadType::kI64Load, LoadTransformationKind::kSplat, opcode_length);
5021 case kExprS128Load8x8S:
5022 return DecodeLoadTransformMem(LoadType::kI32Load8S,
5023 LoadTransformationKind::kExtend,
5024 opcode_length);
5025 case kExprS128Load8x8U:
5026 return DecodeLoadTransformMem(LoadType::kI32Load8U,
5027 LoadTransformationKind::kExtend,
5028 opcode_length);
5029 case kExprS128Load16x4S:
5030 return DecodeLoadTransformMem(LoadType::kI32Load16S,
5031 LoadTransformationKind::kExtend,
5032 opcode_length);
5033 case kExprS128Load16x4U:
5034 return DecodeLoadTransformMem(LoadType::kI32Load16U,
5035 LoadTransformationKind::kExtend,
5036 opcode_length);
5037 case kExprS128Load32x2S:
5038 return DecodeLoadTransformMem(LoadType::kI64Load32S,
5039 LoadTransformationKind::kExtend,
5040 opcode_length);
5041 case kExprS128Load32x2U:
5042 return DecodeLoadTransformMem(LoadType::kI64Load32U,
5043 LoadTransformationKind::kExtend,
5044 opcode_length);
5045 case kExprS128Load8Lane: {
5046 return DecodeLoadLane(opcode, LoadType::kI32Load8S, opcode_length);
5047 }
5048 case kExprS128Load16Lane: {
5049 return DecodeLoadLane(opcode, LoadType::kI32Load16S, opcode_length);
5050 }
5051 case kExprS128Load32Lane: {
5052 return DecodeLoadLane(opcode, LoadType::kI32Load, opcode_length);
5053 }
5054 case kExprS128Load64Lane: {
5055 return DecodeLoadLane(opcode, LoadType::kI64Load, opcode_length);
5056 }
5057 case kExprS128Store8Lane: {
5058 return DecodeStoreLane(opcode, StoreType::kI32Store8, opcode_length);
5059 }
5060 case kExprS128Store16Lane: {
5061 return DecodeStoreLane(opcode, StoreType::kI32Store16, opcode_length);
5062 }
5063 case kExprS128Store32Lane: {
5064 return DecodeStoreLane(opcode, StoreType::kI32Store, opcode_length);
5065 }
5066 case kExprS128Store64Lane: {
5067 return DecodeStoreLane(opcode, StoreType::kI64Store, opcode_length);
5068 }
5069 case kExprS128Const:
5070 return SimdConstOp(opcode_length);
5071 case kExprF16x8Splat:
5072 case kExprF16x8Abs:
5073 case kExprF16x8Neg:
5074 case kExprF16x8Sqrt:
5075 case kExprF16x8Ceil:
5076 case kExprF16x8Floor:
5077 case kExprF16x8Trunc:
5078 case kExprF16x8NearestInt:
5079 case kExprF16x8Eq:
5080 case kExprF16x8Ne:
5081 case kExprF16x8Lt:
5082 case kExprF16x8Gt:
5083 case kExprF16x8Le:
5084 case kExprF16x8Ge:
5085 case kExprF16x8Add:
5086 case kExprF16x8Sub:
5087 case kExprF16x8Mul:
5088 case kExprF16x8Div:
5089 case kExprF16x8Min:
5090 case kExprF16x8Max:
5091 case kExprF16x8Pmin:
5092 case kExprF16x8Pmax:
5093 case kExprI16x8SConvertF16x8:
5094 case kExprI16x8UConvertF16x8:
5095 case kExprF16x8SConvertI16x8:
5096 case kExprF16x8UConvertI16x8:
5097 case kExprF16x8DemoteF32x4Zero:
5098 case kExprF16x8DemoteF64x2Zero:
5099 case kExprF32x4PromoteLowF16x8:
5100 case kExprF16x8Qfma:
5101 case kExprF16x8Qfms: {
5102 if (!v8_flags.experimental_wasm_fp16) {
5103 this->DecodeError(
5104 "invalid simd opcode: 0x%x, "
5105 "enable with --experimental-wasm-fp16",
5106 opcode);
5107 return 0;
5108 }
5109 [[fallthrough]];
5110 }
5111 default: {
5112 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
5113 if (!VALIDATE(sig != nullptr)) {
5114 this->DecodeError("invalid simd opcode");
5115 return 0;
5116 }
5117 PoppedArgVector args = PopArgs(sig);
5118 Value* results = sig->return_count() == 0 ? nullptr : PushReturns(sig);
5119 CALL_INTERFACE_IF_OK_AND_REACHABLE(SimdOp, opcode, args.data(),
5120 results);
5121 return opcode_length;
5122 }
5123 }
5124 }
5125
5126 // Returns true if type checking will always fail, either because the types
5127 // are unrelated or because the target_type is one of the null sentinels and
5128 // conversion to null does not succeed.
5129 bool TypeCheckAlwaysFails(Value obj, HeapType expected_type,
5130 bool null_succeeds) {
5131 bool types_unrelated =
5132 !IsSubtypeOf(ValueType::Ref(expected_type), obj.type, this->module_) &&
5133 !IsSubtypeOf(obj.type, ValueType::RefNull(expected_type),
5134 this->module_);
5135 // For "unrelated" types the check can still succeed for the null value on
5136 // instructions treating null as a successful check.
5137 // TODO(12868): For string views, this implementation anticipates that
5138 // https://github.com/WebAssembly/stringref/issues/40 will be resolved
5139 // by making the views standalone types.
5140 return (types_unrelated &&
5141 (!null_succeeds || !obj.type.is_nullable() ||
5142 obj.type.is_string_view() || expected_type.is_string_view())) ||
5143 ((!null_succeeds || !obj.type.is_nullable()) &&
5144 (expected_type.representation() == HeapType::kNone ||
5145 expected_type.representation() == HeapType::kNoFunc ||
5146 expected_type.representation() == HeapType::kNoExtern ||
5147 expected_type.representation() == HeapType::kNoExn));
5148 }
5149
5150 // Checks if {obj} is a subtype of type, thus checking will always
5151 // succeed.
5153 return IsSubtypeOf(obj.type, ValueType::RefNull(type), this->module_);
5154 }
5155
5156#define NON_CONST_ONLY \
5157 if constexpr (decoding_mode == kConstantExpression) { \
5158 this->DecodeError("opcode %s is not allowed in constant expressions", \
5159 this->SafeOpcodeNameAt(this->pc())); \
5160 return 0; \
5161 }
5162
5164 const TypeDefinition& type = this->module_->type(described_index);
5165 if (!type.has_descriptor()) return Value{nullptr, kWasmVoid};
5166 DCHECK(this->enabled_.has_custom_descriptors());
5167 ValueType desc_type =
5168 ValueType::RefNull(this->module_->heap_type(type.descriptor)).AsExact();
5169 return Pop(desc_type);
5170 }
5171
5172 int DecodeGCOpcode(WasmOpcode opcode, uint32_t opcode_length) {
5173 // Bigger GC opcodes are handled via {DecodeStringRefOpcode}, so we can
5174 // assume here that opcodes are within [0xfb00, 0xfbff].
5175 // This assumption might help the big switch below.
5176 V8_ASSUME(opcode >> 8 == kGCPrefix);
5177 switch (opcode) {
5178 case kExprStructNew: {
5179 StructIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5180 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5181 Value descriptor = PopDescriptor(imm.index);
5182 PoppedArgVector args = PopArgs(imm.struct_type);
5183 Value* value =
5184 Push(ValueType::Ref(imm.heap_type()).AsExactIfProposalEnabled());
5185 CALL_INTERFACE_IF_OK_AND_REACHABLE(StructNew, imm, descriptor,
5186 args.data(), value);
5187 return opcode_length + imm.length;
5188 }
5189 case kExprStructNewDefault: {
5190 StructIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5191 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5192 if (ValidationTag::validate) {
5193 for (uint32_t i = 0; i < imm.struct_type->field_count(); i++) {
5194 ValueType ftype = imm.struct_type->field(i);
5195 if (!VALIDATE(ftype.is_defaultable())) {
5196 this->DecodeError(
5197 "%s: struct type %d has field %d of non-defaultable type %s",
5198 WasmOpcodes::OpcodeName(opcode), imm.index.index, i,
5199 ftype.name().c_str());
5200 return 0;
5201 }
5202 }
5203 }
5204 Value descriptor = PopDescriptor(imm.index);
5205 Value* value =
5206 Push(ValueType::Ref(imm.heap_type()).AsExactIfProposalEnabled());
5207 CALL_INTERFACE_IF_OK_AND_REACHABLE(StructNewDefault, imm, descriptor,
5208 value);
5209 return opcode_length + imm.length;
5210 }
5211 case kExprStructGet: {
5213 FieldImmediate field(this, this->pc_ + opcode_length, validate);
5214 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
5215 ValueType field_type =
5216 field.struct_imm.struct_type->field(field.field_imm.index);
5217 if (!VALIDATE(!field_type.is_packed())) {
5218 this->DecodeError(
5219 "struct.get: Immediate field %d of type %d has packed type %s. "
5220 "Use struct.get_s or struct.get_u instead.",
5221 field.field_imm.index, field.struct_imm.index.index,
5222 field_type.name().c_str());
5223 return 0;
5224 }
5225 Value struct_obj =
5226 Pop(ValueType::RefNull(field.struct_imm.heap_type()));
5227 Value* value = Push(field_type);
5228 CALL_INTERFACE_IF_OK_AND_REACHABLE(StructGet, struct_obj, field, true,
5229 value);
5230 return opcode_length + field.length;
5231 }
5232 case kExprStructGetU:
5233 case kExprStructGetS: {
5235 FieldImmediate field(this, this->pc_ + opcode_length, validate);
5236 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
5237 ValueType field_type =
5238 field.struct_imm.struct_type->field(field.field_imm.index);
5239 if (!VALIDATE(field_type.is_packed())) {
5240 this->DecodeError(
5241 "%s: Immediate field %d of type %d has non-packed type %s. Use "
5242 "struct.get instead.",
5243 WasmOpcodes::OpcodeName(opcode), field.field_imm.index,
5244 field.struct_imm.index, field_type.name().c_str());
5245 return 0;
5246 }
5247 Value struct_obj =
5248 Pop(ValueType::RefNull(field.struct_imm.heap_type()));
5249 Value* value = Push(field_type.Unpacked());
5250 CALL_INTERFACE_IF_OK_AND_REACHABLE(StructGet, struct_obj, field,
5251 opcode == kExprStructGetS, value);
5252 return opcode_length + field.length;
5253 }
5254 case kExprStructSet: {
5256 FieldImmediate field(this, this->pc_ + opcode_length, validate);
5257 if (!this->Validate(this->pc_ + opcode_length, field)) return 0;
5258 const StructType* struct_type = field.struct_imm.struct_type;
5259 if (!VALIDATE(struct_type->mutability(field.field_imm.index))) {
5260 this->DecodeError("struct.set: Field %d of type %d is immutable.",
5261 field.field_imm.index,
5262 field.struct_imm.index.index);
5263 return 0;
5264 }
5265 auto [struct_obj, field_value] =
5266 Pop(ValueType::RefNull(field.struct_imm.heap_type()),
5267 struct_type->field(field.field_imm.index).Unpacked());
5268 CALL_INTERFACE_IF_OK_AND_REACHABLE(StructSet, struct_obj, field,
5269 field_value);
5270 return opcode_length + field.length;
5271 }
5272 case kExprArrayNew: {
5273 ArrayIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5274 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5275 auto [initial_value, length] =
5276 Pop(imm.array_type->element_type().Unpacked(), kWasmI32);
5277 Value* value =
5278 Push(ValueType::Ref(imm.heap_type()).AsExactIfProposalEnabled());
5279 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNew, imm, length, initial_value,
5280 value);
5281 return opcode_length + imm.length;
5282 }
5283 case kExprArrayNewDefault: {
5284 ArrayIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5285 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5286 if (!VALIDATE(imm.array_type->element_type().is_defaultable())) {
5287 this->DecodeError(
5288 "%s: array type %d has non-defaultable element type %s",
5289 WasmOpcodes::OpcodeName(opcode), imm.index.index,
5290 imm.array_type->element_type().name().c_str());
5291 return 0;
5292 }
5293 Value length = Pop(kWasmI32);
5294 Value* value =
5295 Push(ValueType::Ref(imm.heap_type()).AsExactIfProposalEnabled());
5296 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewDefault, imm, length, value);
5297 return opcode_length + imm.length;
5298 }
5299 case kExprArrayNewData: {
5300 // TODO(14616): Add check that array sharedness == segment sharedness?
5302 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5303 validate);
5304 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5305 ValueType element_type = array_imm.array_type->element_type();
5306 if (element_type.is_reference()) {
5307 this->DecodeError(
5308 "array.new_data can only be used with numeric-type arrays, found "
5309 "array type #%d instead",
5310 array_imm.index);
5311 return 0;
5312 }
5313 const uint8_t* data_index_pc =
5314 this->pc_ + opcode_length + array_imm.length;
5315 IndexImmediate data_segment(this, data_index_pc, "data segment",
5316 validate);
5317 if (!this->ValidateDataSegment(data_index_pc, data_segment)) return 0;
5318
5319 auto [offset, length] = Pop(kWasmI32, kWasmI32);
5320
5321 Value* array = Push(
5322 ValueType::Ref(array_imm.heap_type()).AsExactIfProposalEnabled());
5323 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewSegment, array_imm,
5324 data_segment, offset, length, array);
5325 return opcode_length + array_imm.length + data_segment.length;
5326 }
5327 case kExprArrayNewElem: {
5328 // TODO(14616): Add check that array sharedness == segment sharedness?
5330 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5331 validate);
5332 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5333 ValueType element_type = array_imm.array_type->element_type();
5334 if (element_type.is_numeric()) {
5335 this->DecodeError(
5336 "array.new_elem can only be used with reference-type arrays, "
5337 "found array type #%d instead",
5338 array_imm.index);
5339 return 0;
5340 }
5341 const uint8_t* elem_index_pc =
5342 this->pc_ + opcode_length + array_imm.length;
5343 IndexImmediate elem_segment(this, elem_index_pc, "element segment",
5344 validate);
5345 if (!this->ValidateElementSegment(elem_index_pc, elem_segment)) {
5346 return 0;
5347 }
5348
5349 ValueType elem_segment_type =
5350 this->module_->elem_segments[elem_segment.index].type;
5351 if (V8_UNLIKELY(
5352 !IsSubtypeOf(elem_segment_type, element_type, this->module_))) {
5353 this->DecodeError(
5354 "array.new_elem: segment type %s is not a subtype of array "
5355 "element type %s",
5356 elem_segment_type.name().c_str(), element_type.name().c_str());
5357 return 0;
5358 }
5359
5360 auto [offset, length] = Pop(kWasmI32, kWasmI32);
5361 Value* array = Push(
5362 ValueType::Ref(array_imm.heap_type()).AsExactIfProposalEnabled());
5363 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewSegment, array_imm,
5364 elem_segment, offset, length, array);
5365 return opcode_length + array_imm.length + elem_segment.length;
5366 }
5367 case kExprArrayInitData: {
5369 // TODO(14616): Add check that array sharedness == segment sharedness?
5370 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5371 validate);
5372 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5373 if (!array_imm.array_type->mutability()) {
5374 this->DecodeError(
5375 "array.init_data can only be used with mutable arrays, found "
5376 "array type #%d instead",
5377 array_imm.index);
5378 return 0;
5379 }
5380 ValueType element_type = array_imm.array_type->element_type();
5381 if (element_type.is_reference()) {
5382 this->DecodeError(
5383 "array.init_data can only be used with numeric-type arrays, "
5384 "found array type #%d instead",
5385 array_imm.index);
5386 return 0;
5387 }
5388 const uint8_t* data_index_pc =
5389 this->pc_ + opcode_length + array_imm.length;
5390 IndexImmediate data_segment(this, data_index_pc, "data segment",
5391 validate);
5392 if (!this->ValidateDataSegment(data_index_pc, data_segment)) return 0;
5393
5394 auto [array, array_index, data_offset, length] =
5395 Pop(ValueType::RefNull(array_imm.heap_type()), kWasmI32, kWasmI32,
5396 kWasmI32);
5397 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayInitSegment, array_imm,
5398 data_segment, array, array_index,
5399 data_offset, length);
5400 return opcode_length + array_imm.length + data_segment.length;
5401 }
5402 case kExprArrayInitElem: {
5404 // TODO(14616): Add check that array sharedness == segment sharedness?
5405 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5406 validate);
5407 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5408 if (!array_imm.array_type->mutability()) {
5409 this->DecodeError(
5410 "array.init_elem can only be used with mutable arrays, found "
5411 "array type #%d instead",
5412 array_imm.index);
5413 return 0;
5414 }
5415 ValueType element_type = array_imm.array_type->element_type();
5416 if (element_type.is_numeric()) {
5417 this->DecodeError(
5418 "array.init_elem can only be used with reference-type arrays, "
5419 "found array type #%d instead",
5420 array_imm.index);
5421 return 0;
5422 }
5423 const uint8_t* elem_index_pc =
5424 this->pc_ + opcode_length + array_imm.length;
5425 IndexImmediate elem_segment(this, elem_index_pc, "element segment",
5426 validate);
5427 if (!this->ValidateElementSegment(elem_index_pc, elem_segment)) {
5428 return 0;
5429 }
5430 ValueType segment_type =
5431 this->module_->elem_segments[elem_segment.index].type;
5432 if (!VALIDATE(IsSubtypeOf(segment_type, element_type, this->module_))) {
5433 this->DecodeError(
5434 "array.init_elem: segment type %s is not a subtype of array "
5435 "element type %s",
5436 segment_type.name().c_str(), element_type.name().c_str());
5437 return 0;
5438 }
5439
5440 auto [array, array_index, elem_offset, length] =
5441 Pop(ValueType::RefNull(array_imm.heap_type()), kWasmI32, kWasmI32,
5442 kWasmI32);
5443 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayInitSegment, array_imm,
5444 elem_segment, array, array_index,
5445 elem_offset, length);
5446 return opcode_length + array_imm.length + elem_segment.length;
5447 }
5448 case kExprArrayGetS:
5449 case kExprArrayGetU: {
5451 ArrayIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5452 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5453 if (!VALIDATE(imm.array_type->element_type().is_packed())) {
5454 this->DecodeError(
5455 "%s: Immediate array type %d has non-packed type %s. Use "
5456 "array.get instead.",
5457 WasmOpcodes::OpcodeName(opcode), imm.index,
5458 imm.array_type->element_type().name().c_str());
5459 return 0;
5460 }
5461 auto [array_obj, index] =
5462 Pop(ValueType::RefNull(imm.heap_type()), kWasmI32);
5463 Value* value = Push(imm.array_type->element_type().Unpacked());
5464 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayGet, array_obj, imm, index,
5465 opcode == kExprArrayGetS, value);
5466 return opcode_length + imm.length;
5467 }
5468 case kExprArrayGet: {
5470 ArrayIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5471 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5472 if (!VALIDATE(!imm.array_type->element_type().is_packed())) {
5473 this->DecodeError(
5474 "array.get: Immediate array type %d has packed type %s. Use "
5475 "array.get_s or array.get_u instead.",
5476 imm.index, imm.array_type->element_type().name().c_str());
5477 return 0;
5478 }
5479 auto [array_obj, index] =
5480 Pop(ValueType::RefNull(imm.heap_type()), kWasmI32);
5481 Value* value = Push(imm.array_type->element_type());
5482 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayGet, array_obj, imm, index,
5483 true, value);
5484 return opcode_length + imm.length;
5485 }
5486 case kExprArraySet: {
5488 ArrayIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5489 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5490 if (!VALIDATE(imm.array_type->mutability())) {
5491 this->DecodeError("array.set: immediate array type %d is immutable",
5492 imm.index.index);
5493 return 0;
5494 }
5495 auto [array_obj, index, value] =
5496 Pop(ValueType::RefNull(imm.heap_type()), kWasmI32,
5497 imm.array_type->element_type().Unpacked());
5498 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArraySet, array_obj, imm, index,
5499 value);
5500 return opcode_length + imm.length;
5501 }
5502 case kExprArrayLen: {
5504 Value array_obj = Pop(kWasmArrayRef);
5505 Value* value = Push(kWasmI32);
5506 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayLen, array_obj, value);
5507 return opcode_length;
5508 }
5509 case kExprArrayCopy: {
5511 ArrayIndexImmediate dst_imm(this, this->pc_ + opcode_length, validate);
5512 if (!this->Validate(this->pc_ + opcode_length, dst_imm)) return 0;
5513 if (!VALIDATE(dst_imm.array_type->mutability())) {
5514 this->DecodeError(
5515 "array.copy: immediate destination array type #%d is immutable",
5516 dst_imm.index.index);
5517 return 0;
5518 }
5519 ArrayIndexImmediate src_imm(
5520 this, this->pc_ + opcode_length + dst_imm.length, validate);
5521 if (!this->Validate(this->pc_ + opcode_length + dst_imm.length,
5522 src_imm)) {
5523 return 0;
5524 }
5525 if (!IsSubtypeOf(src_imm.array_type->element_type(),
5526 dst_imm.array_type->element_type(), this->module_)) {
5527 this->DecodeError(
5528 "array.copy: source array's #%d element type is not a subtype of "
5529 "destination array's #%d element type",
5530 src_imm.index, dst_imm.index);
5531 return 0;
5532 }
5533 auto [dst, dst_index, src, src_index, length] =
5534 Pop(ValueType::RefNull(dst_imm.heap_type()), kWasmI32,
5535 ValueType::RefNull(src_imm.heap_type()), kWasmI32, kWasmI32);
5536 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayCopy, dst, dst_index, src,
5537 src_index, src_imm, length);
5538 return opcode_length + dst_imm.length + src_imm.length;
5539 }
5540 case kExprArrayFill: {
5542 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5543 validate);
5544 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5545 if (!VALIDATE(array_imm.array_type->mutability())) {
5546 this->DecodeError("array.init: immediate array type #%d is immutable",
5547 array_imm.index.index);
5548 return 0;
5549 }
5550
5551 auto [array, offset, value, length] =
5552 Pop(ValueType::RefNull(array_imm.heap_type()), kWasmI32,
5553 array_imm.array_type->element_type().Unpacked(), kWasmI32);
5554 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayFill, array_imm, array, offset,
5555 value, length);
5556 return opcode_length + array_imm.length;
5557 }
5558 case kExprArrayNewFixed: {
5559 ArrayIndexImmediate array_imm(this, this->pc_ + opcode_length,
5560 validate);
5561 if (!this->Validate(this->pc_ + opcode_length, array_imm)) return 0;
5562 IndexImmediate length_imm(this,
5563 this->pc_ + opcode_length + array_imm.length,
5564 "array.new_fixed length", validate);
5565 uint32_t elem_count = length_imm.index;
5566 if (!VALIDATE(elem_count <= kV8MaxWasmArrayNewFixedLength)) {
5567 this->DecodeError(
5568 "Requested length %u for array.new_fixed too large, maximum is "
5569 "%zu",
5571 return 0;
5572 }
5573 ValueType element_type = array_imm.array_type->element_type();
5574 std::vector<ValueType> element_types(elem_count,
5575 element_type.Unpacked());
5576 FunctionSig element_sig(0, elem_count, element_types.data());
5577 PoppedArgVector elements = PopArgs(&element_sig);
5578 Value* result = Push(
5579 ValueType::Ref(array_imm.heap_type()).AsExactIfProposalEnabled());
5580 CALL_INTERFACE_IF_OK_AND_REACHABLE(ArrayNewFixed, array_imm, length_imm,
5581 elements.data(), result);
5582 return opcode_length + array_imm.length + length_imm.length;
5583 }
5584 case kExprRefI31: {
5585 Value input = Pop(kWasmI32);
5586 Value* value = Push(ValueType::Ref(kWasmI31Ref));
5587 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefI31, input, value);
5588 return opcode_length;
5589 }
5590 case kExprI31GetS: {
5592 Value i31 = Pop(kWasmI31Ref);
5593 Value* value = Push(kWasmI32);
5594 CALL_INTERFACE_IF_OK_AND_REACHABLE(I31GetS, i31, value);
5595 return opcode_length;
5596 }
5597 case kExprI31GetU: {
5599 Value i31 = Pop(kWasmI31Ref);
5600 Value* value = Push(kWasmI32);
5601 CALL_INTERFACE_IF_OK_AND_REACHABLE(I31GetU, i31, value);
5602 return opcode_length;
5603 }
5604 case kExprRefGetDesc: {
5606 CHECK_PROTOTYPE_OPCODE(custom_descriptors);
5607 // We may need to generalize this to any TypeIndex in the future, but
5608 // for now only structs can have descriptors.
5609 StructIndexImmediate imm(this, this->pc_ + opcode_length, validate);
5610 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5611 const TypeDefinition& type = this->module_->type(imm.index);
5612 if (!VALIDATE(type.has_descriptor())) {
5613 this->DecodeError(
5614 this->pc_ + opcode_length,
5615 "Invalid type for ref.get_desc: type %s has no custom descriptor",
5616 imm.heap_type().name().c_str());
5617 return 0;
5618 }
5619 Value ref = Pop(ValueType::RefNull(imm.heap_type()));
5620 Value* desc =
5621 Push(ValueType::Ref(this->module_->heap_type(type.descriptor))
5622 .AsExact(ref.type.exactness()));
5623 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefGetDesc, ref, desc);
5624 return opcode_length + imm.length;
5625 }
5626 case kExprRefCastDesc:
5627 case kExprRefCastDescNull: {
5629 CHECK_PROTOTYPE_OPCODE(custom_descriptors);
5630 HeapTypeImmediate imm(this->enabled_, this, this->pc_ + opcode_length,
5631 validate);
5632 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5633 if (!VALIDATE(imm.type.has_index())) {
5634 this->DecodeError(
5635 this->pc_ + opcode_length,
5636 "ref.cast_desc: immediate type must have an index, but was %s",
5637 imm.type.name().c_str());
5638 return 0;
5639 }
5640 ModuleTypeIndex expected_desc_index =
5641 this->module_->type(imm.type.ref_index()).descriptor;
5642 if (!VALIDATE(expected_desc_index.valid())) {
5643 this->DecodeError(
5644 this->pc_ + opcode_length,
5645 "ref.cast_desc: immediate type %s must have a descriptor",
5646 imm.type.name().c_str());
5647 return 0;
5648 }
5649 ValueType expected_desc_type =
5650 ValueType::RefNull(this->module_->heap_type(expected_desc_index))
5651 .AsExact(imm.type.exactness());
5652 Value desc = Pop(expected_desc_type);
5653 // We will have to generalize the object's expected type to "top type of
5654 // imm.type" if/when values outside the anyref hierarchy can have custom
5655 // descriptors. This DCHECK should point that out when the time comes:
5656 DCHECK_EQ(imm.type.ref_type_kind(), RefTypeKind::kStruct);
5657 ValueType expected_obj_type = ValueType::Generic(
5658 GenericKind::kAny, kNullable, imm.type.is_shared());
5659 Value obj = Pop(expected_obj_type);
5660
5661 bool null_succeeds = (opcode == kExprRefCastDescNull);
5662 ValueType target_type = ValueType::RefMaybeNull(
5664 Value* value = Push(target_type);
5665
5666 // TODO(403372470): Do we need to special-case casts that always fail
5667 // or always succeed?
5668
5669 CALL_INTERFACE_IF_OK_AND_REACHABLE(RefCastDesc, obj, desc, value);
5670
5671 return opcode_length + imm.length;
5672 }
5673 case kExprRefCast:
5674 case kExprRefCastNull: {
5676 HeapTypeImmediate imm(this->enabled_, this, this->pc_ + opcode_length,
5677 validate);
5678 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5679 opcode_length += imm.length;
5680
5681 Value obj = Pop();
5682
5683 HeapType target_type = imm.type;
5684
5685 if (!VALIDATE((obj.type.is_object_reference() &&
5686 IsSameTypeHierarchy(obj.type.heap_type(), target_type,
5687 this->module_)) ||
5688 obj.type.is_bottom())) {
5689 this->DecodeError(obj.pc(),
5690 "Invalid types for %s: %s of type %s has to "
5691 "be in the same reference type hierarchy as %s",
5692 WasmOpcodes::OpcodeName(opcode),
5693 SafeOpcodeNameAt(obj.pc()), obj.type.name().c_str(),
5694 ValueType::Ref(target_type).name().c_str());
5695 return 0;
5696 }
5697 if (!VALIDATE(!target_type.is_string_view())) {
5698 // TODO(12868): This reflects the current state of discussion at
5699 // https://github.com/WebAssembly/stringref/issues/40
5700 // It is suboptimal because it allows classifying a stringview_wtf16
5701 // as a stringref. This would be solved by making the views types
5702 // that aren't subtypes of anyref, which is one of the possible
5703 // resolutions of that discussion.
5704 this->DecodeError(
5705 this->pc_,
5706 "Invalid type for %s: string views are not classifiable",
5707 WasmOpcodes::OpcodeName(opcode));
5708 return 0;
5709 }
5710
5711 bool null_succeeds = opcode == kExprRefCastNull;
5712 Value* value = Push(ValueType::RefMaybeNull(
5713 target_type, null_succeeds ? kNullable : kNonNullable));
5714 if (current_code_reachable_and_ok_) {
5715 // This logic ensures that code generation can assume that functions
5716 // can only be cast to function types, and data objects to data types.
5717 if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, target_type))) {
5718 if (obj.type.is_nullable() && !null_succeeds) {
5719 CALL_INTERFACE(AssertNotNullTypecheck, obj, value);
5720 } else {
5721 CALL_INTERFACE(Forward, obj, value);
5722 }
5723 } else if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, target_type,
5724 null_succeeds))) {
5725 // Unrelated types. The only way this will not trap is if the object
5726 // is null.
5727 if (obj.type.is_nullable() && null_succeeds) {
5728 CALL_INTERFACE(AssertNullTypecheck, obj, value);
5729 } else {
5730 CALL_INTERFACE(Trap, TrapReason::kTrapIllegalCast);
5731 // We know that the following code is not reachable, but according
5732 // to the spec it technically is. Set it to spec-only reachable.
5733 SetSucceedingCodeDynamicallyUnreachable();
5734 }
5735 } else {
5736 if (target_type.is_index()) {
5737 CALL_INTERFACE(RefCast, obj, value);
5738 } else {
5739 CALL_INTERFACE(RefCastAbstract, obj, target_type, value,
5741 }
5742 }
5743 }
5744 return opcode_length;
5745 }
5746 case kExprRefTestNull:
5747 case kExprRefTest: {
5749 HeapTypeImmediate imm(this->enabled_, this, this->pc_ + opcode_length,
5750 validate);
5751 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5752 opcode_length += imm.length;
5753
5754 Value obj = Pop();
5755 HeapType target_type = imm.type;
5756 Value* result = Push(kWasmI32);
5757
5758 if (!VALIDATE((obj.type.is_object_reference() &&
5759 IsSameTypeHierarchy(obj.type.heap_type(), target_type,
5760 this->module_)) ||
5761 obj.type.is_bottom())) {
5762 this->DecodeError(obj.pc(),
5763 "Invalid types for %s: %s of type %s has to "
5764 "be in the same reference type hierarchy as %s",
5765 WasmOpcodes::OpcodeName(opcode),
5766 SafeOpcodeNameAt(obj.pc()), obj.type.name().c_str(),
5767 ValueType::Ref(target_type).name().c_str());
5768 return 0;
5769 }
5770 if (!VALIDATE(!target_type.is_string_view())) {
5771 // TODO(12868): This reflects the current state of discussion at
5772 // https://github.com/WebAssembly/stringref/issues/40
5773 // It is suboptimal because it allows classifying a stringview_wtf16
5774 // as a stringref. This would be solved by making the views types
5775 // that aren't subtypes of anyref, which is one of the possible
5776 // resolutions of that discussion.
5777 this->DecodeError(
5778 this->pc_,
5779 "Invalid type for %s: string views are not classifiable",
5780 WasmOpcodes::OpcodeName(opcode));
5781 return 0;
5782 }
5783 bool null_succeeds = opcode == kExprRefTestNull;
5784 if (V8_LIKELY(current_code_reachable_and_ok_)) {
5785 // This logic ensures that code generation can assume that functions
5786 // can only be cast to function types, and data objects to data types.
5787 if (V8_UNLIKELY(TypeCheckAlwaysSucceeds(obj, target_type))) {
5788 // Type checking can still fail for null.
5789 if (obj.type.is_nullable() && !null_succeeds) {
5790 // We abuse ref.as_non_null, which isn't otherwise used as a unary
5791 // operator, as a sentinel for the negation of ref.is_null.
5792 CALL_INTERFACE(UnOp, kExprRefAsNonNull, obj, result);
5793 } else {
5794 CALL_INTERFACE(Drop);
5795 CALL_INTERFACE(I32Const, result, 1);
5796 }
5797 } else if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, target_type,
5798 null_succeeds))) {
5799 CALL_INTERFACE(Drop);
5800 CALL_INTERFACE(I32Const, result, 0);
5801 } else {
5802 if (imm.type.is_index()) {
5803 CALL_INTERFACE(RefTest, imm.type, obj, result, null_succeeds);
5804 } else {
5805 CALL_INTERFACE(RefTestAbstract, obj, target_type, result,
5807 }
5808 }
5809 }
5810 return opcode_length;
5811 }
5812 case kExprRefCastNop: {
5814 // Temporary non-standard instruction, for performance experiments.
5815 if (!VALIDATE(v8_flags.experimental_wasm_ref_cast_nop)) {
5816 this->DecodeError(
5817 "Invalid opcode 0xfb4c (enable with "
5818 "--experimental-wasm-ref-cast-nop)");
5819 return 0;
5820 }
5821 HeapTypeImmediate imm(this->enabled_, this, this->pc_ + opcode_length,
5822 validate);
5823 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
5824 opcode_length += imm.length;
5825 HeapType target_type = imm.type;
5826 Value obj = Pop();
5827 if (!VALIDATE((obj.type.is_object_reference() &&
5828 IsSameTypeHierarchy(obj.type.heap_type(), target_type,
5829 this->module_)) ||
5830 obj.type.is_bottom())) {
5831 this->DecodeError(obj.pc(),
5832 "Invalid types for %s: %s of type %s has to "
5833 "be in the same reference type hierarchy as %s",
5834 WasmOpcodes::OpcodeName(opcode),
5835 SafeOpcodeNameAt(obj.pc()), obj.type.name().c_str(),
5836 ValueType::Ref(target_type).name().c_str());
5837 return 0;
5838 }
5839 Value* value = Push(ValueType::Ref(target_type));
5840 CALL_INTERFACE_IF_OK_AND_REACHABLE(Forward, obj, value);
5841 return opcode_length;
5842 }
5843 case kExprBrOnCast:
5844 case kExprBrOnCastFail: {
5846 return ParseBrOnCast(opcode, opcode_length);
5847 }
5848 case kExprBrOnCastDesc:
5849 case kExprBrOnCastDescFail: {
5851 CHECK_PROTOTYPE_OPCODE(custom_descriptors);
5852 return ParseBrOnCast(opcode, opcode_length);
5853 }
5854 case kExprAnyConvertExtern: {
5855 Value extern_val = Pop(kWasmExternRef);
5856 ValueType intern_type = ValueType::RefMaybeNull(
5857 kWasmAnyRef, Nullability(extern_val.type.is_nullable()));
5858 Value* intern_val = Push(intern_type);
5859 CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, kExprAnyConvertExtern,
5860 extern_val, intern_val);
5861 return opcode_length;
5862 }
5863 case kExprExternConvertAny: {
5864 Value val = Pop(kWasmAnyRef);
5865 ValueType extern_type = ValueType::RefMaybeNull(
5866 kWasmExternRef, Nullability(val.type.is_nullable()));
5867 Value* extern_val = Push(extern_type);
5868 CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, kExprExternConvertAny, val,
5869 extern_val);
5870 return opcode_length;
5871 }
5872 default:
5873 this->DecodeError("invalid gc opcode: %x", opcode);
5874 return 0;
5875 }
5876 }
5877
5878 enum class WasmArrayAccess { kRead, kWrite };
5879
5880 int ParseBrOnCast(WasmOpcode opcode, uint32_t pc_offset) {
5881 BrOnCastImmediate flags_imm(this, this->pc_ + pc_offset, validate);
5882 pc_offset += flags_imm.length;
5883 BrOnCastFlags flags = flags_imm.flags;
5884
5885 BranchDepthImmediate branch_depth(this, this->pc_ + pc_offset, validate);
5886 if (!this->Validate(this->pc_ + pc_offset, branch_depth, control_.size())) {
5887 return 0;
5888 }
5889 pc_offset += branch_depth.length;
5890
5891 HeapTypeImmediate src_imm(this->enabled_, this, this->pc_ + pc_offset,
5892 validate);
5893 if (!this->Validate(this->pc_ + pc_offset, src_imm)) return 0;
5894 pc_offset += src_imm.length;
5895 ValueType src_type = ValueType::RefMaybeNull(
5896 src_imm.type, flags.src_is_null ? kNullable : kNonNullable);
5897
5898 const uint8_t* target_imm_pc = this->pc_ + pc_offset;
5899 HeapTypeImmediate target_imm(this->enabled_, this, target_imm_pc, validate);
5900 if (!this->Validate(target_imm_pc, target_imm)) return 0;
5901 pc_offset += target_imm.length;
5902 bool null_succeeds = flags.res_is_null;
5903 ValueType target_type = ValueType::RefMaybeNull(
5904 target_imm.type, null_succeeds ? kNullable : kNonNullable);
5905
5906 if (!VALIDATE(IsSubtypeOf(target_type, src_type, this->module_))) {
5907 this->DecodeError("invalid types for %s: %s is not a subtype of %s",
5908 WasmOpcodes::OpcodeName(opcode),
5909 target_type.name().c_str(), src_type.name().c_str());
5910 return 0;
5911 }
5912
5913 Value descriptor{nullptr, kWasmVoid};
5914 if (opcode == kExprBrOnCastDesc || opcode == kExprBrOnCastDescFail) {
5915 if (!VALIDATE(target_imm.type.has_index())) {
5916 this->DecodeError(
5917 target_imm_pc, "%s: target type must have an index, but was %s",
5918 WasmOpcodes::OpcodeName(opcode), target_imm.type.name().c_str());
5919 return 0;
5920 }
5921 ModuleTypeIndex target_desc_index =
5922 this->module_->type(target_imm.type.ref_index()).descriptor;
5923 if (!VALIDATE(target_desc_index.valid())) {
5924 this->DecodeError(
5925 target_imm_pc, "%s: target type %s must have a descriptor",
5926 WasmOpcodes::OpcodeName(opcode), target_imm.type.name().c_str());
5927 return 0;
5928 }
5929 ValueType desc_type =
5930 ValueType::RefNull(this->module_->heap_type(target_desc_index))
5931 .AsExact(target_type.exactness());
5932 descriptor = Pop(desc_type);
5933 }
5934
5935 Value obj = Pop(src_type);
5936
5937 if (!VALIDATE(
5938 (obj.type.is_object_reference() &&
5939 IsSameTypeHierarchy(obj.type.heap_type(), target_type.heap_type(),
5940 this->module_)) ||
5941 obj.type.is_bottom())) {
5942 this->DecodeError(obj.pc(),
5943 "invalid types for %s: %s of type %s has to "
5944 "be in the same reference type hierarchy as %s",
5945 WasmOpcodes::OpcodeName(opcode),
5946 SafeOpcodeNameAt(obj.pc()), obj.type.name().c_str(),
5947 target_type.name().c_str());
5948 return 0;
5949 }
5950
5951 Control* c = control_at(branch_depth.depth);
5952 if (c->br_merge()->arity == 0) {
5953 this->DecodeError("%s must target a branch of arity at least 1",
5954 WasmOpcodes::OpcodeName(opcode));
5955 return 0;
5956 }
5957
5958 if (opcode == kExprBrOnCast || opcode == kExprBrOnCastDesc) {
5959 Value* value_on_branch = Push(target_type);
5960 if (!VALIDATE(
5961 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kYes>(
5962 c)))) {
5963 return 0;
5964 }
5965 if (V8_LIKELY(current_code_reachable_and_ok_)) {
5966 // This logic ensures that code generation can assume that functions
5967 // can only be cast to function types, and data objects to data types.
5968 if (V8_UNLIKELY(
5969 TypeCheckAlwaysSucceeds(obj, target_type.heap_type()))) {
5970 // The branch will still not be taken on null if not
5971 // {null_succeeds}.
5972 if (obj.type.is_nullable() && !null_succeeds) {
5973 CALL_INTERFACE(BrOnNonNull, obj, value_on_branch,
5974 branch_depth.depth, false);
5975 } else {
5976 CALL_INTERFACE(Forward, obj, value_on_branch);
5977 CALL_INTERFACE(BrOrRet, branch_depth.depth);
5978 // We know that the following code is not reachable, but according
5979 // to the spec it technically is. Set it to spec-only reachable.
5980 SetSucceedingCodeDynamicallyUnreachable();
5981 }
5982 c->br_merge()->reached = true;
5983 } else if (V8_LIKELY(!TypeCheckAlwaysFails(obj, target_type.heap_type(),
5984 null_succeeds))) {
5985 if (opcode == kExprBrOnCastDesc) {
5986 CALL_INTERFACE(BrOnCastDesc, target_imm.type, obj, descriptor,
5987 value_on_branch, branch_depth.depth, null_succeeds);
5988 } else if (target_imm.type.is_index()) {
5989 CALL_INTERFACE(BrOnCast, target_imm.type, obj, value_on_branch,
5990 branch_depth.depth, null_succeeds);
5991 } else {
5992 CALL_INTERFACE(BrOnCastAbstract, obj, target_type.heap_type(),
5993 value_on_branch, branch_depth.depth, null_succeeds);
5994 }
5995 c->br_merge()->reached = true;
5996 }
5997 }
5998
5999 Drop(*value_on_branch);
6000 Push(obj); // Restore stack state on fallthrough.
6001 // The fallthrough type is the source type as specified in the br_on_cast
6002 // instruction. This can be a super type of the stack value. Furthermore
6003 // nullability gets refined to non-nullable if the cast target is
6004 // nullable, meaning the branch will be taken on null.
6005 DCHECK(!src_type.heap_type().is_bottom());
6006 bool fallthrough_nullable = flags.src_is_null && !flags.res_is_null;
6007 stack_value(1)->type = ValueType::RefMaybeNull(
6008 src_type.heap_type(),
6009 fallthrough_nullable ? kNullable : kNonNullable);
6010 CALL_INTERFACE_IF_OK_AND_REACHABLE(Forward, obj, stack_value(1));
6011 return pc_offset;
6012
6013 } else {
6014 DCHECK(opcode == kExprBrOnCastFail || opcode == kExprBrOnCastDescFail);
6015 // The branch type is set based on the source type immediate (independent
6016 // of the actual stack value). If the target type is nullable, the branch
6017 // type is non-nullable.
6018 Push(flags.res_is_null ? src_type.AsNonNull() : src_type);
6019 CALL_INTERFACE_IF_OK_AND_REACHABLE(Forward, obj, stack_value(1));
6020
6021 if (!VALIDATE(
6022 (TypeCheckBranch<PushBranchValues::kYes, RewriteStackTypes::kYes>(
6023 c)))) {
6024 return 0;
6025 }
6026
6027 Value result_on_fallthrough = CreateValue(target_type);
6028 if (V8_LIKELY(current_code_reachable_and_ok_)) {
6029 // This logic ensures that code generation can assume that functions
6030 // can only be cast between compatible types.
6031 if (V8_UNLIKELY(TypeCheckAlwaysFails(obj, target_type.heap_type(),
6032 null_succeeds))) {
6033 // The types are incompatible (i.e. neither of the two types is a
6034 // subtype of the other). Always branch.
6035 CALL_INTERFACE(Forward, obj, stack_value(1));
6036 CALL_INTERFACE(BrOrRet, branch_depth.depth);
6037 // We know that the following code is not reachable, but according
6038 // to the spec it technically is. Set it to spec-only reachable.
6039 SetSucceedingCodeDynamicallyUnreachable();
6040 c->br_merge()->reached = true;
6041 } else if (V8_UNLIKELY(
6042 TypeCheckAlwaysSucceeds(obj, target_type.heap_type()))) {
6043 // The branch can still be taken on null.
6044 if (obj.type.is_nullable() && !null_succeeds) {
6045 CALL_INTERFACE(BrOnNull, obj, branch_depth.depth, true,
6046 &result_on_fallthrough);
6047 c->br_merge()->reached = true;
6048 } else {
6049 // Otherwise, the type check always succeeds. Do not branch. Also,
6050 // make sure the object remains on the stack.
6051 result_on_fallthrough = obj;
6052 }
6053 } else {
6054 if (opcode == kExprBrOnCastDescFail) {
6055 CALL_INTERFACE(BrOnCastDescFail, target_imm.type, obj, descriptor,
6056 &result_on_fallthrough, branch_depth.depth,
6058 } else if (target_imm.type.is_index()) {
6059 CALL_INTERFACE(BrOnCastFail, target_imm.type, obj,
6060 &result_on_fallthrough, branch_depth.depth,
6062 } else {
6063 CALL_INTERFACE(BrOnCastFailAbstract, obj, target_type.heap_type(),
6064 &result_on_fallthrough, branch_depth.depth,
6066 }
6067 c->br_merge()->reached = true;
6068 }
6069 }
6070 // Make sure the correct value is on the stack state on fallthrough.
6071 Drop(obj);
6072 Push(result_on_fallthrough);
6073 return pc_offset;
6074 }
6075 }
6076
6078 uint32_t opcode_length) {
6080 bool null_on_invalid = variant == unibrow::Utf8Variant::kUtf8NoTrap;
6081 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6082 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6083 ValueType addr_type = MemoryAddressType(imm.memory);
6084 auto [offset, size] = Pop(addr_type, kWasmI32);
6085 Value* result = Push(ValueType::RefMaybeNull(
6086 kWasmStringRef, null_on_invalid ? kNullable : kNonNullable));
6087 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringNewWtf8, imm, variant, offset,
6088 size, result);
6089 return opcode_length + imm.length;
6090 }
6091
6093 uint32_t opcode_length) {
6095 Value str = Pop(kWasmStringRef);
6096 Value* result = Push(kWasmI32);
6097 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringMeasureWtf8, variant, str, result);
6098 return opcode_length;
6099 }
6100
6102 uint32_t opcode_length) {
6104 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6105 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6106 ValueType addr_type = MemoryAddressType(imm.memory);
6107 auto [str, addr] = Pop(kWasmStringRef, addr_type);
6108 Value* result = Push(kWasmI32);
6109 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringEncodeWtf8, imm, variant, str,
6110 addr, result);
6111 return opcode_length + imm.length;
6112 }
6113
6115 uint32_t opcode_length) {
6117 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6118 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6119 ValueType addr_type = MemoryAddressType(imm.memory);
6120 auto [view, addr, pos, bytes] =
6121 Pop(kWasmStringViewWtf8, addr_type, kWasmI32, kWasmI32);
6122 Value* next_pos = Push(kWasmI32);
6123 Value* bytes_out = Push(kWasmI32);
6124 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf8Encode, imm, variant, view,
6125 addr, pos, bytes, next_pos, bytes_out);
6126 return opcode_length + imm.length;
6127 }
6128
6130 uint32_t opcode_length) {
6132 Value end = Pop(2, kWasmI32);
6133 Value start = Pop(1, kWasmI32);
6134 Value array = PopPackedArray(0, kWasmI8, WasmArrayAccess::kRead);
6135 bool null_on_invalid = variant == unibrow::Utf8Variant::kUtf8NoTrap;
6136 Value* result = Push(ValueType::RefMaybeNull(
6137 kWasmStringRef, null_on_invalid ? kNullable : kNonNullable));
6138 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringNewWtf8Array, variant, array,
6139 start, end, result);
6140 return opcode_length;
6141 }
6142
6144 uint32_t opcode_length) {
6146 Value start = Pop(2, kWasmI32);
6147 Value array = PopPackedArray(1, kWasmI8, WasmArrayAccess::kWrite);
6148 Value str = Pop(0, kWasmStringRef);
6149 Value* result = Push(kWasmI32);
6150 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringEncodeWtf8Array, variant, str,
6151 array, start, result);
6152 return opcode_length;
6153 }
6154
6155 int DecodeStringRefOpcode(WasmOpcode opcode, uint32_t opcode_length) {
6156 // Fast check for out-of-range opcodes (only allow 0xfbXX).
6157 // This might help the big switch below.
6158 if (!VALIDATE((opcode >> 8) == kGCPrefix)) {
6159 this->DecodeError("invalid stringref opcode: %x", opcode);
6160 return 0;
6161 }
6162
6163 switch (opcode) {
6164 case kExprStringNewUtf8:
6165 return DecodeStringNewWtf8(unibrow::Utf8Variant::kUtf8, opcode_length);
6166 case kExprStringNewUtf8Try:
6167 return DecodeStringNewWtf8(unibrow::Utf8Variant::kUtf8NoTrap,
6168 opcode_length);
6169 case kExprStringNewLossyUtf8:
6170 return DecodeStringNewWtf8(unibrow::Utf8Variant::kLossyUtf8,
6171 opcode_length);
6172 case kExprStringNewWtf8:
6173 return DecodeStringNewWtf8(unibrow::Utf8Variant::kWtf8, opcode_length);
6174 case kExprStringNewWtf16: {
6176 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6177 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6178 ValueType addr_type = MemoryAddressType(imm.memory);
6179 auto [offset, size] = Pop(addr_type, kWasmI32);
6180 Value* result = Push(kWasmRefString);
6181 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringNewWtf16, imm, offset, size,
6182 result);
6183 return opcode_length + imm.length;
6184 }
6185 case kExprStringConst: {
6186 StringConstImmediate imm(this, this->pc_ + opcode_length, validate);
6187 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6188 Value* result = Push(kWasmRefString);
6189 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringConst, imm, result);
6190 return opcode_length + imm.length;
6191 }
6192 case kExprStringMeasureUtf8:
6193 return DecodeStringMeasureWtf8(unibrow::Utf8Variant::kUtf8,
6194 opcode_length);
6195 case kExprStringMeasureWtf8:
6196 return DecodeStringMeasureWtf8(unibrow::Utf8Variant::kWtf8,
6197 opcode_length);
6198 case kExprStringMeasureWtf16: {
6200 Value str = Pop(kWasmStringRef);
6201 Value* result = Push(kWasmI32);
6202 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringMeasureWtf16, str, result);
6203 return opcode_length;
6204 }
6205 case kExprStringEncodeUtf8:
6206 return DecodeStringEncodeWtf8(unibrow::Utf8Variant::kUtf8,
6207 opcode_length);
6208 case kExprStringEncodeLossyUtf8:
6209 return DecodeStringEncodeWtf8(unibrow::Utf8Variant::kLossyUtf8,
6210 opcode_length);
6211 case kExprStringEncodeWtf8:
6212 return DecodeStringEncodeWtf8(unibrow::Utf8Variant::kWtf8,
6213 opcode_length);
6214 case kExprStringEncodeWtf16: {
6216 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6217 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6218 ValueType addr_type = MemoryAddressType(imm.memory);
6219 auto [str, addr] = Pop(kWasmStringRef, addr_type);
6220 Value* result = Push(kWasmI32);
6221 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringEncodeWtf16, imm, str, addr,
6222 result);
6223 return opcode_length + imm.length;
6224 }
6225 case kExprStringConcat: {
6227 auto [head, tail] = Pop(kWasmStringRef, kWasmStringRef);
6228 Value* result = Push(kWasmRefString);
6229 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringConcat, head, tail, result);
6230 return opcode_length;
6231 }
6232 case kExprStringEq: {
6234 auto [a, b] = Pop(kWasmStringRef, kWasmStringRef);
6235 Value* result = Push(kWasmI32);
6237 return opcode_length;
6238 }
6239 case kExprStringIsUSVSequence: {
6241 Value str = Pop(kWasmStringRef);
6242 Value* result = Push(kWasmI32);
6243 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringIsUSVSequence, str, result);
6244 return opcode_length;
6245 }
6246 case kExprStringAsWtf8: {
6248 Value str = Pop(kWasmStringRef);
6250 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringAsWtf8, str, result);
6251 return opcode_length;
6252 }
6253 case kExprStringViewWtf8Advance: {
6255 auto [view, pos, bytes] = Pop(kWasmStringViewWtf8, kWasmI32, kWasmI32);
6256 Value* result = Push(kWasmI32);
6257 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf8Advance, view, pos,
6258 bytes, result);
6259 return opcode_length;
6260 }
6261 case kExprStringViewWtf8EncodeUtf8:
6262 return DecodeStringViewWtf8Encode(unibrow::Utf8Variant::kUtf8,
6263 opcode_length);
6264 case kExprStringViewWtf8EncodeLossyUtf8:
6265 return DecodeStringViewWtf8Encode(unibrow::Utf8Variant::kLossyUtf8,
6266 opcode_length);
6267 case kExprStringViewWtf8EncodeWtf8:
6268 return DecodeStringViewWtf8Encode(unibrow::Utf8Variant::kWtf8,
6269 opcode_length);
6270 case kExprStringViewWtf8Slice: {
6272 auto [view, start, end] = Pop(kWasmStringViewWtf8, kWasmI32, kWasmI32);
6273 Value* result = Push(kWasmRefString);
6274 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf8Slice, view, start,
6275 end, result);
6276 return opcode_length;
6277 }
6278 case kExprStringAsWtf16: {
6280 Value str = Pop(kWasmStringRef);
6282 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringAsWtf16, str, result);
6283 return opcode_length;
6284 }
6285 case kExprStringViewWtf16Length: {
6287 Value view = Pop(kWasmStringViewWtf16);
6288 Value* result = Push(kWasmI32);
6289 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringMeasureWtf16, view, result);
6290 return opcode_length;
6291 }
6292 case kExprStringViewWtf16GetCodeunit: {
6294 auto [view, pos] = Pop(kWasmStringViewWtf16, kWasmI32);
6295 Value* result = Push(kWasmI32);
6296 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf16GetCodeUnit, view,
6297 pos, result);
6298 return opcode_length;
6299 }
6300 case kExprStringViewWtf16Encode: {
6302 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6303 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6304 ValueType addr_type = MemoryAddressType(imm.memory);
6305 auto [view, addr, pos, codeunits] =
6306 Pop(kWasmStringViewWtf16, addr_type, kWasmI32, kWasmI32);
6307 Value* result = Push(kWasmI32);
6308 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf16Encode, imm, view,
6309 addr, pos, codeunits, result);
6310 return opcode_length + imm.length;
6311 }
6312 case kExprStringViewWtf16Slice: {
6314 auto [view, start, end] = Pop(kWasmStringViewWtf16, kWasmI32, kWasmI32);
6315 Value* result = Push(kWasmRefString);
6316 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewWtf16Slice, view, start,
6317 end, result);
6318 return opcode_length;
6319 }
6320 case kExprStringAsIter: {
6322 Value str = Pop(kWasmStringRef);
6324 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringAsIter, str, result);
6325 return opcode_length;
6326 }
6327 case kExprStringViewIterNext: {
6329 Value view = Pop(kWasmStringViewIter);
6330 Value* result = Push(kWasmI32);
6331 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewIterNext, view, result);
6332 return opcode_length;
6333 }
6334 case kExprStringViewIterAdvance: {
6336 auto [view, codepoints] = Pop(kWasmStringViewIter, kWasmI32);
6337 Value* result = Push(kWasmI32);
6338 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewIterAdvance, view,
6339 codepoints, result);
6340 return opcode_length;
6341 }
6342 case kExprStringViewIterRewind: {
6344 auto [view, codepoints] = Pop(kWasmStringViewIter, kWasmI32);
6345 Value* result = Push(kWasmI32);
6346 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewIterRewind, view,
6347 codepoints, result);
6348 return opcode_length;
6349 }
6350 case kExprStringViewIterSlice: {
6352 auto [view, codepoints] = Pop(kWasmStringViewIter, kWasmI32);
6353 Value* result = Push(kWasmRefString);
6354 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringViewIterSlice, view,
6355 codepoints, result);
6356 return opcode_length;
6357 }
6358 case kExprStringNewUtf8Array:
6359 return DecodeStringNewWtf8Array(unibrow::Utf8Variant::kUtf8,
6360 opcode_length);
6361 case kExprStringNewUtf8ArrayTry:
6362 return DecodeStringNewWtf8Array(unibrow::Utf8Variant::kUtf8NoTrap,
6363 opcode_length);
6364 case kExprStringNewLossyUtf8Array:
6365 return DecodeStringNewWtf8Array(unibrow::Utf8Variant::kLossyUtf8,
6366 opcode_length);
6367 case kExprStringNewWtf8Array:
6368 return DecodeStringNewWtf8Array(unibrow::Utf8Variant::kWtf8,
6369 opcode_length);
6370 case kExprStringNewWtf16Array: {
6372 Value end = Pop(2, kWasmI32);
6373 Value start = Pop(1, kWasmI32);
6374 Value array = PopPackedArray(0, kWasmI16, WasmArrayAccess::kRead);
6375 Value* result = Push(kWasmRefString);
6376 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringNewWtf16Array, array, start,
6377 end, result);
6378 return opcode_length;
6379 }
6380 case kExprStringEncodeUtf8Array:
6381 return DecodeStringEncodeWtf8Array(unibrow::Utf8Variant::kUtf8,
6382 opcode_length);
6383 case kExprStringEncodeLossyUtf8Array:
6384 return DecodeStringEncodeWtf8Array(unibrow::Utf8Variant::kLossyUtf8,
6385 opcode_length);
6386 case kExprStringEncodeWtf8Array:
6387 return DecodeStringEncodeWtf8Array(unibrow::Utf8Variant::kWtf8,
6388 opcode_length);
6389 case kExprStringEncodeWtf16Array: {
6391 Value start = Pop(2, kWasmI32);
6392 Value array = PopPackedArray(1, kWasmI16, WasmArrayAccess::kWrite);
6393 Value str = Pop(0, kWasmStringRef);
6394 Value* result = Push(kWasmI32);
6395 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringEncodeWtf16Array, str, array,
6396 start, result);
6397 return opcode_length;
6398 }
6399 case kExprStringCompare: {
6401 auto [lhs, rhs] = Pop(kWasmStringRef, kWasmStringRef);
6402 Value* result = Push(kWasmI32);
6403 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringCompare, lhs, rhs, result);
6404 return opcode_length;
6405 }
6406 case kExprStringFromCodePoint: {
6408 Value code_point = Pop(kWasmI32);
6409 Value* result = Push(kWasmRefString);
6410 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringFromCodePoint, code_point,
6411 result);
6412 return opcode_length;
6413 }
6414 case kExprStringHash: {
6416 Value string = Pop(kWasmStringRef);
6417 Value* result = Push(kWasmI32);
6418 CALL_INTERFACE_IF_OK_AND_REACHABLE(StringHash, string, result);
6419 return opcode_length;
6420 }
6421 default:
6422 this->DecodeError("invalid stringref opcode: %x", opcode);
6423 return 0;
6424 }
6425 }
6426#undef NON_CONST_ONLY
6427
6428 uint32_t DecodeAtomicOpcode(WasmOpcode opcode, uint32_t opcode_length) {
6429 // Fast check for out-of-range opcodes (only allow 0xfeXX).
6430 if (!VALIDATE((opcode >> 8) == kAtomicPrefix)) {
6431 this->DecodeError("invalid atomic opcode: 0x%x", opcode);
6432 return 0;
6433 }
6434
6435 MachineType memtype;
6436 switch (opcode) {
6437#define CASE_ATOMIC_STORE_OP(Name, Type) \
6438 case kExpr##Name: { \
6439 memtype = MachineType::Type(); \
6440 break; /* to generic mem access code below */ \
6441 }
6443#undef CASE_ATOMIC_STORE_OP
6444#define CASE_ATOMIC_OP(Name, Type) \
6445 case kExpr##Name: { \
6446 memtype = MachineType::Type(); \
6447 break; /* to generic mem access code below */ \
6448 }
6450#undef CASE_ATOMIC_OP
6451 case kExprAtomicFence: {
6452 uint8_t zero = this->template read_u8<ValidationTag>(
6453 this->pc_ + opcode_length, "zero");
6454 if (!VALIDATE(zero == 0)) {
6455 this->DecodeError(this->pc_ + opcode_length,
6456 "invalid atomic operand");
6457 return 0;
6458 }
6460 return 1 + opcode_length;
6461 }
6462 default:
6463 // This path is only possible if we are validating.
6464 V8_ASSUME(ValidationTag::validate);
6465 this->DecodeError("invalid atomic opcode: 0x%x", opcode);
6466 return 0;
6467 }
6468
6469 const uint32_t element_size_log2 =
6472 MakeMemoryAccessImmediate(opcode_length, element_size_log2);
6473 if (!this->Validate(this->pc_ + opcode_length, imm)) return false;
6474 if (!VALIDATE(imm.alignment == element_size_log2)) {
6475 this->DecodeError(this->pc_,
6476 "invalid alignment for atomic operation; expected "
6477 "alignment is %u, actual alignment is %u",
6478 element_size_log2, imm.alignment);
6479 }
6480
6481 const FunctionSig* sig =
6482 WasmOpcodes::SignatureForAtomicOp(opcode, imm.memory->is_memory64());
6483 V8_ASSUME(sig != nullptr);
6484 PoppedArgVector args = PopArgs(sig);
6485 Value* result = sig->return_count() ? Push(sig->GetReturn()) : nullptr;
6486 if (V8_LIKELY(!CheckStaticallyOutOfBounds(imm.memory, memtype.MemSize(),
6487 imm.offset))) {
6488 CALL_INTERFACE_IF_OK_AND_REACHABLE(AtomicOp, opcode, args.data(),
6489 sig->parameter_count(), imm, result);
6490 }
6491
6492 return opcode_length + imm.length;
6493 }
6494
6495 unsigned DecodeNumericOpcode(WasmOpcode opcode, uint32_t opcode_length) {
6496 // Fast check for out-of-range opcodes (only allow 0xfcXX).
6497 // This avoids a dynamic check in signature lookup, and might also help the
6498 // big switch below.
6499 if (!VALIDATE((opcode >> 8) == kNumericPrefix)) {
6500 this->DecodeError("invalid numeric opcode: 0x%x", opcode);
6501 return 0;
6502 }
6503
6504 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
6505 switch (opcode) {
6506 case kExprI32SConvertSatF32:
6507 case kExprI32UConvertSatF32:
6508 case kExprI32SConvertSatF64:
6509 case kExprI32UConvertSatF64:
6510 case kExprI64SConvertSatF32:
6511 case kExprI64UConvertSatF32:
6512 case kExprI64SConvertSatF64:
6513 case kExprI64UConvertSatF64: {
6514 BuildSimpleOperator(opcode, sig);
6515 return opcode_length;
6516 }
6517 case kExprMemoryInit: {
6518 MemoryInitImmediate imm(this, this->pc_ + opcode_length, validate);
6519 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6520 ValueType mem_type = MemoryAddressType(imm.memory.memory);
6521 auto [dst, offset, size] = Pop(mem_type, kWasmI32, kWasmI32);
6522 CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryInit, imm, dst, offset, size);
6523 return opcode_length + imm.length;
6524 }
6525 case kExprDataDrop: {
6526 IndexImmediate imm(this, this->pc_ + opcode_length,
6527 "data segment index", validate);
6528 if (!this->ValidateDataSegment(this->pc_ + opcode_length, imm)) {
6529 return 0;
6530 }
6532 return opcode_length + imm.length;
6533 }
6534 case kExprMemoryCopy: {
6535 MemoryCopyImmediate imm(this, this->pc_ + opcode_length, validate);
6536 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6537 ValueType dst_type = MemoryAddressType(imm.memory_dst.memory);
6538 ValueType src_type = MemoryAddressType(imm.memory_src.memory);
6539 // size_type = min(dst_type, src_type), where kI32 < kI64.
6540 ValueType size_type = dst_type == kWasmI32 ? kWasmI32 : src_type;
6541
6542 auto [dst, src, size] = Pop(dst_type, src_type, size_type);
6543 CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryCopy, imm, dst, src, size);
6544 return opcode_length + imm.length;
6545 }
6546 case kExprMemoryFill: {
6547 MemoryIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6548 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6549 ValueType mem_type = MemoryAddressType(imm.memory);
6550 auto [dst, value, size] = Pop(mem_type, kWasmI32, mem_type);
6551 CALL_INTERFACE_IF_OK_AND_REACHABLE(MemoryFill, imm, dst, value, size);
6552 return opcode_length + imm.length;
6553 }
6554 case kExprTableInit: {
6555 TableInitImmediate imm(this, this->pc_ + opcode_length, validate);
6556 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6557 ValueType table_address_type = TableAddressType(imm.table.table);
6558 auto [dst, src, size] = Pop(table_address_type, kWasmI32, kWasmI32);
6559 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableInit, imm, dst, src, size);
6560 return opcode_length + imm.length;
6561 }
6562 case kExprElemDrop: {
6563 IndexImmediate imm(this, this->pc_ + opcode_length,
6564 "element segment index", validate);
6565 if (!this->ValidateElementSegment(this->pc_ + opcode_length, imm)) {
6566 return 0;
6567 }
6569 return opcode_length + imm.length;
6570 }
6571 case kExprTableCopy: {
6572 TableCopyImmediate imm(this, this->pc_ + opcode_length, validate);
6573 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6574 ValueType dst_type = TableAddressType(imm.table_dst.table);
6575 ValueType src_type = TableAddressType(imm.table_src.table);
6576 // size_type = min(dst_type, src_type), where kI32 < kI64.
6577 ValueType size_type = dst_type == kWasmI32 ? kWasmI32 : src_type;
6578
6579 auto [dst, src, size] = Pop(dst_type, src_type, size_type);
6580 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableCopy, imm, dst, src, size);
6581 return opcode_length + imm.length;
6582 }
6583 case kExprTableGrow: {
6584 TableIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6585 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6586 ValueType table_address_type = TableAddressType(imm.table);
6587 auto [value, delta] = Pop(imm.table->type, table_address_type);
6588 Value* result = Push(table_address_type);
6589 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableGrow, imm, value, delta,
6590 result);
6591 return opcode_length + imm.length;
6592 }
6593 case kExprTableSize: {
6594 TableIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6595 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6596 Value* result = Push(TableAddressType(imm.table));
6598 return opcode_length + imm.length;
6599 }
6600 case kExprTableFill: {
6601 TableIndexImmediate imm(this, this->pc_ + opcode_length, validate);
6602 if (!this->Validate(this->pc_ + opcode_length, imm)) return 0;
6603 ValueType table_address_type = TableAddressType(imm.table);
6604 auto [start, value, count] =
6605 Pop(table_address_type, imm.table->type, table_address_type);
6606 CALL_INTERFACE_IF_OK_AND_REACHABLE(TableFill, imm, start, value, count);
6607 return opcode_length + imm.length;
6608 }
6609 case kExprF32LoadMemF16: {
6610 if (!v8_flags.experimental_wasm_fp16) {
6611 this->DecodeError(
6612 "invalid numeric opcode: 0x%x, "
6613 "enable with --experimental-wasm-fp16",
6614 opcode);
6615 return 0;
6616 }
6617 return DecodeLoadMem(LoadType::kF32LoadF16, 2);
6618 }
6619 case kExprF32StoreMemF16: {
6620 if (!v8_flags.experimental_wasm_fp16) {
6621 this->DecodeError(
6622 "invalid numeric opcode: 0x%x, "
6623 "enable with --experimental-wasm-fp16",
6624 opcode);
6625 return 0;
6626 }
6627 return DecodeStoreMem(StoreType::kF32StoreF16, 2);
6628 }
6629 default:
6630 this->DecodeError("invalid numeric opcode: 0x%x", opcode);
6631 return 0;
6632 }
6633 }
6634
6635 unsigned DecodeAsmJsOpcode(WasmOpcode opcode, uint32_t opcode_length) {
6636 if ((opcode >> 8) != kAsmJsPrefix) {
6637 this->DecodeError("Invalid opcode: 0x%x", opcode);
6638 return 0;
6639 }
6640
6641 switch (opcode) {
6642#define ASMJS_CASE(Op, ...) case kExpr##Op:
6644#undef ASMJS_CASE
6645 {
6646 // Deal with special asmjs opcodes.
6647 if (!VALIDATE(is_asmjs_module(this->module_)))
6648 break;
6649 const FunctionSig* asmJsSig = WasmOpcodes::AsmjsSignature(opcode);
6650 DCHECK_NOT_NULL(asmJsSig);
6651 BuildSimpleOperator(opcode, asmJsSig);
6652 return opcode_length;
6653 }
6654 default:
6655 break;
6656 }
6657 this->DecodeError("Invalid opcode: 0x%x", opcode);
6658 return 0;
6659 }
6660
6661 V8_INLINE Value CreateValue(ValueType type) { return Value{this->pc_, type}; }
6662
6664 DCHECK_IMPLIES(this->ok(), value.type != kWasmVoid);
6665 if (!VALIDATE(!this->is_shared_ || IsShared(value.type, this->module_))) {
6666 this->DecodeError(value.pc(), "%s does not have a shared type",
6667 SafeOpcodeNameAt(value.pc()));
6668 return nullptr;
6669 }
6670 // {stack_.EnsureMoreCapacity} should have been called before, either in the
6671 // central decoding loop, or individually if more than one element is
6672 // pushed.
6673 stack_.push(value);
6674 return &stack_.back();
6675 }
6676
6677 V8_INLINE Value* Push(ValueType type) { return Push(CreateValue(type)); }
6678
6680 if constexpr (decoding_mode == kConstantExpression) return;
6681 DCHECK_EQ(c, &control_.back());
6682 DCHECK(merge == &c->start_merge || merge == &c->end_merge);
6683 stack_.shrink_to(c->stack_depth);
6684 if (merge->arity == 1) {
6685 // {stack_.EnsureMoreCapacity} should have been called before in the
6686 // central decoding loop.
6687 Push(merge->vals.first);
6688 } else {
6689 stack_.EnsureMoreCapacity(merge->arity, this->zone_);
6690 for (uint32_t i = 0; i < merge->arity; i++) {
6691 Push(merge->vals.array[i]);
6692 }
6693 }
6694 DCHECK_EQ(c->stack_depth + merge->arity, stack_.size());
6695 }
6696
6698 stack_.EnsureMoreCapacity(static_cast<int>(values.size()), this->zone_);
6699 for (Value& value : values) Push(value);
6700 }
6701
6703 size_t return_count = sig->return_count();
6704 stack_.EnsureMoreCapacity(static_cast<int>(return_count), this->zone_);
6705 for (size_t i = 0; i < return_count; ++i) {
6706 Push(sig->GetReturn(i));
6707 }
6708 return stack_.end() - return_count;
6709 }
6710
6711 // We do not inline these functions because doing so causes a large binary
6712 // size increase. Not inlining them should not create a performance
6713 // degradation, because their invocations are guarded by V8_LIKELY.
6715 const char* expected) {
6716 this->DecodeError(val.pc(), "%s[%d] expected %s, found %s of type %s",
6717 SafeOpcodeNameAt(this->pc_), index, expected,
6718 SafeOpcodeNameAt(val.pc()), val.type.name().c_str());
6719 }
6720
6722 std::string expected) {
6723 PopTypeError(index, val, expected.c_str());
6724 }
6725
6727 ValueType expected) {
6728 PopTypeError(index, val, ("type " + expected.name()).c_str());
6729 }
6730
6732 int actual) {
6733 DCHECK_LT(0, needed);
6734 DCHECK_LE(0, actual);
6735 DCHECK_LT(actual, needed);
6736 this->DecodeError(
6737 "not enough arguments on the stack for %s (need %d, got %d)",
6738 SafeOpcodeNameAt(this->pc_), needed, actual);
6739 }
6740
6741 V8_INLINE Value Pop(int index, ValueType expected) {
6742 Value value = Pop();
6743 ValidateStackValue(index, value, expected);
6744 return value;
6745 }
6746
6747 V8_INLINE void ValidateStackValue(int index, Value value,
6748 ValueType expected) {
6749 if (!VALIDATE(IsSubtypeOf(value.type, expected, this->module_) ||
6750 value.type == kWasmBottom || expected == kWasmBottom)) {
6751 PopTypeError(index, value, expected);
6752 }
6753 }
6754
6756 DCHECK(!control_.empty());
6757 uint32_t limit = control_.back().stack_depth;
6758 if (V8_UNLIKELY(stack_size() <= limit)) {
6759 // Popping past the current control start in reachable code.
6760 if (!VALIDATE(control_.back().unreachable())) {
6761 NotEnoughArgumentsError(1, 0);
6762 }
6763 return UnreachableValue(this->pc_);
6764 }
6765 Value top_of_stack = stack_.back();
6766 stack_.pop();
6767 return top_of_stack;
6768 }
6769
6770 V8_INLINE Value Peek(int depth, int index, ValueType expected) {
6771 Value value = Peek(depth);
6772 ValidateStackValue(index, value, expected);
6773 return value;
6774 }
6775
6776 V8_INLINE Value Peek(int depth = 0) {
6777 DCHECK(!control_.empty());
6778 uint32_t limit = control_.back().stack_depth;
6779 if (V8_UNLIKELY(stack_.size() <= limit + depth)) {
6780 // Peeking past the current control start in reachable code.
6781 if (!VALIDATE(decoding_mode == kFunctionBody &&
6782 control_.back().unreachable())) {
6783 NotEnoughArgumentsError(depth + 1, stack_.size() - limit);
6784 }
6785 return UnreachableValue(this->pc_);
6786 }
6787 DCHECK_LT(depth, stack_.size());
6788 return *(stack_.end() - depth - 1);
6789 }
6790
6791 V8_INLINE Value Peek(ValueType expected) { return Peek(0, 0, expected); }
6792
6793 // Pop multiple values at once; faster than multiple individual {Pop}s.
6794 // Returns an array of the popped values if there are multiple, or the popped
6795 // value itself if a single type is passed.
6796 template <typename... ValueTypes>
6797 // Pop is only allowed to be called with ValueType parameters.
6798 requires((std::is_same_v<ValueType, ValueTypes> ||
6799 std::is_base_of_v<IndependentValueType, ValueTypes>) &&
6800 ...)
6801 V8_INLINE std::conditional_t<sizeof...(ValueTypes) == 1, Value,
6802 std::array<Value, sizeof...(ValueTypes)>>
6803 Pop(ValueTypes... expected_types) {
6804 constexpr int kCount = sizeof...(ValueTypes);
6805 EnsureStackArguments(kCount);
6806 DCHECK_LE(control_.back().stack_depth, stack_size());
6807 DCHECK_GE(stack_size() - control_.back().stack_depth, kCount);
6808 // Note: Popping from the {FastZoneVector} does not invalidate the old (now
6809 // out-of-range) elements.
6810 stack_.pop(kCount);
6811 auto ValidateAndGetNextArg = [this, i = 0](ValueType type) mutable {
6812 ValidateStackValue(i, stack_.end()[i], type);
6813 return stack_.end()[i++];
6814 };
6815 return {ValidateAndGetNextArg(expected_types)...};
6816 }
6817
6818 Value PopPackedArray(uint32_t operand_index, ValueType expected_element_type,
6819 WasmArrayAccess access) {
6820 Value array = Pop();
6821 if (array.type.is_bottom()) {
6822 // We are in a polymorphic stack. Leave the stack as it is.
6823 DCHECK(!current_code_reachable_and_ok_);
6824 return array;
6825 }
6826 // Inputs of type "none" are okay due to implicit upcasting. The stringref
6827 // spec doesn't say this explicitly yet, but it's consistent with the rest
6828 // of Wasm. (Of course such inputs will trap at runtime.) See:
6829 // https://github.com/WebAssembly/stringref/issues/66
6830 if (array.type.is_reference_to(HeapType::kNone)) return array;
6831 if (VALIDATE(array.type.is_object_reference() && array.type.has_index())) {
6832 ModuleTypeIndex ref_index = array.type.ref_index();
6833 if (VALIDATE(this->module_->has_array(ref_index))) {
6834 const ArrayType* array_type = this->module_->array_type(ref_index);
6835 if (VALIDATE(array_type->element_type() == expected_element_type &&
6836 (access == WasmArrayAccess::kRead ||
6837 array_type->mutability()))) {
6838 return array;
6839 }
6840 }
6841 }
6842 PopTypeError(operand_index, array,
6843 (std::string("array of ") +
6844 (access == WasmArrayAccess::kWrite ? "mutable " : "") +
6845 expected_element_type.name())
6846 .c_str());
6847 return array;
6848 }
6849
6850 // Drop the top {count} stack elements, or all of them if less than {count}
6851 // are present.
6852 V8_INLINE void Drop(int count = 1) {
6853 DCHECK(!control_.empty());
6854 uint32_t limit = control_.back().stack_depth;
6855 if (V8_UNLIKELY(stack_.size() < limit + count)) {
6856 // Pop what we can.
6857 count = std::min(count, static_cast<int>(stack_.size() - limit));
6858 }
6859 stack_.pop(count);
6860 }
6861 // Drop the top stack element if present. Takes a Value input for more
6862 // descriptive call sites.
6863 V8_INLINE void Drop(const Value& /* unused */) { Drop(1); }
6864
6866 kNonStrictCounting = false,
6867 kStrictCounting = true
6869
6876
6877 enum class PushBranchValues : bool {
6878 kNo = false,
6879 kYes = true,
6880 };
6881 enum class RewriteStackTypes : bool {
6882 kNo = false,
6883 kYes = true,
6884 };
6885
6886 // - If the current code is reachable, check if the current stack values are
6887 // compatible with {merge} based on their number and types. If
6888 // {strict_count}, check that #(stack elements) == {merge->arity}, otherwise
6889 // #(stack elements) >= {merge->arity}.
6890 // - If the current code is unreachable, check if any values that may exist on
6891 // top of the stack are compatible with {merge}. If {push_branch_values},
6892 // push back to the stack values based on the type of {merge} (this is
6893 // needed for conditional branches due to their typing rules, and
6894 // fallthroughs so that the outer control finds the expected values on the
6895 // stack). TODO(manoskouk): We expect the unreachable-code behavior to
6896 // change, either due to relaxation of dead code verification, or the
6897 // introduction of subtyping.
6898 template <StackElementsCountMode strict_count,
6899 PushBranchValues push_branch_values, MergeType merge_type,
6900 RewriteStackTypes rewrite_types>
6902 uint32_t arity = merge->arity;
6903 uint32_t actual = stack_.size() - control_.back().stack_depth;
6904 // Handle trivial cases first. Arity 0 is the most common case.
6905 if (arity == 0 && (!strict_count || actual == 0)) return true;
6906 // Arity 1 is still common enough that we handle it separately (only doing
6907 // the most basic subtype check).
6908 if (arity == 1 && (strict_count ? actual == arity : actual >= arity)) {
6909 if (stack_.back().type == merge->vals.first.type) return true;
6910 }
6911 return TypeCheckStackAgainstMerge_Slow<strict_count, push_branch_values,
6912 merge_type, rewrite_types>(merge);
6913 }
6914
6915 // Slow path for {TypeCheckStackAgainstMerge}.
6916 template <StackElementsCountMode strict_count,
6917 PushBranchValues push_branch_values, MergeType merge_type,
6918 RewriteStackTypes rewrite_types>
6920 Merge<Value>* merge) {
6921 constexpr const char* merge_description =
6922 merge_type == kBranchMerge ? "branch"
6923 : merge_type == kReturnMerge ? "return"
6924 : merge_type == kInitExprMerge ? "constant expression"
6925 : "fallthru";
6926 uint32_t arity = merge->arity;
6927 uint32_t actual = stack_.size() - control_.back().stack_depth;
6928 // Here we have to check for !unreachable(), because we need to typecheck as
6929 // if the current code is reachable even if it is spec-only reachable.
6930 if (V8_LIKELY(decoding_mode == kConstantExpression ||
6931 !control_.back().unreachable())) {
6932 if (V8_UNLIKELY(strict_count ? actual != arity : actual < arity)) {
6933 this->DecodeError("expected %u elements on the stack for %s, found %u",
6934 arity, merge_description, actual);
6935 return false;
6936 }
6937 // Typecheck the topmost {merge->arity} values on the stack.
6938 Value* stack_values = stack_.end() - arity;
6939 for (uint32_t i = 0; i < arity; ++i) {
6940 Value& val = stack_values[i];
6941 Value& old = (*merge)[i];
6942 if (!IsSubtypeOf(val.type, old.type, this->module_)) {
6943 this->DecodeError("type error in %s[%u] (expected %s, got %s)",
6944 merge_description, i, old.type.name().c_str(),
6945 val.type.name().c_str());
6946 return false;
6947 }
6948 if constexpr (static_cast<bool>(rewrite_types)) {
6949 // Upcast type on the stack to the target type of the label.
6950 val.type = old.type;
6951 }
6952 }
6953 return true;
6954 }
6955 // Unreachable code validation starts here.
6956 if (V8_UNLIKELY(strict_count && actual > arity)) {
6957 this->DecodeError("expected %u elements on the stack for %s, found %u",
6958 arity, merge_description, actual);
6959 return false;
6960 }
6961 // TODO(manoskouk): Use similar code as above if we keep unreachable checks.
6962 for (int i = arity - 1, depth = 0; i >= 0; --i, ++depth) {
6963 Peek(depth, i, (*merge)[i].type);
6964 }
6965 if constexpr (static_cast<bool>(push_branch_values)) {
6966 uint32_t inserted_value_count =
6967 static_cast<uint32_t>(EnsureStackArguments(arity));
6968 if (inserted_value_count > 0) {
6969 // stack_.EnsureMoreCapacity() may have inserted unreachable values into
6970 // the bottom of the stack. If so, mark them with the correct type. If
6971 // drop values were also inserted, disregard them, as they will be
6972 // dropped anyway.
6973 Value* stack_base = stack_value(arity);
6974 for (uint32_t i = 0; i < std::min(arity, inserted_value_count); i++) {
6975 if (stack_base[i].type == kWasmBottom) {
6976 stack_base[i].type = (*merge)[i].type;
6977 }
6978 }
6979 }
6980 }
6981 return VALIDATE(this->ok());
6982 }
6983
6984 template <StackElementsCountMode strict_count, MergeType merge_type>
6985 bool DoReturn() {
6986 if (!VALIDATE(
6987 (TypeCheckStackAgainstMerge<strict_count, PushBranchValues::kNo,
6988 merge_type, RewriteStackTypes::kNo>(
6989 &control_.front().end_merge)))) {
6990 return false;
6991 }
6992 DCHECK_IMPLIES(current_code_reachable_and_ok_,
6993 stack_.size() >= this->sig_->return_count());
6995 EndControl();
6996 return true;
6997 }
6998
6999 int startrel(const uint8_t* ptr) {
7000 return static_cast<int>(ptr - this->start_);
7001 }
7002
7004 Control* c = &control_.back();
7005 DCHECK_NE(c->kind, kControlLoop);
7006 if (!VALIDATE(TypeCheckFallThru())) return;
7008 if (c->reachable()) c->end_merge.reached = true;
7009 }
7010
7012 DCHECK(c->is_onearmed_if());
7013 if (c->end_merge.arity != c->start_merge.arity) {
7014 this->DecodeError(c->pc(),
7015 "start-arity and end-arity of one-armed if must match");
7016 return false;
7017 }
7018 for (uint32_t i = 0; i < c->start_merge.arity; ++i) {
7019 Value& start = c->start_merge[i];
7020 Value& end = c->end_merge[i];
7021 if (!IsSubtypeOf(start.type, end.type, this->module_)) {
7022 this->DecodeError("type error in merge[%u] (expected %s, got %s)", i,
7023 end.type.name().c_str(), start.type.name().c_str());
7024 return false;
7025 }
7026 }
7027 return true;
7028 }
7029
7031 return TypeCheckStackAgainstMerge<kStrictCounting, PushBranchValues::kYes,
7032 kFallthroughMerge,
7033 RewriteStackTypes::kNo>(
7034 &control_.back().end_merge);
7035 }
7036
7037 // If the current code is reachable, check if the current stack values are
7038 // compatible with a jump to {c}, based on their number and types.
7039 // Otherwise, we have a polymorphic stack: check if any values that may exist
7040 // on top of the stack are compatible with {c}. If {push_branch_values},
7041 // push back to the stack values based on the type of {c} (this is needed for
7042 // conditional branches due to their typing rules, and fallthroughs so that
7043 // the outer control finds enough values on the stack).
7044 template <PushBranchValues push_branch_values,
7045 RewriteStackTypes rewrite_types>
7047 return TypeCheckStackAgainstMerge<kNonStrictCounting, push_branch_values,
7048 kBranchMerge, rewrite_types>(
7049 c->br_merge());
7050 }
7051
7052 void onFirstError() override {
7053 this->end_ = this->pc_; // Terminate decoding loop.
7054 this->current_code_reachable_and_ok_ = false;
7055 TRACE(" !%s\n", this->error_.message().c_str());
7056 // Cannot use CALL_INTERFACE_* macros because we emitted an error.
7057 interface().OnFirstError(this);
7058 }
7059
7060 // There are currently no simple prototype operators.
7062 const FunctionSig* sig = WasmOpcodes::Signature(opcode);
7063 return BuildSimpleOperator(opcode, sig);
7064 }
7065
7067 DCHECK_GE(1, sig->return_count());
7068 if (sig->parameter_count() == 1) {
7069 // All current simple unary operators have exactly 1 return value.
7070 DCHECK_EQ(1, sig->return_count());
7071 return BuildSimpleOperator(opcode, sig->GetReturn(0), sig->GetParam(0));
7072 } else {
7073 DCHECK_EQ(2, sig->parameter_count());
7074 ValueType ret = sig->return_count() == 0 ? kWasmVoid : sig->GetReturn(0);
7075 return BuildSimpleOperator(opcode, ret, sig->GetParam(0),
7076 sig->GetParam(1));
7077 }
7078 }
7079
7081 ValueType arg_type) {
7082 DCHECK_NE(kWasmVoid, return_type);
7083 Value val = Pop(arg_type);
7084 Value* ret = Push(return_type);
7085 CALL_INTERFACE_IF_OK_AND_REACHABLE(UnOp, opcode, val, ret);
7086 return 1;
7087 }
7088
7090 ValueType lhs_type, ValueType rhs_type) {
7091 auto [lval, rval] = Pop(lhs_type, rhs_type);
7092 Value* ret = return_type == kWasmVoid ? nullptr : Push(return_type);
7093 CALL_INTERFACE_IF_OK_AND_REACHABLE(BinOp, opcode, lval, rval, ret);
7094 return 1;
7095 }
7096
7097#define DEFINE_SIMPLE_SIG_OPERATOR(sig, ...) \
7098 int BuildSimpleOperator_##sig(WasmOpcode opcode) { \
7099 return BuildSimpleOperator(opcode, __VA_ARGS__); \
7100 }
7102#undef DEFINE_SIMPLE_SIG_OPERATOR
7103
7104 static constexpr ValidationTag validate = {};
7105};
7106
7108 public:
7110 static constexpr DecodingMode decoding_mode = kFunctionBody;
7111 static constexpr bool kUsesPoppedArgs = false;
7115
7116#define DEFINE_EMPTY_CALLBACK(name, ...) \
7117 void name(FullDecoder* decoder, ##__VA_ARGS__) {}
7119#undef DEFINE_EMPTY_CALLBACK
7120};
7121
7122#undef CALL_INTERFACE_IF_OK_AND_REACHABLE
7123#undef CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE
7124#undef TRACE
7125#undef TRACE_INST_FORMAT
7126#undef VALIDATE
7127#undef CHECK_PROTOTYPE_OPCODE
7128
7129} // namespace v8::internal::wasm
7130
7131#endif // V8_WASM_FUNCTION_BODY_DECODER_IMPL_H_
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
constexpr MachineRepresentation representation() const
constexpr uint8_t MemSize() const
size_t return_count() const
Definition signature.h:93
T GetParam(size_t index) const
Definition signature.h:96
T GetReturn(size_t index=0) const
Definition signature.h:103
base::Vector< const T > returns() const
Definition signature.h:116
size_t parameter_count() const
Definition signature.h:94
base::Vector< const T > all() const
Definition signature.h:117
T * New(Args &&... args)
Definition zone.h:114
void DeleteArray(T *pointer, size_t length)
Definition zone.h:171
ValueType element_type() const
BranchTableIterator(Decoder *decoder, const BranchTableImmediate &imm)
uint64_t read_u64(const uint8_t *pc, Name< ValidationTag > msg="expected 8 bytes")
Definition decoder.h:153
const uint8_t * start() const
Definition decoder.h:407
std::pair< int64_t, uint32_t > read_i64v(const uint8_t *pc, Name< ValidationTag > name="signed LEB64")
Definition decoder.h:185
uint32_t read_u32(const uint8_t *pc, Name< ValidationTag > msg="expected 4 bytes")
Definition decoder.h:146
std::pair< int64_t, uint32_t > read_i33v(const uint8_t *pc, Name< ValidationTag > name="signed LEB33")
Definition decoder.h:193
std::pair< uint64_t, uint32_t > read_u64v(const uint8_t *pc, Name< ValidationTag > name="LEB64")
Definition decoder.h:177
void V8_NOINLINE V8_PRESERVE_MOST errorf(const char *format, Args... args)
Definition decoder.h:342
const uint8_t * pc() const
Definition decoder.h:408
uint8_t read_u8(const uint8_t *pc, Name< ValidationTag > msg="expected 1 byte")
Definition decoder.h:132
std::pair< uint32_t, uint32_t > read_u32v(const uint8_t *pc, Name< ValidationTag > name="LEB32")
Definition decoder.h:161
std::pair< int32_t, uint32_t > read_i32v(const uint8_t *pc, Name< ValidationTag > name="signed LEB32")
Definition decoder.h:169
const uint8_t * end() const
Definition decoder.h:426
std::pair< WasmOpcode, uint32_t > read_prefixed_opcode(const uint8_t *pc, Name< ValidationTag > name="prefixed opcode")
Definition decoder.h:202
void V8_NOINLINE V8_PRESERVE_MOST error(const char *msg)
Definition decoder.h:331
uint32_t buffer_offset() const
Definition decoder.h:419
EffectHandlerTableIterator(Decoder *decoder, const EffectHandlerTableImmediate &imm)
V8_NOINLINE V8_PRESERVE_MOST void Grow(int slots_needed, Zone *zone)
V8_INLINE void EnsureMoreCapacity(int slots_needed, Zone *zone)
static constexpr HeapType from_code(uint8_t code, bool is_shared)
Definition value-type.h:727
static constexpr HeapType Index(ModuleTypeIndex index, bool shared, RefTypeKind kind, Exactness exact=Exactness::kAnySubtype)
Definition value-type.h:716
constexpr Representation representation() const
Definition value-type.h:837
constexpr bool is_index() const
Definition value-type.h:840
std::string name() const
Definition value-type.h:773
constexpr ModuleTypeIndex ref_index() const
Definition value-type.h:762
constexpr std::nullptr_t data() const
bool mutability(uint32_t index) const
ValueType field(uint32_t index) const
TryTableIterator(Decoder *decoder, const TryTableImmediate &imm)
void Populate(bool shared, RefTypeKind kind)
Definition value-type.h:359
constexpr bool is_bottom() const
Definition value-type.h:426
constexpr bool is_packed() const
Definition value-type.h:444
constexpr bool is_reference() const
Definition value-type.h:600
constexpr bool is_shared() const
Definition value-type.h:403
constexpr bool is_string_view() const
Definition value-type.h:438
constexpr bool has_index() const
Definition value-type.h:367
constexpr Exactness exactness() const
Definition value-type.h:399
constexpr bool is_numeric() const
Definition value-type.h:373
constexpr bool is_defaultable() const
Definition value-type.h:452
V8_EXPORT_PRIVATE std::string name() const
constexpr RefTypeKind ref_type_kind() const
Definition value-type.h:406
constexpr ValueType AsExact(Exactness exact=Exactness::kExact) const
Definition value-type.h:924
constexpr HeapType heap_type() const
static constexpr ValueType RefNull(ModuleTypeIndex index, bool shared, RefTypeKind kind)
Definition value-type.h:895
ValueType AsExactIfProposalEnabled(Exactness exact=Exactness::kExact) const
Definition value-type.h:932
constexpr ValueType Unpacked() const
Definition value-type.h:944
static constexpr ValueType Ref(ModuleTypeIndex index, bool shared, RefTypeKind kind)
Definition value-type.h:887
constexpr ModuleTypeIndex ref_index() const
static constexpr ValueType RefMaybeNull(ModuleTypeIndex index, Nullability nullable, bool shared, RefTypeKind kind)
Definition value-type.h:903
constexpr ValueType AsNonNull() const
Definition value-type.h:917
bool Validate(const uint8_t *pc, BranchDepthImmediate &imm, size_t control_depth)
bool Validate(const uint8_t *pc, BlockTypeImmediate &imm)
bool Validate(const uint8_t *pc, StringConstImmediate &imm)
bool Validate(const uint8_t *pc, MemoryInitImmediate &imm)
bool Validate(const uint8_t *pc, BranchTableImmediate &imm)
bool Validate(const uint8_t *pc, GlobalIndexImmediate &imm)
bool Validate(const uint8_t *pc, StructIndexImmediate &imm)
static uint32_t OpcodeLength(WasmDecoder *decoder, const uint8_t *pc, ImmediateObservers &... ios)
bool Validate(const uint8_t *pc, HeapTypeImmediate &imm)
bool Validate(const uint8_t *pc, MemoryCopyImmediate &imm)
bool Validate(const uint8_t *pc, ArrayIndexImmediate &imm)
bool Validate(const uint8_t *pc, CallIndirectImmediate &imm)
bool ValidateLocal(const uint8_t *pc, IndexImmediate &imm)
bool Validate(const uint8_t *pc, MemoryAccessImmediate &imm)
bool ValidateValueType(const uint8_t *pc, ValueType type)
WasmDecoder(Zone *zone, const WasmModule *module, WasmEnabledFeatures enabled, WasmDetectedFeatures *detected, const FunctionSig *sig, bool is_shared, const uint8_t *start, const uint8_t *end, uint32_t buffer_offset=0)
base::Vector< ValueType > local_types() const
bool ValidateElementSegment(const uint8_t *pc, IndexImmediate &imm)
bool Validate(const uint8_t *pc, SigIndexImmediate &imm)
const std::pair< uint32_t, uint32_t > * current_inst_trace_
bool Validate(const uint8_t *pc, Simd128Immediate &imm)
bool Validate(const uint8_t *pc, WasmOpcode opcode, SimdLaneImmediate &imm)
bool ValidateFunction(const uint8_t *pc, IndexImmediate &imm)
bool ValidateDataSegment(const uint8_t *pc, IndexImmediate &imm)
bool Validate(const uint8_t *pc, CallFunctionImmediate &imm)
bool Validate(const uint8_t *pc, SelectTypeImmediate &imm)
bool Validate(const uint8_t *pc, MemoryIndexImmediate &imm)
bool ValidateCont(const uint8_t *pc, ContIndexImmediate &imm)
bool Validate(const uint8_t *pc, TagIndexImmediate &imm)
static BitVector * AnalyzeLoopAssignment(WasmDecoder *decoder, const uint8_t *pc, uint32_t locals_count, Zone *zone, bool *loop_is_innermost=nullptr)
bool Validate(const uint8_t *pc, FieldImmediate &imm)
bool Validate(const uint8_t *pc, TableCopyImmediate &imm)
bool CanReturnCall(const FunctionSig *target_sig)
bool Validate(const uint8_t *pc, TableInitImmediate &imm)
bool Validate(const uint8_t *pc, TableIndexImmediate &imm)
ValueType local_type(uint32_t index) const
bool ValidateHeapType(const uint8_t *pc, HeapType type)
static constexpr WasmEnabledFeatures All()
uint32_t DecodeAtomicOpcode(WasmOpcode opcode, uint32_t opcode_length)
static int UnknownOpcodeError(WasmFullDecoder *decoder, WasmOpcode opcode)
bool CheckStaticallyOutOfBounds(const WasmMemory *memory, uint64_t size, uint64_t offset)
V8_INLINE Value Pop(int index, ValueType expected)
V8_INLINE void ValidateParameters(const FunctionSig *sig)
Value PopPackedArray(uint32_t operand_index, ValueType expected_element_type, WasmArrayAccess access)
V8_NOINLINE V8_PRESERVE_MOST void PopTypeError(int index, Value val, std::string expected)
V8_INLINE bool TypeCheckStackAgainstMerge(Merge< Value > *merge)
bool TypeCheckAlwaysFails(Value obj, HeapType expected_type, bool null_succeeds)
int DecodeLoadLane(WasmOpcode opcode, LoadType type, uint32_t opcode_length)
int DecodeGCOpcode(WasmOpcode opcode, uint32_t opcode_length)
static Value UnreachableValue(const uint8_t *pc)
unsigned DecodeNumericOpcode(WasmOpcode opcode, uint32_t opcode_length)
int DecodeStringEncodeWtf8Array(unibrow::Utf8Variant variant, uint32_t opcode_length)
static constexpr OpcodeHandler GetOpcodeHandlerTableEntry(size_t idx)
unsigned DecodeAsmJsOpcode(WasmOpcode opcode, uint32_t opcode_length)
V8_INLINE void DropArgs(const StructType *type)
Control * PushControl(ControlKind kind, const BlockTypeImmediate &imm)
V8_INLINE void ValidateStackValue(int index, Value value, ValueType expected)
Value * PushReturns(const FunctionSig *sig)
V8_PRESERVE_MOST V8_NOINLINE bool TypeCheckStackAgainstMerge_Slow(Merge< Value > *merge)
int DecodeStringMeasureWtf8(unibrow::Utf8Variant variant, uint32_t opcode_length)
std::conditional_t< Interface::kUsesPoppedArgs, base::SmallVector< Value, 8 >, NoVector > PoppedArgVector
V8_NOINLINE V8_PRESERVE_MOST void PopTypeError(int index, Value val, const char *expected)
int DecodeStoreLane(WasmOpcode opcode, StoreType type, uint32_t opcode_length)
V8_INLINE Value Peek(ValueType expected)
V8_INLINE MemoryAccessImmediate MakeMemoryAccessImmediate(uint32_t pc_offset, uint32_t max_alignment)
V8_INLINE ValueType TableAddressType(const WasmTable *table)
Value PopDescriptor(ModuleTypeIndex described_index)
int DecodeStringNewWtf8(unibrow::Utf8Variant variant, uint32_t opcode_length)
V8_INLINE PoppedArgVector PopArgs(const FunctionSig *sig)
V8_INLINE Value Peek(int depth, int index, ValueType expected)
void InitializeInitializedLocalsTracking(int non_defaultable_locals)
V8_INLINE ValueType MemoryAddressType(const WasmMemory *memory)
V8_INLINE PoppedArgVector PopArgs(const StructType *type)
int DecodeStoreMem(StoreType store, int prefix_len=1)
V8_INLINE Value CreateValue(ValueType type)
int DecodeLoadMem(LoadType type, int prefix_len=1)
int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type, ValueType lhs_type, ValueType rhs_type)
V8_NOINLINE V8_PRESERVE_MOST void NotEnoughArgumentsError(int needed, int actual)
uint32_t SimdExtractLane(WasmOpcode opcode, ValueType type, uint32_t opcode_length)
int BuildSimpleOperator(WasmOpcode opcode, const FunctionSig *sig)
V8_INLINE void InitMerge(Merge< Value > *merge, uint32_t arity, func get_val)
WasmFullDecoder(Zone *zone, const WasmModule *module, WasmEnabledFeatures enabled, WasmDetectedFeatures *detected, const FunctionBody &body, InterfaceArgs &&... interface_args)
FastZoneVector< uint32_t > locals_initializers_stack_
void PushMergeValues(Control *c, Merge< Value > *merge)
uint32_t DecodeSimdOpcode(WasmOpcode opcode, uint32_t opcode_length)
static int NonConstError(WasmFullDecoder *decoder, WasmOpcode opcode)
V8_NOINLINE V8_PRESERVE_MOST int EnsureStackArguments_Slow(int count)
bool TypeCheckAlwaysSucceeds(Value obj, HeapType type)
const char * SafeOpcodeNameAt(const uint8_t *pc)
int DecodeLoadTransformMem(LoadType type, LoadTransformationKind transform, uint32_t opcode_length)
int ParseBrOnCast(WasmOpcode opcode, uint32_t pc_offset)
int BuildSimpleOperator(WasmOpcode opcode, ValueType return_type, ValueType arg_type)
int DecodeStringRefOpcode(WasmOpcode opcode, uint32_t opcode_length)
int DecodeStringNewWtf8Array(unibrow::Utf8Variant variant, uint32_t opcode_length)
int DecodeStringEncodeWtf8(unibrow::Utf8Variant variant, uint32_t opcode_length)
V8_INLINE void DropArgs(const FunctionSig *sig)
uint32_t Simd8x16ShuffleOp(uint32_t opcode_length)
V8_INLINE void PushReturns(ReturnVector values)
int(*)(WasmFullDecoder *, WasmOpcode) OpcodeHandler
uint32_t SimdReplaceLane(WasmOpcode opcode, ValueType type, uint32_t opcode_length)
V8_NOINLINE V8_PRESERVE_MOST void PopTypeError(int index, Value val, ValueType expected)
uint32_t SimdConstOp(uint32_t opcode_length)
DECODE(NopForTestingUnsupportedInLiftoff)
int DecodeStringViewWtf8Encode(unibrow::Utf8Variant variant, uint32_t opcode_length)
Zone * zone_
base::OwnedVector< uint8_t > buffer_
Definition assembler.cc:111
uint8_t *const start_
Definition assembler.cc:131
const v8::base::TimeTicks end_
Definition sweeper.cc:54
int start
uint32_t count
int end
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation nullptr
#define CALL_INTERFACE(name,...)
#define CASE_ATOMIC_STORE_OP(Name, Type)
#define CALL_INTERFACE_IF_OK_AND_REACHABLE(name,...)
#define DEFINE_EMPTY_CALLBACK(name,...)
#define CASE_ATOMIC_OP(Name, Type)
#define VALIDATE(condition)
#define TRACE(...)
#define ATOMIC_STORE_OP_LIST(V)
#define DECODE_IMPL(opcode)
#define NON_CONST_ONLY
#define ATOMIC_OP_LIST(V)
#define DECODE_IMPL_CONST2(opcode, name)
#define DECLARE_OPCODE_CASE(name,...)
#define CALL_INTERFACE_IF_OK_AND_PARENT_REACHABLE(name,...)
#define INTERFACE_FUNCTIONS(F)
#define BUILD_SIMPLE_OPCODE(op, _, sig,...)
#define CHECK_PROTOTYPE_OPCODE(feat)
#define ASMJS_CASE(Op,...)
#define DECODE_IMPL_CONST(opcode)
#define DECODE_STORE_MEM(op,...)
#define DEFINE_SIMPLE_SIG_OPERATOR(sig,...)
#define SIMPLE_PROTOTYPE_CASE(name,...)
#define BUILD_SIMPLE_EXTENDED_CONST_OPCODE(op,...)
#define TRACE_INST_FORMAT
#define DECODE_LOAD_MEM(op,...)
#define DECODE_IMPL2(opcode, name)
OptionalOpIndex index
int32_t offset
TNode< Object > target
std::optional< TNode< JSArray > > a
Control control_
ZoneVector< RpoNumber > & result
#define TRACE(...)
bool null_succeeds
MovableLabel handler
static constexpr bool kUsesPoppedArgs
int num_locals_
ZoneVector< Entry > entries
int pc_offset
STL namespace.
Utf8Variant
Definition unicode.h:145
constexpr size_t RoundUpToPowerOfTwo(size_t value)
Definition bits.h:252
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
bool ValidateHeapType(Decoder *decoder, const uint8_t *pc, const WasmModule *module, HeapType type)
static void Populate(HeapType *unfinished_type, const WasmModule *module)
bool ValidateValueType(Decoder *decoder, const uint8_t *pc, const WasmModule *module, ValueType type)
std::pair< ValueType, uint32_t > read_value_type(Decoder *decoder, const uint8_t *pc, WasmEnabledFeatures enabled)
std::pair< HeapType, uint32_t > read_heap_type(Decoder *decoder, const uint8_t *pc, WasmEnabledFeatures enabled)
const std::pair< uint32_t, uint32_t > invalid_instruction_trace
constexpr uint8_t kSharedFlagCode
constexpr IndependentHeapType kWasmStringRef
constexpr size_t kV8MaxWasmFunctionLocals
Definition wasm-limits.h:52
bool IsSameTypeHierarchy(HeapType type1, HeapType type2, const WasmModule *module)
constexpr IndependentValueType kWasmI8
constexpr IndependentValueType kWasmF32
static constexpr LoadType GetLoadType(WasmOpcode opcode)
constexpr IndependentHeapType kWasmAnyRef
constexpr IndependentHeapType kWasmStringViewWtf8
V8_INLINE void DecodeError(Decoder *decoder, const uint8_t *pc, const char *str, Args &&... args)
bool is_asmjs_module(const WasmModule *module)
constexpr IndependentHeapType kWasmEqRef
constexpr IndependentHeapType kWasmExternRef
constexpr IndependentValueType kWasmI32
constexpr IndependentHeapType kWasmStringViewIter
constexpr IndependentHeapType kWasmContRef
constexpr size_t kV8MaxWasmArrayNewFixedLength
Definition wasm-limits.h:66
constexpr IndependentHeapType kWasmI31Ref
constexpr size_t kV8MaxWasmTypes
Definition wasm-limits.h:30
constexpr IndependentHeapType kWasmExnRef
constexpr size_t kV8MaxWasmFunctionBrTableSize
Definition wasm-limits.h:55
constexpr IndependentHeapType kWasmStringViewWtf16
constexpr IndependentHeapType kWasmVoid
constexpr IndependentHeapType kWasmNullContRef
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
constexpr IndependentHeapType kWasmBottom
constexpr IndependentValueType kWasmS128
static constexpr StoreType GetStoreType(WasmOpcode opcode)
unsigned OpcodeLength(const uint8_t *pc, const uint8_t *end)
Signature< ValueType > FunctionSig
constexpr IndependentValueType kWasmF64
constexpr bool is_reference(ValueKind kind)
constexpr IndependentValueType kWasmI64
constexpr IndependentValueType kWasmI16
constexpr IndependentHeapType kWasmRefString
constexpr IndependentHeapType kWasmArrayRef
constexpr IndependentHeapType kWasmNullExnRef
constexpr int kSimd128Size
Definition globals.h:706
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
bool IsShared(Tagged< Object > obj)
V8_EXPORT_PRIVATE FlagValues v8_flags
V8_EXPORT_PRIVATE constexpr int ElementSizeLog2Of(MachineRepresentation)
JSArrayBuffer::IsDetachableBit is_shared
constexpr uint32_t kMaxUInt32
Definition globals.h:387
RegExpError error_
#define V8_NOEXCEPT
#define PRINTF_FORMAT(format_param, dots_param)
#define UNREACHABLE()
Definition logging.h:67
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define DCHECK_NULL(val)
Definition logging.h:491
#define DCHECK_WITH_MSG(condition, msg)
Definition logging.h:182
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define arraysize(array)
Definition macros.h:67
bool enabled_
Definition string.cc:1013
ArrayIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
BlockTypeImmediate(const BlockTypeImmediate &)=delete
BlockTypeImmediate & operator=(const BlockTypeImmediate &)=delete
BlockTypeImmediate & operator=(BlockTypeImmediate &&)=delete
BlockTypeImmediate(WasmEnabledFeatures enabled, Decoder *decoder, const uint8_t *pc, ValidationTag={})
BlockTypeImmediate(BlockTypeImmediate &&)=delete
BrOnCastImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
BranchDepthImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
BranchTableImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
CallFunctionImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
CallIndirectImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
union v8::internal::wasm::CatchCase::MaybeTagIndex maybe_tag
ContIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
ControlBase(Zone *zone, ControlKind kind, uint32_t stack_depth, uint32_t init_stack_depth, const uint8_t *pc, Reachability reachability)
MOVE_ONLY_NO_DEFAULT_CONSTRUCTOR(ControlBase)
FieldImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
GlobalIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
union v8::internal::wasm::HandlerCase::MaybeHandlerDepth maybe_depth
HeapTypeImmediate(WasmEnabledFeatures enabled, Decoder *decoder, const uint8_t *pc, ValidationTag={})
ImmF32Immediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
ImmF64Immediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
ImmI32Immediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
ImmI64Immediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
IndexImmediate(Decoder *decoder, const uint8_t *pc, const char *name, ValidationTag={})
V8_INLINE MemoryAccessImmediate(Decoder *decoder, const uint8_t *pc, uint32_t max_alignment, ValidationTag={})
V8_NOINLINE V8_PRESERVE_MOST void ConstructSlow(Decoder *decoder, const uint8_t *pc, uint32_t max_alignment)
MemoryCopyImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
MemoryIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
MemoryInitImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
union v8::internal::wasm::Merge::@174 vals
static constexpr ModuleTypeIndex Invalid()
Definition value-type.h:73
SelectTypeImmediate(WasmEnabledFeatures enabled, Decoder *decoder, const uint8_t *pc, ValidationTag={})
SigIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
Simd128Immediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
SimdLaneImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
StringConstImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag={})
StructIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
TableCopyImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
TableIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
TableInitImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
TagIndexImmediate(Decoder *decoder, const uint8_t *pc, ValidationTag validate={})
TypeIndexImmediate(Decoder *decoder, const uint8_t *pc, const char *name, ValidationTag={})
constexpr bool valid() const
Definition value-type.h:58
ValueBase(const uint8_t *pc, ValueType type)
std::vector< TypeDefinition > types
HeapType heap_type(ModuleTypeIndex index) const
bool has_type(ModuleTypeIndex index) const
#define V8_INLINE
Definition v8config.h:500
#define V8_ASSUME
Definition v8config.h:533
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NOINLINE
Definition v8config.h:586
#define V8_PRESERVE_MOST
Definition v8config.h:598
std::unique_ptr< ValueMirror > value
std::unique_ptr< ValueMirror > key
wasm::ValueType type
const wasm::WasmModule * module_
#define FOREACH_SIMD_0_OPERAND_OPCODE(V)
#define FOREACH_ATOMIC_OPCODE(V)
#define FOREACH_GC_OPCODE(V)
#define FOREACH_SIMD_MEM_OPCODE(V)
#define FOREACH_SIMPLE_NON_CONST_OPCODE(V)
#define FOREACH_LOAD_MEM_OPCODE(V)
#define FOREACH_SIMD_MEM_1_OPERAND_OPCODE(V)
#define FOREACH_SIMPLE_PROTOTYPE_OPCODE(V)
#define FOREACH_NUMERIC_OPCODE(V)
#define FOREACH_ASMJS_COMPAT_OPCODE(V)
#define FOREACH_SIMD_OPCODE(V)
#define FOREACH_SIMD_1_OPERAND_OPCODE(V)
#define FOREACH_STORE_MEM_OPCODE(V)
#define FOREACH_ATOMIC_0_OPERAND_OPCODE(V)
#define FOREACH_SIMPLE_EXTENDED_CONST_OPCODE(V)
#define FOREACH_SIGNATURE(V)
#define FOREACH_SIMPLE_OPCODE(V)