30constexpr int kMaxArrays = 3;
31constexpr int kMaxStructs = 4;
32constexpr int kMaxStructFields = 4;
33constexpr int kMaxGlobals = 64;
34constexpr uint32_t kMaxLocals = 32;
35constexpr int kMaxParameters = 15;
36constexpr int kMaxReturns = 15;
37constexpr int kMaxExceptions = 4;
38constexpr int kMaxTables = 4;
39constexpr int kMaxMemories = 4;
40constexpr int kMaxArraySize = 20;
41constexpr int kMaxPassiveDataSegments = 2;
42constexpr uint32_t kMaxRecursionDepth = 64;
43constexpr int kMaxCatchCases = 6;
46 return std::min(
static_cast<int>(
v8_flags.wasm_max_table_size.value()), 32);
49int MaxNumOfFunctions() {
50 return std::min(
static_cast<int>(
v8_flags.max_wasm_functions.value()), 4);
79template <
typename... T>
80constexpr auto CreateArray(T... elements) {
81 std::array
result = {elements...};
86template <
typename T,
size_t...
N>
87constexpr auto ConcatArrays(std::array<T, N>... array) {
88 constexpr size_t kNumArrays =
sizeof...(array);
89 std::array<T*, kNumArrays> kArrays = {&array[0]...};
90 constexpr size_t kLengths[kNumArrays] = {array.size()...};
91 constexpr size_t kSumOfLengths = (... + array.size());
93 std::array<T, kSumOfLengths>
result = {0};
94 size_t result_index = 0;
95 for (
size_t arr = 0; arr < kNumArrays; arr++) {
96 for (
size_t pos = 0;
pos < kLengths[arr];
pos++) {
97 result[result_index++] = kArrays[arr][
pos];
108 base::RandomNumberGenerator
rng_;
111 explicit DataRange(base::Vector<const uint8_t> data, int64_t seed = -1)
112 :
data_(data),
rng_(seed == -1 ? get<int64_t>() : seed) {}
113 DataRange(
const DataRange&) =
delete;
114 DataRange& operator=(
const DataRange&) =
delete;
124 DataRange& operator=(DataRange&& other)
V8_NOEXCEPT {
131 size_t size()
const {
return data_.size(); }
135 uint16_t random_choice =
data_.size() > std::numeric_limits<uint8_t>::max()
138 uint16_t num_bytes = random_choice % std::max(
size_t{1},
data_.size());
139 int64_t new_seed =
rng_.initial_seed() ^
rng_.NextInt64();
140 DataRange
split(
data_.SubVector(0, num_bytes), new_seed);
145 template <
typename T,
size_t max_
bytes = sizeof(T)>
146 T getPseudoRandom() {
147 static_assert(!std::is_same_v<T, bool>,
"bool needs special handling");
148 static_assert(max_bytes <=
sizeof(
T));
154 if constexpr (std::is_integral_v<T> && std::is_signed_v<T>) {
157 return static_cast<int8_t
>(getPseudoRandom<uint8_t>());
159 return static_cast<int16_t>(getPseudoRandom<uint16_t>());
161 return static_cast<int32_t>(getPseudoRandom<uint32_t>());
163 return static_cast<T
>(
164 getPseudoRandom<std::make_unsigned_t<T>, max_bytes>());
173 template <
typename T>
176 static_assert(!std::is_same_v<T, bool>,
"bool needs special handling");
182 const size_t num_bytes = std::min(
sizeof(T),
data_.size());
192bool DataRange::get() {
201 return get<uint8_t>() % 2;
204enum IncludeNumericTypes {
205 kIncludeNumericTypes =
true,
206 kExcludeNumericTypes =
false
208enum IncludePackedTypes {
209 kIncludePackedTypes =
true,
210 kExcludePackedTypes =
false
212enum IncludeAllGenerics {
213 kIncludeAllGenerics =
true,
214 kExcludeSomeGenerics =
false
216enum IncludeS128 { kIncludeS128 =
true, kExcludeS128 =
false };
220ValueType GetValueTypeHelper(WasmModuleGenerationOptions options,
221 DataRange* data, uint32_t num_nullable_types,
222 uint32_t num_non_nullable_types,
223 IncludeNumericTypes include_numeric_types,
224 IncludePackedTypes include_packed_types,
225 IncludeAllGenerics include_all_generics,
226 IncludeS128 include_s128 = kIncludeS128) {
228 base::SmallVector<ValueType, 32> types;
231 if (include_numeric_types) {
234 types.insert(types.end(),
235 {kWasmI32, kWasmI32, kWasmI32, kWasmI64, kWasmF32, kWasmF64});
238 if (options.generate_simd() && include_s128) {
252 const bool nullable = options.generate_wasm_gc() ? data->get<
bool>() : false;
254 if (options.generate_wasm_gc()) {
257 if (include_numeric_types && include_packed_types) {
258 types.insert(types.end(), {kWasmI8, kWasmI16});
262 types.insert(types.end(),
263 {kWasmNullRef, kWasmNullExternRef, kWasmNullFuncRef});
265 if (nullable || include_all_generics) {
266 types.insert(types.end(), {kWasmStructRef, kWasmArrayRef, kWasmAnyRef,
267 kWasmEqRef, kWasmExternRef});
274 const uint32_t num_user_defined_types =
275 nullable ? num_nullable_types : num_non_nullable_types;
280 data->get<uint8_t>() % (types.size() + num_user_defined_types);
284 if (chosen_id >= types.size()) {
287 ModuleTypeIndex{chosen_id -
static_cast<uint32_t
>(types.size())},
295 return types[chosen_id];
298ValueType GetValueType(WasmModuleGenerationOptions options, DataRange* data,
299 uint32_t num_types) {
300 return GetValueTypeHelper(options, data, num_types, num_types,
301 kIncludeNumericTypes, kExcludePackedTypes,
302 kIncludeAllGenerics);
305void GeneratePassiveDataSegment(DataRange* range, WasmModuleBuilder* builder) {
306 int length = range->get<uint8_t>() % 65;
307 ZoneVector<uint8_t>
data(length, builder->zone());
309 data[
i] = range->getPseudoRandom<uint8_t>();
311 builder->AddPassiveDataSegment(data.data(),
312 static_cast<uint32_t
>(data.size()));
315uint32_t GenerateRefTypeElementSegment(DataRange* range,
316 WasmModuleBuilder* builder,
317 ValueType element_type) {
318 DCHECK(element_type.is_object_reference());
319 DCHECK(element_type.has_index());
320 WasmModuleBuilder::WasmElemSegment segment(
321 builder->zone(), element_type,
false,
323 size_t element_count = range->get<uint8_t>() % 11;
324 for (
size_t i = 0;
i < element_count; ++
i) {
325 segment.entries.emplace_back(
327 element_type.ref_index().index);
329 return builder->AddElementSegment(std::move(segment));
332std::vector<ValueType> GenerateTypes(WasmModuleGenerationOptions options,
333 DataRange* data, uint32_t num_ref_types) {
334 std::vector<ValueType> types;
335 int num_params =
int{data->get<uint8_t>()} % (kMaxParameters + 1);
336 types.reserve(num_params);
337 for (
int i = 0;
i < num_params; ++
i) {
338 types.push_back(GetValueType(options, data, num_ref_types));
344 base::Vector<const ValueType> param_types,
345 base::Vector<const ValueType> return_types) {
347 for (
auto& type : param_types) {
348 builder.AddParam(type);
350 for (
auto& type : return_types) {
351 builder.AddReturn(type);
353 return builder.Get();
357using GenerateFn =
void (BodyGen::*)(DataRange*);
358using GenerateFnWithHeap =
bool (BodyGen::*)(HeapType, DataRange*,
Nullability);
362template <
size_t kNumMVP,
size_t kAdditionalSimd,
size_t kAdditionalWasmGC>
363class GeneratorAlternativesPerOption {
364 static constexpr size_t kNumSimd = kNumMVP + kAdditionalSimd;
365 static constexpr size_t kNumWasmGC = kNumMVP + kAdditionalWasmGC;
369 constexpr GeneratorAlternativesPerOption(
370 std::array<GenerateFn, kNumMVP> mvp,
371 std::array<GenerateFn, kAdditionalSimd> simd,
372 std::array<GenerateFn, kAdditionalWasmGC> wasmgc)
374 simd_(ConcatArrays(mvp, simd)),
375 wasmgc_(ConcatArrays(mvp, wasmgc)),
376 all_(ConcatArrays(mvp, ConcatArrays(simd, wasmgc))) {}
378 constexpr base::Vector<const GenerateFn> GetAlternatives(
379 WasmModuleGenerationOptions options)
const {
380 switch (options.ToIntegral()) {
382 return base::VectorOf(
mvp_);
383 case 1 << kGenerateSIMD:
384 return base::VectorOf(
simd_);
385 case 1 << kGenerateWasmGC:
386 return base::VectorOf(
wasmgc_);
387 case (1 << kGenerateSIMD) | (1 << kGenerateWasmGC):
394 const std::array<GenerateFn, kNumMVP>
mvp_;
395 const std::array<GenerateFn, kNumSimd>
simd_;
396 const std::array<GenerateFn, kNumWasmGC>
wasmgc_;
397 const std::array<GenerateFn, kNumAll>
all_;
401template <
size_t kNumMVP,
size_t kAdditionalSimd,
size_t kAdditionalWasmGC>
402GeneratorAlternativesPerOption(std::array<GenerateFn, kNumMVP>,
403 std::array<GenerateFn, kAdditionalSimd>,
404 std::array<GenerateFn, kAdditionalWasmGC>)
405 -> GeneratorAlternativesPerOption<kNumMVP, kAdditionalSimd,
410 void op(DataRange* data) {
411 Generate<Args...>(
data);
417 BlockScope(BodyGen*
gen, WasmOpcode block_type,
418 base::Vector<const ValueType> param_types,
419 base::Vector<const ValueType> result_types,
420 base::Vector<const ValueType> br_types,
bool emit_end =
true)
422 gen->blocks_.emplace_back(br_types.begin(), br_types.end());
423 gen->builder_->EmitByte(block_type);
425 if (param_types.size() == 0 && result_types.size() == 0) {
426 gen->builder_->EmitValueType(kWasmVoid);
429 if (param_types.size() == 0 && result_types.size() == 1) {
430 gen->builder_->EmitValueType(result_types[0]);
434 Zone* zone =
gen->builder_->builder()->zone();
435 FunctionSig::Builder builder(zone, result_types.size(),
437 for (
auto& type : param_types) {
439 builder.AddParam(type);
441 for (
auto& type : result_types) {
443 builder.AddReturn(type);
446 const bool is_final =
true;
447 ModuleTypeIndex sig_id =
448 gen->builder_->builder()->AddSignature(sig, is_final);
449 gen->builder_->EmitI32V(sig_id);
454 gen_->blocks_.pop_back();
462 void block(base::Vector<const ValueType> param_types,
463 base::Vector<const ValueType> return_types, DataRange* data) {
464 BlockScope block_scope(
this, kExprBlock, param_types, return_types,
466 ConsumeAndGenerate(param_types, return_types, data);
469 template <ValueKind T>
470 void block(DataRange* data) {
471 if constexpr (T == kVoid) {
474 block({}, base::VectorOf({ValueType::Primitive(T)}), data);
478 void loop(base::Vector<const ValueType> param_types,
479 base::Vector<const ValueType> return_types, DataRange* data) {
480 BlockScope block_scope(
this, kExprLoop, param_types, return_types,
482 ConsumeAndGenerate(param_types, return_types, data);
485 template <ValueKind T>
486 void loop(DataRange* data) {
487 if constexpr (T == kVoid) {
490 loop({}, base::VectorOf({ValueType::Primitive(T)}), data);
494 void finite_loop(base::Vector<const ValueType> param_types,
495 base::Vector<const ValueType> return_types,
498 int kLoopConstant = data->get<uint8_t>() % 8 + 1;
499 uint32_t counter =
builder_->AddLocal(kWasmI32);
500 builder_->EmitI32Const(kLoopConstant);
504 BlockScope loop_scope(
this, kExprLoop, param_types, return_types,
510 for (
auto it = param_types.rbegin(); it != param_types.rend(); it++) {
511 uint32_t local =
builder_->AddLocal(*it);
516 Generate(kWasmVoid, data);
527 BlockScope if_scope(
this, kExprIf, {}, {}, {});
528 Generate(param_types, data);
533 Generate(return_types, data);
537 template <ValueKind T>
538 void finite_loop(DataRange* data) {
539 if constexpr (T == kVoid) {
540 finite_loop({}, {},
data);
542 finite_loop({}, base::VectorOf({ValueType::Primitive(T)}), data);
546 enum IfType { kIf, kIfElse };
548 void if_(base::Vector<const ValueType> param_types,
549 base::Vector<const ValueType> return_types, IfType type,
553 Generate(kWasmI32, data);
554 BlockScope block_scope(
this, kExprIf, param_types, return_types,
556 ConsumeAndGenerate(param_types, return_types, data);
557 if (type == kIfElse) {
559 ConsumeAndGenerate(param_types, return_types, data);
563 template <ValueKind T, IfType type>
564 void if_(DataRange* data) {
565 static_assert(T == kVoid || type == kIfElse,
566 "if without else cannot produce a value");
568 T == kVoid ? base::Vector<ValueType>{}
569 : base::VectorOf({ValueType::Primitive(T)}),
573 void try_block_helper(ValueType return_type, DataRange* data) {
574 bool has_catch_all = data->get<
bool>();
576 data->get<uint8_t>() % (
builder_->builder()->NumTags() + 1);
577 bool is_delegate = num_catch == 0 && !has_catch_all && data->get<
bool>();
581 base::Vector<const ValueType> return_type_vec =
582 return_type.kind() == kVoid ? base::Vector<ValueType>{}
583 : base::VectorOf(&return_type, 1);
584 BlockScope block_scope(
this, kExprTry, {}, return_type_vec, return_type_vec,
586 int control_depth =
static_cast<int>(
blocks_.size()) - 1;
587 Generate(return_type, data);
589 for (
int i = 0;
i < num_catch; ++
i) {
592 ConsumeAndGenerate(exception_type->parameters(), return_type_vec, data);
596 Generate(return_type, data);
602 int delegate_depth = data->get<uint8_t>() % (
blocks_.size() - 1);
603 builder_->EmitWithU32V(kExprDelegate, delegate_depth);
608 template <ValueKind T>
609 void try_block(DataRange* data) {
610 try_block_helper(ValueType::Primitive(T), data);
620 void try_table_rec(base::Vector<const ValueType> param_types,
621 base::Vector<const ValueType> return_types,
622 base::Vector<CatchCase> catch_cases,
size_t i,
625 if (
i == catch_cases.
size()) {
628 blocks_.emplace_back(return_types.begin(), return_types.end());
629 const bool is_final =
true;
630 ModuleTypeIndex try_sig_index =
builder_->builder()->AddSignature(
631 CreateSignature(
builder_->builder()->zone(), param_types,
635 builder_->EmitU32V(
static_cast<uint32_t
>(catch_cases.size()));
636 for (
size_t j = 0; j < catch_cases.size(); ++j) {
641 builder_->EmitByte(catch_cases.size() - j - 1);
643 ConsumeAndGenerate(param_types, return_types, data);
646 builder_->EmitWithI32V(kExprBr,
static_cast<int32_t>(catch_cases.size()));
662 size_t return_count =
663 (has_tag ? type->parameter_count() : 0) + (has_ref ? 1 : 0);
665 builder_->builder()->zone()->AllocateVector<ValueType>(return_count);
667 std::copy_n(type->parameters().begin(), type->parameter_count(),
668 block_returns.begin());
672 BlockScope
block(
this, kExprBlock, param_types, block_returns,
674 try_table_rec(param_types, return_types, catch_cases,
i + 1, data);
678 ConsumeAndGenerate(block_returns, return_types, data);
679 builder_->EmitWithU32V(kExprBr,
static_cast<uint32_t
>(
i));
682 void try_table_block_helper(base::Vector<const ValueType> param_types,
683 base::Vector<const ValueType> return_types,
685 uint8_t num_catch = data->get<uint8_t>() % kMaxCatchCases;
687 builder_->builder()->zone()->AllocateVector<CatchCase>(num_catch);
688 for (
int i = 0;
i < num_catch; ++
i) {
689 catch_cases[
i].tag_index =
690 data->get<uint8_t>() %
builder_->builder()->NumTags();
691 catch_cases[
i].kind =
695 BlockScope block_scope(
this, kExprBlock, param_types, return_types,
697 try_table_rec(param_types, return_types, catch_cases, 0, data);
700 template <ValueKind T>
701 void try_table_block(DataRange* data) {
702 ValueType return_types_arr[1] = {ValueType::Primitive(T)};
703 auto return_types = base::VectorOf(return_types_arr, T == kVoid ? 0 : 1);
704 if (!
v8_flags.experimental_wasm_exnref) {
706 any_block({}, return_types,
data);
709 try_table_block_helper({}, return_types,
data);
712 void any_block(base::Vector<const ValueType> param_types,
713 base::Vector<const ValueType> return_types, DataRange* data) {
714 uint8_t available_cases =
v8_flags.experimental_wasm_exnref ? 6 : 5;
715 uint8_t block_type = data->get<uint8_t>() % available_cases;
716 switch (block_type) {
718 block(param_types, return_types, data);
721 loop(param_types, return_types, data);
724 finite_loop(param_types, return_types, data);
727 if (param_types == return_types) {
728 if_({}, {}, kIf,
data);
733 if_(param_types, return_types, kIfElse, data);
736 try_table_block_helper(param_types, return_types, data);
741 void br(DataRange* data) {
744 const uint32_t target_block = data->get<uint8_t>() %
blocks_.size();
745 const auto break_types = base::VectorOf(
blocks_[target_block]);
747 Generate(break_types, data);
749 kExprBr,
static_cast<uint32_t
>(
blocks_.size()) - 1 - target_block);
752 template <ValueKind wanted_kind>
753 void br_if(DataRange* data) {
756 const uint32_t target_block = data->get<uint8_t>() %
blocks_.size();
757 const auto break_types = base::VectorOf(
blocks_[target_block]);
759 Generate(break_types, data);
760 Generate(kWasmI32, data);
762 kExprBrIf,
static_cast<uint32_t
>(
blocks_.size()) - 1 - target_block);
766 ? base::Vector<ValueType>{}
767 : base::VectorOf({ValueType::Primitive(wanted_kind)}),
771 template <ValueKind wanted_kind>
772 void br_on_null(DataRange* data) {
774 const uint32_t target_block = data->get<uint8_t>() %
blocks_.size();
775 const auto break_types = base::VectorOf(
blocks_[target_block]);
776 Generate(break_types, data);
780 static_cast<uint32_t
>(
blocks_.size()) - 1 - target_block);
785 ? base::Vector<ValueType>{}
786 : base::VectorOf({ValueType::Primitive(wanted_kind)}),
790 template <ValueKind wanted_kind>
791 void br_on_non_null(DataRange* data) {
793 const uint32_t target_block = data->get<uint8_t>() %
blocks_.size();
794 const auto break_types = base::VectorOf(
blocks_[target_block]);
795 if (break_types.empty() ||
796 !break_types[break_types.size() - 1].is_reference()) {
798 Generate<wanted_kind>(data);
801 Generate(break_types, data);
804 static_cast<uint32_t
>(
blocks_.size()) - 1 - target_block);
806 break_types.SubVector(0, break_types.size() - 1),
808 ? base::Vector<ValueType>{}
809 : base::VectorOf({ValueType::Primitive(wanted_kind)}),
813 void br_table(ValueType result_type, DataRange* data) {
814 const uint8_t block_count = 1 + data->get<uint8_t>() % 8;
817 block_count > 4 ? data->get<
uint16_t>() : data->get<uint8_t>();
818 for (
size_t i = 0;
i < block_count; ++
i) {
820 builder_->EmitValueType(result_type);
822 if (result_type != kWasmVoid) {
823 blocks_.back().push_back(result_type);
828 if ((entry_bits & 3) == 3) {
829 Generate(kWasmVoid, data);
834 Generate(result_type, data);
835 Generate(kWasmI32, data);
837 uint32_t entry_count = 1 + data->get<uint8_t>() % 8;
839 for (
size_t i = 0;
i < entry_count + 1; ++
i) {
840 builder_->EmitU32V(data->get<uint8_t>() % block_count);
843 uint8_t exit_bits = result_type ==
kWasmVoid ? 0 : data->get<uint8_t>();
844 for (
size_t i = 0;
i < block_count; ++
i) {
848 Generate(result_type, data);
856 template <ValueKind wanted_kind>
857 void br_table(DataRange* data) {
859 wanted_kind == kVoid ? kWasmVoid : ValueType::Primitive(wanted_kind),
863 void return_op(DataRange* data) {
864 auto returns =
builder_->signature()->returns();
865 Generate(returns, data);
869 constexpr static uint8_t max_alignment(WasmOpcode memop) {
871 case kExprS128LoadMem:
872 case kExprS128StoreMem:
874 case kExprI64LoadMem:
875 case kExprF64LoadMem:
876 case kExprI64StoreMem:
877 case kExprF64StoreMem:
878 case kExprI64AtomicStore:
879 case kExprI64AtomicLoad:
880 case kExprI64AtomicAdd:
881 case kExprI64AtomicSub:
882 case kExprI64AtomicAnd:
883 case kExprI64AtomicOr:
884 case kExprI64AtomicXor:
885 case kExprI64AtomicExchange:
886 case kExprI64AtomicCompareExchange:
887 case kExprS128Load8x8S:
888 case kExprS128Load8x8U:
889 case kExprS128Load16x4S:
890 case kExprS128Load16x4U:
891 case kExprS128Load32x2S:
892 case kExprS128Load32x2U:
893 case kExprS128Load64Splat:
894 case kExprS128Load64Zero:
896 case kExprI32LoadMem:
897 case kExprI64LoadMem32S:
898 case kExprI64LoadMem32U:
899 case kExprF32LoadMem:
900 case kExprI32StoreMem:
901 case kExprI64StoreMem32:
902 case kExprF32StoreMem:
903 case kExprI32AtomicStore:
904 case kExprI64AtomicStore32U:
905 case kExprI32AtomicLoad:
906 case kExprI64AtomicLoad32U:
907 case kExprI32AtomicAdd:
908 case kExprI32AtomicSub:
909 case kExprI32AtomicAnd:
910 case kExprI32AtomicOr:
911 case kExprI32AtomicXor:
912 case kExprI32AtomicExchange:
913 case kExprI32AtomicCompareExchange:
914 case kExprI64AtomicAdd32U:
915 case kExprI64AtomicSub32U:
916 case kExprI64AtomicAnd32U:
917 case kExprI64AtomicOr32U:
918 case kExprI64AtomicXor32U:
919 case kExprI64AtomicExchange32U:
920 case kExprI64AtomicCompareExchange32U:
921 case kExprS128Load32Splat:
922 case kExprS128Load32Zero:
924 case kExprI32LoadMem16S:
925 case kExprI32LoadMem16U:
926 case kExprI64LoadMem16S:
927 case kExprI64LoadMem16U:
928 case kExprI32StoreMem16:
929 case kExprI64StoreMem16:
930 case kExprI32AtomicStore16U:
931 case kExprI64AtomicStore16U:
932 case kExprI32AtomicLoad16U:
933 case kExprI64AtomicLoad16U:
934 case kExprI32AtomicAdd16U:
935 case kExprI32AtomicSub16U:
936 case kExprI32AtomicAnd16U:
937 case kExprI32AtomicOr16U:
938 case kExprI32AtomicXor16U:
939 case kExprI32AtomicExchange16U:
940 case kExprI32AtomicCompareExchange16U:
941 case kExprI64AtomicAdd16U:
942 case kExprI64AtomicSub16U:
943 case kExprI64AtomicAnd16U:
944 case kExprI64AtomicOr16U:
945 case kExprI64AtomicXor16U:
946 case kExprI64AtomicExchange16U:
947 case kExprI64AtomicCompareExchange16U:
948 case kExprS128Load16Splat:
950 case kExprI32LoadMem8S:
951 case kExprI32LoadMem8U:
952 case kExprI64LoadMem8S:
953 case kExprI64LoadMem8U:
954 case kExprI32StoreMem8:
955 case kExprI64StoreMem8:
956 case kExprI32AtomicStore8U:
957 case kExprI64AtomicStore8U:
958 case kExprI32AtomicLoad8U:
959 case kExprI64AtomicLoad8U:
960 case kExprI32AtomicAdd8U:
961 case kExprI32AtomicSub8U:
962 case kExprI32AtomicAnd8U:
963 case kExprI32AtomicOr8U:
964 case kExprI32AtomicXor8U:
965 case kExprI32AtomicExchange8U:
966 case kExprI32AtomicCompareExchange8U:
967 case kExprI64AtomicAdd8U:
968 case kExprI64AtomicSub8U:
969 case kExprI64AtomicAnd8U:
970 case kExprI64AtomicOr8U:
971 case kExprI64AtomicXor8U:
972 case kExprI64AtomicExchange8U:
973 case kExprI64AtomicCompareExchange8U:
974 case kExprS128Load8Splat:
982 void memop(DataRange* data) {
984 const bool is_atomic = memory_op >> 8 == kAtomicPrefix;
985 const uint8_t align = is_atomic ? max_alignment(memory_op)
986 : data->getPseudoRandom<uint8_t>() %
987 (max_alignment(memory_op) + 1);
989 uint8_t memory_index =
990 data->get<uint8_t>() %
builder_->builder()->NumMemories();
994 if ((
offset & 0xff) == 0xff) {
996 ? data->getPseudoRandom<uint64_t>() & 0x1ffffffff
997 : data->getPseudoRandom<uint32_t>();
1001 builder_->builder()->IsMemory64(memory_index)
1002 ? Generate<kI64, arg_kinds...>(data)
1003 : Generate<kI32, arg_kinds...>(data);
1007 if (WasmOpcodes::IsPrefixOpcode(
static_cast<WasmOpcode>(memory_op >> 8))) {
1008 DCHECK(memory_op >> 8 == kAtomicPrefix || memory_op >> 8 == kSimdPrefix);
1009 builder_->EmitWithPrefix(memory_op);
1019 void op_with_prefix(DataRange* data) {
1020 Generate<Args...>(
data);
1024 void simd_const(DataRange* data) {
1025 builder_->EmitWithPrefix(kExprS128Const);
1027 builder_->EmitByte(data->getPseudoRandom<uint8_t>());
1032 void simd_lane_op(DataRange* data) {
1033 Generate<Args...>(
data);
1035 builder_->EmitByte(data->get<uint8_t>() % lanes);
1039 void simd_lane_memop(DataRange* data) {
1041 memop<Op, Args...>(
data);
1042 builder_->EmitByte(data->get<uint8_t>() % lanes);
1045 void simd_shuffle(DataRange* data) {
1046 Generate<kS128, kS128>(data);
1047 builder_->EmitWithPrefix(kExprI8x16Shuffle);
1049 builder_->EmitByte(
static_cast<uint8_t
>(data->get<uint8_t>() % 32));
1053 void drop(DataRange* data) {
1062 enum CallKind { kCallDirect, kCallIndirect, kCallRef };
1064 template <ValueKind wanted_kind>
1065 void call(DataRange* data) {
1066 call(data, ValueType::Primitive(wanted_kind), kCallDirect);
1069 template <ValueKind wanted_kind>
1070 void call_indirect(DataRange* data) {
1071 call(data, ValueType::Primitive(wanted_kind), kCallIndirect);
1074 template <ValueKind wanted_kind>
1075 void call_ref(DataRange* data) {
1076 call(data, ValueType::Primitive(wanted_kind), kCallRef);
1079 void Convert(ValueType src, ValueType dst) {
1080 auto idx = [](ValueType t) ->
int {
1094 static constexpr WasmOpcode kConvertOpcodes[] = {
1096 kExprNop, kExprI32ConvertI64, kExprI32SConvertF32, kExprI32SConvertF64,
1098 kExprI64SConvertI32, kExprNop, kExprI64SConvertF32, kExprI64SConvertF64,
1100 kExprF32SConvertI32, kExprF32SConvertI64, kExprNop, kExprF32ConvertF64,
1102 kExprF64SConvertI32, kExprF64SConvertI64, kExprF64ConvertF32, kExprNop};
1103 int arr_idx = idx(dst) << 2 | idx(src);
1104 builder_->Emit(kConvertOpcodes[arr_idx]);
1107 int choose_function_table_index(DataRange* data) {
1108 int table_count =
builder_->builder()->NumTables();
1109 int start = data->get<uint8_t>() % table_count;
1110 for (
int i = 0;
i < table_count; ++
i) {
1111 int index = (
start +
i) % table_count;
1112 if (
builder_->builder()->GetTableType(index).is_reference_to(
1117 FATAL(
"No funcref table found; table index 0 is expected to be funcref");
1120 void call(DataRange* data, ValueType wanted_kind, CallKind call_kind) {
1121 uint8_t random_byte = data->get<uint8_t>();
1122 int func_index = random_byte %
functions_.size();
1123 ModuleTypeIndex sig_index =
functions_[func_index];
1126 for (
size_t i = 0;
i <
sig->parameter_count(); ++
i) {
1127 Generate(
sig->GetParam(
i), data);
1132 bool use_return_call = random_byte > 127;
1133 if (use_return_call &&
1134 std::equal(
sig->returns().begin(),
sig->returns().end(),
1135 builder_->signature()->returns().begin(),
1136 builder_->signature()->returns().end())) {
1137 if (call_kind == kCallDirect) {
1138 builder_->EmitWithU32V(kExprReturnCall,
1139 NumImportedFunctions() + func_index);
1140 }
else if (call_kind == kCallIndirect) {
1143 uint32_t table_index = choose_function_table_index(data);
1144 builder_->builder()->IsTable64(table_index)
1145 ?
builder_->EmitI64Const(func_index)
1146 :
builder_->EmitI32Const(func_index);
1147 builder_->EmitWithU32V(kExprReturnCallIndirect, sig_index);
1151 HeapType::Index(sig_index, kNotShared, RefTypeKind::kFunction),
1153 builder_->EmitWithU32V(kExprReturnCallRef, sig_index);
1157 if (call_kind == kCallDirect) {
1158 builder_->EmitWithU32V(kExprCallFunction,
1159 NumImportedFunctions() + func_index);
1160 }
else if (call_kind == kCallIndirect) {
1163 uint32_t table_index = choose_function_table_index(data);
1164 builder_->builder()->IsTable64(table_index)
1165 ?
builder_->EmitI64Const(func_index)
1166 :
builder_->EmitI32Const(func_index);
1167 builder_->EmitWithU32V(kExprCallIndirect, sig_index);
1171 HeapType::Index(sig_index, kNotShared, RefTypeKind::kFunction),
1173 builder_->EmitWithU32V(kExprCallRef, sig_index);
1176 if (
sig->return_count() == 0 && wanted_kind != kWasmVoid) {
1178 Generate(wanted_kind, data);
1181 if (wanted_kind == kWasmVoid) {
1183 for (
size_t i = 0;
i <
sig->return_count(); ++
i) {
1189 base::VectorOf(&wanted_kind, wanted_kind == kWasmVoid ? 0 : 1);
1190 ConsumeAndGenerate(
sig->returns(), wanted_types, data);
1195 ValueType type = kWasmVoid;
1197 Var(uint32_t index, ValueType type) : index(index), type(type) {}
1198 bool is_valid()
const {
return type !=
kWasmVoid; }
1201 ValueType AsNullable(ValueType original) {
1202 if (!original.is_ref())
return original;
1203 return original.AsNullable();
1206 Var GetRandomLocal(DataRange* data, ValueType type = kWasmTop) {
1207 const size_t locals_count = all_locals_count();
1208 if (locals_count == 0)
return {};
1209 uint32_t start_index = data->get<uint8_t>() % locals_count;
1210 uint32_t index = start_index;
1213 while (type != kWasmTop && local_type(index) != type &&
1214 AsNullable(local_type(index)) != type) {
1215 index = (index + 1) % locals_count;
1216 if (index == start_index)
return {};
1218 return {
index, local_type(index)};
1221 constexpr static bool is_convertible_kind(ValueKind
kind) {
1225 template <ValueKind wanted_kind>
1226 void local_op(DataRange* data, WasmOpcode opcode) {
1227 static_assert(wanted_kind == kVoid || is_convertible_kind(wanted_kind));
1228 Var local = GetRandomLocal(data);
1231 if (!local.is_valid() || !is_convertible_kind(local.type.kind())) {
1232 if (wanted_kind == kVoid)
return;
1233 return Generate<wanted_kind>(data);
1236 if (opcode != kExprLocalGet) Generate(local.type, data);
1237 builder_->EmitWithU32V(opcode, local.index);
1238 if (wanted_kind != kVoid && local.type.kind() != wanted_kind) {
1239 Convert(local.type, ValueType::Primitive(wanted_kind));
1243 template <ValueKind wanted_kind>
1244 void get_local(DataRange* data) {
1245 static_assert(wanted_kind != kVoid,
"illegal type");
1246 local_op<wanted_kind>(data, kExprLocalGet);
1249 void set_local(DataRange* data) { local_op<kVoid>(data, kExprLocalSet); }
1251 template <ValueKind wanted_kind>
1252 void tee_local(DataRange* data) {
1253 local_op<wanted_kind>(data, kExprLocalTee);
1264 void shift_locals_to(DataRange* data, Var start_local) {
1265 const uint32_t max_shift = data->get<uint8_t>() % 8 + 2;
1266 const auto [start_index,
type] = start_local;
1267 const size_t locals_count = all_locals_count();
1268 uint32_t previous_index = start_index;
1269 uint32_t index = start_index;
1270 for (uint32_t
i = 0;
i < max_shift; ++
i) {
1272 index = (index + 1) % locals_count;
1273 }
while (local_type(index) != type);
1276 if (index == start_index)
break;
1278 builder_->EmitSetLocal(previous_index);
1279 previous_index =
index;
1283 void shift_locals(DataRange* data) {
1284 const Var local = GetRandomLocal(data);
1285 if (local.type == kWasmVoid ||
1289 shift_locals_to(data, local);
1292 template <
size_t num_
bytes>
1293 void i32_const(DataRange* data) {
1294 builder_->EmitI32Const(data->getPseudoRandom<int32_t, num_bytes>());
1297 template <
size_t num_
bytes>
1298 void i64_const(DataRange* data) {
1299 builder_->EmitI64Const(data->getPseudoRandom<int64_t, num_bytes>());
1302 Var GetRandomGlobal(DataRange* data,
bool ensure_mutable) {
1304 if (ensure_mutable) {
1309 index = data->get<uint8_t>() %
globals_.size();
1312 return {
index, type};
1315 template <ValueKind wanted_kind>
1316 void global_op(DataRange* data) {
1317 static_assert(wanted_kind == kVoid || is_convertible_kind(wanted_kind));
1318 constexpr bool is_set = wanted_kind == kVoid;
1319 Var global = GetRandomGlobal(data, is_set);
1322 if (!global.is_valid() || !is_convertible_kind(global.type.kind())) {
1323 if (wanted_kind == kVoid)
return;
1324 return Generate<wanted_kind>(data);
1327 if (is_set) Generate(global.type, data);
1328 builder_->EmitWithU32V(is_set ? kExprGlobalSet : kExprGlobalGet,
1330 if (!is_set && global.type.kind() != wanted_kind) {
1331 Convert(global.type, ValueType::Primitive(wanted_kind));
1335 template <ValueKind wanted_kind>
1336 void get_global(DataRange* data) {
1337 static_assert(wanted_kind != kVoid,
"illegal type");
1338 global_op<wanted_kind>(data);
1341 template <ValueKind select_kind>
1342 void select_with_type(DataRange* data) {
1343 static_assert(select_kind != kVoid,
"illegal kind for select");
1344 Generate<select_kind, select_kind, kI32>(data);
1346 uint8_t num_types = 1;
1347 builder_->EmitWithU8U8(kExprSelectWithType, num_types,
1348 ValueType::Primitive(select_kind).value_type_code());
1351 void set_global(DataRange* data) { global_op<kVoid>(data); }
1353 void throw_or_rethrow(DataRange* data) {
1354 bool rethrow = data->get<
bool>();
1356 int control_depth =
static_cast<int>(
blocks_.size() - 1);
1358 data->get<uint8_t>() %
static_cast<int>(
catch_blocks_.size());
1359 builder_->EmitWithU32V(kExprRethrow,
1362 int tag = data->get<uint8_t>() %
builder_->builder()->NumTags();
1364 Generate(exception_sig->parameters(), data);
1365 builder_->EmitWithU32V(kExprThrow, tag);
1370 void sequence(DataRange* data) {
1371 Generate<Types...>(
data);
1374 void memory_size(DataRange* data) {
1375 uint8_t memory_index =
1376 data->get<uint8_t>() %
builder_->builder()->NumMemories();
1378 builder_->EmitWithU8(kExprMemorySize, memory_index);
1381 if (
builder_->builder()->IsMemory64(memory_index)) {
1382 builder_->Emit(kExprI32ConvertI64);
1386 void grow_memory(DataRange* data) {
1387 uint8_t memory_index =
1388 data->get<uint8_t>() %
builder_->builder()->NumMemories();
1391 builder_->builder()->IsMemory64(memory_index) ? Generate<kI64>(data)
1392 : Generate<
kI32>(data);
1393 builder_->EmitWithU8(kExprMemoryGrow, memory_index);
1396 if (
builder_->builder()->IsMemory64(memory_index)) {
1397 builder_->Emit(kExprI32ConvertI64);
1401 void ref_null(HeapType type, DataRange* data) {
1406 bool get_local_ref(HeapType type, DataRange* data, Nullability nullable) {
1407 Var local = GetRandomLocal(data, ValueType::RefMaybeNull(type, nullable));
1412 if (data->get<uint8_t>() % 8 == 1) {
1413 shift_locals_to(data, local);
1415 builder_->EmitWithU32V(kExprLocalGet, local.index);
1422 bool new_object(HeapType type, DataRange* data, Nullability nullable) {
1425 ModuleTypeIndex index = type.ref_index();
1426 bool new_default = data->get<
bool>();
1428 if (
builder_->builder()->IsStructType(index)) {
1429 const StructType* struct_gen =
builder_->builder()->GetStructType(index);
1430 int field_count = struct_gen->field_count();
1431 bool can_be_defaultable = std::all_of(
1432 struct_gen->fields().begin(), struct_gen->fields().end(),
1433 [](ValueType type) ->
bool { return type.is_defaultable(); });
1435 if (new_default && can_be_defaultable) {
1436 builder_->EmitWithPrefix(kExprStructNewDefault);
1439 for (
int i = 0;
i < field_count;
i++) {
1440 Generate(struct_gen->field(
i).Unpacked(), data);
1442 builder_->EmitWithPrefix(kExprStructNew);
1445 }
else if (
builder_->builder()->IsArrayType(index)) {
1446 ValueType element_type =
1447 builder_->builder()->GetArrayType(index)->element_type();
1448 bool can_be_defaultable = element_type.is_defaultable();
1450 kExprArrayNew, kExprArrayNewFixed,
1451 kExprArrayNewData, kExprArrayNewElem,
1452 kExprArrayNewDefault,
1454 size_t op_size =
arraysize(array_new_op);
1455 if (!can_be_defaultable) --op_size;
1456 switch (array_new_op[data->get<uint8_t>() % op_size]) {
1457 case kExprArrayNewElem:
1458 case kExprArrayNewData: {
1462 if (element_type.is_reference() && element_type.is_nullable() &&
1463 element_type.has_index()) {
1465 uint32_t element_segment = GenerateRefTypeElementSegment(
1466 data,
builder_->builder(), element_type);
1472 builder_->EmitWithPrefix(kExprArrayNewElem);
1474 builder_->EmitU32V(element_segment);
1476 }
else if (!element_type.is_reference()) {
1478 if (
builder_->builder()->NumDataSegments() == 0) {
1479 GeneratePassiveDataSegment(data,
builder_->builder());
1482 data->get<uint8_t>() %
builder_->builder()->NumDataSegments();
1485 builder_->EmitWithPrefix(kExprArrayNewData);
1493 Generate(element_type.Unpacked(), data);
1494 Generate(kWasmI32, data);
1495 builder_->EmitI32Const(kMaxArraySize);
1497 builder_->EmitWithPrefix(kExprArrayNew);
1500 case kExprArrayNewFixed: {
1501 size_t element_count =
1502 std::min(
static_cast<size_t>(data->get<uint8_t>()), data->size());
1503 for (
size_t i = 0;
i < element_count; ++
i) {
1504 Generate(element_type.Unpacked(), data);
1506 builder_->EmitWithPrefix(kExprArrayNewFixed);
1508 builder_->EmitU32V(
static_cast<uint32_t
>(element_count));
1511 case kExprArrayNewDefault:
1512 Generate(kWasmI32, data);
1513 builder_->EmitI32Const(kMaxArraySize);
1515 builder_->EmitWithPrefix(kExprArrayNewDefault);
1519 FATAL(
"Unimplemented opcode");
1527 uint32_t declared_func_index =
1528 index.index -
static_cast<uint32_t
>(
arrays_.size() +
structs_.size());
1529 size_t num_functions =
builder_->builder()->NumDeclaredFunctions();
1531 for (
size_t i = 0;
i < num_functions; ++
i) {
1533 ->GetFunction(declared_func_index)
1535 uint32_t absolute_func_index =
1536 NumImportedFunctions() + declared_func_index;
1537 builder_->EmitWithU32V(kExprRefFunc, absolute_func_index);
1540 declared_func_index = (declared_func_index + 1) % num_functions;
1543 builder_->EmitWithI32V(kExprRefNull, index.index);
1552 void table_op(uint32_t index, std::vector<ValueType> types, DataRange* data,
1553 WasmOpcode opcode) {
1554 DCHECK(opcode == kExprTableSet || opcode == kExprTableSize ||
1555 opcode == kExprTableGrow || opcode == kExprTableFill);
1556 for (
size_t i = 0;
i < types.size();
i++) {
1559 if (types[
i] == kWasmFuncRef) {
1560 types[
i] =
builder_->builder()->GetTableType(index);
1563 Generate(base::VectorOf(types), data);
1564 if (opcode == kExprTableSet) {
1573 if ((opcode == kExprTableSize || opcode == kExprTableGrow) &&
1574 builder_->builder()->IsTable64(index)) {
1575 builder_->Emit(kExprI32ConvertI64);
1579 ValueType table_address_type(
int table_index) {
1583 std::pair<int, ValueType> select_random_table(DataRange* data) {
1584 int num_tables =
builder_->builder()->NumTables();
1586 int index = data->get<uint8_t>() % num_tables;
1587 ValueType address_type = table_address_type(index);
1589 return {
index, address_type};
1592 bool table_get(HeapType type, DataRange* data, Nullability nullable) {
1593 ValueType needed_type = ValueType::RefMaybeNull(type, nullable);
1594 int table_count =
builder_->builder()->NumTables();
1596 ZoneVector<uint32_t> table(
builder_->builder()->zone());
1597 for (
int i = 0;
i < table_count;
i++) {
1598 if (
builder_->builder()->GetTableType(
i) == needed_type) {
1602 if (table.empty()) {
1606 table[data->get<uint8_t>() %
static_cast<int>(table.size())];
1607 ValueType address_type = table_address_type(table_index);
1608 Generate(address_type, data);
1614 void table_set(DataRange* data) {
1615 auto [table_index, address_type] = select_random_table(data);
1619 void table_size(DataRange* data) {
1620 auto [table_index,
_] = select_random_table(data);
1621 table_op(table_index, {},
data, kExprTableSize);
1624 void table_grow(DataRange* data) {
1625 auto [table_index, address_type] = select_random_table(data);
1629 void table_fill(DataRange* data) {
1630 auto [table_index, address_type] = select_random_table(data);
1635 void table_copy(DataRange* data) {
1636 ValueType needed_type = data->get<
bool>() ? kWasmFuncRef :
kWasmExternRef;
1637 int table_count =
builder_->builder()->NumTables();
1638 ZoneVector<uint32_t> table(
builder_->builder()->zone());
1639 for (
int i = 0;
i < table_count;
i++) {
1640 if (
builder_->builder()->GetTableType(
i) == needed_type) {
1644 if (table.empty()) {
1647 int first_index = data->get<uint8_t>() %
static_cast<int>(table.size());
1648 int second_index = data->get<uint8_t>() %
static_cast<int>(table.size());
1649 ValueType first_addrtype = table_address_type(table[first_index]);
1650 ValueType second_addrtype = table_address_type(table[second_index]);
1651 ValueType result_addrtype =
1653 Generate(first_addrtype, data);
1654 Generate(second_addrtype, data);
1655 Generate(result_addrtype, data);
1656 builder_->EmitWithPrefix(kExprTableCopy);
1657 builder_->EmitU32V(table[first_index]);
1658 builder_->EmitU32V(table[second_index]);
1661 bool array_get_helper(ValueType value_type, DataRange* data) {
1662 WasmModuleBuilder* builder =
builder_->builder();
1663 ZoneVector<ModuleTypeIndex> array_indices(builder->zone());
1665 for (ModuleTypeIndex
i :
arrays_) {
1666 DCHECK(builder->IsArrayType(
i));
1667 if (builder->GetArrayType(
i)->element_type().Unpacked() == value_type) {
1668 array_indices.push_back(
i);
1672 if (!array_indices.empty()) {
1673 int index = data->get<uint8_t>() %
static_cast<int>(array_indices.size());
1674 GenerateRef(HeapType::Index(array_indices[index], kNotShared,
1675 RefTypeKind::kArray),
1677 Generate(kWasmI32, data);
1678 if (builder->GetArrayType(array_indices[index])
1681 builder_->EmitWithPrefix(data->get<
bool>() ? kExprArrayGetS
1685 builder_->EmitWithPrefix(kExprArrayGet);
1687 builder_->EmitU32V(array_indices[index]);
1694 template <ValueKind wanted_kind>
1695 void array_get(DataRange* data) {
1696 bool got_array_value =
1697 array_get_helper(ValueType::Primitive(wanted_kind), data);
1698 if (!got_array_value) {
1699 Generate<wanted_kind>(data);
1702 bool array_get_ref(HeapType type, DataRange* data, Nullability nullable) {
1703 ValueType needed_type = ValueType::RefMaybeNull(type, nullable);
1704 return array_get_helper(needed_type, data);
1707 void i31_get(DataRange* data) {
1708 GenerateRef(kWasmI31Ref, data);
1709 if (data->get<
bool>()) {
1710 builder_->EmitWithPrefix(kExprI31GetS);
1712 builder_->EmitWithPrefix(kExprI31GetU);
1716 void array_len(DataRange* data) {
1718 GenerateRef(kWasmArrayRef, data);
1719 builder_->EmitWithPrefix(kExprArrayLen);
1722 void array_copy(DataRange* data) {
1727 ModuleTypeIndex array_index =
1730 GenerateRef(HeapType::Index(array_index, kNotShared, RefTypeKind::kArray),
1732 Generate(kWasmI32, data);
1733 GenerateRef(HeapType::Index(array_index, kNotShared, RefTypeKind::kArray),
1735 Generate(kWasmI32, data);
1736 Generate(kWasmI32, data);
1737 builder_->EmitWithPrefix(kExprArrayCopy);
1742 void array_fill(DataRange* data) {
1744 ModuleTypeIndex array_index =
1747 ValueType element_type =
builder_->builder()
1748 ->GetArrayType(array_index)
1751 GenerateRef(HeapType::Index(array_index, kNotShared, RefTypeKind::kArray),
1753 Generate(kWasmI32, data);
1754 Generate(element_type, data);
1755 Generate(kWasmI32, data);
1756 builder_->EmitWithPrefix(kExprArrayFill);
1760 void array_init_data(DataRange* data) {
1762 ModuleTypeIndex array_index =
1765 const ArrayType* array_type =
1766 builder_->builder()->GetArrayType(array_index);
1767 DCHECK(array_type->mutability());
1768 ValueType element_type = array_type->element_type().Unpacked();
1769 if (element_type.is_reference()) {
1772 if (
builder_->builder()->NumDataSegments() == 0) {
1773 GeneratePassiveDataSegment(data,
builder_->builder());
1777 data->get<uint8_t>() %
builder_->builder()->NumDataSegments();
1779 Generate({ValueType::RefNull(array_index, kNotShared, RefTypeKind::kArray),
1782 builder_->EmitWithPrefix(kExprArrayInitData);
1787 void array_init_elem(DataRange* data) {
1789 ModuleTypeIndex array_index =
1792 const ArrayType* array_type =
1793 builder_->builder()->GetArrayType(array_index);
1794 DCHECK(array_type->mutability());
1795 ValueType element_type = array_type->element_type().Unpacked();
1799 if (!element_type.is_reference() || element_type.is_non_nullable() ||
1800 !element_type.has_index()) {
1804 uint32_t element_segment =
1805 GenerateRefTypeElementSegment(data,
builder_->builder(), element_type);
1809 Generate({ValueType::RefNull(array_index, kNotShared, RefTypeKind::kArray),
1813 builder_->EmitWithPrefix(kExprArrayInitElem);
1815 builder_->EmitU32V(element_segment);
1818 void array_set(DataRange* data) {
1819 WasmModuleBuilder* builder =
builder_->builder();
1820 ZoneVector<ModuleTypeIndex> array_indices(builder->zone());
1821 for (ModuleTypeIndex
i :
arrays_) {
1822 DCHECK(builder->IsArrayType(
i));
1823 if (builder->GetArrayType(
i)->mutability()) {
1824 array_indices.push_back(
i);
1828 if (array_indices.empty()) {
1832 int index = data->get<uint8_t>() %
static_cast<int>(array_indices.size());
1834 HeapType::Index(array_indices[index], kNotShared, RefTypeKind::kArray),
1836 Generate(kWasmI32, data);
1838 builder->GetArrayType(array_indices[index])->element_type().Unpacked(),
1840 builder_->EmitWithPrefix(kExprArraySet);
1841 builder_->EmitU32V(array_indices[index]);
1844 bool struct_get_helper(ValueType value_type, DataRange* data) {
1845 WasmModuleBuilder* builder =
builder_->builder();
1846 ZoneVector<uint32_t> field_index(builder->zone());
1847 ZoneVector<ModuleTypeIndex> struct_index(builder->zone());
1849 DCHECK(builder->IsStructType(
i));
1850 int field_count = builder->GetStructType(
i)->field_count();
1851 for (
int index = 0; index < field_count; index++) {
1853 if (builder->GetStructType(
i)->field(index) == value_type) {
1854 field_index.push_back(index);
1855 struct_index.push_back(
i);
1859 if (!field_index.empty()) {
1860 int index = data->get<uint8_t>() %
static_cast<int>(field_index.size());
1861 GenerateRef(HeapType::Index(struct_index[index], kNotShared,
1862 RefTypeKind::kStruct),
1864 if (builder->GetStructType(struct_index[index])
1865 ->field(field_index[index])
1867 builder_->EmitWithPrefix(data->get<
bool>() ? kExprStructGetS
1870 builder_->EmitWithPrefix(kExprStructGet);
1872 builder_->EmitU32V(struct_index[index]);
1873 builder_->EmitU32V(field_index[index]);
1879 template <ValueKind wanted_kind>
1880 void struct_get(DataRange* data) {
1881 bool got_struct_value =
1882 struct_get_helper(ValueType::Primitive(wanted_kind), data);
1883 if (!got_struct_value) {
1884 Generate<wanted_kind>(data);
1888 bool struct_get_ref(HeapType type, DataRange* data, Nullability nullable) {
1889 ValueType needed_type = ValueType::RefMaybeNull(type, nullable);
1890 return struct_get_helper(needed_type, data);
1893 bool ref_cast(HeapType type, DataRange* data, Nullability nullable) {
1894 HeapType input_type = top_type(type);
1895 GenerateRef(input_type, data);
1896 builder_->EmitWithPrefix(nullable ? kExprRefCastNull : kExprRefCast);
1901 HeapType top_type(HeapType type) {
1902 switch (type.representation()) {
1903 case HeapType::kAny:
1905 case HeapType::kArray:
1906 case HeapType::kStruct:
1907 case HeapType::kI31:
1908 case HeapType::kNone:
1910 case HeapType::kExtern:
1911 case HeapType::kNoExtern:
1913 case HeapType::kExn:
1914 case HeapType::kNoExn:
1916 case HeapType::kFunc:
1917 case HeapType::kNoFunc:
1921 if (
builder_->builder()->IsSignature(type.ref_index())) {
1925 builder_->builder()->IsArrayType(type.ref_index()));
1930 HeapType choose_sub_type(HeapType type, DataRange* data) {
1931 switch (type.representation()) {
1932 case HeapType::kAny: {
1934 GenericKind::kAny, GenericKind::kEq, GenericKind::kArray,
1935 GenericKind::kStruct, GenericKind::kI31, GenericKind::kNone,
1938 data->get<uint8_t>() %
1941 if (choice <
arrays_.size()) {
1942 return HeapType::Index(
arrays_[choice], kNotShared,
1943 RefTypeKind::kArray);
1947 return HeapType::Index(
structs_[choice], kNotShared,
1948 RefTypeKind::kStruct);
1951 return HeapType::Generic(generic_types[choice], kNotShared);
1953 case HeapType::kEq: {
1955 GenericKind::kEq, GenericKind::kArray, GenericKind::kStruct,
1956 GenericKind::kI31, GenericKind::kNone,
1959 data->get<uint8_t>() %
1962 if (choice <
arrays_.size()) {
1963 return HeapType::Index(
arrays_[choice], kNotShared,
1964 RefTypeKind::kArray);
1968 return HeapType::Index(
structs_[choice], kNotShared,
1969 RefTypeKind::kStruct);
1972 return HeapType::Generic(generic_types[choice], kNotShared);
1974 case HeapType::kStruct: {
1976 GenericKind::kStruct,
1979 const size_t type_count =
structs_.size();
1980 const size_t choice =
1981 data->get<uint8_t>() % (type_count +
arraysize(generic_types));
1982 return choice >= type_count
1983 ? HeapType::Generic(generic_types[choice - type_count],
1988 case HeapType::kArray: {
1990 GenericKind::kArray,
1993 const size_t type_count =
arrays_.size();
1994 const size_t choice =
1995 data->get<uint8_t>() % (type_count +
arraysize(generic_types));
1996 return choice >= type_count
1997 ? HeapType::Generic(generic_types[choice - type_count],
2002 case HeapType::kFunc: {
2003 constexpr GenericKind generic_types[] = {GenericKind::kFunc,
2004 GenericKind::kNoFunc};
2006 const size_t choice =
2007 data->get<uint8_t>() % (type_count +
arraysize(generic_types));
2008 return choice >= type_count
2009 ? HeapType::Generic(generic_types[choice - type_count],
2014 case HeapType::kExtern:
2016 return HeapType::Generic(data->get<uint8_t>() > 25
2017 ? GenericKind::kExtern
2018 : GenericKind::kNoExtern,
2021 if (!type.is_index()) {
2027 std::vector<ModuleTypeIndex> subtypes;
2028 uint32_t type_count =
builder_->builder()->NumTypes();
2029 for (uint32_t
i = 0;
i < type_count; ++
i) {
2030 if (
builder_->builder()->GetSuperType(
i) == type.ref_index()) {
2031 subtypes.push_back(ModuleTypeIndex{
i});
2034 if (subtypes.empty())
return type;
2035 return HeapType::Index(subtypes[data->get<uint8_t>() % subtypes.size()],
2036 kNotShared, type.ref_type_kind());
2040 bool br_on_cast(HeapType type, DataRange* data, Nullability nullable) {
2042 const uint32_t target_block = data->get<uint8_t>() %
blocks_.size();
2043 const uint32_t block_index =
2044 static_cast<uint32_t
>(
blocks_.size()) - 1 - target_block;
2045 const auto break_types = base::VectorOf(
blocks_[target_block]);
2046 if (break_types.empty()) {
2049 ValueType break_type = break_types.last();
2050 if (!break_type.is_reference()) {
2054 Generate(break_types.SubVector(0, break_types.size() - 1), data);
2055 if (data->get<
bool>()) {
2057 HeapType source_type = top_type(break_type.heap_type());
2058 const bool source_is_nullable = data->get<
bool>();
2059 GenerateRef(source_type, data,
2060 source_is_nullable ? kNullable : kNonNullable);
2061 const bool target_is_nullable =
2062 source_is_nullable && break_type.is_nullable() && data->get<
bool>();
2063 builder_->EmitWithPrefix(kExprBrOnCast);
2064 builder_->EmitU32V(source_is_nullable + (target_is_nullable << 1));
2066 builder_->EmitHeapType(source_type);
2067 builder_->EmitHeapType(break_type.heap_type());
2071 base::SmallVector<ValueType, 32> fallthrough_types(break_types);
2072 fallthrough_types.back() = ValueType::RefMaybeNull(
2073 source_type, source_is_nullable ? kNullable : kNonNullable);
2074 ConsumeAndGenerate(base::VectorOf(fallthrough_types), {},
data);
2076 GenerateRef(type, data, nullable);
2079 HeapType source_type = break_type.heap_type();
2080 const bool source_is_nullable = data->get<
bool>();
2081 GenerateRef(source_type, data,
2082 source_is_nullable ? kNullable : kNonNullable);
2083 const bool target_is_nullable =
2084 source_is_nullable &&
2085 (!break_type.is_nullable() || data->get<
bool>());
2086 HeapType target_type = choose_sub_type(source_type, data);
2088 builder_->EmitWithPrefix(kExprBrOnCastFail);
2089 builder_->EmitU32V(source_is_nullable + (target_is_nullable << 1));
2091 builder_->EmitHeapType(source_type);
2092 builder_->EmitHeapType(target_type);
2094 base::SmallVector<ValueType, 32> fallthrough_types(break_types);
2095 fallthrough_types.back() = ValueType::RefMaybeNull(
2096 target_type, target_is_nullable ? kNullable : kNonNullable);
2097 ConsumeAndGenerate(base::VectorOf(fallthrough_types), {},
data);
2099 GenerateRef(type, data, nullable);
2104 bool any_convert_extern(HeapType type, DataRange* data,
2105 Nullability nullable) {
2106 if (type.representation() != HeapType::kAny) {
2109 GenerateRef(kWasmExternRef, data);
2110 builder_->EmitWithPrefix(kExprAnyConvertExtern);
2111 if (nullable == kNonNullable) {
2117 bool ref_as_non_null(HeapType type, DataRange* data, Nullability nullable) {
2118 GenerateRef(type, data, kNullable);
2123 void struct_set(DataRange* data) {
2124 WasmModuleBuilder* builder =
builder_->builder();
2126 ModuleTypeIndex struct_index =
2128 DCHECK(builder->IsStructType(struct_index));
2129 const StructType* struct_type = builder->GetStructType(struct_index);
2130 ZoneVector<uint32_t> field_indices(builder->zone());
2131 for (uint32_t
i = 0;
i < struct_type->field_count();
i++) {
2132 if (struct_type->mutability(
i)) {
2133 field_indices.push_back(
i);
2136 if (field_indices.empty()) {
2140 field_indices[data->get<uint8_t>() % field_indices.size()];
2141 GenerateRef(HeapType::Index(struct_index, kNotShared, RefTypeKind::kStruct),
2143 Generate(struct_type->field(field_index).Unpacked(), data);
2144 builder_->EmitWithPrefix(kExprStructSet);
2149 void ref_is_null(DataRange* data) {
2150 GenerateRef(kWasmAnyRef, data);
2154 template <WasmOpcode opcode>
2155 void ref_test(DataRange* data) {
2156 GenerateRef(kWasmAnyRef, data);
2160 size_t num_all_types = num_types +
arraysize(generic_types);
2161 size_t type_choice = data->get<uint8_t>() % num_all_types;
2163 if (type_choice <
structs_.size()) {
2168 if (type_choice <
arrays_.size()) {
2172 type_choice -=
arrays_.size();
2173 builder_->EmitU32V(generic_types[type_choice]);
2176 void ref_eq(DataRange* data) {
2177 GenerateRef(kWasmEqRef, data);
2178 GenerateRef(kWasmEqRef, data);
2182 void call_string_import(uint32_t index) {
2183 builder_->EmitWithU32V(kExprCallFunction, index);
2186 void string_cast(DataRange* data) {
2187 GenerateRef(kWasmExternRef, data);
2191 void string_test(DataRange* data) {
2192 GenerateRef(kWasmExternRef, data);
2196 void string_fromcharcode(DataRange* data) {
2197 Generate(kWasmI32, data);
2201 void string_fromcodepoint(DataRange* data) {
2202 Generate(kWasmI32, data);
2206 void string_charcodeat(DataRange* data) {
2207 GenerateRef(kWasmExternRef, data);
2208 Generate(kWasmI32, data);
2212 void string_codepointat(DataRange* data) {
2213 GenerateRef(kWasmExternRef, data);
2214 Generate(kWasmI32, data);
2218 void string_length(DataRange* data) {
2219 GenerateRef(kWasmExternRef, data);
2223 void string_concat(DataRange* data) {
2224 GenerateRef(kWasmExternRef, data);
2225 GenerateRef(kWasmExternRef, data);
2229 void string_substring(DataRange* data) {
2230 GenerateRef(kWasmExternRef, data);
2231 Generate(kWasmI32, data);
2232 Generate(kWasmI32, data);
2236 void string_equals(DataRange* data) {
2237 GenerateRef(kWasmExternRef, data);
2238 GenerateRef(kWasmExternRef, data);
2242 void string_compare(DataRange* data) {
2243 GenerateRef(kWasmExternRef, data);
2244 GenerateRef(kWasmExternRef, data);
2248 void string_fromcharcodearray(DataRange* data) {
2250 RefTypeKind::kArray),
2252 Generate(kWasmI32, data);
2253 Generate(kWasmI32, data);
2257 void string_intocharcodearray(DataRange* data) {
2258 GenerateRef(kWasmExternRef, data);
2260 RefTypeKind::kArray),
2262 Generate(kWasmI32, data);
2266 void string_measureutf8(DataRange* data) {
2267 GenerateRef(kWasmExternRef, data);
2271 void string_intoutf8array(DataRange* data) {
2272 GenerateRef(kWasmExternRef, data);
2274 RefTypeKind::kArray),
2276 Generate(kWasmI32, data);
2280 void string_toutf8array(DataRange* data) {
2281 GenerateRef(kWasmExternRef, data);
2285 void string_fromutf8array(DataRange* data) {
2287 RefTypeKind::kArray),
2289 Generate(kWasmI32, data);
2290 Generate(kWasmI32, data);
2294 template <
typename Arr>
2295 requires requires(
const Arr& arr) {
2296 { arr.size() } -> std::convertible_to<std::size_t>;
2297 { arr.data()[0] } -> std::convertible_to<GenerateFn>;
2299 void GenerateOneOf(
const Arr& alternatives, DataRange* data) {
2300 DCHECK_LT(alternatives.size(), std::numeric_limits<uint8_t>::max());
2301 const auto which = data->get<uint8_t>();
2303 GenerateFn alternate = alternatives[which % alternatives.size()];
2304 (this->*alternate)(data);
2307 template <
size_t... kAlternativesSizes>
2308 void GenerateOneOf(
const GeneratorAlternativesPerOption<
2309 kAlternativesSizes...>& alternatives_per_option,
2311 return GenerateOneOf(alternatives_per_option.GetAlternatives(
options_),
2317 template <
typename Arr>
2318 requires requires(
const Arr& arr) {
2319 { arr.size() } -> std::convertible_to<std::size_t>;
2320 { arr.data()[0] } -> std::convertible_to<GenerateFnWithHeap>;
2322 bool GenerateOneOf(
const Arr& alternatives, HeapType type, DataRange* data,
2323 Nullability nullability) {
2324 DCHECK_LT(alternatives.size(), std::numeric_limits<uint8_t>::max());
2326 size_t index = data->get<uint8_t>() % (alternatives.size() + 1);
2328 if (nullability && index == alternatives.size()) {
2329 ref_null(type, data);
2333 for (
size_t i = index;
i < alternatives.
size();
i++) {
2334 if ((this->*alternatives[
i])(type, data, nullability)) {
2339 for (
size_t i = 0;
i <
index;
i++) {
2340 if ((this->*alternatives[
i])(type, data, nullability)) {
2345 if (nullability == kNullable) {
2346 ref_null(type, data);
2353 struct GeneratorRecursionScope {
2354 explicit GeneratorRecursionScope(BodyGen*
gen) :
gen(
gen) {
2355 ++
gen->recursion_depth;
2358 ~GeneratorRecursionScope() {
2360 --
gen->recursion_depth;
2366 BodyGen(WasmModuleGenerationOptions options, WasmFunctionBuilder*
fn,
2367 const std::vector<ModuleTypeIndex>& functions,
2368 const std::vector<ValueType>& globals,
2369 const std::vector<uint8_t>& mutable_globals,
2370 const std::vector<ModuleTypeIndex>& structs,
2371 const std::vector<ModuleTypeIndex>& arrays,
2372 const StringImports& strings, DataRange* data)
2383 for (
size_t i = 0;
i <
sig->return_count(); ++
i) {
2386 locals_.resize(data->get<uint8_t>() % kMaxLocals);
2387 uint32_t num_types =
static_cast<uint32_t
>(
2389 for (ValueType& local :
locals_) {
2390 local = GetValueType(options, data, num_types);
2391 fn->AddLocal(local);
2395 int NumImportedFunctions() {
2396 return builder_->builder()->NumImportedFunctions();
2400 size_t all_locals_count()
const {
2405 ValueType local_type(uint32_t index)
const {
2406 size_t num_params =
builder_->signature()->parameter_count();
2407 return index < num_params ?
builder_->signature()->GetParam(index)
2408 :
locals_[index - num_params];
2420 void GenerateVoid(DataRange* data) {
2421 GeneratorRecursionScope rec_scope(
this);
2422 if (recursion_limit_reached() || data->size() == 0)
return;
2424 static constexpr auto kMvpAlternatives =
2425 CreateArray(&BodyGen::sequence<kVoid, kVoid>,
2426 &BodyGen::sequence<kVoid, kVoid, kVoid, kVoid>,
2427 &BodyGen::sequence<kVoid, kVoid, kVoid, kVoid, kVoid, kVoid,
2429 &BodyGen::block<kVoid>,
2430 &BodyGen::loop<kVoid>,
2431 &BodyGen::finite_loop<kVoid>,
2432 &BodyGen::if_<kVoid, kIf>,
2433 &BodyGen::if_<kVoid, kIfElse>,
2435 &BodyGen::br_if<kVoid>,
2436 &BodyGen::br_on_null<kVoid>,
2437 &BodyGen::br_on_non_null<kVoid>,
2438 &BodyGen::br_table<kVoid>,
2439 &BodyGen::try_table_block<kVoid>,
2440 &BodyGen::return_op,
2442 &BodyGen::memop<kExprI32StoreMem, kI32>,
2443 &BodyGen::memop<kExprI32StoreMem8, kI32>,
2444 &BodyGen::memop<kExprI32StoreMem16, kI32>,
2445 &BodyGen::memop<kExprI64StoreMem, kI64>,
2446 &BodyGen::memop<kExprI64StoreMem8, kI64>,
2447 &BodyGen::memop<kExprI64StoreMem16, kI64>,
2448 &BodyGen::memop<kExprI64StoreMem32, kI64>,
2449 &BodyGen::memop<kExprF32StoreMem, kF32>,
2450 &BodyGen::memop<kExprF64StoreMem, kF64>,
2451 &BodyGen::memop<kExprI32AtomicStore, kI32>,
2452 &BodyGen::memop<kExprI32AtomicStore8U, kI32>,
2453 &BodyGen::memop<kExprI32AtomicStore16U, kI32>,
2454 &BodyGen::memop<kExprI64AtomicStore, kI64>,
2455 &BodyGen::memop<kExprI64AtomicStore8U, kI64>,
2456 &BodyGen::memop<kExprI64AtomicStore16U, kI64>,
2457 &BodyGen::memop<kExprI64AtomicStore32U, kI64>,
2461 &BodyGen::call<kVoid>,
2462 &BodyGen::call_indirect<kVoid>,
2463 &BodyGen::call_ref<kVoid>,
2465 &BodyGen::set_local,
2466 &BodyGen::set_global,
2467 &BodyGen::throw_or_rethrow,
2468 &BodyGen::try_block<kVoid>,
2470 &BodyGen::shift_locals,
2472 &BodyGen::table_set,
2473 &BodyGen::table_fill,
2474 &BodyGen::table_copy);
2476 static constexpr auto kSimdAlternatives =
2477 CreateArray(&BodyGen::memop<kExprS128StoreMem, kS128>,
2478 &BodyGen::simd_lane_memop<kExprS128Store8Lane, 16, kS128>,
2479 &BodyGen::simd_lane_memop<kExprS128Store16Lane, 8, kS128>,
2480 &BodyGen::simd_lane_memop<kExprS128Store32Lane, 4, kS128>,
2481 &BodyGen::simd_lane_memop<kExprS128Store64Lane, 2, kS128>);
2483 static constexpr auto kWasmGCAlternatives =
2484 CreateArray(&BodyGen::struct_set,
2485 &BodyGen::array_set,
2486 &BodyGen::array_copy,
2487 &BodyGen::array_fill,
2488 &BodyGen::array_init_data,
2489 &BodyGen::array_init_elem);
2491 static constexpr GeneratorAlternativesPerOption kAlternativesPerOptions{
2492 kMvpAlternatives, kSimdAlternatives, kWasmGCAlternatives};
2494 GenerateOneOf(kAlternativesPerOptions, data);
2497 void GenerateI32(DataRange* data) {
2498 GeneratorRecursionScope rec_scope(
this);
2499 if (recursion_limit_reached() || data->size() <= 1) {
2504 uint8_t size = 1 + (data->getPseudoRandom<uint8_t>() & 31);
2506 builder_->EmitI32Const(data->getPseudoRandom<uint32_t>() &
mask);
2510 static constexpr auto kMvpAlternatives = CreateArray(
2511 &BodyGen::i32_const<1>,
2512 &BodyGen::i32_const<2>,
2513 &BodyGen::i32_const<3>,
2514 &BodyGen::i32_const<4>,
2516 &BodyGen::sequence<kI32, kVoid>,
2517 &BodyGen::sequence<kVoid, kI32>,
2518 &BodyGen::sequence<kVoid, kI32, kVoid>,
2520 &BodyGen::op<kExprI32Eqz, kI32>,
2521 &BodyGen::op<kExprI32Eq, kI32, kI32>,
2522 &BodyGen::op<kExprI32Ne, kI32, kI32>,
2523 &BodyGen::op<kExprI32LtS, kI32, kI32>,
2524 &BodyGen::op<kExprI32LtU, kI32, kI32>,
2525 &BodyGen::op<kExprI32GeS, kI32, kI32>,
2526 &BodyGen::op<kExprI32GeU, kI32, kI32>,
2528 &BodyGen::op<kExprI64Eqz, kI64>,
2529 &BodyGen::op<kExprI64Eq, kI64, kI64>,
2530 &BodyGen::op<kExprI64Ne, kI64, kI64>,
2531 &BodyGen::op<kExprI64LtS, kI64, kI64>,
2532 &BodyGen::op<kExprI64LtU, kI64, kI64>,
2533 &BodyGen::op<kExprI64GeS, kI64, kI64>,
2534 &BodyGen::op<kExprI64GeU, kI64, kI64>,
2536 &BodyGen::op<kExprF32Eq, kF32, kF32>,
2537 &BodyGen::op<kExprF32Ne, kF32, kF32>,
2538 &BodyGen::op<kExprF32Lt, kF32, kF32>,
2539 &BodyGen::op<kExprF32Ge, kF32, kF32>,
2541 &BodyGen::op<kExprF64Eq, kF64, kF64>,
2542 &BodyGen::op<kExprF64Ne, kF64, kF64>,
2543 &BodyGen::op<kExprF64Lt, kF64, kF64>,
2544 &BodyGen::op<kExprF64Ge, kF64, kF64>,
2546 &BodyGen::op<kExprI32Add, kI32, kI32>,
2547 &BodyGen::op<kExprI32Sub, kI32, kI32>,
2548 &BodyGen::op<kExprI32Mul, kI32, kI32>,
2550 &BodyGen::op<kExprI32DivS, kI32, kI32>,
2551 &BodyGen::op<kExprI32DivU, kI32, kI32>,
2552 &BodyGen::op<kExprI32RemS, kI32, kI32>,
2553 &BodyGen::op<kExprI32RemU, kI32, kI32>,
2555 &BodyGen::op<kExprI32And, kI32, kI32>,
2556 &BodyGen::op<kExprI32Ior, kI32, kI32>,
2557 &BodyGen::op<kExprI32Xor, kI32, kI32>,
2558 &BodyGen::op<kExprI32Shl, kI32, kI32>,
2559 &BodyGen::op<kExprI32ShrU, kI32, kI32>,
2560 &BodyGen::op<kExprI32ShrS, kI32, kI32>,
2561 &BodyGen::op<kExprI32Ror, kI32, kI32>,
2562 &BodyGen::op<kExprI32Rol, kI32, kI32>,
2564 &BodyGen::op<kExprI32Clz, kI32>,
2565 &BodyGen::op<kExprI32Ctz, kI32>,
2566 &BodyGen::op<kExprI32Popcnt, kI32>,
2568 &BodyGen::op<kExprI32ConvertI64, kI64>,
2569 &BodyGen::op<kExprI32SConvertF32, kF32>,
2570 &BodyGen::op<kExprI32UConvertF32, kF32>,
2571 &BodyGen::op<kExprI32SConvertF64, kF64>,
2572 &BodyGen::op<kExprI32UConvertF64, kF64>,
2573 &BodyGen::op<kExprI32ReinterpretF32, kF32>,
2575 &BodyGen::op_with_prefix<kExprI32SConvertSatF32, kF32>,
2576 &BodyGen::op_with_prefix<kExprI32UConvertSatF32, kF32>,
2577 &BodyGen::op_with_prefix<kExprI32SConvertSatF64, kF64>,
2578 &BodyGen::op_with_prefix<kExprI32UConvertSatF64, kF64>,
2580 &BodyGen::block<kI32>,
2581 &BodyGen::loop<kI32>,
2582 &BodyGen::finite_loop<kI32>,
2583 &BodyGen::if_<kI32, kIfElse>,
2584 &BodyGen::br_if<kI32>,
2585 &BodyGen::br_on_null<kI32>,
2586 &BodyGen::br_on_non_null<kI32>,
2587 &BodyGen::br_table<kI32>,
2588 &BodyGen::try_table_block<kI32>,
2590 &BodyGen::memop<kExprI32LoadMem>,
2591 &BodyGen::memop<kExprI32LoadMem8S>,
2592 &BodyGen::memop<kExprI32LoadMem8U>,
2593 &BodyGen::memop<kExprI32LoadMem16S>,
2594 &BodyGen::memop<kExprI32LoadMem16U>,
2596 &BodyGen::memop<kExprI32AtomicLoad>,
2597 &BodyGen::memop<kExprI32AtomicLoad8U>,
2598 &BodyGen::memop<kExprI32AtomicLoad16U>,
2599 &BodyGen::memop<kExprI32AtomicAdd, kI32>,
2600 &BodyGen::memop<kExprI32AtomicSub, kI32>,
2601 &BodyGen::memop<kExprI32AtomicAnd, kI32>,
2602 &BodyGen::memop<kExprI32AtomicOr, kI32>,
2603 &BodyGen::memop<kExprI32AtomicXor, kI32>,
2604 &BodyGen::memop<kExprI32AtomicExchange, kI32>,
2605 &BodyGen::memop<kExprI32AtomicCompareExchange, kI32, kI32>,
2606 &BodyGen::memop<kExprI32AtomicAdd8U, kI32>,
2607 &BodyGen::memop<kExprI32AtomicSub8U, kI32>,
2608 &BodyGen::memop<kExprI32AtomicAnd8U, kI32>,
2609 &BodyGen::memop<kExprI32AtomicOr8U, kI32>,
2610 &BodyGen::memop<kExprI32AtomicXor8U, kI32>,
2611 &BodyGen::memop<kExprI32AtomicExchange8U, kI32>,
2612 &BodyGen::memop<kExprI32AtomicCompareExchange8U, kI32, kI32>,
2613 &BodyGen::memop<kExprI32AtomicAdd16U, kI32>,
2614 &BodyGen::memop<kExprI32AtomicSub16U, kI32>,
2615 &BodyGen::memop<kExprI32AtomicAnd16U, kI32>,
2616 &BodyGen::memop<kExprI32AtomicOr16U, kI32>,
2617 &BodyGen::memop<kExprI32AtomicXor16U, kI32>,
2618 &BodyGen::memop<kExprI32AtomicExchange16U, kI32>,
2619 &BodyGen::memop<kExprI32AtomicCompareExchange16U, kI32, kI32>,
2621 &BodyGen::memory_size,
2622 &BodyGen::grow_memory,
2624 &BodyGen::get_local<kI32>,
2625 &BodyGen::tee_local<kI32>,
2626 &BodyGen::get_global<kI32>,
2627 &BodyGen::op<kExprSelect, kI32, kI32, kI32>,
2628 &BodyGen::select_with_type<kI32>,
2630 &BodyGen::call<kI32>,
2631 &BodyGen::call_indirect<kI32>,
2632 &BodyGen::call_ref<kI32>,
2633 &BodyGen::try_block<kI32>,
2635 &BodyGen::table_size,
2636 &BodyGen::table_grow);
2638 static constexpr auto kSimdAlternatives =
2639 CreateArray(&BodyGen::op_with_prefix<kExprV128AnyTrue, kS128>,
2640 &BodyGen::op_with_prefix<kExprI8x16AllTrue, kS128>,
2641 &BodyGen::op_with_prefix<kExprI8x16BitMask, kS128>,
2642 &BodyGen::op_with_prefix<kExprI16x8AllTrue, kS128>,
2643 &BodyGen::op_with_prefix<kExprI16x8BitMask, kS128>,
2644 &BodyGen::op_with_prefix<kExprI32x4AllTrue, kS128>,
2645 &BodyGen::op_with_prefix<kExprI32x4BitMask, kS128>,
2646 &BodyGen::op_with_prefix<kExprI64x2AllTrue, kS128>,
2647 &BodyGen::op_with_prefix<kExprI64x2BitMask, kS128>,
2648 &BodyGen::simd_lane_op<kExprI8x16ExtractLaneS, 16, kS128>,
2649 &BodyGen::simd_lane_op<kExprI8x16ExtractLaneU, 16, kS128>,
2650 &BodyGen::simd_lane_op<kExprI16x8ExtractLaneS, 8, kS128>,
2651 &BodyGen::simd_lane_op<kExprI16x8ExtractLaneU, 8, kS128>,
2652 &BodyGen::simd_lane_op<kExprI32x4ExtractLane, 4, kS128>);
2654 static constexpr auto kWasmGCAlternatives =
2655 CreateArray(&BodyGen::i31_get,
2657 &BodyGen::struct_get<kI32>,
2658 &BodyGen::array_get<kI32>,
2659 &BodyGen::array_len,
2661 &BodyGen::ref_is_null,
2663 &BodyGen::ref_test<kExprRefTest>,
2664 &BodyGen::ref_test<kExprRefTestNull>,
2666 &BodyGen::string_test,
2667 &BodyGen::string_charcodeat,
2668 &BodyGen::string_codepointat,
2669 &BodyGen::string_length,
2670 &BodyGen::string_equals,
2671 &BodyGen::string_compare,
2672 &BodyGen::string_intocharcodearray,
2673 &BodyGen::string_intoutf8array,
2674 &BodyGen::string_measureutf8);
2676 static constexpr GeneratorAlternativesPerOption kAlternativesPerOptions{
2677 kMvpAlternatives, kSimdAlternatives, kWasmGCAlternatives};
2679 GenerateOneOf(kAlternativesPerOptions, data);
2682 void GenerateI64(DataRange* data) {
2683 GeneratorRecursionScope rec_scope(
this);
2684 if (recursion_limit_reached() || data->size() <= 1) {
2685 builder_->EmitI64Const(data->getPseudoRandom<int64_t>());
2689 static constexpr auto kMvpAlternatives = CreateArray(
2690 &BodyGen::i64_const<1>,
2691 &BodyGen::i64_const<2>,
2692 &BodyGen::i64_const<3>,
2693 &BodyGen::i64_const<4>,
2694 &BodyGen::i64_const<5>,
2695 &BodyGen::i64_const<6>,
2696 &BodyGen::i64_const<7>,
2697 &BodyGen::i64_const<8>,
2699 &BodyGen::sequence<kI64, kVoid>,
2700 &BodyGen::sequence<kVoid, kI64>,
2701 &BodyGen::sequence<kVoid, kI64, kVoid>,
2703 &BodyGen::op<kExprI64Add, kI64, kI64>,
2704 &BodyGen::op<kExprI64Sub, kI64, kI64>,
2705 &BodyGen::op<kExprI64Mul, kI64, kI64>,
2707 &BodyGen::op<kExprI64DivS, kI64, kI64>,
2708 &BodyGen::op<kExprI64DivU, kI64, kI64>,
2709 &BodyGen::op<kExprI64RemS, kI64, kI64>,
2710 &BodyGen::op<kExprI64RemU, kI64, kI64>,
2712 &BodyGen::op<kExprI64And, kI64, kI64>,
2713 &BodyGen::op<kExprI64Ior, kI64, kI64>,
2714 &BodyGen::op<kExprI64Xor, kI64, kI64>,
2715 &BodyGen::op<kExprI64Shl, kI64, kI64>,
2716 &BodyGen::op<kExprI64ShrU, kI64, kI64>,
2717 &BodyGen::op<kExprI64ShrS, kI64, kI64>,
2718 &BodyGen::op<kExprI64Ror, kI64, kI64>,
2719 &BodyGen::op<kExprI64Rol, kI64, kI64>,
2721 &BodyGen::op<kExprI64Clz, kI64>,
2722 &BodyGen::op<kExprI64Ctz, kI64>,
2723 &BodyGen::op<kExprI64Popcnt, kI64>,
2725 &BodyGen::op_with_prefix<kExprI64SConvertSatF32, kF32>,
2726 &BodyGen::op_with_prefix<kExprI64UConvertSatF32, kF32>,
2727 &BodyGen::op_with_prefix<kExprI64SConvertSatF64, kF64>,
2728 &BodyGen::op_with_prefix<kExprI64UConvertSatF64, kF64>,
2730 &BodyGen::block<kI64>,
2731 &BodyGen::loop<kI64>,
2732 &BodyGen::finite_loop<kI64>,
2733 &BodyGen::if_<kI64, kIfElse>,
2734 &BodyGen::br_if<kI64>,
2735 &BodyGen::br_on_null<kI64>,
2736 &BodyGen::br_on_non_null<kI64>,
2737 &BodyGen::br_table<kI64>,
2738 &BodyGen::try_table_block<kI64>,
2740 &BodyGen::memop<kExprI64LoadMem>,
2741 &BodyGen::memop<kExprI64LoadMem8S>,
2742 &BodyGen::memop<kExprI64LoadMem8U>,
2743 &BodyGen::memop<kExprI64LoadMem16S>,
2744 &BodyGen::memop<kExprI64LoadMem16U>,
2745 &BodyGen::memop<kExprI64LoadMem32S>,
2746 &BodyGen::memop<kExprI64LoadMem32U>,
2748 &BodyGen::memop<kExprI64AtomicLoad>,
2749 &BodyGen::memop<kExprI64AtomicLoad8U>,
2750 &BodyGen::memop<kExprI64AtomicLoad16U>,
2751 &BodyGen::memop<kExprI64AtomicLoad32U>,
2752 &BodyGen::memop<kExprI64AtomicAdd, kI64>,
2753 &BodyGen::memop<kExprI64AtomicSub, kI64>,
2754 &BodyGen::memop<kExprI64AtomicAnd, kI64>,
2755 &BodyGen::memop<kExprI64AtomicOr, kI64>,
2756 &BodyGen::memop<kExprI64AtomicXor, kI64>,
2757 &BodyGen::memop<kExprI64AtomicExchange, kI64>,
2758 &BodyGen::memop<kExprI64AtomicCompareExchange, kI64, kI64>,
2759 &BodyGen::memop<kExprI64AtomicAdd8U, kI64>,
2760 &BodyGen::memop<kExprI64AtomicSub8U, kI64>,
2761 &BodyGen::memop<kExprI64AtomicAnd8U, kI64>,
2762 &BodyGen::memop<kExprI64AtomicOr8U, kI64>,
2763 &BodyGen::memop<kExprI64AtomicXor8U, kI64>,
2764 &BodyGen::memop<kExprI64AtomicExchange8U, kI64>,
2765 &BodyGen::memop<kExprI64AtomicCompareExchange8U, kI64, kI64>,
2766 &BodyGen::memop<kExprI64AtomicAdd16U, kI64>,
2767 &BodyGen::memop<kExprI64AtomicSub16U, kI64>,
2768 &BodyGen::memop<kExprI64AtomicAnd16U, kI64>,
2769 &BodyGen::memop<kExprI64AtomicOr16U, kI64>,
2770 &BodyGen::memop<kExprI64AtomicXor16U, kI64>,
2771 &BodyGen::memop<kExprI64AtomicExchange16U, kI64>,
2772 &BodyGen::memop<kExprI64AtomicCompareExchange16U, kI64, kI64>,
2773 &BodyGen::memop<kExprI64AtomicAdd32U, kI64>,
2774 &BodyGen::memop<kExprI64AtomicSub32U, kI64>,
2775 &BodyGen::memop<kExprI64AtomicAnd32U, kI64>,
2776 &BodyGen::memop<kExprI64AtomicOr32U, kI64>,
2777 &BodyGen::memop<kExprI64AtomicXor32U, kI64>,
2778 &BodyGen::memop<kExprI64AtomicExchange32U, kI64>,
2779 &BodyGen::memop<kExprI64AtomicCompareExchange32U, kI64, kI64>,
2781 &BodyGen::get_local<kI64>,
2782 &BodyGen::tee_local<kI64>,
2783 &BodyGen::get_global<kI64>,
2784 &BodyGen::op<kExprSelect, kI64, kI64, kI32>,
2785 &BodyGen::select_with_type<kI64>,
2787 &BodyGen::call<kI64>,
2788 &BodyGen::call_indirect<kI64>,
2789 &BodyGen::call_ref<kI64>,
2790 &BodyGen::try_block<kI64>);
2792 static constexpr auto kSimdAlternatives =
2793 CreateArray(&BodyGen::simd_lane_op<kExprI64x2ExtractLane, 2, kS128>);
2795 static constexpr auto kWasmGCAlternatives =
2796 CreateArray(&BodyGen::struct_get<kI64>,
2797 &BodyGen::array_get<kI64>);
2799 static constexpr GeneratorAlternativesPerOption kAlternativesPerOptions{
2800 kMvpAlternatives, kSimdAlternatives, kWasmGCAlternatives};
2802 GenerateOneOf(kAlternativesPerOptions, data);
2805 void GenerateF32(DataRange* data) {
2806 GeneratorRecursionScope rec_scope(
this);
2807 if (recursion_limit_reached() || data->size() <=
sizeof(
float)) {
2808 builder_->EmitF32Const(data->getPseudoRandom<
float>());
2812 static constexpr auto kMvpAlternatives = CreateArray(
2813 &BodyGen::sequence<kF32, kVoid>, &BodyGen::sequence<kVoid, kF32>,
2814 &BodyGen::sequence<kVoid, kF32, kVoid>,
2816 &BodyGen::op<kExprF32Abs, kF32>,
2817 &BodyGen::op<kExprF32Neg, kF32>,
2818 &BodyGen::op<kExprF32Ceil, kF32>,
2819 &BodyGen::op<kExprF32Floor, kF32>,
2820 &BodyGen::op<kExprF32Trunc, kF32>,
2821 &BodyGen::op<kExprF32NearestInt, kF32>,
2822 &BodyGen::op<kExprF32Sqrt, kF32>,
2823 &BodyGen::op<kExprF32Add, kF32, kF32>,
2824 &BodyGen::op<kExprF32Sub, kF32, kF32>,
2825 &BodyGen::op<kExprF32Mul, kF32, kF32>,
2826 &BodyGen::op<kExprF32Div, kF32, kF32>,
2827 &BodyGen::op<kExprF32Min, kF32, kF32>,
2828 &BodyGen::op<kExprF32Max, kF32, kF32>,
2829 &BodyGen::op<kExprF32CopySign, kF32, kF32>,
2831 &BodyGen::op<kExprF32SConvertI32, kI32>,
2832 &BodyGen::op<kExprF32UConvertI32, kI32>,
2833 &BodyGen::op<kExprF32SConvertI64, kI64>,
2834 &BodyGen::op<kExprF32UConvertI64, kI64>,
2835 &BodyGen::op<kExprF32ConvertF64, kF64>,
2836 &BodyGen::op<kExprF32ReinterpretI32, kI32>,
2838 &BodyGen::block<kF32>,
2839 &BodyGen::loop<kF32>,
2840 &BodyGen::finite_loop<kF32>,
2841 &BodyGen::if_<kF32, kIfElse>,
2842 &BodyGen::br_if<kF32>,
2843 &BodyGen::br_on_null<kF32>,
2844 &BodyGen::br_on_non_null<kF32>,
2845 &BodyGen::br_table<kF32>,
2846 &BodyGen::try_table_block<kF32>,
2848 &BodyGen::memop<kExprF32LoadMem>,
2850 &BodyGen::get_local<kF32>,
2851 &BodyGen::tee_local<kF32>,
2852 &BodyGen::get_global<kF32>,
2853 &BodyGen::op<kExprSelect, kF32, kF32, kI32>,
2854 &BodyGen::select_with_type<kF32>,
2856 &BodyGen::call<kF32>,
2857 &BodyGen::call_indirect<kF32>,
2858 &BodyGen::call_ref<kF32>,
2859 &BodyGen::try_block<kF32>);
2861 static constexpr auto kSimdAlternatives =
2862 CreateArray(&BodyGen::simd_lane_op<kExprF32x4ExtractLane, 4, kS128>);
2864 static constexpr auto kWasmGCAlternatives =
2865 CreateArray(&BodyGen::struct_get<kF32>,
2866 &BodyGen::array_get<kF32>);
2868 static constexpr GeneratorAlternativesPerOption kAlternativesPerOptions{
2869 kMvpAlternatives, kSimdAlternatives, kWasmGCAlternatives};
2871 GenerateOneOf(kAlternativesPerOptions, data);
2874 void GenerateF64(DataRange* data) {
2875 GeneratorRecursionScope rec_scope(
this);
2876 if (recursion_limit_reached() || data->size() <=
sizeof(
double)) {
2877 builder_->EmitF64Const(data->getPseudoRandom<
double>());
2881 static constexpr auto kMvpAlternatives = CreateArray(
2882 &BodyGen::sequence<kF64, kVoid>, &BodyGen::sequence<kVoid, kF64>,
2883 &BodyGen::sequence<kVoid, kF64, kVoid>,
2885 &BodyGen::op<kExprF64Abs, kF64>,
2886 &BodyGen::op<kExprF64Neg, kF64>,
2887 &BodyGen::op<kExprF64Ceil, kF64>,
2888 &BodyGen::op<kExprF64Floor, kF64>,
2889 &BodyGen::op<kExprF64Trunc, kF64>,
2890 &BodyGen::op<kExprF64NearestInt, kF64>,
2891 &BodyGen::op<kExprF64Sqrt, kF64>,
2892 &BodyGen::op<kExprF64Add, kF64, kF64>,
2893 &BodyGen::op<kExprF64Sub, kF64, kF64>,
2894 &BodyGen::op<kExprF64Mul, kF64, kF64>,
2895 &BodyGen::op<kExprF64Div, kF64, kF64>,
2896 &BodyGen::op<kExprF64Min, kF64, kF64>,
2897 &BodyGen::op<kExprF64Max, kF64, kF64>,
2898 &BodyGen::op<kExprF64CopySign, kF64, kF64>,
2900 &BodyGen::op<kExprF64SConvertI32, kI32>,
2901 &BodyGen::op<kExprF64UConvertI32, kI32>,
2902 &BodyGen::op<kExprF64SConvertI64, kI64>,
2903 &BodyGen::op<kExprF64UConvertI64, kI64>,
2904 &BodyGen::op<kExprF64ConvertF32, kF32>,
2905 &BodyGen::op<kExprF64ReinterpretI64, kI64>,
2907 &BodyGen::block<kF64>,
2908 &BodyGen::loop<kF64>,
2909 &BodyGen::finite_loop<kF64>,
2910 &BodyGen::if_<kF64, kIfElse>,
2911 &BodyGen::br_if<kF64>,
2912 &BodyGen::br_on_null<kF64>,
2913 &BodyGen::br_on_non_null<kF64>,
2914 &BodyGen::br_table<kF64>,
2915 &BodyGen::try_table_block<kF64>,
2917 &BodyGen::memop<kExprF64LoadMem>,
2919 &BodyGen::get_local<kF64>,
2920 &BodyGen::tee_local<kF64>,
2921 &BodyGen::get_global<kF64>,
2922 &BodyGen::op<kExprSelect, kF64, kF64, kI32>,
2923 &BodyGen::select_with_type<kF64>,
2925 &BodyGen::call<kF64>,
2926 &BodyGen::call_indirect<kF64>,
2927 &BodyGen::call_ref<kF64>,
2928 &BodyGen::try_block<kF64>);
2930 static constexpr auto kSimdAlternatives =
2931 CreateArray(&BodyGen::simd_lane_op<kExprF64x2ExtractLane, 2, kS128>);
2933 static constexpr auto kWasmGCAlternatives =
2934 CreateArray(&BodyGen::struct_get<kF64>,
2935 &BodyGen::array_get<kF64>);
2937 static constexpr GeneratorAlternativesPerOption kAlternativesPerOptions{
2938 kMvpAlternatives, kSimdAlternatives, kWasmGCAlternatives};
2940 GenerateOneOf(kAlternativesPerOptions, data);
2943 void GenerateS128(DataRange* data) {
2945 GeneratorRecursionScope rec_scope(
this);
2946 if (recursion_limit_reached() || data->size() <=
sizeof(int32_t)) {
2950 builder_->EmitWithPrefix(kExprI8x16Splat);
2954 constexpr auto alternatives = CreateArray(
2955 &BodyGen::simd_const,
2956 &BodyGen::simd_lane_op<kExprI8x16ReplaceLane, 16, kS128, kI32>,
2957 &BodyGen::simd_lane_op<kExprI16x8ReplaceLane, 8, kS128, kI32>,
2958 &BodyGen::simd_lane_op<kExprI32x4ReplaceLane, 4, kS128, kI32>,
2959 &BodyGen::simd_lane_op<kExprI64x2ReplaceLane, 2, kS128, kI64>,
2960 &BodyGen::simd_lane_op<kExprF32x4ReplaceLane, 4, kS128, kF32>,
2961 &BodyGen::simd_lane_op<kExprF64x2ReplaceLane, 2, kS128, kF64>,
2963 &BodyGen::op_with_prefix<kExprI8x16Splat, kI32>,
2964 &BodyGen::op_with_prefix<kExprI8x16Eq, kS128, kS128>,
2965 &BodyGen::op_with_prefix<kExprI8x16Ne, kS128, kS128>,
2966 &BodyGen::op_with_prefix<kExprI8x16LtS, kS128, kS128>,
2967 &BodyGen::op_with_prefix<kExprI8x16LtU, kS128, kS128>,
2968 &BodyGen::op_with_prefix<kExprI8x16GtS, kS128, kS128>,
2969 &BodyGen::op_with_prefix<kExprI8x16GtU, kS128, kS128>,
2970 &BodyGen::op_with_prefix<kExprI8x16LeS, kS128, kS128>,
2971 &BodyGen::op_with_prefix<kExprI8x16LeU, kS128, kS128>,
2972 &BodyGen::op_with_prefix<kExprI8x16GeS, kS128, kS128>,
2973 &BodyGen::op_with_prefix<kExprI8x16GeU, kS128, kS128>,
2974 &BodyGen::op_with_prefix<kExprI8x16Abs, kS128>,
2975 &BodyGen::op_with_prefix<kExprI8x16Neg, kS128>,
2976 &BodyGen::op_with_prefix<kExprI8x16Shl, kS128, kI32>,
2977 &BodyGen::op_with_prefix<kExprI8x16ShrS, kS128, kI32>,
2978 &BodyGen::op_with_prefix<kExprI8x16ShrU, kS128, kI32>,
2979 &BodyGen::op_with_prefix<kExprI8x16Add, kS128, kS128>,
2980 &BodyGen::op_with_prefix<kExprI8x16AddSatS, kS128, kS128>,
2981 &BodyGen::op_with_prefix<kExprI8x16AddSatU, kS128, kS128>,
2982 &BodyGen::op_with_prefix<kExprI8x16Sub, kS128, kS128>,
2983 &BodyGen::op_with_prefix<kExprI8x16SubSatS, kS128, kS128>,
2984 &BodyGen::op_with_prefix<kExprI8x16SubSatU, kS128, kS128>,
2985 &BodyGen::op_with_prefix<kExprI8x16MinS, kS128, kS128>,
2986 &BodyGen::op_with_prefix<kExprI8x16MinU, kS128, kS128>,
2987 &BodyGen::op_with_prefix<kExprI8x16MaxS, kS128, kS128>,
2988 &BodyGen::op_with_prefix<kExprI8x16MaxU, kS128, kS128>,
2989 &BodyGen::op_with_prefix<kExprI8x16RoundingAverageU, kS128, kS128>,
2990 &BodyGen::op_with_prefix<kExprI8x16Popcnt, kS128>,
2992 &BodyGen::op_with_prefix<kExprI16x8Splat, kI32>,
2993 &BodyGen::op_with_prefix<kExprI16x8Eq, kS128, kS128>,
2994 &BodyGen::op_with_prefix<kExprI16x8Ne, kS128, kS128>,
2995 &BodyGen::op_with_prefix<kExprI16x8LtS, kS128, kS128>,
2996 &BodyGen::op_with_prefix<kExprI16x8LtU, kS128, kS128>,
2997 &BodyGen::op_with_prefix<kExprI16x8GtS, kS128, kS128>,
2998 &BodyGen::op_with_prefix<kExprI16x8GtU, kS128, kS128>,
2999 &BodyGen::op_with_prefix<kExprI16x8LeS, kS128, kS128>,
3000 &BodyGen::op_with_prefix<kExprI16x8LeU, kS128, kS128>,
3001 &BodyGen::op_with_prefix<kExprI16x8GeS, kS128, kS128>,
3002 &BodyGen::op_with_prefix<kExprI16x8GeU, kS128, kS128>,
3003 &BodyGen::op_with_prefix<kExprI16x8Abs, kS128>,
3004 &BodyGen::op_with_prefix<kExprI16x8Neg, kS128>,
3005 &BodyGen::op_with_prefix<kExprI16x8Shl, kS128, kI32>,
3006 &BodyGen::op_with_prefix<kExprI16x8ShrS, kS128, kI32>,
3007 &BodyGen::op_with_prefix<kExprI16x8ShrU, kS128, kI32>,
3008 &BodyGen::op_with_prefix<kExprI16x8Add, kS128, kS128>,
3009 &BodyGen::op_with_prefix<kExprI16x8AddSatS, kS128, kS128>,
3010 &BodyGen::op_with_prefix<kExprI16x8AddSatU, kS128, kS128>,
3011 &BodyGen::op_with_prefix<kExprI16x8Sub, kS128, kS128>,
3012 &BodyGen::op_with_prefix<kExprI16x8SubSatS, kS128, kS128>,
3013 &BodyGen::op_with_prefix<kExprI16x8SubSatU, kS128, kS128>,
3014 &BodyGen::op_with_prefix<kExprI16x8Mul, kS128, kS128>,
3015 &BodyGen::op_with_prefix<kExprI16x8MinS, kS128, kS128>,
3016 &BodyGen::op_with_prefix<kExprI16x8MinU, kS128, kS128>,
3017 &BodyGen::op_with_prefix<kExprI16x8MaxS, kS128, kS128>,
3018 &BodyGen::op_with_prefix<kExprI16x8MaxU, kS128, kS128>,
3019 &BodyGen::op_with_prefix<kExprI16x8RoundingAverageU, kS128, kS128>,
3020 &BodyGen::op_with_prefix<kExprI16x8ExtMulLowI8x16S, kS128, kS128>,
3021 &BodyGen::op_with_prefix<kExprI16x8ExtMulLowI8x16U, kS128, kS128>,
3022 &BodyGen::op_with_prefix<kExprI16x8ExtMulHighI8x16S, kS128, kS128>,
3023 &BodyGen::op_with_prefix<kExprI16x8ExtMulHighI8x16U, kS128, kS128>,
3024 &BodyGen::op_with_prefix<kExprI16x8Q15MulRSatS, kS128, kS128>,
3025 &BodyGen::op_with_prefix<kExprI16x8ExtAddPairwiseI8x16S, kS128>,
3026 &BodyGen::op_with_prefix<kExprI16x8ExtAddPairwiseI8x16U, kS128>,
3028 &BodyGen::op_with_prefix<kExprI32x4Splat, kI32>,
3029 &BodyGen::op_with_prefix<kExprI32x4Eq, kS128, kS128>,
3030 &BodyGen::op_with_prefix<kExprI32x4Ne, kS128, kS128>,
3031 &BodyGen::op_with_prefix<kExprI32x4LtS, kS128, kS128>,
3032 &BodyGen::op_with_prefix<kExprI32x4LtU, kS128, kS128>,
3033 &BodyGen::op_with_prefix<kExprI32x4GtS, kS128, kS128>,
3034 &BodyGen::op_with_prefix<kExprI32x4GtU, kS128, kS128>,
3035 &BodyGen::op_with_prefix<kExprI32x4LeS, kS128, kS128>,
3036 &BodyGen::op_with_prefix<kExprI32x4LeU, kS128, kS128>,
3037 &BodyGen::op_with_prefix<kExprI32x4GeS, kS128, kS128>,
3038 &BodyGen::op_with_prefix<kExprI32x4GeU, kS128, kS128>,
3039 &BodyGen::op_with_prefix<kExprI32x4Abs, kS128>,
3040 &BodyGen::op_with_prefix<kExprI32x4Neg, kS128>,
3041 &BodyGen::op_with_prefix<kExprI32x4Shl, kS128, kI32>,
3042 &BodyGen::op_with_prefix<kExprI32x4ShrS, kS128, kI32>,
3043 &BodyGen::op_with_prefix<kExprI32x4ShrU, kS128, kI32>,
3044 &BodyGen::op_with_prefix<kExprI32x4Add, kS128, kS128>,
3045 &BodyGen::op_with_prefix<kExprI32x4Sub, kS128, kS128>,
3046 &BodyGen::op_with_prefix<kExprI32x4Mul, kS128, kS128>,
3047 &BodyGen::op_with_prefix<kExprI32x4MinS, kS128, kS128>,
3048 &BodyGen::op_with_prefix<kExprI32x4MinU, kS128, kS128>,
3049 &BodyGen::op_with_prefix<kExprI32x4MaxS, kS128, kS128>,
3050 &BodyGen::op_with_prefix<kExprI32x4MaxU, kS128, kS128>,
3051 &BodyGen::op_with_prefix<kExprI32x4DotI16x8S, kS128, kS128>,
3052 &BodyGen::op_with_prefix<kExprI32x4ExtMulLowI16x8S, kS128, kS128>,
3053 &BodyGen::op_with_prefix<kExprI32x4ExtMulLowI16x8U, kS128, kS128>,
3054 &BodyGen::op_with_prefix<kExprI32x4ExtMulHighI16x8S, kS128, kS128>,
3055 &BodyGen::op_with_prefix<kExprI32x4ExtMulHighI16x8U, kS128, kS128>,
3056 &BodyGen::op_with_prefix<kExprI32x4ExtAddPairwiseI16x8S, kS128>,
3057 &BodyGen::op_with_prefix<kExprI32x4ExtAddPairwiseI16x8U, kS128>,
3059 &BodyGen::op_with_prefix<kExprI64x2Splat, kI64>,
3060 &BodyGen::op_with_prefix<kExprI64x2Eq, kS128, kS128>,
3061 &BodyGen::op_with_prefix<kExprI64x2Ne, kS128, kS128>,
3062 &BodyGen::op_with_prefix<kExprI64x2LtS, kS128, kS128>,
3063 &BodyGen::op_with_prefix<kExprI64x2GtS, kS128, kS128>,
3064 &BodyGen::op_with_prefix<kExprI64x2LeS, kS128, kS128>,
3065 &BodyGen::op_with_prefix<kExprI64x2GeS, kS128, kS128>,
3066 &BodyGen::op_with_prefix<kExprI64x2Abs, kS128>,
3067 &BodyGen::op_with_prefix<kExprI64x2Neg, kS128>,
3068 &BodyGen::op_with_prefix<kExprI64x2Shl, kS128, kI32>,
3069 &BodyGen::op_with_prefix<kExprI64x2ShrS, kS128, kI32>,
3070 &BodyGen::op_with_prefix<kExprI64x2ShrU, kS128, kI32>,
3071 &BodyGen::op_with_prefix<kExprI64x2Add, kS128, kS128>,
3072 &BodyGen::op_with_prefix<kExprI64x2Sub, kS128, kS128>,
3073 &BodyGen::op_with_prefix<kExprI64x2Mul, kS128, kS128>,
3074 &BodyGen::op_with_prefix<kExprI64x2ExtMulLowI32x4S, kS128, kS128>,
3075 &BodyGen::op_with_prefix<kExprI64x2ExtMulLowI32x4U, kS128, kS128>,
3076 &BodyGen::op_with_prefix<kExprI64x2ExtMulHighI32x4S, kS128, kS128>,
3077 &BodyGen::op_with_prefix<kExprI64x2ExtMulHighI32x4U, kS128, kS128>,
3079 &BodyGen::op_with_prefix<kExprF32x4Splat, kF32>,
3080 &BodyGen::op_with_prefix<kExprF32x4Eq, kS128, kS128>,
3081 &BodyGen::op_with_prefix<kExprF32x4Ne, kS128, kS128>,
3082 &BodyGen::op_with_prefix<kExprF32x4Lt, kS128, kS128>,
3083 &BodyGen::op_with_prefix<kExprF32x4Gt, kS128, kS128>,
3084 &BodyGen::op_with_prefix<kExprF32x4Le, kS128, kS128>,
3085 &BodyGen::op_with_prefix<kExprF32x4Ge, kS128, kS128>,
3086 &BodyGen::op_with_prefix<kExprF32x4Abs, kS128>,
3087 &BodyGen::op_with_prefix<kExprF32x4Neg, kS128>,
3088 &BodyGen::op_with_prefix<kExprF32x4Sqrt, kS128>,
3089 &BodyGen::op_with_prefix<kExprF32x4Add, kS128, kS128>,
3090 &BodyGen::op_with_prefix<kExprF32x4Sub, kS128, kS128>,
3091 &BodyGen::op_with_prefix<kExprF32x4Mul, kS128, kS128>,
3092 &BodyGen::op_with_prefix<kExprF32x4Div, kS128, kS128>,
3093 &BodyGen::op_with_prefix<kExprF32x4Min, kS128, kS128>,
3094 &BodyGen::op_with_prefix<kExprF32x4Max, kS128, kS128>,
3095 &BodyGen::op_with_prefix<kExprF32x4Pmin, kS128, kS128>,
3096 &BodyGen::op_with_prefix<kExprF32x4Pmax, kS128, kS128>,
3097 &BodyGen::op_with_prefix<kExprF32x4Ceil, kS128>,
3098 &BodyGen::op_with_prefix<kExprF32x4Floor, kS128>,
3099 &BodyGen::op_with_prefix<kExprF32x4Trunc, kS128>,
3100 &BodyGen::op_with_prefix<kExprF32x4NearestInt, kS128>,
3102 &BodyGen::op_with_prefix<kExprF64x2Splat, kF64>,
3103 &BodyGen::op_with_prefix<kExprF64x2Eq, kS128, kS128>,
3104 &BodyGen::op_with_prefix<kExprF64x2Ne, kS128, kS128>,
3105 &BodyGen::op_with_prefix<kExprF64x2Lt, kS128, kS128>,
3106 &BodyGen::op_with_prefix<kExprF64x2Gt, kS128, kS128>,
3107 &BodyGen::op_with_prefix<kExprF64x2Le, kS128, kS128>,
3108 &BodyGen::op_with_prefix<kExprF64x2Ge, kS128, kS128>,
3109 &BodyGen::op_with_prefix<kExprF64x2Abs, kS128>,
3110 &BodyGen::op_with_prefix<kExprF64x2Neg, kS128>,
3111 &BodyGen::op_with_prefix<kExprF64x2Sqrt, kS128>,
3112 &BodyGen::op_with_prefix<kExprF64x2Add, kS128, kS128>,
3113 &BodyGen::op_with_prefix<kExprF64x2Sub, kS128, kS128>,
3114 &BodyGen::op_with_prefix<kExprF64x2Mul, kS128, kS128>,
3115 &BodyGen::op_with_prefix<kExprF64x2Div, kS128, kS128>,
3116 &BodyGen::op_with_prefix<kExprF64x2Min, kS128, kS128>,
3117 &BodyGen::op_with_prefix<kExprF64x2Max, kS128, kS128>,
3118 &BodyGen::op_with_prefix<kExprF64x2Pmin, kS128, kS128>,
3119 &BodyGen::op_with_prefix<kExprF64x2Pmax, kS128, kS128>,
3120 &BodyGen::op_with_prefix<kExprF64x2Ceil, kS128>,
3121 &BodyGen::op_with_prefix<kExprF64x2Floor, kS128>,
3122 &BodyGen::op_with_prefix<kExprF64x2Trunc, kS128>,
3123 &BodyGen::op_with_prefix<kExprF64x2NearestInt, kS128>,
3125 &BodyGen::op_with_prefix<kExprF64x2PromoteLowF32x4, kS128>,
3126 &BodyGen::op_with_prefix<kExprF64x2ConvertLowI32x4S, kS128>,
3127 &BodyGen::op_with_prefix<kExprF64x2ConvertLowI32x4U, kS128>,
3128 &BodyGen::op_with_prefix<kExprF32x4DemoteF64x2Zero, kS128>,
3129 &BodyGen::op_with_prefix<kExprI32x4TruncSatF64x2SZero, kS128>,
3130 &BodyGen::op_with_prefix<kExprI32x4TruncSatF64x2UZero, kS128>,
3132 &BodyGen::op_with_prefix<kExprI64x2SConvertI32x4Low, kS128>,
3133 &BodyGen::op_with_prefix<kExprI64x2SConvertI32x4High, kS128>,
3134 &BodyGen::op_with_prefix<kExprI64x2UConvertI32x4Low, kS128>,
3135 &BodyGen::op_with_prefix<kExprI64x2UConvertI32x4High, kS128>,
3137 &BodyGen::op_with_prefix<kExprI32x4SConvertF32x4, kS128>,
3138 &BodyGen::op_with_prefix<kExprI32x4UConvertF32x4, kS128>,
3139 &BodyGen::op_with_prefix<kExprF32x4SConvertI32x4, kS128>,
3140 &BodyGen::op_with_prefix<kExprF32x4UConvertI32x4, kS128>,
3142 &BodyGen::op_with_prefix<kExprI8x16SConvertI16x8, kS128, kS128>,
3143 &BodyGen::op_with_prefix<kExprI8x16UConvertI16x8, kS128, kS128>,
3144 &BodyGen::op_with_prefix<kExprI16x8SConvertI32x4, kS128, kS128>,
3145 &BodyGen::op_with_prefix<kExprI16x8UConvertI32x4, kS128, kS128>,
3147 &BodyGen::op_with_prefix<kExprI16x8SConvertI8x16Low, kS128>,
3148 &BodyGen::op_with_prefix<kExprI16x8SConvertI8x16High, kS128>,
3149 &BodyGen::op_with_prefix<kExprI16x8UConvertI8x16Low, kS128>,
3150 &BodyGen::op_with_prefix<kExprI16x8UConvertI8x16High, kS128>,
3151 &BodyGen::op_with_prefix<kExprI32x4SConvertI16x8Low, kS128>,
3152 &BodyGen::op_with_prefix<kExprI32x4SConvertI16x8High, kS128>,
3153 &BodyGen::op_with_prefix<kExprI32x4UConvertI16x8Low, kS128>,
3154 &BodyGen::op_with_prefix<kExprI32x4UConvertI16x8High, kS128>,
3156 &BodyGen::op_with_prefix<kExprS128Not, kS128>,
3157 &BodyGen::op_with_prefix<kExprS128And, kS128, kS128>,
3158 &BodyGen::op_with_prefix<kExprS128AndNot, kS128, kS128>,
3159 &BodyGen::op_with_prefix<kExprS128Or, kS128, kS128>,
3160 &BodyGen::op_with_prefix<kExprS128Xor, kS128, kS128>,
3161 &BodyGen::op_with_prefix<kExprS128Select, kS128, kS128, kS128>,
3163 &BodyGen::simd_shuffle,
3164 &BodyGen::op_with_prefix<kExprI8x16Swizzle, kS128, kS128>,
3166 &BodyGen::memop<kExprS128LoadMem>,
3167 &BodyGen::memop<kExprS128Load8x8S>,
3168 &BodyGen::memop<kExprS128Load8x8U>,
3169 &BodyGen::memop<kExprS128Load16x4S>,
3170 &BodyGen::memop<kExprS128Load16x4U>,
3171 &BodyGen::memop<kExprS128Load32x2S>,
3172 &BodyGen::memop<kExprS128Load32x2U>,
3173 &BodyGen::memop<kExprS128Load8Splat>,
3174 &BodyGen::memop<kExprS128Load16Splat>,
3175 &BodyGen::memop<kExprS128Load32Splat>,
3176 &BodyGen::memop<kExprS128Load64Splat>,
3177 &BodyGen::memop<kExprS128Load32Zero>,
3178 &BodyGen::memop<kExprS128Load64Zero>,
3179 &BodyGen::simd_lane_memop<kExprS128Load8Lane, 16, kS128>,
3180 &BodyGen::simd_lane_memop<kExprS128Load16Lane, 8, kS128>,
3181 &BodyGen::simd_lane_memop<kExprS128Load32Lane, 4, kS128>,
3182 &BodyGen::simd_lane_memop<kExprS128Load64Lane, 2, kS128>,
3184 &BodyGen::op_with_prefix<kExprI8x16RelaxedSwizzle, kS128, kS128>,
3185 &BodyGen::op_with_prefix<kExprI8x16RelaxedLaneSelect, kS128, kS128,
3187 &BodyGen::op_with_prefix<kExprI16x8RelaxedLaneSelect, kS128, kS128,
3189 &BodyGen::op_with_prefix<kExprI32x4RelaxedLaneSelect, kS128, kS128,
3191 &BodyGen::op_with_prefix<kExprI64x2RelaxedLaneSelect, kS128, kS128,
3193 &BodyGen::op_with_prefix<kExprF32x4Qfma, kS128, kS128, kS128>,
3194 &BodyGen::op_with_prefix<kExprF32x4Qfms, kS128, kS128, kS128>,
3195 &BodyGen::op_with_prefix<kExprF64x2Qfma, kS128, kS128, kS128>,
3196 &BodyGen::op_with_prefix<kExprF64x2Qfms, kS128, kS128, kS128>,
3197 &BodyGen::op_with_prefix<kExprF32x4RelaxedMin, kS128, kS128>,
3198 &BodyGen::op_with_prefix<kExprF32x4RelaxedMax, kS128, kS128>,
3199 &BodyGen::op_with_prefix<kExprF64x2RelaxedMin, kS128, kS128>,
3200 &BodyGen::op_with_prefix<kExprF64x2RelaxedMax, kS128, kS128>,
3201 &BodyGen::op_with_prefix<kExprI32x4RelaxedTruncF32x4S, kS128>,
3202 &BodyGen::op_with_prefix<kExprI32x4RelaxedTruncF32x4U, kS128>,
3203 &BodyGen::op_with_prefix<kExprI32x4RelaxedTruncF64x2SZero, kS128>,
3204 &BodyGen::op_with_prefix<kExprI32x4RelaxedTruncF64x2UZero, kS128>,
3205 &BodyGen::op_with_prefix<kExprI16x8DotI8x16I7x16S, kS128, kS128>,
3206 &BodyGen::op_with_prefix<kExprI32x4DotI8x16I7x16AddS, kS128, kS128,
3209 GenerateOneOf(alternatives, data);
3212 void Generate(ValueType type, DataRange* data) {
3213 switch (type.kind()) {
3215 return GenerateVoid(data);
3217 return GenerateI32(data);
3219 return GenerateI64(data);
3221 return GenerateF32(data);
3223 return GenerateF64(data);
3225 return GenerateS128(data);
3227 return GenerateRef(type.heap_type(), data, kNullable);
3229 return GenerateRef(type.heap_type(), data, kNonNullable);
3235 template <ValueKind kind>
3236 constexpr void Generate(DataRange* data) {
3239 return GenerateVoid(data);
3241 return GenerateI32(data);
3243 return GenerateI64(data);
3245 return GenerateF32(data);
3247 return GenerateF64(data);
3249 return GenerateS128(data);
3258 void Generate(DataRange* data) {
3260 auto first_data = data->split();
3261 Generate<T1>(&first_data);
3262 Generate<
T2, Ts...>(
data);
3265 void GenerateRef(HeapType type, DataRange* data,
3266 Nullability nullability = kNullable) {
3267 std::optional<GeneratorRecursionScope> rec_scope;
3269 rec_scope.emplace(
this);
3272 if (recursion_limit_reached() || data->size() == 0) {
3273 if (nullability == kNullable) {
3274 ref_null(type, data);
3281 constexpr auto alternatives_indexed_type =
3282 CreateArray(&BodyGen::new_object,
3283 &BodyGen::get_local_ref,
3284 &BodyGen::array_get_ref,
3285 &BodyGen::struct_get_ref,
3287 &BodyGen::ref_as_non_null,
3288 &BodyGen::br_on_cast);
3290 constexpr auto alternatives_func_any =
3291 CreateArray(&BodyGen::table_get,
3292 &BodyGen::get_local_ref,
3293 &BodyGen::array_get_ref,
3294 &BodyGen::struct_get_ref,
3296 &BodyGen::any_convert_extern,
3297 &BodyGen::ref_as_non_null,
3298 &BodyGen::br_on_cast);
3300 constexpr auto alternatives_other =
3301 CreateArray(&BodyGen::array_get_ref,
3302 &BodyGen::get_local_ref,
3303 &BodyGen::struct_get_ref,
3305 &BodyGen::ref_as_non_null,
3306 &BodyGen::br_on_cast);
3308 switch (type.representation()) {
3310 case HeapType::kAny: {
3315 const uint8_t num_data_types =
3317 const uint8_t emit_i31ref = 2;
3318 const uint8_t fallback_to_anyref = 2;
3319 uint8_t random = data->get<uint8_t>() %
3320 (num_data_types + emit_i31ref + fallback_to_anyref);
3326 if (random >= num_data_types + emit_i31ref) {
3327 if (GenerateOneOf(alternatives_func_any, type, data, nullability)) {
3330 random = data->get<uint8_t>() % (num_data_types + emit_i31ref);
3333 GenerateRef(kWasmStructRef, data, nullability);
3334 }
else if (random < num_data_types) {
3335 GenerateRef(kWasmArrayRef, data, nullability);
3337 GenerateRef(kWasmI31Ref, data, nullability);
3341 case HeapType::kArray: {
3342 constexpr uint8_t fallback_to_dataref = 1;
3344 data->get<uint8_t>() % (
arrays_.size() + fallback_to_dataref);
3347 if (random >=
arrays_.size()) {
3348 if (GenerateOneOf(alternatives_other, type, data, nullability))
3350 random = data->get<uint8_t>() %
arrays_.size();
3354 GenerateRef(HeapType::Index(index, kNotShared, RefTypeKind::kArray),
3358 case HeapType::kStruct: {
3359 constexpr uint8_t fallback_to_dataref = 2;
3361 data->get<uint8_t>() % (
structs_.size() + fallback_to_dataref);
3365 if (GenerateOneOf(alternatives_other, type, data, nullability)) {
3368 random = data->get<uint8_t>() %
structs_.size();
3372 GenerateRef(HeapType::Index(index, kNotShared, RefTypeKind::kStruct),
3376 case HeapType::kEq: {
3378 const uint8_t emit_i31ref = 2;
3379 constexpr uint8_t fallback_to_eqref = 1;
3380 uint8_t random = data->get<uint8_t>() %
3381 (num_types + emit_i31ref + fallback_to_eqref);
3384 if (random >= num_types + emit_i31ref) {
3385 if (GenerateOneOf(alternatives_other, type, data, nullability)) {
3388 random = data->get<uint8_t>() % (num_types + emit_i31ref);
3390 if (random < num_types) {
3394 if (
builder_->builder()->IsArrayType(random)) {
3395 kind = RefTypeKind::kArray;
3396 }
else if (
builder_->builder()->IsStructType(random)) {
3397 kind = RefTypeKind::kStruct;
3405 GenerateRef(kWasmI31Ref, data, nullability);
3409 case HeapType::kFunc: {
3410 uint32_t random = data->get<uint8_t>() % (
functions_.size() + 1);
3414 if (GenerateOneOf(alternatives_func_any, type, data, nullability)) {
3417 random = data->get<uint8_t>() %
functions_.size();
3421 GenerateRef(HeapType::Index(signature_index, kNotShared,
3422 RefTypeKind::kFunction),
3426 case HeapType::kI31: {
3429 if (data->get<
bool>() &&
3430 GenerateOneOf(alternatives_other, type, data, nullability)) {
3433 Generate(kWasmI32, data);
3434 builder_->EmitWithPrefix(kExprRefI31);
3437 case HeapType::kExn: {
3439 ref_null(type, data);
3440 if (nullability == kNonNullable) {
3445 case HeapType::kExtern: {
3446 uint8_t choice = data->get<uint8_t>();
3449 GenerateRef(kWasmAnyRef, data);
3450 builder_->EmitWithPrefix(kExprExternConvertAny);
3451 if (nullability == kNonNullable) {
3457 if (choice < 230 &&
options_.generate_wasm_gc()) {
3458 uint8_t subchoice = choice % 7;
3459 switch (subchoice) {
3461 return string_cast(data);
3463 return string_fromcharcode(data);
3465 return string_fromcodepoint(data);
3467 return string_concat(data);
3469 return string_substring(data);
3471 return string_fromcharcodearray(data);
3473 return string_fromutf8array(data);
3479 case HeapType::kNoExtern:
3480 case HeapType::kNoFunc:
3481 case HeapType::kNone:
3482 case HeapType::kNoExn:
3483 ref_null(type, data);
3484 if (nullability == kNonNullable) {
3493 data->get<uint8_t>() < 32) {
3495 return string_toutf8array(data);
3497 GenerateOneOf(alternatives_indexed_type, type, data, nullability);
3503 void GenerateRef(DataRange* data) {
3504 constexpr HeapType top_types[] = {
3509 HeapType type = top_types[data->get<uint8_t>() %
arraysize(top_types)];
3510 GenerateRef(type, data);
3513 std::vector<ValueType> GenerateTypes(DataRange* data) {
3514 return fuzzing::GenerateTypes(
3520 void Generate(base::Vector<const ValueType> types, DataRange* data) {
3524 bool generate_block = data->get<uint8_t>() % 32 == 1;
3525 if (generate_block) {
3526 GeneratorRecursionScope rec_scope(
this);
3527 if (!recursion_limit_reached()) {
3528 const auto param_types = GenerateTypes(data);
3529 Generate(base::VectorOf(param_types), data);
3530 any_block(base::VectorOf(param_types), types, data);
3535 if (types.size() == 0) {
3536 Generate(kWasmVoid, data);
3539 if (types.size() == 1) {
3540 Generate(types[0], data);
3546 size_t split_index = data->get<uint8_t>() % (types.size() - 1) + 1;
3547 base::Vector<const ValueType> lower_half = types.SubVector(0, split_index);
3548 base::Vector<const ValueType> upper_half =
3549 types.SubVector(split_index, types.size());
3550 DataRange first_range = data->split();
3551 Generate(lower_half, &first_range);
3552 Generate(upper_half, data);
3554 void Generate(std::initializer_list<ValueTypeBase> types, DataRange* data) {
3555 base::Vector<const ValueType> cast_types = base::VectorOf<const ValueType>(
3556 static_cast<const ValueType*
>(types.begin()), types.size());
3557 return Generate(cast_types, data);
3560 void Consume(ValueType type) {
3564 size_t num_params =
builder_->signature()->parameter_count();
3565 for (uint32_t local_offset = 0; local_offset <
locals_.size();
3567 if (
locals_[local_offset] == type) {
3568 uint32_t local_index =
static_cast<uint32_t
>(local_offset + num_params);
3569 builder_->EmitWithU32V(kExprLocalSet, local_index);
3573 for (uint32_t param_index = 0; param_index < num_params; ++param_index) {
3574 if (
builder_->signature()->GetParam(param_index) == type) {
3575 builder_->EmitWithU32V(kExprLocalSet, param_index);
3585 void ConsumeAndGenerate(base::Vector<const ValueType> param_types,
3586 base::Vector<const ValueType> return_types,
3592 auto primitive = [](ValueType t) ->
bool {
3604 if (return_types.size() == 0 || param_types.size() == 0 ||
3605 !primitive(return_types[0])) {
3606 for (
auto iter = param_types.rbegin(); iter != param_types.rend();
3610 Generate(return_types, data);
3614 int bottom_primitives = 0;
3616 while (
static_cast<int>(param_types.size()) > bottom_primitives &&
3617 primitive(param_types[bottom_primitives])) {
3618 bottom_primitives++;
3621 bottom_primitives > 0 ? (data->get<uint8_t>() % bottom_primitives) : -1;
3622 for (
int i =
static_cast<int>(param_types.size() - 1);
i > return_index;
3624 Consume(param_types[
i]);
3626 for (
int i = return_index;
i > 0; --
i) {
3627 Convert(param_types[
i], param_types[
i - 1]);
3631 DCHECK(!return_types.empty());
3632 if (return_index >= 0) {
3633 Convert(param_types[0], return_types[0]);
3634 Generate(return_types + 1, data);
3636 Generate(return_types, data);
3640 void InitializeNonDefaultableLocals(DataRange* data) {
3642 if (!
locals_[
i].is_defaultable()) {
3643 GenerateRef(
locals_[
i].heap_type(), data, kNonNullable);
3645 kExprLocalSet,
i +
static_cast<uint32_t
>(
3646 builder_->signature()->parameter_count()));
3653 bool recursion_limit_reached() {
3672WasmInitExpr GenerateInitExpr(
Zone* zone, DataRange& range,
3673 WasmModuleBuilder* builder, ValueType type,
3674 const std::vector<ModuleTypeIndex>& structs,
3675 const std::vector<ModuleTypeIndex>& arrays,
3680 explicit ModuleGen(
Zone* zone, WasmModuleGenerationOptions options,
3681 WasmModuleBuilder*
fn, DataRange* module_range,
3682 uint8_t num_functions, uint8_t num_structs,
3683 uint8_t num_arrays, uint8_t num_signatures)
3691 num_types_(num_signatures + num_structs + num_arrays) {}
3694 void GenerateRandomMemories() {
3695 int num_memories = 1 + (
module_range_->get<uint8_t>() % kMaxMemories);
3696 for (
int i = 0;
i < num_memories;
i++) {
3698 bool mem64 = random_byte & 1;
3699 bool has_maximum = random_byte & 2;
3701 uint32_t max_supported_pages =
3703 uint32_t min_pages =
3706 uint32_t max_pages =
3708 (max_supported_pages + 1));
3710 builder_->AddMemory64(min_pages, max_pages);
3712 builder_->AddMemory(min_pages, max_pages);
3725 std::map<uint8_t, uint8_t> GenerateRandomRecursiveGroups(
3726 uint8_t kNumDefaultArrayTypes) {
3728 std::map<uint8_t, uint8_t> explicit_rec_groups;
3729 uint8_t current_type_index = 0;
3732 for (uint8_t
i = 0;
i < kNumDefaultArrayTypes;
i++) {
3733 explicit_rec_groups.emplace(current_type_index, current_type_index);
3734 builder_->AddRecursiveTypeGroup(current_type_index++, 1);
3743 DCHECK_GE(group_start, current_type_index);
3744 current_type_index = group_start;
3747 uint8_t group_size =
3750 for (uint8_t
i = group_start;
i < group_start + group_size;
i++) {
3751 explicit_rec_groups.emplace(
i, group_start + group_size - 1);
3753 builder_->AddRecursiveTypeGroup(group_start, group_size);
3754 current_type_index += group_size;
3757 return explicit_rec_groups;
3761 void GenerateRandomStructs(
3762 const std::map<uint8_t, uint8_t>& explicit_rec_groups,
3763 std::vector<ModuleTypeIndex>& struct_types, uint8_t& current_type_index,
3764 uint8_t kNumDefaultArrayTypes) {
3765 uint8_t last_struct_type_index = current_type_index +
num_structs_;
3766 for (; current_type_index < last_struct_type_index; current_type_index++) {
3767 auto rec_group = explicit_rec_groups.find(current_type_index);
3768 uint8_t current_rec_group_end = rec_group != explicit_rec_groups.end()
3770 : current_type_index;
3773 uint8_t num_fields =
3776 uint32_t existing_struct_types =
3777 current_type_index - kNumDefaultArrayTypes;
3778 if (existing_struct_types > 0 &&
module_range_->get<
bool>()) {
3780 existing_struct_types +
3781 kNumDefaultArrayTypes};
3782 num_fields +=
builder_->GetStructType(supertype)->field_count();
3784 StructType::Builder struct_builder(
zone_, num_fields,
false);
3787 uint32_t field_index = 0;
3788 if (supertype != kNoSuperType) {
3789 const StructType* parent =
builder_->GetStructType(supertype);
3790 for (; field_index < parent->field_count(); ++field_index) {
3793 struct_builder.AddField(parent->field(field_index),
3794 parent->mutability(field_index));
3797 for (; field_index < num_fields; field_index++) {
3809 ValueType type = GetValueTypeHelper(
3811 current_type_index, kIncludeNumericTypes, kIncludePackedTypes,
3812 kExcludeSomeGenerics);
3815 struct_builder.AddField(type, mutability);
3817 StructType* struct_fuz = struct_builder.Build();
3819 ModuleTypeIndex index =
3820 builder_->AddStructType(struct_fuz,
false, supertype);
3821 struct_types.push_back(index);
3826 void GenerateRandomArrays(
3827 const std::map<uint8_t, uint8_t>& explicit_rec_groups,
3828 std::vector<ModuleTypeIndex>& array_types, uint8_t& current_type_index) {
3829 uint32_t last_struct_type_index = current_type_index +
num_structs_;
3831 current_type_index++) {
3832 auto rec_group = explicit_rec_groups.find(current_type_index);
3833 uint8_t current_rec_group_end = rec_group != explicit_rec_groups.end()
3835 : current_type_index;
3838 current_type_index, kIncludeNumericTypes,
3839 kIncludePackedTypes, kExcludeSomeGenerics);
3841 if (current_type_index > last_struct_type_index &&
3844 uint8_t existing_array_types =
3845 current_type_index - last_struct_type_index;
3846 supertype = ModuleTypeIndex{
3847 last_struct_type_index +
3851 type =
builder_->GetArrayType(supertype)->element_type();
3853 ArrayType* array_fuz =
zone_->New<ArrayType>(
type,
true);
3855 ModuleTypeIndex index =
3856 builder_->AddArrayType(array_fuz,
false, supertype);
3857 array_types.push_back(index);
3861 enum SigKind { kFunctionSig, kExceptionSig };
3863 FunctionSig* GenerateSig(SigKind sig_kind,
int num_types) {
3865 int num_params =
int{
module_range_->get<uint8_t>()} % (kMaxParameters + 1);
3867 sig_kind == kFunctionSig
3871 FunctionSig::Builder builder(
zone_, num_returns, num_params);
3872 for (
int i = 0;
i < num_returns; ++
i) {
3875 for (
int i = 0;
i < num_params; ++
i) {
3878 return builder.Get();
3882 void GenerateRandomFunctionSigs(
3883 const std::map<uint8_t, uint8_t>& explicit_rec_groups,
3884 std::vector<ModuleTypeIndex>& function_signatures,
3885 uint8_t& current_type_index,
bool kIsFinal) {
3890 for (; current_type_index <
num_types_; current_type_index++) {
3891 auto rec_group = explicit_rec_groups.find(current_type_index);
3892 uint8_t current_rec_group_end = rec_group != explicit_rec_groups.end()
3894 : current_type_index;
3895 FunctionSig*
sig = GenerateSig(kFunctionSig, current_rec_group_end + 1);
3896 ModuleTypeIndex signature_index =
3897 builder_->ForceAddSignature(sig, kIsFinal);
3898 function_signatures.push_back(signature_index);
3910 StringImports AddImportedStringImports() {
3911 static constexpr ModuleTypeIndex kArrayI8{0};
3912 static constexpr ModuleTypeIndex kArrayI16{1};
3913 StringImports strings;
3914 strings.array_i8 = kArrayI8;
3915 strings.array_i16 = kArrayI16;
3919 static constexpr ValueType kRefA8 =
3920 ValueType::Ref(kArrayI8, kNotShared, RefTypeKind::kArray);
3921 static constexpr ValueType kRefNullA8 =
3922 ValueType::RefNull(kArrayI8, kNotShared, RefTypeKind::kArray);
3923 static constexpr ValueType kRefNullA16 =
3924 ValueType::RefNull(kArrayI16, kNotShared, RefTypeKind::kArray);
3928 static constexpr ValueType kReps_e_i[] = {kRefExtern,
kI32};
3929 static constexpr ValueType kReps_e_rr[] = {kRefExtern, kExternRef,
3931 static constexpr ValueType kReps_e_rii[] = {kRefExtern, kExternRef,
kI32,
3933 static constexpr ValueType kReps_i_ri[] = {
kI32, kExternRef,
kI32};
3934 static constexpr ValueType kReps_i_rr[] = {
kI32, kExternRef, kExternRef};
3935 static constexpr ValueType kReps_from_a16[] = {kRefExtern, kRefNullA16,
3937 static constexpr ValueType kReps_from_a8[] = {kRefExtern, kRefNullA8,
kI32,
3939 static constexpr ValueType kReps_into_a16[] = {
kI32, kExternRef,
3941 static constexpr ValueType kReps_into_a8[] = {
kI32, kExternRef, kRefNullA8,
3943 static constexpr ValueType kReps_to_a8[] = {kRefA8, kExternRef};
3945 static constexpr FunctionSig kSig_e_i(1, 1, kReps_e_i);
3946 static constexpr FunctionSig kSig_e_r(1, 1, kReps_e_rr);
3947 static constexpr FunctionSig kSig_e_rr(1, 2, kReps_e_rr);
3948 static constexpr FunctionSig kSig_e_rii(1, 3, kReps_e_rii);
3950 static constexpr FunctionSig kSig_i_r(1, 1, kReps_i_ri);
3951 static constexpr FunctionSig kSig_i_ri(1, 2, kReps_i_ri);
3952 static constexpr FunctionSig kSig_i_rr(1, 2, kReps_i_rr);
3953 static constexpr FunctionSig kSig_from_a16(1, 3, kReps_from_a16);
3954 static constexpr FunctionSig kSig_from_a8(1, 3, kReps_from_a8);
3955 static constexpr FunctionSig kSig_into_a16(1, 3, kReps_into_a16);
3956 static constexpr FunctionSig kSig_into_a8(1, 3, kReps_into_a8);
3957 static constexpr FunctionSig kSig_to_a8(1, 1, kReps_to_a8);
3959 static constexpr base::Vector<const char>
kJsString =
3960 base::StaticCharVector(
"wasm:js-string");
3961 static constexpr base::Vector<const char>
kTextDecoder =
3962 base::StaticCharVector(
"wasm:text-decoder");
3963 static constexpr base::Vector<const char>
kTextEncoder =
3964 base::StaticCharVector(
"wasm:text-encoder");
3966#define STRINGFUNC(name, sig, group) \
3967 strings.name = builder_->AddImport(base::CStrVector(#name), &sig, group)
3977 STRINGFUNC(substring, kSig_e_rii, kJsString);
3993 void GenerateRandomTables(
const std::vector<ModuleTypeIndex>& array_types,
3994 const std::vector<ModuleTypeIndex>& struct_types) {
3995 int num_tables =
module_range_->get<uint8_t>() % kMaxTables + 1;
3999 "Too many tables. Use more random bits to choose their address type.");
4001 for (
int i = 0;
i < num_tables;
i++) {
4002 uint32_t min_size =
i == 0
4006 module_range_->get<uint8_t>() % (max_table_size - min_size) +
4012 bool force_funcref =
i == 0;
4018 kExcludePackedTypes, kIncludeAllGenerics);
4019 bool use_initializer =
4022 bool use_table64 = are_table64 & 1;
4024 AddressType address_type =
4025 use_table64 ? AddressType::kI64 : AddressType::kI32;
4026 uint32_t table_index =
4029 type, min_size, max_size,
4031 struct_types, array_types, 0),
4033 :
builder_->AddTable(type, min_size, max_size, address_type);
4034 if (type.is_reference_to(HeapType::kFunc)) {
4039 WasmInitExpr init_expr =
builder_->IsTable64(table_index)
4040 ? WasmInitExpr(
static_cast<int64_t
>(0))
4041 : WasmInitExpr(static_cast<
int32_t>(0));
4042 WasmModuleBuilder::WasmElemSegment segment(
zone_, type, table_index,
4044 for (
int entry_index = 0; entry_index < static_cast<int>(min_size);
4046 segment.entries.emplace_back(
4047 WasmModuleBuilder::WasmElemSegment::Entry::kRefFuncEntry,
4051 builder_->AddElementSegment(std::move(segment));
4057 std::tuple<std::vector<ValueType>, std::vector<uint8_t>>
4058 GenerateRandomGlobals(
const std::vector<ModuleTypeIndex>& array_types,
4059 const std::vector<ModuleTypeIndex>& struct_types) {
4060 int num_globals =
module_range_->get<uint8_t>() % (kMaxGlobals + 1);
4061 std::vector<ValueType> globals;
4062 std::vector<uint8_t> mutable_globals;
4063 globals.reserve(num_globals);
4064 mutable_globals.reserve(num_globals);
4066 for (
int i = 0;
i < num_globals; ++
i) {
4069 const bool mutability = (
module_range_->get<uint8_t>() % 8) != 0;
4070 builder_->AddGlobal(type, mutability,
4072 type, struct_types, array_types, 0));
4073 globals.push_back(type);
4074 if (mutability) mutable_globals.push_back(
static_cast<uint8_t
>(
i));
4077 return {globals, mutable_globals};
4082 const WasmModuleGenerationOptions
options_;
4091WasmInitExpr GenerateStructNewInitExpr(
4092 Zone* zone, DataRange& range, WasmModuleBuilder* builder,
4093 ModuleTypeIndex index,
const std::vector<ModuleTypeIndex>& structs,
4094 const std::vector<ModuleTypeIndex>& arrays, uint32_t
recursion_depth) {
4095 const StructType* struct_type = builder->GetStructType(index);
4096 bool use_new_default =
4097 std::all_of(struct_type->fields().begin(), struct_type->fields().end(),
4098 [](ValueType type) { return type.is_defaultable(); }) &&
4101 if (use_new_default) {
4104 ZoneVector<WasmInitExpr>* elements =
4105 zone->New<ZoneVector<WasmInitExpr>>(zone);
4106 int field_count = struct_type->field_count();
4107 for (
int field_index = 0; field_index < field_count; field_index++) {
4108 elements->push_back(GenerateInitExpr(
4109 zone, range, builder, struct_type->field(field_index), structs,
4116WasmInitExpr GenerateArrayInitExpr(
Zone* zone, DataRange& range,
4117 WasmModuleBuilder* builder,
4118 ModuleTypeIndex index,
4119 const std::vector<ModuleTypeIndex>& structs,
4120 const std::vector<ModuleTypeIndex>& arrays,
4122 constexpr int kMaxArrayLength = 20;
4123 uint8_t choice = range.get<uint8_t>() % 3;
4124 ValueType element_type = builder->GetArrayType(index)->element_type();
4126 size_t element_count = range.get<uint8_t>() % kMaxArrayLength;
4127 if (!element_type.is_defaultable()) {
4134 ZoneVector<WasmInitExpr>* elements =
4135 zone->New<ZoneVector<WasmInitExpr>>(zone);
4136 for (
size_t i = 0;
i < element_count;
i++) {
4137 elements->push_back(GenerateInitExpr(zone, range, builder, element_type,
4142 }
else if (choice == 1 || !element_type.is_defaultable()) {
4144 WasmInitExpr length = WasmInitExpr(range.get<uint8_t>() % kMaxArrayLength);
4145 WasmInitExpr init = GenerateInitExpr(zone, range, builder, element_type,
4149 WasmInitExpr length = WasmInitExpr(range.get<uint8_t>() % kMaxArrayLength);
4154WasmInitExpr GenerateInitExpr(
Zone* zone, DataRange& range,
4155 WasmModuleBuilder* builder, ValueType type,
4156 const std::vector<ModuleTypeIndex>& structs,
4157 const std::vector<ModuleTypeIndex>& arrays,
4159 switch (type.kind()) {
4164 return WasmInitExpr(int32_t{0});
4167 uint8_t choice = range.get<uint8_t>() % 6;
4172 if (choice % 2 == 0 && builder->NumGlobals()) {
4174 int num_globals = builder->NumGlobals();
4175 int start_index = range.get<uint8_t>() % num_globals;
4176 for (
int i = 0;
i < num_globals; ++
i) {
4177 int index = (start_index +
i) % num_globals;
4178 if (builder->GetGlobalType(index) == type &&
4179 !builder->IsMutableGlobal(index)) {
4185 return WasmInitExpr(range.getPseudoRandom<int32_t>());
4192 GenerateInitExpr(zone, range, builder,
kWasmI32, structs, arrays,
4194 GenerateInitExpr(zone, range, builder,
kWasmI32, structs, arrays,
4200 return WasmInitExpr(int64_t{0});
4203 uint8_t choice = range.get<uint8_t>() % 6;
4208 return WasmInitExpr(range.get<int64_t>());
4215 GenerateInitExpr(zone, range, builder,
kWasmI64, structs, arrays,
4217 GenerateInitExpr(zone, range, builder,
kWasmI64, structs, arrays,
4223 return WasmInitExpr(0.0f);
4225 return WasmInitExpr(0.0);
4228 return WasmInitExpr(s128_const);
4231 bool null_only =
false;
4232 switch (type.heap_representation()) {
4242 null_only || (range.get<uint8_t>() % 4 == 0)) {
4248 switch (type.heap_representation()) {
4250 ModuleTypeIndex index =
4251 structs[range.get<uint8_t>() % structs.size()];
4252 return GenerateStructNewInitExpr(zone, range, builder, index, structs,
4260 range.get<uint8_t>() % 4 == 3) {
4262 zone, GenerateInitExpr(zone, range, builder,
4270 uint8_t choice = range.get<uint8_t>() % 3;
4275 return GenerateInitExpr(
4276 zone, range, builder,
4282 range.get<uint32_t>() % (builder->NumDeclaredFunctions() +
4283 builder->NumImportedFunctions());
4288 zone, GenerateInitExpr(zone, range, builder,
4294 zone, GenerateInitExpr(zone, range, builder,
kWasmI32, structs,
4297 ModuleTypeIndex index = arrays[range.get<uint8_t>() % arrays.size()];
4298 return GenerateArrayInitExpr(zone, range, builder, index, structs,
4306 ModuleTypeIndex index = type.ref_index();
4307 if (builder->IsStructType(index)) {
4308 return GenerateStructNewInitExpr(zone, range, builder, index,
4310 }
else if (builder->IsArrayType(index)) {
4311 return GenerateArrayInitExpr(zone, range, builder, index, structs,
4314 DCHECK(builder->IsSignature(index));
4315 for (
int i = 0;
i < builder->NumDeclaredFunctions(); ++
i) {
4316 if (builder->GetFunction(
i)->sig_index() == index) {
4318 builder->NumImportedFunctions() +
i);
4340 Zone* zone, WasmModuleGenerationOptions options,
4349 DataRange module_range(data);
4350 DataRange functions_range = module_range.split();
4351 std::vector<ModuleTypeIndex> function_signatures;
4354 int max_num_functions = MaxNumOfFunctions();
4356 uint8_t num_functions = 1 + (module_range.get<uint8_t>() % max_num_functions);
4364 uint8_t num_structs = 0;
4365 uint8_t num_arrays = 0;
4366 std::vector<ModuleTypeIndex> array_types;
4367 std::vector<ModuleTypeIndex> struct_types;
4372 constexpr uint8_t kNumDefaultArrayTypesForWasmGC = 2;
4373 if (options.generate_wasm_gc()) {
4376 num_structs = 1 + module_range.get<uint8_t>() % kMaxStructs;
4377 num_arrays = kNumDefaultArrayTypesForWasmGC +
4378 module_range.get<uint8_t>() % kMaxArrays;
4381 uint8_t num_signatures = num_functions;
4382 ModuleGen gen_module(zone, options, &builder, &module_range, num_functions,
4383 num_structs, num_arrays, num_signatures);
4388 gen_module.GenerateRandomMemories();
4390 uint8_t current_type_index = 0;
4393 std::map<uint8_t, uint8_t> explicit_rec_groups;
4394 if (options.generate_wasm_gc()) {
4396 explicit_rec_groups = gen_module.GenerateRandomRecursiveGroups(
4397 kNumDefaultArrayTypesForWasmGC);
4405 array_types.push_back(kArrayI8);
4408 array_types.push_back(kArrayI16);
4410 static_assert(kNumDefaultArrayTypesForWasmGC == kArrayI16.index + 1);
4411 current_type_index = kNumDefaultArrayTypesForWasmGC;
4414 gen_module.GenerateRandomStructs(explicit_rec_groups, struct_types,
4416 kNumDefaultArrayTypesForWasmGC);
4417 DCHECK_EQ(current_type_index, kNumDefaultArrayTypesForWasmGC + num_structs);
4420 gen_module.GenerateRandomArrays(explicit_rec_groups, array_types,
4421 current_type_index);
4422 DCHECK_EQ(current_type_index, num_structs + num_arrays);
4426 constexpr bool kIsFinal =
true;
4429 function_signatures.push_back(
4431 current_type_index++;
4434 gen_module.GenerateRandomFunctionSigs(
4435 explicit_rec_groups, function_signatures, current_type_index, kIsFinal);
4436 DCHECK_EQ(current_type_index, num_functions + num_structs + num_arrays);
4439 int num_exceptions = 1 + (module_range.get<uint8_t>() % kMaxExceptions);
4445 StringImports strings = options.generate_wasm_gc()
4446 ? gen_module.AddImportedStringImports()
4451 std::vector<WasmFunctionBuilder*> functions;
4452 functions.reserve(num_functions);
4453 for (uint8_t
i = 0;
i < num_functions;
i++) {
4460 functions.push_back(builder.
AddFunction(function_signatures[
i]));
4467 gen_module.GenerateRandomTables(array_types, struct_types);
4470 auto [globals, mutable_globals] =
4471 gen_module.GenerateRandomGlobals(array_types, struct_types);
4474 int num_data_segments = module_range.get<uint8_t>() % kMaxPassiveDataSegments;
4475 for (
int i = 0;
i < num_data_segments;
i++) {
4476 GeneratePassiveDataSegment(&module_range, &builder);
4480 for (
int i = 0;
i < num_functions; ++
i) {
4484 DataRange function_range =
i != num_functions - 1
4485 ? functions_range.split()
4486 : std::move(functions_range);
4487 BodyGen gen_body(options, f, function_signatures, globals, mutable_globals,
4488 struct_types, array_types, strings, &function_range);
4491 sig->return_count());
4492 gen_body.InitializeNonDefaultableLocals(&function_range);
4493 gen_body.Generate(return_types, &function_range);
4507 constexpr WasmModuleGenerationOptions options =
4508 WasmModuleGenerationOptions::All();
4511 DataRange module_range(data);
4512 std::vector<ModuleTypeIndex> function_signatures;
4513 std::vector<ModuleTypeIndex> array_types;
4514 std::vector<ModuleTypeIndex> struct_types;
4516 int num_globals = 1 + module_range.get<uint8_t>() % (kMaxGlobals + 1);
4518 uint8_t num_functions = num_globals;
4519 *
count = num_functions;
4523 uint8_t num_structs = 1 + module_range.get<uint8_t>() % kMaxStructs;
4524 uint8_t num_arrays = 1 + module_range.get<uint8_t>() % kMaxArrays;
4525 uint16_t num_types = num_functions + num_structs + num_arrays;
4527 uint8_t current_type_index = 0;
4530 uint8_t last_struct_type = current_type_index + num_structs;
4531 for (; current_type_index < last_struct_type; current_type_index++) {
4533 uint8_t num_fields = module_range.get<uint8_t>() % (kMaxStructFields + 1);
4535 uint32_t existing_struct_types = current_type_index;
4536 if (existing_struct_types > 0 && module_range.get<
bool>()) {
4539 num_fields += builder.
GetStructType(supertype)->field_count();
4545 uint32_t field_index = 0;
4548 for (; field_index < parent->
field_count(); ++field_index) {
4553 for (; field_index < num_fields; field_index++) {
4555 options, &module_range, current_type_index, current_type_index,
4556 kIncludeNumericTypes, kIncludePackedTypes, kExcludeSomeGenerics);
4560 if (current_type_index >= 2 && type.is_non_nullable()) {
4561 type = type.AsNullable();
4564 bool mutability = module_range.get<
bool>();
4565 struct_builder.
AddField(type, mutability);
4569 struct_types.push_back(index);
4572 for (; current_type_index < num_structs + num_arrays; current_type_index++) {
4574 options, &module_range, current_type_index, current_type_index,
4575 kIncludeNumericTypes, kIncludePackedTypes, kExcludeSomeGenerics);
4577 if (current_type_index > last_struct_type && module_range.get<
bool>()) {
4578 uint32_t existing_array_types = current_type_index - last_struct_type;
4581 (module_range.get<uint8_t>() % existing_array_types)};
4582 type = builder.
GetArrayType(supertype)->element_type();
4586 array_types.push_back(index);
4590 constexpr bool kIsFinal =
true;
4591 std::vector<ValueType> globals;
4592 for (; current_type_index < num_types; current_type_index++) {
4593 ValueType return_type = GetValueTypeHelper(
4594 options, &module_range, num_types - num_globals,
4595 num_types - num_globals, kIncludeNumericTypes, kExcludePackedTypes,
4596 kIncludeAllGenerics, kExcludeS128);
4597 globals.push_back(return_type);
4604 function_signatures.push_back(signature_index);
4607 std::vector<WasmFunctionBuilder*> functions;
4608 functions.reserve(num_functions);
4609 for (uint8_t
i = 0;
i < num_functions;
i++) {
4610 functions.push_back(builder.
AddFunction(function_signatures[
i]));
4614 std::vector<uint8_t> mutable_globals;
4615 std::vector<WasmInitExpr> init_exprs;
4616 init_exprs.reserve(num_globals);
4617 mutable_globals.reserve(num_globals);
4618 CHECK_EQ(globals.size(), num_globals);
4619 uint64_t mutabilities = module_range.get<uint64_t>();
4620 for (
int i = 0;
i < num_globals; ++
i) {
4623 const bool mutability = mutabilities & 1;
4626 zone, module_range, &builder, type, struct_types, array_types, 0);
4627 init_exprs.push_back(init_expr);
4631 {buffer.begin(), len});
4632 if (mutability) mutable_globals.push_back(
static_cast<uint8_t
>(
i));
4637 for (
int i = 0;
i < num_functions; ++
i) {
4642 builder.
AddExport({buffer.begin(), len}, f);
4653 if (a->return_count() != b->
return_count())
return false;
4654 for (
size_t i = 0;
i < a->return_count(); ++
i) {
4655 if (a->GetReturn(
i) != b->
GetReturn(
i))
return false;
4660void EmitDeoptAndReturnValues(BodyGen gen_body, WasmFunctionBuilder* f,
4662 ModuleTypeIndex target_sig_index,
4663 uint32_t global_index, uint32_t table_index,
4664 bool use_table64, DataRange* data) {
4670 uint32_t returns_split = data->get<uint8_t>() % (return_types.
size() + 1);
4671 if (returns_split) {
4672 gen_body.Generate(return_types.
SubVector(0, returns_split), data);
4674 gen_body.Generate(target_sig->parameters(), data);
4675 f->EmitWithU32V(kExprGlobalGet, global_index);
4677 f->Emit(kExprI64UConvertI32);
4680 bool same_returns = HasSameReturns(target_sig, f->signature());
4681 size_t option_count = (same_returns + 1) * 2;
4682 switch (data->get<uint8_t>() % option_count) {
4685 f->Emit(kExprTableGet);
4686 f->EmitU32V(table_index);
4687 f->EmitWithPrefix(kExprRefCast);
4688 f->EmitI32V(target_sig_index);
4689 f->EmitWithU32V(kExprCallRef, target_sig_index);
4693 f->EmitWithU32V(kExprCallIndirect, target_sig_index);
4694 f->EmitByte(table_index);
4698 f->Emit(kExprTableGet);
4699 f->EmitU32V(table_index);
4700 f->EmitWithPrefix(kExprRefCast);
4701 f->EmitI32V(target_sig_index);
4702 f->EmitWithU32V(kExprReturnCallRef, target_sig_index);
4706 f->EmitWithU32V(kExprReturnCallIndirect, target_sig_index);
4707 f->EmitByte(table_index);
4712 gen_body.ConsumeAndGenerate(target_sig->returns(),
4716void EmitCallAndReturnValues(BodyGen gen_body, WasmFunctionBuilder* f,
4717 WasmFunctionBuilder* callee, uint32_t table_index,
4718 bool use_table64, DataRange* data) {
4719 const FunctionSig* callee_sig = callee->signature();
4720 uint32_t callee_index =
4721 callee->func_index() + gen_body.NumImportedFunctions();
4723 base::Vector<const ValueType> return_types = f->signature()->returns();
4726 uint32_t returns_split = data->get<uint8_t>() % (return_types.size() + 1);
4727 if (returns_split) {
4728 gen_body.Generate(return_types.SubVector(0, returns_split), data);
4730 gen_body.Generate(callee_sig->parameters(), data);
4732 bool same_returns = HasSameReturns(callee_sig, f->signature());
4733 size_t option_count = (same_returns + 1) * 3;
4734 switch (data->get<uint8_t>() % option_count) {
4736 f->EmitWithU32V(kExprCallFunction, callee_index);
4739 f->EmitWithU32V(kExprRefFunc, callee_index);
4740 f->EmitWithU32V(kExprCallRef, callee->sig_index());
4745 use_table64 ? f->EmitI64Const(callee->func_index())
4746 : f->EmitI32Const(callee->func_index());
4747 f->EmitWithU32V(kExprCallIndirect, callee->sig_index());
4748 f->EmitByte(table_index);
4751 f->EmitWithU32V(kExprReturnCall, callee_index);
4754 f->EmitWithU32V(kExprRefFunc, callee_index);
4755 f->EmitWithU32V(kExprReturnCallRef, callee->sig_index());
4760 use_table64 ? f->EmitI64Const(callee->func_index())
4761 : f->EmitI32Const(callee->func_index());
4762 f->EmitWithU32V(kExprReturnCallIndirect, callee->sig_index());
4763 f->EmitByte(table_index);
4768 gen_body.ConsumeAndGenerate(callee_sig->returns(),
4769 return_types.SubVectorFrom(returns_split), data);
4775 std::vector<std::string>& callees, std::vector<std::string>& inlinees) {
4777 constexpr WasmModuleGenerationOptions options =
4778 WasmModuleGenerationOptions::All();
4781 DataRange range(data);
4782 std::vector<ModuleTypeIndex> function_signatures;
4783 std::vector<ModuleTypeIndex> array_types;
4784 std::vector<ModuleTypeIndex> struct_types;
4786 const int kMaxCallTargets = 5;
4787 const int kMaxInlinees = 3;
4790 const int num_call_targets = 2 + range.get<uint8_t>() % (kMaxCallTargets - 1);
4791 const int num_inlinees = range.get<uint8_t>() % (kMaxInlinees + 1);
4794 uint8_t num_functions = 1 + num_inlinees + num_call_targets;
4797 uint8_t num_signatures = 2 + num_inlinees;
4799 uint8_t num_structs = 1 + range.get<uint8_t>() % kMaxStructs;
4803 constexpr uint8_t kNumDefaultArrayTypesForWasmGC = 2;
4804 uint8_t num_arrays =
4805 range.get<uint8_t>() % kMaxArrays + kNumDefaultArrayTypesForWasmGC;
4807 uint16_t num_types = num_structs + num_arrays;
4809 uint8_t current_type_index = kNumDefaultArrayTypesForWasmGC;
4812 ModuleGen gen_module(zone, options, &builder, &range, num_functions,
4813 num_structs, num_arrays, num_signatures);
4815 gen_module.GenerateRandomMemories();
4816 std::map<uint8_t, uint8_t> explicit_rec_groups =
4817 gen_module.GenerateRandomRecursiveGroups(kNumDefaultArrayTypesForWasmGC);
4824 array_types.push_back(kArrayI8);
4827 array_types.push_back(kArrayI16);
4829 static_assert(kNumDefaultArrayTypesForWasmGC == kArrayI16.index + 1);
4830 gen_module.GenerateRandomStructs(explicit_rec_groups, struct_types,
4832 kNumDefaultArrayTypesForWasmGC);
4833 DCHECK_EQ(current_type_index, kNumDefaultArrayTypesForWasmGC + num_structs);
4834 gen_module.GenerateRandomArrays(explicit_rec_groups, array_types,
4835 current_type_index);
4836 DCHECK_EQ(current_type_index, num_structs + num_arrays);
4839 std::vector<ValueType> return_types =
4840 GenerateTypes(options, &range, num_types);
4841 constexpr bool kIsFinal =
true;
4848 function_signatures.reserve(num_call_targets);
4849 for (
int i = 0;
i < num_call_targets; ++
i) {
4852 function_signatures.push_back(target_sig_index);
4858 uint8_t use_same_return = range.get<uint8_t>();
4859 for (
int i = 0;
i < num_inlinees; ++
i) {
4860 if ((use_same_return & (1 <<
i)) == 0) {
4861 return_types = GenerateTypes(options, &range, num_types);
4867 function_signatures.push_back(
4875 function_signatures.push_back(builder.ForceAddSignature(main_sig, kIsFinal));
4877 DCHECK_EQ(function_signatures.back().index,
4878 num_structs + num_arrays + num_signatures - 1);
4883 StringImports strings = gen_module.AddImportedStringImports();
4886 std::vector<WasmFunctionBuilder*> functions;
4887 DCHECK_EQ(num_functions, function_signatures.size());
4888 functions.reserve(num_functions);
4889 for (uint8_t
i = 0;
i < num_functions;
i++) {
4890 functions.push_back(builder.AddFunction(function_signatures[
i]));
4893 uint32_t num_entries = num_call_targets + num_inlinees;
4894 bool use_table64 = range.get<
bool>();
4895 AddressType address_type =
4897 uint32_t table_index =
4898 builder.AddTable(
kWasmFuncRef, num_entries, num_entries, address_type);
4899 WasmModuleBuilder::WasmElemSegment segment(
4901 use_table64 ? WasmInitExpr(int64_t{0}) : WasmInitExpr(0));
4902 for (uint32_t
i = 0;
i < num_entries;
i++) {
4903 segment.entries.emplace_back(
4905 builder.NumImportedFunctions() +
i);
4907 builder.AddElementSegment(std::move(segment));
4909 gen_module.GenerateRandomTables(array_types, struct_types);
4914 uint32_t global_index =
4915 builder.AddExportedGlobal(
kWasmI32,
true, WasmInitExpr(0),
4919 for (
int i = 0;
i < num_inlinees; ++
i) {
4920 uint32_t declared_func_index =
i + num_call_targets;
4921 WasmFunctionBuilder* f = functions[declared_func_index];
4922 DataRange function_range = range.split();
4923 BodyGen gen_body(options, f, function_signatures, {}, {}, struct_types,
4924 array_types, strings, &function_range);
4925 gen_body.InitializeNonDefaultableLocals(&function_range);
4928 EmitDeoptAndReturnValues(gen_body, f, target_sig, target_sig_index,
4929 global_index, table_index, use_table64,
4933 uint32_t callee_declared_index = declared_func_index - 1;
4934 EmitCallAndReturnValues(gen_body, f, functions[callee_declared_index],
4935 table_index, use_table64, &function_range);
4938 auto buffer = zone->AllocateVector<
char>(32);
4940 builder.AddExport({buffer.begin(), len}, f);
4941 inlinees.emplace_back(buffer.begin(), len);
4946 uint32_t declared_func_index = num_functions - 1;
4947 WasmFunctionBuilder* f = functions[declared_func_index];
4948 DataRange function_range = range.split();
4949 BodyGen gen_body(options, f, function_signatures, {}, {}, struct_types,
4950 array_types, strings, &function_range);
4951 gen_body.InitializeNonDefaultableLocals(&function_range);
4953 f->EmitWithU32V(kExprLocalGet, 0);
4954 f->EmitWithU32V(kExprGlobalSet, 0);
4956 if (num_inlinees == 0) {
4958 EmitDeoptAndReturnValues(gen_body, f, target_sig, target_sig_index,
4959 global_index, table_index, use_table64,
4963 uint32_t callee_declared_index = declared_func_index - 1;
4964 EmitCallAndReturnValues(gen_body, f, functions[callee_declared_index],
4965 table_index, use_table64, &function_range);
4976 for (
int i = 0;
i < num_call_targets; ++
i) {
4977 WasmFunctionBuilder* f = functions[
i];
4978 DataRange function_range = range.split();
4979 BodyGen gen_body(options, f, function_signatures, {}, {}, struct_types,
4980 array_types, strings, &function_range);
4982 base::Vector<const ValueType> target_return_types(
sig->returns().begin(),
4983 sig->return_count());
4984 gen_body.InitializeNonDefaultableLocals(&function_range);
4985 gen_body.Generate(target_return_types, &function_range);
4988 auto buffer = zone->AllocateVector<
char>(32);
4990 builder.AddExport({buffer.begin(), len}, f);
4991 callees.emplace_back(buffer.begin(), len);
4994 ZoneBuffer buffer{zone};
4995 builder.WriteTo(&buffer);