v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-module.h
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_WASM_WASM_MODULE_H_
6#define V8_WASM_WASM_MODULE_H_
7
8#if !V8_ENABLE_WEBASSEMBLY
9#error This header should only be included if WebAssembly is enabled.
10#endif // !V8_ENABLE_WEBASSEMBLY
11
12#include <map>
13#include <memory>
14#include <optional>
15
17#include "src/base/vector.h"
19#include "src/common/globals.h"
20#include "src/handles/handles.h"
29
30namespace v8::internal {
31class WasmModuleObject;
32}
33
34namespace v8::internal::wasm {
35
36using WasmName = base::Vector<const char>;
37
38struct AsmJsOffsets;
39class ErrorThrower;
40#if V8_ENABLE_DRUMBRAKE
41class WasmInterpreterRuntime;
42#endif // V8_ENABLE_DRUMBRAKE
43class WellKnownImportsList;
44class TypeCanonicalizer;
45
46enum class AddressType : uint8_t { kI32, kI64 };
47
48inline constexpr const char* AddressTypeToStr(AddressType address_type) {
49 return address_type == AddressType::kI32 ? "i32" : "i64";
50}
51
52inline std::ostream& operator<<(std::ostream& os, AddressType address_type) {
53 return os << AddressTypeToStr(address_type);
54}
55
56// Reference to a string in the wire bytes.
58 public:
59 constexpr WireBytesRef() = default;
60 constexpr WireBytesRef(uint32_t offset, uint32_t length)
61 : offset_(offset), length_(length) {
62 DCHECK_IMPLIES(offset_ == 0, length_ == 0);
63 DCHECK_LE(offset_, offset_ + length_); // no uint32_t overflow.
64 }
65
66 uint32_t offset() const { return offset_; }
67 uint32_t length() const { return length_; }
68 uint32_t end_offset() const { return offset_ + length_; }
69 bool is_empty() const { return length_ == 0; }
70 bool is_set() const { return offset_ != 0; }
71
72 private:
73 uint32_t offset_ = 0;
74 uint32_t length_ = 0;
75};
76
77// Static representation of a wasm function.
79 const FunctionSig* sig = nullptr; // signature of the function.
80 uint32_t func_index = 0; // index into the function table.
81 ModuleTypeIndex sig_index{0}; // index into the signature table.
82 // TODO(clemensb): Should we add canonical_sig_id and canonical_sig?
83 WireBytesRef code = {}; // code of this function.
84 bool imported = false;
85 bool exported = false;
86 bool declared = false;
87};
88
89// Static representation of a wasm global variable.
90struct WasmGlobal {
91 ValueType type; // type of the global.
92 bool mutability = false; // {true} if mutable.
93 ConstantExpression init = {}; // the initialization expression of the global.
94 union {
95 // Index of imported mutable global.
96 uint32_t index;
97 // Offset into global memory (if not imported & mutable). Expressed in bytes
98 // for value-typed globals, and in tagged words for reference-typed globals.
99 uint32_t offset;
100 };
101 bool shared = false;
102 bool imported = false;
103 bool exported = false;
104};
105
106// Note: An exception tag signature only uses the params portion of a function
107// signature.
109
110// Static representation of a wasm tag type.
111struct WasmTag {
114 const FunctionSig* ToFunctionSig() const { return sig; }
115
116 const WasmTagSig* sig; // type signature of the tag.
118};
119
125
126enum BoundsCheckStrategy : int8_t {
127 // Emit protected instructions, use the trap handler for OOB detection.
129 // Emit explicit bounds checks.
131 // Emit no bounds checks at all (for testing only).
134
135// Static representation of a wasm memory.
137 // Index into the memory table.
138 uint32_t index = 0;
139 // Initial size of the memory in 64k pages.
140 uint32_t initial_pages = 0;
141 // Maximum declared size of the memory in 64k pages. The actual memory size at
142 // runtime is capped at {kV8MaxWasmMemory32Pages} / {kV8MaxWasmMemory64Pages}.
143 uint64_t maximum_pages = 0;
144 bool is_shared = false;
145 bool has_maximum_pages = false;
147 bool imported = false;
148 bool exported = false;
149
150 // Computed information, cached here for faster compilation.
151 // Updated via {UpdateComputedInformation}.
152 // Smallest size this memory can have at runtime, in bytes.
153 uintptr_t min_memory_size = 0;
154 // Largest size this memory can have at runtime (via declared maximum and
155 // engine limits), in bytes.
156 uintptr_t max_memory_size = 0;
157
159
160 bool is_memory64() const { return address_type == AddressType::kI64; }
161};
162
164 const uintptr_t platform_max_pages =
165 memory->is_memory64() ? wasm::max_mem64_pages() : wasm::max_mem32_pages();
166 memory->min_memory_size = static_cast<uintptr_t>(std::min<uint64_t>(
167 platform_max_pages, memory->initial_pages)) *
169 memory->max_memory_size = static_cast<uintptr_t>(std::min<uint64_t>(
170 platform_max_pages, memory->maximum_pages)) *
172
173 if (!v8_flags.wasm_bounds_checks) {
174 memory->bounds_checks = kNoBoundsChecks;
175 } else if (v8_flags.wasm_enforce_bounds_checks) {
176 // Explicit bounds checks requested via flag (for testing).
177 memory->bounds_checks = kExplicitBoundsChecks;
178 } else if (origin != kWasmOrigin) {
179 // Asm.js modules can't use trap handling.
180 memory->bounds_checks = kExplicitBoundsChecks;
181 } else if (memory->is_memory64() && !v8_flags.wasm_memory64_trap_handling) {
182 memory->bounds_checks = kExplicitBoundsChecks;
184 if constexpr (kSystemPointerSize == 4) UNREACHABLE();
185 memory->bounds_checks = kTrapHandler;
186 } else {
187 // If the trap handler is not enabled, fall back to explicit bounds checks.
188 memory->bounds_checks = kExplicitBoundsChecks;
189 }
190}
191
192// Static representation of a wasm literal stringref.
194 explicit WasmStringRefLiteral(const WireBytesRef& source) : source(source) {}
195 WireBytesRef source; // start offset in the module bytes.
196};
197
198// Static representation of a wasm data segment.
200 explicit WasmDataSegment(bool is_active, bool is_shared,
202 WireBytesRef source)
203 : active(is_active),
204 shared(is_shared),
207 source(source) {}
208
210 return WasmDataSegment{false, false, 0, {}, {}};
211 }
212
213 bool active = true; // true if copied automatically during instantiation.
214 bool shared = false; // true if shared.
215 uint32_t memory_index; // memory index (if active).
216 ConstantExpression dest_addr; // destination memory address (if active).
217 WireBytesRef source; // start offset in the module bytes.
218};
219
220// Static representation of wasm element segment (table initializer).
222 enum Status {
223 kStatusActive, // copied automatically during instantiation.
224 kStatusPassive, // copied explicitly after instantiation.
225 kStatusDeclarative // purely declarative and never copied.
226 };
228
229 // Construct an active segment.
241
242 // Construct a passive or declarative segment, which has no table index or
243 // offset.
244 WasmElemSegment(Status status, bool shared, ValueType type,
247 : status(status),
248 shared(shared),
249 type(type),
250 table_index(0),
254 DCHECK_NE(status, kStatusActive);
255 }
256
257 // Default constructor. Constucts an invalid segment.
259 : status(kStatusActive),
260 shared(false),
261 type(kWasmBottom),
262 table_index(0),
264 element_count(0),
266
269 WasmElemSegment& operator=(const WasmElemSegment&) = delete;
271
272 Status status;
273 bool shared;
275 uint32_t table_index;
280};
281
282// Static representation of a wasm import.
284 WireBytesRef module_name; // module name.
285 WireBytesRef field_name; // import name.
286 ImportExportKindCode kind; // kind of the import.
287 uint32_t index = 0; // index into the respective space.
288};
289
290// Static representation of a wasm export.
292 WireBytesRef name; // exported name.
293 ImportExportKindCode kind; // kind of the export.
294 uint32_t index = 0; // index into the respective space.
295};
296
297enum class WasmCompilationHintStrategy : uint8_t {
298 kDefault = 0,
299 kLazy = 1,
300 kEager = 2,
302};
303
304enum class WasmCompilationHintTier : uint8_t {
305 kDefault = 0,
306 kBaseline = 1,
307 kOptimized = 2,
308};
309
310// Static representation of a wasm compilation hint
316
317#define SELECT_WASM_COUNTER(counters, origin, prefix, suffix) \
318 ((origin) == kWasmOrigin ? (counters)->prefix##_wasm_##suffix() \
319 : (counters)->prefix##_asm_##suffix())
320
321// Uses a map as backing storage when sparsely, or a vector when densely
322// populated. Requires {Value} to implement `bool is_set()` to identify
323// uninitialized objects.
324template <class Value>
326 public:
327 // The technical limitation here is that index+1 must not overflow. Since
328 // we have significantly lower maximums on anything that can be named,
329 // we can have a tighter limit here to reject useless entries early.
330 static constexpr uint32_t kMaxKey = 10'000'000;
331 static_assert(kMaxKey < std::numeric_limits<uint32_t>::max());
332
334
335 explicit AdaptiveMap(const AdaptiveMap&) = delete;
337
338 AdaptiveMap(AdaptiveMap&& other) V8_NOEXCEPT { *this = std::move(other); }
339
341 mode_ = other.mode_;
342 vector_.swap(other.vector_);
343 map_.swap(other.map_);
344 return *this;
345 }
346
347 void FinishInitialization();
348
349 bool is_set() const { return mode_ != kInitializing; }
350
351 void Put(uint32_t key, const Value& value) {
352 DCHECK(mode_ == kInitializing);
353 DCHECK_LE(key, kMaxKey);
354 map_->insert(std::make_pair(key, value));
355 }
356
357 void Put(uint32_t key, Value&& value) {
358 DCHECK(mode_ == kInitializing);
359 DCHECK_LE(key, kMaxKey);
360 map_->insert(std::make_pair(key, std::move(value)));
361 }
362
363 const Value* Get(uint32_t key) const {
364 if (mode_ == kDense) {
365 if (key >= vector_.size()) return nullptr;
366 if (!vector_[key].is_set()) return nullptr;
367 return &vector_[key];
368 } else {
369 DCHECK(mode_ == kSparse || mode_ == kInitializing);
370 auto it = map_->find(key);
371 if (it == map_->end()) return nullptr;
372 return &it->second;
373 }
374 }
375
376 bool Has(uint32_t key) const {
377 if (mode_ == kDense) {
378 return key < vector_.size() && vector_[key].is_set();
379 } else {
380 DCHECK(mode_ == kSparse || mode_ == kInitializing);
381 return map_->find(key) != map_->end();
382 }
383 }
384
385 size_t EstimateCurrentMemoryConsumption() const;
386
387 private:
388 static constexpr uint32_t kLoadFactor = 4;
389 using MapType = std::map<uint32_t, Value>;
390 enum Mode { kDense, kSparse, kInitializing };
391
392 Mode mode_{kInitializing};
393 std::vector<Value> vector_;
394 std::unique_ptr<MapType> map_;
395};
398
399struct ModuleWireBytes;
400
402 public:
403 WireBytesRef LookupFunctionName(ModuleWireBytes wire_bytes,
404 uint32_t function_index);
405
406 void AddForTesting(int function_index, WireBytesRef name);
407 bool Has(uint32_t function_index);
408
409 size_t EstimateCurrentMemoryConsumption() const;
410
411 private:
412 // Lazy loading must guard against concurrent modifications from multiple
413 // {WasmModuleObject}s.
415 bool has_functions_{false};
417};
418
420 public:
421 explicit AsmJsOffsetInformation(base::Vector<const uint8_t> encoded_offsets);
422
423 // Destructor defined in wasm-module.cc, where the definition of
424 // {AsmJsOffsets} is available.
426
427 int GetSourcePosition(int func_index, int byte_offset,
428 bool is_at_number_conversion);
429
430 std::pair<int, int> GetFunctionOffsets(int func_index);
431
432 private:
433 void EnsureDecodedOffsets();
434
435 // The offset information table is decoded lazily, hence needs to be
436 // protected against concurrent accesses.
437 // Exactly one of the two fields below will be set at a time.
439
440 // Holds the encoded offset table bytes.
442
443 // Holds the decoded offset table.
444 std::unique_ptr<AsmJsOffsets> decoded_offsets_;
445};
446
447// Used as the supertype for a type at the top of the type hierarchy.
450
452 enum Kind : int8_t {
453 kFunction = static_cast<int8_t>(RefTypeKind::kFunction),
454 kStruct = static_cast<int8_t>(RefTypeKind::kStruct),
455 kArray = static_cast<int8_t>(RefTypeKind::kArray),
456 kCont = static_cast<int8_t>(RefTypeKind::kCont),
457 };
458
459 constexpr TypeDefinition(const FunctionSig* sig, ModuleTypeIndex supertype,
460 bool is_final, bool is_shared)
461 : function_sig(sig),
462 supertype{supertype},
463 kind(kFunction),
464 is_final(is_final),
466
467 constexpr TypeDefinition(const StructType* type, ModuleTypeIndex supertype,
468 bool is_final, bool is_shared)
469 : struct_type(type),
470 supertype{supertype},
471 kind(kStruct),
472 is_final(is_final),
474
475 constexpr TypeDefinition(const ArrayType* type, ModuleTypeIndex supertype,
476 bool is_final, bool is_shared)
477 : array_type(type),
478 supertype{supertype},
479 kind(kArray),
480 is_final(is_final),
482
483 constexpr TypeDefinition(const ContType* type, ModuleTypeIndex supertype,
484 bool is_final, bool is_shared)
485 : cont_type(type),
486 supertype{supertype},
487 kind(kCont),
488 is_final(is_final),
490
491 constexpr TypeDefinition() = default;
492
493 bool operator==(const TypeDefinition& other) const {
494 if (supertype != other.supertype) return false;
495 if (kind != other.kind) return false;
496 if (is_final != other.is_final) return false;
497 if (is_shared != other.is_shared) return false;
498 if (descriptor != other.descriptor) return false;
499 if (describes != other.describes) return false;
500 if (kind == kFunction) return *function_sig == *other.function_sig;
501 if (kind == kStruct) return *struct_type == *other.struct_type;
503 return *array_type == *other.array_type;
504 }
505
506 bool has_descriptor() const { return descriptor.valid(); }
507 bool is_descriptor() const { return describes.valid(); }
508
509 union {
510 const FunctionSig* function_sig = nullptr;
514 };
518 Kind kind = kFunction;
519 bool is_final = false;
520 bool is_shared = false;
521 uint8_t subtyping_depth = 0;
522};
523
525 static constexpr int kNumTypes = 3;
526 enum Type { SourceMap, EmbeddedDWARF, ExternalDWARF, None };
527 Type type = Type::None;
529};
530
532 public:
537
539 CallSiteFeedback feedback;
540 feedback.is_megamorphic_ = true;
541 DCHECK(!feedback.is_invalid());
542 DCHECK(!feedback.is_monomorphic());
543 DCHECK(!feedback.is_polymorphic());
544 DCHECK(feedback.is_megamorphic());
545 return feedback;
546 }
547
548 // Regular constructor: uninitialized/unknown, monomorphic, or
549 // polymorphic.
550 CallSiteFeedback() : index_or_count_(-1), frequency_or_ool_(0) {}
551 CallSiteFeedback(int function_index, int call_count)
552 : index_or_count_(function_index), frequency_or_ool_(call_count) {}
553 CallSiteFeedback(PolymorphicCase* polymorphic_cases, int num_cases)
554 : index_or_count_(-num_cases),
555 frequency_or_ool_(reinterpret_cast<intptr_t>(polymorphic_cases)) {}
556
557 // Copying and assignment: prefer moving, as it's cheaper.
558 // The code below makes sure external polymorphic storage is copied and/or
559 // freed as appropriate.
560 CallSiteFeedback(const CallSiteFeedback& other) V8_NOEXCEPT { *this = other; }
563 index_or_count_ = other.index_or_count_;
564 if (other.is_polymorphic()) {
565 int num_cases = other.num_cases();
566 PolymorphicCase* polymorphic = new PolymorphicCase[num_cases];
567 for (int i = 0; i < num_cases; i++) {
568 polymorphic[i].function_index = other.function_index(i);
569 polymorphic[i].absolute_call_frequency = other.call_count(i);
570 }
571 frequency_or_ool_ = reinterpret_cast<intptr_t>(polymorphic);
572 } else {
573 frequency_or_ool_ = other.frequency_or_ool_;
574 }
575 has_non_inlineable_targets_ = other.has_non_inlineable_targets_;
576 is_megamorphic_ = other.is_megamorphic_;
577 return *this;
578 }
580 if (this != &other) {
581 index_or_count_ = other.index_or_count_;
582 frequency_or_ool_ = other.frequency_or_ool_;
583 other.frequency_or_ool_ = 0;
584 }
585 has_non_inlineable_targets_ = other.has_non_inlineable_targets_;
586 is_megamorphic_ = other.is_megamorphic_;
587 return *this;
588 }
589
591 if (is_polymorphic()) delete[] polymorphic_storage();
592 }
593
594 int num_cases() const {
595 if (is_monomorphic()) return 1;
596 if (is_invalid() || is_megamorphic()) return 0;
597 return -index_or_count_;
598 }
599 int function_index(int i) const {
600 DCHECK(!is_invalid() && !is_megamorphic());
601 if (is_monomorphic()) return index_or_count_;
602 return polymorphic_storage()[i].function_index;
603 }
604 int call_count(int i) const {
605 DCHECK(!is_invalid() && !is_megamorphic());
606 if (is_monomorphic()) return static_cast<int>(frequency_or_ool_);
607 return polymorphic_storage()[i].absolute_call_frequency;
608 }
610 return has_non_inlineable_targets_;
611 }
612 void set_has_non_inlineable_targets(bool has_non_inlineable_targets) {
613 has_non_inlineable_targets_ = has_non_inlineable_targets;
614 }
615
616 bool is_megamorphic() const { return is_megamorphic_; }
617
618 private:
619 bool is_monomorphic() const { return index_or_count_ >= 0; }
620 bool is_polymorphic() const { return index_or_count_ <= -2; }
621 bool is_invalid() const { return index_or_count_ == -1 && !is_megamorphic_; }
623 DCHECK(is_polymorphic());
624 return reinterpret_cast<PolymorphicCase*>(frequency_or_ool_);
625 }
626
628 bool has_non_inlineable_targets_ = false;
629 bool is_megamorphic_ = false;
631};
632
634 // {feedback_vector} is computed from {call_targets} and the instance-specific
635 // feedback vector by {TransitiveTypeFeedbackProcessor}.
637
638 // {call_targets} has one entry per "call", "call_indirect", and "call_ref" in
639 // the function.
640 // For "call", it holds the index of the called function, for "call_indirect"
641 // and "call_ref" the value will be a sentinel {kCallIndirect} / {kCallRef}.
643
644 // {tierup_priority} is updated and used when triggering tier-up.
645 // TODO(clemensb): This does not belong here; find a better place.
646 int tierup_priority = 0;
647
648 static constexpr uint32_t kUninitializedLiftoffFrameSize = 1;
649 // The size of the stack frame in liftoff in bytes.
650 uint32_t liftoff_frame_size : 31 = kUninitializedLiftoffFrameSize;
651 // Flag whether the cached {feedback_vector} has to be reprocessed as the data
652 // is outdated (signaled by a deopt).
653 // This is set by the deoptimizer, so that the next tierup trigger performs
654 // the reprocessing. The deoptimizer can't update the cached data, as the new
655 // feedback (which caused the deopt) hasn't been processed yet and processing
656 // it can trigger allocations. After returning to liftoff, the feedback is
657 // updated (which is guaranteed to happen before the next tierup trigger).
659
660 static constexpr uint32_t kCallRef = 0xFFFFFFFF;
661 static constexpr uint32_t kCallIndirect = kCallRef - 1;
662 static_assert(kV8MaxWasmTotalFunctions < kCallIndirect);
663};
664
666 std::unordered_map<uint32_t, FunctionTypeFeedback> feedback_for_function;
667 std::unordered_map<uint32_t, uint32_t> deopt_count_for_function;
668 // Accesses to {feedback_for_function} and {deopt_count_for_function} are
669 // guarded by this mutex. Multiple reads are allowed (shared lock), but only
670 // exclusive writes. Currently known users of the mutex are:
671 // - LiftoffCompiler: writes {call_targets}.
672 // - TransitiveTypeFeedbackProcessor: reads {call_targets},
673 // writes {feedback_vector}, reads {feedback_vector.size()}.
674 // - TriggerTierUp: increments {tierup_priority}.
675 // - WasmGraphBuilder: reads {feedback_vector}.
676 // - Feedback vector allocation: reads {call_targets.size()}.
677 // - PGO ProfileGenerator: reads everything.
678 // - PGO deserializer: writes everything, currently not locked, relies on
679 // being called before multi-threading enters the picture.
680 // - Deoptimizer: sets needs_reprocessing_after_deopt.
682
684
685 size_t EstimateCurrentMemoryConsumption() const;
686};
687
688struct WasmTable {
690 uint32_t initial_size = 0;
691 // The declared maximum size; at runtime the actual size is limited to a
692 // 32-bit value (kV8MaxWasmTableSize).
693 uint64_t maximum_size = 0;
694 bool has_maximum_size = false;
695 AddressType address_type = AddressType::kI32;
696 bool shared = false;
697 bool imported = false;
698 bool exported = false;
699 ConstantExpression initial_value = {};
700
701 bool is_table64() const { return address_type == AddressType::kI64; }
702};
703
704// Static representation of a module.
706 // ================ Fields ===================================================
707 // The signature zone is also used to store the signatures of C++ functions
708 // called with the V8 fast API. These signatures are added during
709 // instantiation, so the `signature_zone` may be changed even when the
710 // `WasmModule` is already `const`.
712 int start_function_index = -1; // start function, >= 0 if any
713
714 // Size of the buffer required for all globals that are not imported and
715 // mutable.
716 uint32_t untagged_globals_buffer_size = 0;
717 uint32_t tagged_globals_buffer_size = 0;
718 uint32_t num_imported_globals = 0;
719 uint32_t num_imported_mutable_globals = 0;
720 uint32_t num_imported_functions = 0;
721 uint32_t num_imported_tables = 0;
722 uint32_t num_imported_tags = 0;
723 uint32_t num_declared_functions = 0; // excluding imported
724 // This field is updated when decoding the functions. At this point in time
725 // with streaming compilation there can already be background threads running
726 // turbofan compilations which will read this to decide on inlining budgets.
727 // This can only happen with eager compilation as code execution only starts
728 // after the module has been fully decoded and therefore it does not affect
729 // production configurations.
730 std::atomic<uint32_t> num_small_functions = 0;
731 uint32_t num_exported_functions = 0;
732 uint32_t num_declared_data_segments = 0; // From the DataCount section.
733 // Position and size of the code section (payload only, i.e. without section
734 // ID and length).
735 WireBytesRef code = {0, 0};
736 WireBytesRef name = {0, 0};
737 // Position and size of the name section (payload only, i.e. without section
738 // ID and length).
739 WireBytesRef name_section = {0, 0};
740 // Set by the singleton TypeNamesProvider to avoid duplicate work.
741 mutable std::atomic<bool> canonical_typenames_decoded = false;
742 // Set to true if this module has wasm-gc types in its type section.
743 bool is_wasm_gc = false;
744 // Set to true if this module has any shared elements other than memories.
745 bool has_shared_part = false;
746
747 std::vector<TypeDefinition> types; // by type index
748 // Maps each type index to its global (cross-module) canonical index as per
749 // isorecursive type canonicalization.
750 std::vector<CanonicalTypeIndex> isorecursive_canonical_type_ids;
751 std::vector<WasmFunction> functions;
752 std::vector<WasmGlobal> globals;
753 std::vector<WasmDataSegment> data_segments;
754 std::vector<WasmTable> tables;
755 std::vector<WasmMemory> memories;
756 std::vector<WasmImport> import_table;
757 std::vector<WasmExport> export_table;
758 std::vector<WasmTag> tags;
759 std::vector<WasmStringRefLiteral> stringref_literals;
760 std::vector<WasmElemSegment> elem_segments;
761 std::vector<WasmCompilationHint> compilation_hints;
763 // Pairs of module offsets and mark id.
764 std::vector<std::pair<uint32_t, uint32_t>> inst_traces;
765
766 // This is the only member of {WasmModule} where we store dynamic information
767 // that's not a decoded representation of the wire bytes.
768 // TODO(jkummerow): Rename.
770
773 std::array<WasmDebugSymbols, WasmDebugSymbols::kNumTypes> debug_symbols{};
775
776 // Asm.js source position information. Only available for modules compiled
777 // from asm.js.
778 std::unique_ptr<AsmJsOffsetInformation> asm_js_offset_information;
779
780 // {validated_functions} is atomically updated when functions get validated
781 // (during compilation, streaming decoding, or via explicit validation).
782 static_assert(sizeof(std::atomic<uint8_t>) == 1);
783 static_assert(alignof(std::atomic<uint8_t>) == 1);
784 mutable std::unique_ptr<std::atomic<uint8_t>[]> validated_functions;
785
786 // ================ Constructors =============================================
787 explicit WasmModule(ModuleOrigin = kWasmOrigin);
788 WasmModule(const WasmModule&) = delete;
789 WasmModule& operator=(const WasmModule&) = delete;
790
791 // ================ Interface for tests ======================================
792 // Tests sometimes add times iteratively instead of all at once via module
793 // decoding.
795 types.push_back(type);
796 if (type.supertype.valid()) {
797 // Set the subtyping depth. Outside of unit tests this is done by the
798 // module decoder.
799 DCHECK_GT(types.size(), 0);
800 DCHECK_LT(type.supertype.index, types.size() - 1);
801 types.back().subtyping_depth =
802 this->type(type.supertype).subtyping_depth + 1;
803 }
804 // Isorecursive canonical type will be computed later.
805 isorecursive_canonical_type_ids.push_back(CanonicalTypeIndex{kNoSuperType});
806 }
807
809 bool is_final, bool is_shared) {
811 AddTypeForTesting(TypeDefinition(sig, supertype, is_final, is_shared));
812 }
813
815 ModuleTypeIndex supertype, bool is_final,
816 bool is_shared) {
817 DCHECK_NOT_NULL(type);
818 AddTypeForTesting(TypeDefinition(type, supertype, is_final, is_shared));
819 }
820
822 bool is_final, bool is_shared) {
823 DCHECK_NOT_NULL(type);
824 AddTypeForTesting(TypeDefinition(type, supertype, is_final, is_shared));
825 }
826
827 void AddContTypeForTesting(const ContType* type, ModuleTypeIndex supertype,
828 bool is_final, bool is_shared) {
829 DCHECK_NOT_NULL(type);
830 AddTypeForTesting(TypeDefinition(type, supertype, is_final, is_shared));
831 }
832
833 // ================ Accessors ================================================
834 bool has_type(ModuleTypeIndex index) const {
835 return index.index < types.size();
836 }
837
838 const TypeDefinition& type(ModuleTypeIndex index) const {
839 size_t num_types = types.size();
840 V8_ASSUME(index.index < num_types);
841 return types[index.index];
842 }
843
845 const TypeDefinition& t = type(index);
846 return HeapType::Index(index, t.is_shared,
847 static_cast<RefTypeKind>(t.kind));
848 }
849
851 size_t num_types = isorecursive_canonical_type_ids.size();
852 DCHECK_EQ(num_types, types.size());
853 V8_ASSUME(index.index < num_types);
854 return isorecursive_canonical_type_ids[index.index];
855 }
856
858 if (!type.has_index()) {
859 return CanonicalValueType{type};
860 }
861 return type.Canonicalize(canonical_type_id(type.ref_index()));
862 }
863
864 bool has_signature(ModuleTypeIndex index) const {
865 return index.index < types.size() &&
866 types[index.index].kind == TypeDefinition::kFunction;
867 }
869 DCHECK(has_signature(index));
870 size_t num_types = types.size();
871 V8_ASSUME(index.index < num_types);
872 return types[index.index].function_sig;
873 }
874
875 bool has_cont_type(ModuleTypeIndex index) const {
876 return index.index < types.size() &&
877 types[index.index].kind == TypeDefinition::kCont;
878 }
879
880 const ContType* cont_type(ModuleTypeIndex index) const {
881 DCHECK(has_cont_type(index));
882 size_t num_types = types.size();
883 V8_ASSUME(index.index < num_types);
884 return types[index.index].cont_type;
885 }
886
888 DCHECK(has_signature(index));
889 size_t num_types = isorecursive_canonical_type_ids.size();
890 V8_ASSUME(index.index < num_types);
891 return isorecursive_canonical_type_ids[index.index];
892 }
893
894 uint64_t signature_hash(const TypeCanonicalizer*,
895 uint32_t function_index) const;
896
897 bool has_struct(ModuleTypeIndex index) const {
898 return index.index < types.size() &&
899 types[index.index].kind == TypeDefinition::kStruct;
900 }
901
903 DCHECK(has_struct(index));
904 size_t num_types = types.size();
905 V8_ASSUME(index.index < num_types);
906 return types[index.index].struct_type;
907 }
908
909 bool has_array(ModuleTypeIndex index) const {
910 return index.index < types.size() &&
911 types[index.index].kind == TypeDefinition::kArray;
912 }
914 DCHECK(has_array(index));
915 size_t num_types = types.size();
916 V8_ASSUME(index.index < num_types);
917 return types[index.index].array_type;
918 }
919
921 size_t num_types = types.size();
922 V8_ASSUME(index.index < num_types);
923 return types[index.index].supertype;
924 }
925 bool has_supertype(ModuleTypeIndex index) const {
926 return supertype(index).valid();
927 }
928
929 // Linear search. Returns CanonicalTypeIndex::Invalid() if types are empty.
931 if (isorecursive_canonical_type_ids.empty()) {
932 return CanonicalTypeIndex::Invalid();
933 }
934 return *std::max_element(isorecursive_canonical_type_ids.begin(),
935 isorecursive_canonical_type_ids.end());
936 }
937
938 bool function_is_shared(int func_index) const {
939 return type(functions[func_index].sig_index).is_shared;
940 }
941
942 bool function_was_validated(int func_index) const {
943 DCHECK_NOT_NULL(validated_functions);
944 static_assert(sizeof(validated_functions[0]) == 1);
945 DCHECK_LE(num_imported_functions, func_index);
946 int pos = func_index - num_imported_functions;
947 DCHECK_LE(pos, num_declared_functions);
948 uint8_t byte =
949 validated_functions[pos >> 3].load(std::memory_order_relaxed);
950 DCHECK_IMPLIES(origin != kWasmOrigin, byte == 0xff);
951 return byte & (1 << (pos & 7));
952 }
953
954 void set_function_validated(int func_index) const {
955 DCHECK_EQ(kWasmOrigin, origin);
956 DCHECK_NOT_NULL(validated_functions);
957 DCHECK_LE(num_imported_functions, func_index);
958 int pos = func_index - num_imported_functions;
959 DCHECK_LE(pos, num_declared_functions);
960 std::atomic<uint8_t>* atomic_byte = &validated_functions[pos >> 3];
961 uint8_t old_byte = atomic_byte->load(std::memory_order_relaxed);
962 uint8_t new_bit = 1 << (pos & 7);
963 while ((old_byte & new_bit) == 0 &&
964 !atomic_byte->compare_exchange_weak(old_byte, old_byte | new_bit,
965 std::memory_order_relaxed)) {
966 // Retry with updated {old_byte}.
967 }
968 }
969
971 DCHECK_EQ(kWasmOrigin, origin);
972 if (num_declared_functions == 0) return;
973 DCHECK_NOT_NULL(validated_functions);
974 size_t num_words = (num_declared_functions + 7) / 8;
975 for (size_t i = 0; i < num_words; ++i) {
976 validated_functions[i].store(0xff, std::memory_order_relaxed);
977 }
978 }
979
981 return base::VectorOf(functions) + num_imported_functions;
982 }
983
984#if V8_ENABLE_DRUMBRAKE
985 void SetWasmInterpreter(
986 std::shared_ptr<WasmInterpreterRuntime> interpreter) const {
987 base::MutexGuard lock(&interpreter_mutex_);
988 interpreter_ = interpreter;
989 }
990 mutable std::weak_ptr<WasmInterpreterRuntime> interpreter_;
991 mutable base::Mutex interpreter_mutex_;
992#endif // V8_ENABLE_DRUMBRAKE
993
994 size_t EstimateStoredSize() const; // No tracing.
995 size_t EstimateCurrentMemoryConsumption() const; // With tracing.
996};
997
998inline bool is_asmjs_module(const WasmModule* module) {
999 return module->origin != kWasmOrigin;
1000}
1001
1002// Return the byte offset of the function identified by the given index.
1003// The offset will be relative to the start of the module bytes.
1004// Returns -1 if the function index is invalid.
1005int GetWasmFunctionOffset(const WasmModule* module, uint32_t func_index);
1006
1007// Returns the function containing the given byte offset.
1008// Returns -1 if the byte offset is not contained in any
1009// function of this module.
1010int GetContainingWasmFunction(const WasmModule* module, uint32_t byte_offset);
1011
1012// Returns the function containing the given byte offset.
1013// Will return preceding function if the byte offset is not
1014// contained within a function.
1015int GetNearestWasmFunction(const WasmModule* module, uint32_t byte_offset);
1016
1017// Gets the explicitly defined subtyping depth for the given type.
1018// Returns 0 if the type has no explicit supertype.
1019// The result is capped to {kV8MaxRttSubtypingDepth + 1}.
1020// Invalid cyclic hierarchies will return -1.
1022 ModuleTypeIndex type_index);
1023
1024// Interface to the storage (wire bytes) of a wasm module.
1025// It is illegal for anyone receiving a ModuleWireBytes to store pointers based
1026// on module_bytes, as this storage is only guaranteed to be alive as long as
1027// this struct is alive.
1028// As {ModuleWireBytes} is just a wrapper around a {base::Vector<const
1029// uint8_t>}, it should generally be passed by value.
1032 : module_bytes_(module_bytes) {}
1033 constexpr ModuleWireBytes(const uint8_t* start, const uint8_t* end)
1034 : module_bytes_(start, static_cast<int>(end - start)) {
1036 }
1037
1038 bool operator==(const ModuleWireBytes& other) const = default;
1039
1040 // Get a string stored in the module bytes representing a name.
1041 WasmName GetNameOrNull(WireBytesRef ref) const;
1042
1043 // Get a string stored in the module bytes representing a function name.
1044 WasmName GetNameOrNull(int func_index, const WasmModule* module) const;
1045
1046 // Checks the given reference is contained within the module bytes.
1047 bool BoundsCheck(WireBytesRef ref) const {
1048 uint32_t size = static_cast<uint32_t>(module_bytes_.length());
1049 return ref.offset() <= size && ref.length() <= size - ref.offset();
1050 }
1051
1053 const WasmFunction* function) const {
1054 return module_bytes_.SubVector(function->code.offset(),
1055 function->code.end_offset());
1056 }
1057
1058 base::Vector<const uint8_t> module_bytes() const { return module_bytes_; }
1059 const uint8_t* start() const { return module_bytes_.begin(); }
1060 const uint8_t* end() const { return module_bytes_.end(); }
1061 size_t length() const { return module_bytes_.length(); }
1062
1063 private:
1065};
1067
1068// A helper for printing out the names of functions.
1070 WasmFunctionName(int func_index, WasmName name)
1071 : func_index_(func_index), name_(name) {}
1072
1073 const int func_index_;
1075};
1076
1077V8_EXPORT_PRIVATE std::ostream& operator<<(std::ostream& os,
1078 const WasmFunctionName& name);
1079
1081 Isolate* isolate, DirectHandle<NativeContext> context);
1083 Isolate* isolate, DirectHandle<Context> context);
1084
1086 const FunctionSig* sig,
1087 bool for_exception = false);
1088DirectHandle<JSObject> GetTypeForGlobal(Isolate* isolate, bool is_mutable,
1089 ValueType type);
1090DirectHandle<JSObject> GetTypeForMemory(Isolate* isolate, uint32_t min_size,
1091 std::optional<uint64_t> max_size,
1092 bool shared, AddressType address_type);
1094 uint32_t min_size,
1095 std::optional<uint64_t> max_size,
1096 AddressType address_type);
1104 ErrorThrower* thrower);
1105
1106// Get the source position from a given function index and byte offset,
1107// for either asm.js or pure Wasm modules.
1108int GetSourcePosition(const WasmModule*, uint32_t func_index,
1109 uint32_t byte_offset, bool is_at_number_conversion);
1110
1111// Translate function index to the index relative to the first declared (i.e.
1112// non-imported) function.
1113inline int declared_function_index(const WasmModule* module, int func_index) {
1114 DCHECK_LE(module->num_imported_functions, func_index);
1115 int declared_idx = func_index - module->num_imported_functions;
1116 DCHECK_GT(module->num_declared_functions, declared_idx);
1117 return declared_idx;
1118}
1119
1120// Translate from function index to jump table offset.
1121int JumpTableOffset(const WasmModule* module, int func_index);
1122
1123// TruncatedUserString makes it easy to output names up to a certain length, and
1124// output a truncation followed by '...' if they exceed a limit.
1125// Use like this:
1126// TruncatedUserString<> name (pc, len);
1127// printf("... %.*s ...", name.length(), name.start())
1128template <int kMaxLen = 50>
1130 static_assert(kMaxLen >= 4, "minimum length is 4 (length of '...' plus one)");
1131
1132 public:
1133 template <typename T>
1135 : TruncatedUserString(name.begin(), name.length()) {}
1136
1137 TruncatedUserString(const uint8_t* start, size_t len)
1138 : TruncatedUserString(reinterpret_cast<const char*>(start), len) {}
1139
1140 TruncatedUserString(const char* start, size_t len)
1141 : start_(start), length_(std::min(kMaxLen, static_cast<int>(len))) {
1142 if (len > static_cast<size_t>(kMaxLen)) {
1143 memcpy(buffer_, start, kMaxLen - 3);
1144 memset(buffer_ + kMaxLen - 3, '.', 3);
1145 start_ = buffer_;
1146 }
1147 }
1148
1149 const char* start() const { return start_; }
1150
1151 int length() const { return length_; }
1152
1153 private:
1154 const char* start_;
1155 const int length_;
1156 char buffer_[kMaxLen];
1157};
1158
1159// Print the signature into the given {buffer}, using {delimiter} as separator
1160// between parameter types and return types. If {buffer} is non-empty, it will
1161// be null-terminated, even if the signature is cut off. Returns the number of
1162// characters written, excluding the terminating null-byte.
1164 char delimiter = ':');
1165
1166V8_EXPORT_PRIVATE size_t
1168
1169// Get the required number of feedback slots for a function.
1170int NumFeedbackSlots(const WasmModule* module, int func_index);
1171
1172} // namespace v8::internal::wasm
1173
1174#endif // V8_WASM_WASM_MODULE_H_
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
Vector< T > SubVector(size_t from, size_t to) const
Definition vector.h:41
AdaptiveMap & operator=(AdaptiveMap &&other) V8_NOEXCEPT
std::map< uint32_t, Value > MapType
void Put(uint32_t key, const Value &value)
AdaptiveMap(AdaptiveMap &&other) V8_NOEXCEPT
const Value * Get(uint32_t key) const
std::unique_ptr< MapType > map_
bool Has(uint32_t key) const
AdaptiveMap & operator=(const AdaptiveMap &)=delete
void Put(uint32_t key, Value &&value)
AdaptiveMap(const AdaptiveMap &)=delete
std::vector< Value > vector_
base::OwnedVector< const uint8_t > encoded_offsets_
std::unique_ptr< AsmJsOffsets > decoded_offsets_
CallSiteFeedback(const CallSiteFeedback &other) V8_NOEXCEPT
CallSiteFeedback & operator=(CallSiteFeedback &&other) V8_NOEXCEPT
static CallSiteFeedback CreateMegamorphic()
CallSiteFeedback(PolymorphicCase *polymorphic_cases, int num_cases)
const PolymorphicCase * polymorphic_storage() const
CallSiteFeedback(CallSiteFeedback &&other) V8_NOEXCEPT
CallSiteFeedback(int function_index, int call_count)
void set_has_non_inlineable_targets(bool has_non_inlineable_targets)
CallSiteFeedback & operator=(const CallSiteFeedback &other) V8_NOEXCEPT
TruncatedUserString(const char *start, size_t len)
TruncatedUserString(const uint8_t *start, size_t len)
TruncatedUserString(base::Vector< T > name)
constexpr WireBytesRef(uint32_t offset, uint32_t length)
Definition wasm-module.h:60
constexpr WireBytesRef()=default
RecordWriteMode const mode_
base::OwnedVector< uint8_t > buffer_
Definition assembler.cc:111
uint8_t *const start_
Definition assembler.cc:131
const MapRef map_
std::vector< T > vector_
Definition sweeper.cc:212
int start
int end
const int func_index_
const char * name_
const int length_
Definition mul-fft.cc:473
STL namespace.
DirectHandle< JSObject > GetTypeForTable(Isolate *isolate, ValueType type, uint32_t min_size, std::optional< uint64_t > max_size, AddressType address_type)
size_t PrintSignature(base::Vector< char > buffer, const CanonicalSig *sig, char delimiter)
int GetSubtypingDepth(const WasmModule *module, ModuleTypeIndex type_index)
uint32_t max_mem32_pages()
DirectHandle< JSObject > GetTypeForMemory(Isolate *isolate, uint32_t min_size, std::optional< uint64_t > max_size, bool shared, AddressType address_type)
uint32_t max_mem64_pages()
int GetWasmFunctionOffset(const WasmModule *module, uint32_t func_index)
int JumpTableOffset(const WasmModule *module, int func_index)
int GetContainingWasmFunction(const WasmModule *module, uint32_t byte_offset)
bool is_asmjs_module(const WasmModule *module)
size_t GetWireBytesHash(base::Vector< const uint8_t > wire_bytes)
DirectHandle< JSArray > GetImports(Isolate *isolate, DirectHandle< WasmModuleObject > module_object)
constexpr const char * AddressTypeToStr(AddressType address_type)
Definition wasm-module.h:48
int GetSourcePosition(const WasmModule *module, uint32_t func_index, uint32_t byte_offset, bool is_at_number_conversion)
base::Vector< const char > WasmName
Definition code-events.h:30
constexpr ModuleTypeIndex kNoType
constexpr ModuleTypeIndex kNoSuperType
int GetNearestWasmFunction(const WasmModule *module, uint32_t byte_offset)
constexpr IndependentHeapType kWasmVoid
int declared_function_index(const WasmModule *module, int func_index)
DirectHandle< JSArray > GetExports(Isolate *isolate, DirectHandle< WasmModuleObject > module_object)
constexpr size_t kWasmPageSize
void UpdateComputedInformation(WasmMemory *memory, ModuleOrigin origin)
DirectHandle< JSArray > GetCustomSections(Isolate *isolate, DirectHandle< WasmModuleObject > module_object, DirectHandle< String > name, ErrorThrower *thrower)
constexpr IndependentHeapType kWasmBottom
std::ostream & operator<<(std::ostream &os, LiftoffVarState slot)
DirectHandle< JSObject > GetTypeForFunction(Isolate *isolate, const FunctionSig *sig, bool for_exception)
constexpr size_t kV8MaxWasmTotalFunctions
Definition wasm-limits.h:88
std::unordered_map< uint32_t, BranchHintMap > BranchHintInfo
Signature< ValueType > FunctionSig
DirectHandle< String > ErrorStringForCodegen(Isolate *isolate, DirectHandle< Context > context)
bool IsWasmCodegenAllowed(Isolate *isolate, DirectHandle< NativeContext > context)
int NumFeedbackSlots(const WasmModule *module, int func_index)
DirectHandle< JSObject > GetTypeForGlobal(Isolate *isolate, bool is_mutable, ValueType type)
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
Definition globals.h:410
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int kMaxInt
Definition globals.h:374
JSArrayBuffer::IsDetachableBit is_shared
#define V8_NOEXCEPT
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define ASSERT_TRIVIALLY_COPYABLE(T)
Definition macros.h:267
#define V8_EXPORT_PRIVATE
Definition macros.h:460
base::OwnedVector< uint32_t > call_targets
base::OwnedVector< CallSiteFeedback > feedback_vector
static constexpr ModuleTypeIndex Invalid()
Definition value-type.h:73
bool BoundsCheck(WireBytesRef ref) const
base::Vector< const uint8_t > module_bytes_
bool operator==(const ModuleWireBytes &other) const =default
base::Vector< const uint8_t > GetFunctionBytes(const WasmFunction *function) const
constexpr ModuleWireBytes(const uint8_t *start, const uint8_t *end)
ModuleWireBytes(base::Vector< const uint8_t > module_bytes)
const uint8_t * start() const
base::Vector< const uint8_t > module_bytes() const
constexpr TypeDefinition(const StructType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
constexpr TypeDefinition()=default
bool operator==(const TypeDefinition &other) const
constexpr TypeDefinition(const FunctionSig *sig, ModuleTypeIndex supertype, bool is_final, bool is_shared)
constexpr TypeDefinition(const ArrayType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
constexpr TypeDefinition(const ContType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
std::unordered_map< uint32_t, FunctionTypeFeedback > feedback_for_function
std::unordered_map< uint32_t, uint32_t > deopt_count_for_function
WasmCompilationHintStrategy strategy
WasmCompilationHintTier baseline_tier
WasmDataSegment(bool is_active, bool is_shared, uint32_t memory_index, ConstantExpression dest_addr, WireBytesRef source)
static WasmDataSegment PassiveForTesting()
WasmElemSegment(Status status, bool shared, ValueType type, ElementType element_type, uint32_t element_count, uint32_t elements_wire_bytes_offset)
WasmElemSegment(WasmElemSegment &&) V8_NOEXCEPT=default
WasmElemSegment(const WasmElemSegment &)=delete
WasmElemSegment(bool shared, ValueType type, uint32_t table_index, ConstantExpression offset, ElementType element_type, uint32_t element_count, uint32_t elements_wire_bytes_offset)
ImportExportKindCode kind
WasmFunctionName(int func_index, WasmName name)
ImportExportKindCode kind
BoundsCheckStrategy bounds_checks
const StructType * struct_type(ModuleTypeIndex index) const
base::Vector< const WasmFunction > declared_functions() const
LazilyGeneratedNames lazily_generated_names
void AddContTypeForTesting(const ContType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
std::vector< TypeDefinition > types
WasmModule(const WasmModule &)=delete
void set_function_validated(int func_index) const
std::vector< CanonicalTypeIndex > isorecursive_canonical_type_ids
CanonicalTypeIndex canonical_type_id(ModuleTypeIndex index) const
const ContType * cont_type(ModuleTypeIndex index) const
std::vector< WasmMemory > memories
std::unique_ptr< AsmJsOffsetInformation > asm_js_offset_information
bool has_cont_type(ModuleTypeIndex index) const
void AddTypeForTesting(TypeDefinition type)
const ArrayType * array_type(ModuleTypeIndex index) const
std::unique_ptr< std::atomic< uint8_t >[]> validated_functions
bool has_signature(ModuleTypeIndex index) const
bool has_array(ModuleTypeIndex index) const
std::vector< WasmExport > export_table
std::vector< WasmElemSegment > elem_segments
std::vector< WasmImport > import_table
std::vector< WasmStringRefLiteral > stringref_literals
HeapType heap_type(ModuleTypeIndex index) const
void AddSignatureForTesting(const FunctionSig *sig, ModuleTypeIndex supertype, bool is_final, bool is_shared)
CanonicalValueType canonical_type(ValueType type) const
bool function_is_shared(int func_index) const
std::vector< WasmFunction > functions
bool has_supertype(ModuleTypeIndex index) const
CanonicalTypeIndex canonical_sig_id(ModuleTypeIndex index) const
std::vector< WasmCompilationHint > compilation_hints
const FunctionSig * signature(ModuleTypeIndex index) const
bool has_struct(ModuleTypeIndex index) const
std::vector< std::pair< uint32_t, uint32_t > > inst_traces
std::vector< WasmTag > tags
void AddStructTypeForTesting(const StructType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
WasmModule & operator=(const WasmModule &)=delete
std::vector< WasmGlobal > globals
TypeFeedbackStorage type_feedback
bool function_was_validated(int func_index) const
void AddArrayTypeForTesting(const ArrayType *type, ModuleTypeIndex supertype, bool is_final, bool is_shared)
const TypeDefinition & type(ModuleTypeIndex index) const
std::vector< WasmDataSegment > data_segments
ModuleTypeIndex supertype(ModuleTypeIndex index) const
bool has_type(ModuleTypeIndex index) const
void set_all_functions_validated() const
std::vector< WasmTable > tables
CanonicalTypeIndex MaxCanonicalTypeIndex() const
WasmStringRefLiteral(const WireBytesRef &source)
WasmTag(const WasmTagSig *sig, ModuleTypeIndex sig_index)
const WasmTagSig * sig
const FunctionSig * ToFunctionSig() const
#define V8_ASSUME
Definition v8config.h:533
std::unique_ptr< ValueMirror > key
wasm::ValueType type