5#ifndef V8_WASM_WASM_CODE_MANAGER_H_
6#define V8_WASM_WASM_CODE_MANAGER_H_
8#if !V8_ENABLE_WEBASSEMBLY
9#error This header should only be included if WebAssembly is enabled.
19#include "absl/container/flat_hash_map.h"
41class InstructionStream;
47class AssumptionsJournal;
51struct WasmCompilationResult;
53class WasmImportWrapperCache;
64 : regions_({region}) {}
79 bool IsEmpty()
const {
return regions_.empty(); }
81 const auto&
regions()
const {
return regions_; }
84 std::set<base::AddressRegion, base::AddressRegion::StartAddressLess>
regions_;
96#if V8_ENABLE_DRUMBRAKE
104 case SaveFPRegsMode::kIgnore:
105 return Builtin::kRecordWriteIgnoreFP;
106 case SaveFPRegsMode::kSave:
107 return Builtin::kRecordWriteSaveFP;
113 std::memory_order order) {
114 if (order == std::memory_order_relaxed) {
115 if (size == kInt8Size) {
116 return fp_mode == SaveFPRegsMode::kIgnore
117 ? Builtin::kTSANRelaxedStore8IgnoreFP
118 : Builtin::kTSANRelaxedStore8SaveFP;
119 }
else if (size == kInt16Size) {
120 return fp_mode == SaveFPRegsMode::kIgnore
121 ? Builtin::kTSANRelaxedStore16IgnoreFP
122 : Builtin::kTSANRelaxedStore16SaveFP;
123 }
else if (size == kInt32Size) {
124 return fp_mode == SaveFPRegsMode::kIgnore
125 ? Builtin::kTSANRelaxedStore32IgnoreFP
126 : Builtin::kTSANRelaxedStore32SaveFP;
129 return fp_mode == SaveFPRegsMode::kIgnore
130 ? Builtin::kTSANRelaxedStore64IgnoreFP
131 : Builtin::kTSANRelaxedStore64SaveFP;
134 DCHECK_EQ(order, std::memory_order_seq_cst);
135 if (size == kInt8Size) {
136 return fp_mode == SaveFPRegsMode::kIgnore
137 ? Builtin::kTSANSeqCstStore8IgnoreFP
138 : Builtin::kTSANSeqCstStore8SaveFP;
139 }
else if (size == kInt16Size) {
140 return fp_mode == SaveFPRegsMode::kIgnore
141 ? Builtin::kTSANSeqCstStore16IgnoreFP
142 : Builtin::kTSANSeqCstStore16SaveFP;
143 }
else if (size == kInt32Size) {
144 return fp_mode == SaveFPRegsMode::kIgnore
145 ? Builtin::kTSANSeqCstStore32IgnoreFP
146 : Builtin::kTSANSeqCstStore32SaveFP;
149 return fp_mode == SaveFPRegsMode::kIgnore
150 ? Builtin::kTSANSeqCstStore64IgnoreFP
151 : Builtin::kTSANSeqCstStore64SaveFP;
156 static Builtin GetTSANRelaxedLoadBuiltin(SaveFPRegsMode fp_mode,
int size) {
157 if (size == kInt32Size) {
158 return fp_mode == SaveFPRegsMode::kIgnore
159 ? Builtin::kTSANRelaxedLoad32IgnoreFP
160 : Builtin::kTSANRelaxedLoad32SaveFP;
163 return fp_mode == SaveFPRegsMode::kIgnore
164 ? Builtin::kTSANRelaxedLoad64IgnoreFP
165 : Builtin::kTSANRelaxedLoad64SaveFP;
171 return base::VectorOf(instructions_,
172 static_cast<size_t>(instructions_size_));
175 return reinterpret_cast<Address
>(instructions_);
178 return static_cast<size_t>(instructions_size_);
181 return {protected_instructions_data().end(),
182 static_cast<size_t>(reloc_info_size_)};
185 return {reloc_info().end(),
static_cast<size_t>(source_positions_size_)};
189 static_cast<size_t>(inlining_positions_size_)};
192 return {inlining_positions().end(),
static_cast<size_t>(deopt_data_size_)};
202 Address handler_table()
const;
203 int handler_table_size()
const;
204 Address code_comments()
const;
205 int code_comments_size()
const;
215 return tagged_parameter_slots_ >> 16;
218 return tagged_parameter_slots_ & 0xFFFF;
221 return tagged_parameter_slots_;
224 bool is_liftoff()
const {
return tier() == ExecutionTier::kLiftoff; }
226 bool is_turbofan()
const {
return tier() == ExecutionTier::kTurbofan; }
229 return reinterpret_cast<Address
>(instructions_) <=
pc &&
230 pc <
reinterpret_cast<Address
>(instructions_ + instructions_size_);
238 return {meta_data_.get(),
239 static_cast<size_t>(protected_instructions_size_)};
245 protected_instructions_data());
248 bool IsProtectedInstruction(Address
pc);
250 void Validate()
const;
251 void Print(
const char* name =
nullptr)
const;
252 void MaybePrint()
const;
253 void Disassemble(
const char* name, std::ostream& os,
254 Address current_pc = kNullAddress)
const;
256 static bool ShouldBeLogged(
Isolate* isolate);
257 void LogCode(
Isolate* isolate,
const char* source_url,
int script_id)
const;
264 [[maybe_unused]]
int old_val =
265 ref_count_.fetch_add(1, std::memory_order_acq_rel);
273 int old_count = ref_count_.load(std::memory_order_acquire);
281 return DecRefOnDeadCode();
285 old_count = ref_count_.load(std::memory_order_acquire);
288 DecRefOnPotentiallyDeadCode();
294 undo_mark_as_dying();
297 if (ref_count_.compare_exchange_weak(old_count, old_count - 1,
298 std::memory_order_acq_rel)) {
307 [[maybe_unused]]
int old_count =
308 ref_count_.fetch_sub(1, std::memory_order_acq_rel);
316 return ref_count_.fetch_sub(1, std::memory_order_acq_rel) == 1;
327 DCHECK_EQ(1, ref_count_.load(std::memory_order_acquire));
332 int GetSourceOffsetBefore(
int code_offset);
334 std::tuple<int, bool, SourcePosition> GetInliningPosition(
335 int inlining_id)
const;
341 return ForDebuggingField::decode(
flags_);
344 bool is_dying()
const {
return dying_.load(std::memory_order_acquire); }
350 return FrameHasFeedbackSlotField::decode(
flags_);
353 enum FlushICache :
bool { kFlushICache =
true, kNoFlushICache =
false };
355 size_t EstimateCurrentMemoryConsumption()
const;
359 std::string DebugName()
const;
367 uint32_t tagged_parameter_slots,
int safepoint_table_offset,
368 int handler_table_offset,
int constant_pool_offset,
369 int code_comments_offset,
int unpadded_binary_size,
376 uint64_t signature_hash,
bool frame_has_feedback_slot =
false)
378 instructions_(instructions.begin()),
379 signature_hash_(signature_hash),
380 meta_data_(ConcatenateBytes({protected_instructions_data, reloc_info,
381 source_position_table, inlining_positions,
383 instructions_size_(instructions.length()),
384 reloc_info_size_(reloc_info.length()),
385 source_positions_size_(source_position_table.length()),
386 inlining_positions_size_(inlining_positions.length()),
387 deopt_data_size_(deopt_data.length()),
388 protected_instructions_size_(protected_instructions_data.length()),
390 constant_pool_offset_(constant_pool_offset),
392 ool_spills_(ool_spills),
393 tagged_parameter_slots_(tagged_parameter_slots),
394 safepoint_table_offset_(safepoint_table_offset),
396 code_comments_offset_(code_comments_offset),
397 unpadded_binary_size_(unpadded_binary_size),
398 flags_(KindField::encode(
kind) | ExecutionTierField::encode(tier) |
399 ForDebuggingField::encode(for_debugging) |
400 FrameHasFeedbackSlotField::encode(frame_has_feedback_slot)) {
401 DCHECK_LE(safepoint_table_offset, unpadded_binary_size);
402 DCHECK_LE(handler_table_offset, unpadded_binary_size);
403 DCHECK_LE(code_comments_offset, unpadded_binary_size);
404 DCHECK_LE(constant_pool_offset, unpadded_binary_size);
407 std::unique_ptr<const uint8_t[]> ConcatenateBytes(
414 CHECK(has_trap_handler_index());
415 return trap_handler_index_;
418 CHECK(!has_trap_handler_index());
419 trap_handler_index_ =
value;
425 void RegisterTrapHandlerData();
466 int trap_handler_index_ = -1;
470#if !V8_ENABLE_DRUMBRAKE
482 std::atomic<bool> dying_{
false};
494 std::atomic<int> ref_count_{1};
505 std::unique_ptr<WasmCode>
code;
583#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_ARM64 || \
584 V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_LOONG64 || \
585 V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_MIPS64
586 static constexpr bool kNeedsFarJumpsBetweenCodeSpaces =
true;
588 static constexpr bool kNeedsFarJumpsBetweenCodeSpaces =
false;
598 std::unique_ptr<WasmCode> AddCode(
600 uint32_t tagged_parameter_slots,
628 Address far_jump_table_start = kNullAddress;
630 bool is_valid()
const {
return far_jump_table_start != kNullAddress; }
633 std::pair<base::Vector<uint8_t>, JumpTablesRef> AllocateForDeserializedCode(
634 size_t total_code_size);
636 std::unique_ptr<WasmCode> AddDeserializedCode(
638 int ool_spills, uint32_t tagged_parameter_slots,
639 int safepoint_table_offset,
int handler_table_offset,
640 int constant_pool_offset,
int code_comments_offset,
641 int unpadded_binary_size,
654 void InitializeJumpTableForLazyCompilation(uint32_t num_wasm_functions);
657 void InitializeCodePointerTableHandles(uint32_t num_wasm_functions);
658 void FreeCodePointerTableHandles();
663 void UseLazyStubLocked(uint32_t func_index);
668 std::pair<std::vector<WasmCode*>, std::vector<WellKnownImport>>
669 SnapshotCodeTable()
const;
672 std::vector<WasmCode*> SnapshotAllOwnedCode()
const;
674 WasmCode* GetCode(uint32_t index)
const;
675 bool HasCode(uint32_t index)
const;
676 bool HasCodeWithTier(uint32_t index,
ExecutionTier tier)
const;
678 void SetWasmSourceMap(std::unique_ptr<WasmModuleSourceMap> source_map);
682 return main_jump_table_ ? main_jump_table_->instruction_start()
688 Address GetNearCallTargetForFunction(uint32_t func_index,
689 const JumpTablesRef&)
const;
692 Address GetJumpTableEntryForBuiltin(
Builtin builtin,
693 const JumpTablesRef&)
const;
697 uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address)
const;
708 return compilation_state_.get();
712 return module_->num_declared_functions +
module_->num_imported_functions;
715 return module_->num_imported_functions;
718 return module_->num_declared_functions;
723 return std::atomic_load(&
wire_bytes_)->as_vector();
728 return code_allocator_.committed_code_space();
731 return code_allocator_.generated_code_size();
734 return liftoff_bailout_count_.load(std::memory_order_relaxed);
737 return liftoff_code_size_.load(std::memory_order_relaxed);
740 return turbofan_code_size_.load(std::memory_order_relaxed);
743 void AddLazyCompilationTimeSample(int64_t sample);
746 return num_lazy_compilations_.load(std::memory_order_relaxed);
750 return sum_lazy_compilation_time_in_micro_sec_.load(
751 std::memory_order_relaxed) /
756 return max_lazy_compilation_time_in_micro_sec_.load(
757 std::memory_order_relaxed) /
764 return should_metrics_be_reported_.exchange(
false,
765 std::memory_order_relaxed);
771 return should_pgo_data_be_written_.exchange(
false,
772 std::memory_order_relaxed);
777 return wire_bytes && !wire_bytes->empty();
782 liftoff_bailout_count_.fetch_add(1, std::memory_order_relaxed);
792 Builtin GetBuiltinInJumptableSlot(Address target)
const;
795 void SampleCodeSize(
Counters*)
const;
822 std::pair<size_t, size_t> RemoveCompiledCode(RemoveFilter filter);
825 size_t SumLiftoffCodeSizeForTesting()
const;
835 size_t GetNumberOfCodeSpacesForTesting()
const;
838 bool HasDebugInfo()
const;
847 return tiering_budgets_.get();
853 size_t EstimateCurrentMemoryConsumption()
const;
855 void PrintCurrentMemoryConsumptionEstimate()
const;
857 bool log_code()
const {
return log_code_.load(std::memory_order_relaxed); }
862 log_code_.store(
false, std::memory_order_relaxed);
879 fast_api_targets_[func_index].load(std::memory_order_relaxed);
880 if (old_val == target) {
883 if (old_val != kNullAddress) {
891 if (fast_api_targets_[func_index].compare_exchange_strong(
892 old_val, target, std::memory_order_relaxed)) {
901 return fast_api_targets_.get();
909 fast_api_signatures_[func_index] =
sig;
913 return fast_api_signatures_[
index] !=
nullptr;
917 return fast_api_signatures_.get();
938 std::shared_ptr<const WasmModule> module,
939 std::shared_ptr<Counters> async_counters,
940 std::shared_ptr<NativeModule>* shared_this);
942 std::unique_ptr<WasmCode> AddCodeWithCodeSpace(
944 uint32_t tagged_parameter_slots,
955 WasmCode* CreateEmptyJumpTableInRegionLocked(
int jump_table_size,
974 void PatchJumpTablesLocked(uint32_t slot_index, Address target,
975 Address code_pointer_table_target,
976 uint64_t signature_hash);
977 void PatchJumpTableLocked(WritableJumpTablePair& jump_table_pair,
992 void TransferNewOwnedCodeLocked()
const;
994 bool should_update_code_table(
WasmCode* new_code,
WasmCode* prior_code)
const;
1080 size_t code_pointer_handles_size_ = 0;
1098 bool lazy_compile_frozen_ =
false;
1099 std::atomic<size_t> liftoff_bailout_count_{0};
1100 std::atomic<size_t> liftoff_code_size_{0};
1101 std::atomic<size_t> turbofan_code_size_{0};
1104 std::atomic<int> num_lazy_compilations_{0};
1105 std::atomic<int64_t> sum_lazy_compilation_time_in_micro_sec_{0};
1106 std::atomic<int64_t> max_lazy_compilation_time_in_micro_sec_{0};
1107 std::atomic<bool> should_metrics_be_reported_{
true};
1111 std::atomic<bool> should_pgo_data_be_written_{
true};
1118 std::atomic<bool> log_code_{
false};
1132#if defined(V8_OS_WIN64)
1133 static bool CanRegisterUnwindInfoForNonABICompliantCodeRange();
1144 std::pair<WasmCode*, SafepointEntry> LookupCodeAndSafepoint(
Isolate* isolate,
1146 void FlushCodeLookupCache(
Isolate* isolate);
1148 return total_committed_code_space_.load();
1153 static size_t EstimateLiftoffCodeSize(
int body_size);
1155 static size_t EstimateNativeModuleCodeSize(
const WasmModule*);
1158 static size_t EstimateNativeModuleCodeSize(
int num_functions,
1159 int code_section_length);
1162 static size_t EstimateNativeModuleMetaDataSize(
const WasmModule*);
1167 static bool HasMemoryProtectionKeySupport();
1170 static bool MemoryProtectionKeysEnabled();
1175 static bool MemoryProtectionKeyWritable();
1183 std::shared_ptr<NativeModule> NewNativeModule(
1187 std::shared_ptr<const WasmModule> module);
1194 size_t committed_size);
1202 std::atomic<size_t> total_committed_code_space_{0};
MOVE_ONLY_WITH_DEFAULT_CONSTRUCTORS(DisjointAllocationPool)
const auto & regions() const
DisjointAllocationPool(base::AddressRegion region)
std::set< base::AddressRegion, base::AddressRegion::StartAddressLess > regions_
void DisableCodeLogging()
CompilationState * compilation_state() const
std::unique_ptr< NamesProvider > names_provider_
std::vector< CodeSpaceData > code_space_data_
const WasmEnabledFeatures enabled_features_
size_t liftoff_bailout_count() const
NativeModule(const NativeModule &)=delete
std::shared_ptr< base::OwnedVector< const uint8_t > > wire_bytes_
bool ShouldLazyCompilationMetricsBeReported()
std::shared_ptr< const WasmModule > module_
std::atomic< uint32_t > * tiering_budget_array() const
size_t liftoff_code_size() const
std::unique_ptr< WasmModuleSourceMap > source_map_
std::map< Address, std::unique_ptr< WasmCode > > owned_code_
std::unique_ptr< CompilationState > compilation_state_
std::unique_ptr< DebugInfo > debug_info_
Counters * counters() const
uint32_t num_functions() const
base::RecursiveMutex allocation_mutex_
const WasmModule * module() const
bool HasWireBytes() const
std::shared_ptr< const WasmModule > shared_module() const
base::Vector< const uint8_t > wire_bytes() const
std::unique_ptr< WasmCodePointer[]> code_pointer_handles_
const CompileTimeImports compile_imports_
OperationsBarrier::Token engine_scope_
int num_lazy_compilations() const
void set_lazy_compile_frozen(bool frozen)
bool has_fast_api_signature(int index)
size_t turbofan_code_size() const
std::unique_ptr< std::atomic< const MachineSignature * >[]> fast_api_signatures_
std::unique_ptr< std::atomic< Address >[]> fast_api_targets_
absl::flat_hash_map< WasmCodePointer, uint32_t > CallIndirectTargetMap
DebugState IsInDebugState() const
std::unique_ptr< WasmCode *[]> code_table_
bool lazy_compile_frozen() const
bool TrySetFastApiCallTarget(int func_index, Address target)
Address jump_table_start() const
std::atomic< Address > * fast_api_targets() const
size_t committed_code_space() const
bool ShouldPgoDataBeWritten()
std::atomic< const MachineSignature * > * fast_api_signatures() const
WasmEnabledFeatures enabled_features() const
uint32_t num_declared_functions() const
void set_fast_api_signature(int func_index, const MachineSignature *sig)
size_t generated_code_size() const
std::unique_ptr< std::atomic< uint32_t >[]> tiering_budgets_
int64_t max_lazy_compilation_time_in_ms() const
int64_t sum_lazy_compilation_time_in_ms() const
std::vector< std::unique_ptr< WasmCode > > new_owned_code_
NativeModule & operator=(const NativeModule &)=delete
WasmCodeAllocator code_allocator_
const CompileTimeImports & compile_imports() const
uint32_t num_imported_functions() const
Counters * counters() const
base::Vector< uint8_t > AllocateForCode(NativeModule *, size_t size)
DisjointAllocationPool free_code_space_
base::Vector< uint8_t > AllocateForCodeInRegion(NativeModule *, size_t size, base::AddressRegion)
std::atomic< size_t > generated_code_size_
std::atomic< size_t > committed_code_space_
std::vector< VirtualMemory > owned_code_space_
size_t freed_code_size() const
size_t GetNumCodeSpaces() const
std::atomic< size_t > freed_code_size_
size_t committed_code_space() const
WasmCodeAllocator(std::shared_ptr< Counters > async_counters)
void Init(VirtualMemory code_space)
std::shared_ptr< Counters > async_counters_
size_t generated_code_size() const
DisjointAllocationPool freed_code_space_
base::Vector< uint8_t > AllocateForWrapper(size_t size)
void FreeCode(base::Vector< WasmCode *const >)
void InitializeCodeRange(NativeModule *native_module, base::AddressRegion region)
CacheEntry * GetCacheEntry(Address pc)
WasmCodeLookupCache & operator=(const WasmCodeLookupCache &)=delete
CacheEntry cache_[kWasmCodeLookupCacheSize]
WasmCodeLookupCache(const WasmCodeLookupCache &)=delete
static const int kWasmCodeLookupCacheSize
std::atomic< Address > next_code_space_hint_
const size_t max_committed_code_space_
std::map< Address, std::pair< Address, NativeModule * > > lookup_map_
base::Mutex native_modules_mutex_
std::atomic< size_t > critical_committed_code_space_
WasmCodeManager(const WasmCodeManager &)=delete
size_t committed_code_space() const
WasmCodeManager & operator=(const WasmCodeManager &)=delete
static constexpr Handle kInvalidHandle
WasmCodeRefScope(const WasmCodeRefScope &)=delete
WasmCodeRefScope & operator=(const WasmCodeRefScope &)=delete
std::vector< WasmCode * > code_ptrs_
WasmCodeRefScope *const previous_scope_
const int unpadded_binary_size_
V8_WARN_UNUSED_RESULT bool DecRef()
uint64_t signature_hash() const
Address instruction_start() const
ExecutionTier tier() const
void undo_mark_as_dying()
bool frame_has_feedback_slot() const
static constexpr Builtin GetRecordWriteBuiltin(SaveFPRegsMode fp_mode)
const int instructions_size_
int code_comments_offset() const
base::Vector< uint8_t > instructions() const
const int inlining_positions_size_
bool has_trap_handler_index() const
const int constant_pool_offset_
void DcheckRefCountIsOne()
WasmCode(NativeModule *native_module, int index, base::Vector< uint8_t > instructions, int stack_slots, int ool_spills, uint32_t tagged_parameter_slots, int safepoint_table_offset, int handler_table_offset, int constant_pool_offset, int code_comments_offset, int unpadded_binary_size, base::Vector< const uint8_t > protected_instructions_data, base::Vector< const uint8_t > reloc_info, base::Vector< const uint8_t > source_position_table, base::Vector< const uint8_t > inlining_positions, base::Vector< const uint8_t > deopt_data, Kind kind, ExecutionTier tier, ForDebugging for_debugging, uint64_t signature_hash, bool frame_has_feedback_slot=false)
size_t instructions_size() const
uint8_t *const instructions_
int constant_pool_offset() const
const uint32_t tagged_parameter_slots_
base::Vector< const trap_handler::ProtectedInstructionData > protected_instructions() const
const int reloc_info_size_
base::Vector< const uint8_t > source_positions() const
void set_trap_handler_index(int value)
int trap_handler_index() const
uint16_t first_tagged_parameter_slot() const
base::Vector< const uint8_t > deopt_data() const
const int handler_table_offset_
NativeModule * native_module() const
bool is_inspectable() const
base::Vector< const uint8_t > reloc_info() const
bool contains(Address pc) const
const int deopt_data_size_
const int protected_instructions_size_
int safepoint_table_offset() const
base::Vector< const uint8_t > inlining_positions() const
const int safepoint_table_offset_
uint16_t num_tagged_parameter_slots() const
V8_WARN_UNUSED_RESULT bool DecRefOnDeadCode()
int unpadded_binary_size() const
const uint64_t signature_hash_
const int code_comments_offset_
WasmCode(const WasmCode &)=delete
int handler_table_offset() const
std::unique_ptr< const uint8_t[]> meta_data_
ForDebugging for_debugging() const
const int source_positions_size_
uint32_t raw_tagged_parameter_slots_for_serialization() const
base::Vector< const uint8_t > protected_instructions_data() const
WasmCode & operator=(const WasmCode &)=delete
SourcePositionTable * source_positions
ZoneVector< RpoNumber > & result
int handler_table_offset_
ZoneVector< Entry > entries
base::SmallVector< int32_t, 1 > stack_slots
std::shared_ptr< NativeModule > native_module_
const base::Vector< const uint8_t > wire_bytes_
const WasmEnabledFeatures enabled_features_
constexpr int kAnonymousFuncIndex
void Disassemble(const WasmModule *module, ModuleWireBytes wire_bytes, NamesProvider *names, v8::debug::DisassemblyCollector *collector, std::vector< int > *function_body_offsets)
constexpr WasmCodePointer kInvalidWasmCodePointer
const char * GetWasmCodeKindAsString(WasmCode::Kind kind)
WasmCode::Kind GetCodeKind(const WasmCompilationResult &result)
wasm::WasmModule WasmModule
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset jump_table_start
#define DCHECK_LE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_EXPORT_PRIVATE
WasmCode * far_jump_table
base::AddressRegion region
static constexpr AssumptionsJournal * kNoAssumptions
std::unique_ptr< AssumptionsJournal > assumptions
std::unique_ptr< WasmCode > code
std::atomic< Address > pc
SafepointEntry safepoint_entry
#define V8_LIKELY(condition)
#define V8_WARN_UNUSED_RESULT
#define V8_UNLIKELY(condition)
std::unique_ptr< ValueMirror > value
const wasm::WasmModule * module_