v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-interpreter.h
Go to the documentation of this file.
1// Copyright 2024 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_WASM_INTERPRETER_WASM_INTERPRETER_H_
6#define V8_WASM_INTERPRETER_WASM_INTERPRETER_H_
7
8#if !V8_ENABLE_WEBASSEMBLY
9#error This header should only be included if WebAssembly is enabled.
10#endif // !V8_ENABLE_WEBASSEMBLY
11
12#include <atomic>
13#include <memory>
14#include <vector>
15
19#include "src/base/vector.h"
20#include "src/common/simd128.h"
25#include "src/wasm/wasm-value.h"
26
28//
29// DrumBrake: An interpreter for WebAssembly.
30//
32
33// Uncomment to enable profiling.
34// #define DRUMBRAKE_ENABLE_PROFILING true
35//
36
37#ifdef V8_HOST_ARCH_ARM64
38#define VECTORCALL
39#elif !defined(__clang__) // GCC or MSVC
40#define VECTORCALL
41#elif defined(V8_DRUMBRAKE_BOUNDS_CHECKS)
42#define VECTORCALL
43#else
44#define VECTORCALL __vectorcall
45#endif // V8_HOST_ARCH_ARM64
46
47#define INSTRUCTION_HANDLER_FUNC \
48 __attribute__((noinline)) static DISABLE_CFI_ICALL void VECTORCALL
49
50namespace v8 {
51
52namespace internal {
53class Cell;
54class FixedArray;
55class WasmInstanceObject;
56
57namespace wasm {
58
59// Forward declarations.
60class Decoder;
61struct InterpreterCode;
62class InterpreterHandle;
63struct ModuleWireBytes;
64class WasmBytecode;
65class WasmBytecodeGenerator;
66class WasmCode;
67struct WasmFunction;
68struct WasmModule;
69class WasmInterpreterRuntime;
70class WasmInterpreterThread;
71
72using pc_t = size_t;
75
76// We are using sizeof(WasmRef) and kSystemPointerSize interchangeably in the
77// interpreter code.
78static_assert(sizeof(WasmRef) == kSystemPointerSize);
79
80// Code and metadata needed to execute a function.
83 const uint8_t* start, const uint8_t* end)
84 : function(function), locals(locals), start(start), end(end) {}
85
86 const uint8_t* at(pc_t pc) { return start + pc; }
87
88 const WasmFunction* function; // wasm function
89 BodyLocalDecls locals; // local declarations
90 const uint8_t* start; // start of code
91 const uint8_t* end; // end of code
92 std::unique_ptr<WasmBytecode> bytecode;
93};
94
95struct FrameState {
97 : current_function_(nullptr),
98 previous_frame_(nullptr),
99 current_bytecode_(nullptr),
100 current_sp_(nullptr),
101 thread_(nullptr),
103 handle_scope_(nullptr)
104#ifdef V8_ENABLE_DRUMBRAKE_TRACING
105 ,
106 current_stack_height_(0),
107 current_stack_start_args_(0),
108 current_stack_start_locals_(0),
109 current_stack_start_stack_(0)
110#endif // V8_ENABLE_DRUMBRAKE_TRACING
111 {
112 }
113
116 const uint8_t* current_bytecode_;
117 uint8_t* current_sp_;
121
122 // Maintains a reference to the exceptions caught by each catch handler.
123 void SetCaughtException(Isolate* isolate, uint32_t catch_block_index,
124 DirectHandle<Object> exception);
126 uint32_t catch_block_index) const;
129
130 inline void ResetHandleScope(Isolate* isolate);
131
132#ifdef V8_ENABLE_DRUMBRAKE_TRACING
133 uint32_t current_stack_height_;
134 uint32_t current_stack_start_args_;
135 uint32_t current_stack_start_locals_;
136 uint32_t current_stack_start_stack_;
137#endif // V8_ENABLE_DRUMBRAKE_TRACING
138};
139
140// Manages the calculations of the
141// V8.WasmInterpreterExecutionInTenSecondsPercentage histogram, which measures
142// the percentage of time spent executing in the Wasm interpreter in a 10
143// seconds window and it is useful to detect applications that are CPU-bound
144// and that could be visibly slowed down by the interpreter. Only about one
145// sample per minute is generated.
185
187 public:
188 WasmInterpreterThread* GetCurrentInterpreterThread(Isolate* isolate);
189
190 void NotifyIsolateDisposal(Isolate* isolate);
191
192 private:
193 typedef std::unordered_map<int, std::unique_ptr<WasmInterpreterThread>>
197};
198
199// Representation of a thread in the interpreter.
201 public:
202 // State machine for a WasmInterpreterThread:
203 //
204 // STOPPED
205 // |
206 // Run()|
207 // V
208 // RUNNING <-----------------------------------+
209 // | |
210 // | |
211 // +-------------+---------------+---------------+ |
212 // |Stop() |Trap() |Finish() | |
213 // V V V V |
214 // STOPPED <---- TRAPPED FINISHED EH_UNWINDING --+
215 // ^ |
216 // +---------------------------------------------+
217 //
218 // In more detail:
219 // - For each loaded instance, an InterpreterHandler is created that owns a
220 // WasmInterpreter that owns a WasmInterpreterRuntime object.
221 //
222 // - The WasmInterpreterThread is created in STOPPED state.
223 //
224 // - InterpreterHandle::Execute(func_index, ...) executes Wasm code in
225 // the interpreter:
226 // - WasmInterpreter::BeginExecution ->
227 // WasmInterpreterRuntime::BeginExecution ->
228 // WasmInterpreterThread::StartActivation() -> Run() -> RUNNING
229 // state.
230 // - WasmInterpreter::ContinueExecution ->
231 // WasmInterpreterRuntime::ContinueExecution ->
232 // WasmInterpreterRuntime::ExecuteFunction
233 //
234 // WasmInterpreterRuntime::ExecuteFunction(..., func_index, ...) executes a
235 // specific Wasm function.
236 // If 'func_index' indicates an imported function, and the call fails ->
237 // Stop() -> STOPPED state.
238 // If 'func_index' indicates an not-imported function, we start executing a
239 // sequence of instruction handlers. One of these handlers can cause a
240 // Trap() -> TRAPPED state.
241 // From these instructions sequence we can make several kinds of direct or
242 // indirect wasm calls to:
243 // . An external JS function ->
244 // WasmInterpreterRuntime::CallExternalJSFunction() ->
245 // If the call fails -> Stop() -> STOPPED state.
246 // . A Wasm function in the same module instance, recursively calling
247 // WasmInterpreterRuntime::ExecuteFunction().
248 // . A Wasm function in a different module instance. In this case we
249 // recusively call InterpreterHandle::Execute with the
250 // InterpreterHandle of that different instance. If the call fails ->
251 // Stop() -> STOPPED state.
252 //
253 // After WasmInterpreterRuntime::ExecuteFunction() completes, if we ended up
254 // in the TRAPPED state we raise a JS exception -> RaiseException() ->
255 // Stop() -> STOPPED state.
256 //
257 // If an exception can be handled by Wasm code, according to the Wasm
258 // Exception Handling proposal, the thread can go to the EH_UNWINDING state
259 // while looking for a Wasm function in the call stack that has a {catch}
260 // instruction that can handle that exception. If no such catch handler is
261 // found, the thread goes to STOPPED.
262 //
263 // If we are running the WasmInterpreter of instance A and we can call
264 // from a function of a different instance B (via
265 // InterpreterHandle::Execute()) the execution of instance A "suspends"
266 // waiting for the execution in the WasmInterpreter of instance B to
267 // complete. Instance B can call back into instance A, and so on... This
268 // means that in the call stack we might have a sequence of stack frames for
269 // the WasmInterpreter A followed by frames for instance B followed by
270 // more frames of instance A.
271 // To manage this case WasmInterpreterThread maintains a stack of
272 // Activations, which represents the set of StackFrames for a given module
273 // instance. Only when the last active Activation terminates we call
274 // Finish() -> FINISHED state.
275
276 enum State { STOPPED, RUNNING, FINISHED, TRAPPED, EH_UNWINDING };
277
278 enum ExceptionHandlingResult { HANDLED, UNWOUND };
279
280 struct TrapStatus {
281 // bool has_trapped;
284 };
285
287 public:
289 WasmInterpreterRuntime* wasm_runtime, Address frame_pointer,
290 uint8_t* start_fp, const FrameState& callee_frame_state)
291 : thread_(thread),
292 wasm_runtime_(wasm_runtime),
293 frame_pointer_(frame_pointer),
294 current_frame_size_(0),
295 current_ref_stack_fp_(0),
296 current_ref_stack_frame_size_(0),
297 current_fp_(start_fp),
298 current_frame_state_(callee_frame_state)
299#ifdef V8_ENABLE_DRUMBRAKE_TRACING
300 ,
301 current_stack_start_(callee_frame_state.current_stack_start_args_ +
302 thread->CurrentStackFrameSize()),
303 current_stack_size_(0)
304#endif // V8_ENABLE_DRUMBRAKE_TRACING
305 {
306 }
307
308 WasmInterpreterThread* thread() const { return thread_; }
309
310 inline Isolate* GetIsolate() const;
311
312 Address GetFramePointer() const { return frame_pointer_; }
313
314 void SetCurrentFrame(const FrameState& frame_state) {
315 current_frame_state_ = frame_state;
316 }
317 const FrameState& GetCurrentFrame() const { return current_frame_state_; }
318
319 void SetCurrentActivationFrame(uint8_t* current_fp,
320 uint32_t current_frame_size,
321 uint32_t current_stack_size,
322 uint32_t current_ref_stack_fp,
323 uint32_t current_ref_stack_frame_size) {
324 current_fp_ = current_fp;
325 current_frame_size_ = current_frame_size;
326 current_ref_stack_fp_ = current_ref_stack_fp;
327 current_ref_stack_frame_size_ = current_ref_stack_frame_size;
328
329#ifdef V8_ENABLE_DRUMBRAKE_TRACING
330 current_stack_size_ = current_stack_size;
331#endif // V8_ENABLE_DRUMBRAKE_TRACING
332 }
333
334 uint8_t* NextFrameAddress() const {
335 return current_fp_ + current_frame_size_;
336 }
337
338 uint32_t NextRefStackOffset() const {
339 return current_ref_stack_fp_ + current_ref_stack_frame_size_;
340 }
341
342 void SetTrapped(int trap_function_index, int trap_pc) {
343 // Capture the call stack at the moment of the trap and store it to be
344 // retrieved later. This works because, once an Activation has trapped,
345 // execution will never resume in it, given that Wasm EH is not
346 // supported yet.
347 TrapStatus trap_status{trap_function_index, trap_pc};
348 trap_stack_trace_ =
349 std::make_unique<std::vector<WasmInterpreterStackEntry>>(
350 CaptureStackTrace(&trap_status));
351 }
352
353 std::vector<WasmInterpreterStackEntry> GetStackTrace() {
354 if (trap_stack_trace_) {
355 return *trap_stack_trace_;
356 }
357
358 // If the Activation has not trapped, it is still executing so we need
359 // to capture the current call stack.
360 return CaptureStackTrace();
361 }
362
363 int GetFunctionIndex(int index) const;
364
366 return wasm_runtime_;
367 }
368
369#ifdef V8_ENABLE_DRUMBRAKE_TRACING
370 uint32_t CurrentStackFrameStart() const { return current_stack_start_; }
371 uint32_t CurrentStackFrameSize() const { return current_stack_size_; }
372#endif // V8_ENABLE_DRUMBRAKE_TRACING
373
374 private:
375 std::vector<WasmInterpreterStackEntry> CaptureStackTrace(
376 const TrapStatus* trap_status = nullptr) const;
377
384 uint8_t* current_fp_;
386 std::unique_ptr<std::vector<WasmInterpreterStackEntry>> trap_stack_trace_;
387#ifdef V8_ENABLE_DRUMBRAKE_TRACING
388 uint32_t current_stack_start_;
389 uint32_t current_stack_size_;
390#endif // V8_ENABLE_DRUMBRAKE_TRACING
391 };
392
393 explicit WasmInterpreterThread(Isolate* isolate);
395
396 Handle<FixedArray> reference_stack() const { return reference_stack_; }
397
398 bool ExpandStack(size_t additional_required_size) {
399 if (current_stack_size_ + additional_required_size > kMaxStackSize) {
400 return false;
401 }
402
403 uint32_t new_size = current_stack_size_;
404 while (new_size < current_stack_size_ + additional_required_size) {
405 new_size = std::min(new_size + kStackSizeIncrement, kMaxStackSize);
406 }
407
408 if (SetPermissions(GetPlatformPageAllocator(), stack_mem_, new_size,
409 PageAllocator::Permission::kReadWrite)) {
410 current_stack_size_ = new_size;
411 return true;
412 }
413 return false;
414 }
415
416 static void Initialize() {
417 // This function can be called multiple times by fuzzers.
418 if (thread_interpreter_map_s) return;
419 thread_interpreter_map_s = new WasmInterpreterThreadMap();
420 }
421
422 static void Terminate() {
423 delete thread_interpreter_map_s;
424 thread_interpreter_map_s = nullptr;
425 }
426
427 static void NotifyIsolateDisposal(Isolate* isolate) {
428 thread_interpreter_map_s->NotifyIsolateDisposal(isolate);
429 }
430
432 DCHECK_NOT_NULL(thread_interpreter_map_s);
433 return thread_interpreter_map_s->GetCurrentInterpreterThread(isolate);
434 }
435
436 const Isolate* GetIsolate() const { return isolate_; }
437
438 State state() const { return state_; }
439
440 void Run() {
441 if (!trap_handler::IsThreadInWasm()) {
442 trap_handler::SetThreadInWasm();
443 }
444 state_ = State::RUNNING;
445 }
446 void Stop() { state_ = State::STOPPED; }
447
448 void Trap(TrapReason trap_reason, int trap_function_index, int trap_pc,
449 const FrameState& current_frame) {
450 state_ = State::TRAPPED;
451 trap_reason_ = trap_reason;
452
453 DCHECK(!activations_.empty());
454 activations_.back()->SetCurrentFrame(current_frame);
455 activations_.back()->SetTrapped(trap_function_index, trap_pc);
456 }
457 TrapReason GetTrapReason() const { return trap_reason_; }
458
459 void Unwinding() { state_ = State::EH_UNWINDING; }
460
461 inline WasmInterpreterThread::Activation* StartActivation(
462 WasmInterpreterRuntime* wasm_runtime, Address frame_pointer,
463 uint8_t* interpreter_fp, const FrameState& frame_state);
464 inline void FinishActivation();
465 inline const FrameState* GetCurrentActivationFor(
467
468 inline void SetCurrentFrame(const FrameState& frame_state) {
469 DCHECK(!activations_.empty());
470 activations_.back()->SetCurrentFrame(frame_state);
471 }
472
473 inline void SetCurrentActivationFrame(uint32_t* fp,
474 uint32_t current_frame_size,
475 uint32_t current_stack_size,
476 uint32_t current_ref_stack_fp,
477 uint32_t current_ref_stack_frame_size) {
478 DCHECK(!activations_.empty());
479 activations_.back()->SetCurrentActivationFrame(
480 reinterpret_cast<uint8_t*>(fp), current_frame_size, current_stack_size,
481 current_ref_stack_fp, current_ref_stack_frame_size);
482 }
483
485 Address frame_pointer) const {
486 for (size_t i = 0; i < activations_.size(); i++) {
487 if (activations_[i]->GetFramePointer() == frame_pointer) {
488 return activations_[i].get();
489 }
490 }
491 return nullptr;
492 }
493
494 uint8_t* NextFrameAddress() const {
495 if (activations_.empty()) {
496 return stack_mem();
497 } else {
498 return activations_.back()->NextFrameAddress();
499 }
500 }
501
502 uint32_t NextRefStackOffset() const {
503 if (activations_.empty()) {
504 return 0;
505 } else {
506 return activations_.back()->NextRefStackOffset();
507 }
508 }
509 const uint8_t* StackLimitAddress() const {
510 return stack_mem() + current_stack_size_;
511 }
512
513 void EnsureRefStackSpace(size_t new_size);
514 void ClearRefStackValues(size_t index, size_t count);
515
516 void StartExecutionTimer();
517 void StopExecutionTimer();
518 void TerminateExecutionTimers();
519
520 static void SetRuntimeLastWasmError(Isolate* isolate,
521 MessageTemplate message);
522 static TrapReason GetRuntimeLastWasmError(Isolate* isolate);
523
524#ifdef V8_ENABLE_DRUMBRAKE_TRACING
525 uint32_t CurrentStackFrameStart() const {
526 if (activations_.empty()) {
527 return 0;
528 } else {
529 return activations_.back()->CurrentStackFrameStart();
530 }
531 }
532
533 uint32_t CurrentStackFrameSize() const {
534 if (activations_.empty()) {
535 return 0;
536 } else {
537 return activations_.back()->CurrentStackFrameSize();
538 }
539 }
540#endif // V8_ENABLE_DRUMBRAKE_TRACING
541
542 void RaiseException(Isolate* isolate, MessageTemplate message);
543
544 private:
545 void Finish() { state_ = State::FINISHED; }
546
547 inline uint8_t* stack_mem() const {
548 return reinterpret_cast<uint8_t*>(stack_mem_);
549 }
550
552
556
557 static constexpr uint32_t kInitialStackSize = 1 * MB;
558 static constexpr uint32_t kStackSizeIncrement = 1 * MB;
559 static constexpr uint32_t kMaxStackSize = 32 * MB;
562
563 std::vector<std::unique_ptr<Activation>> activations_;
564
565 // References are kept on an on-heap stack. It would not be any good to store
566 // reference object pointers into stack slots because the pointers obviously
567 // could be invalidated if the object moves in a GC. Furthermore we need to
568 // make sure that the reference objects in the Wasm stack are marked as alive
569 // for GC. This is why in each Wasm thread we instantiate a FixedArray that
570 // contains all the reference objects present in the execution stack.
571 // Only while calling JS functions or Wasm functions in a separate instance we
572 // need to store temporarily the reference objects pointers into stack slots,
573 // and in this case we need to make sure to temporarily disallow GC and avoid
574 // object allocation while the reference arguments are being passed to the
575 // callee and while the reference return values are being passed back to the
576 // caller.
579
581};
582
583// The interpreter interface.
585 public:
586 // The main storage for interpreter code. It maps {WasmFunction} to the
587 // metadata needed to execute each function.
588 class CodeMap {
589 public:
590 CodeMap(Isolate* isolate, const WasmModule* module,
591 const uint8_t* module_start, Zone* zone);
592
593 const WasmModule* module() const { return module_; }
594
595 inline InterpreterCode* GetCode(uint32_t function_index);
596
597 inline WasmBytecode* GetFunctionBytecode(uint32_t func_index);
598
599 inline void AddFunction(const WasmFunction* function,
600 const uint8_t* code_start, const uint8_t* code_end);
601
603 return generated_code_size_.load(std::memory_order_relaxed);
604 }
605
606 private:
607 void Preprocess(uint32_t function_index);
608
613
615 std::atomic<size_t> generated_code_size_;
616 };
617
618 WasmInterpreter(Isolate* isolate, const WasmModule* module,
619 const ModuleWireBytes& wire_bytes,
621
622 static void InitializeOncePerProcess();
623 static void GlobalTearDown();
624 static void NotifyIsolateDisposal(Isolate* isolate);
625
626 inline void BeginExecution(WasmInterpreterThread* thread,
627 uint32_t function_index, Address frame_pointer,
628 uint8_t* interpreter_fp, uint32_t ref_stack_offset,
629 const std::vector<WasmValue>& argument_values);
630 inline void BeginExecution(WasmInterpreterThread* thread,
631 uint32_t function_index, Address frame_pointer,
632 uint8_t* interpreter_fp);
633
634 WasmInterpreterThread::State ContinueExecution(WasmInterpreterThread* thread,
635 bool called_from_js);
636
637 inline WasmValue GetReturnValue(int index) const;
638
639 inline std::vector<WasmInterpreterStackEntry> GetInterpretedStack(
640 Address frame_pointer);
641
642 inline int GetFunctionIndex(Address frame_pointer, int index) const;
643
644 inline void SetTrapFunctionIndex(int32_t func_index);
645
647 return wasm_runtime_.get();
648 }
649
650 private:
651 // This {Zone} has the lifespan of this {WasmInterpreter}, which should
652 // have the lifespan of the corresponding {WasmInstanceObject}.
653 // The zone is used to allocate the {module_bytes_} vector below and the
654 // {InterpreterCode} vector in the {CodeMap}. It is also passed to
655 // {WasmDecoder} used to parse the 'locals' in a Wasm function.
658
659 // Create a copy of the module bytes for the interpreter, since the passed
660 // pointer might be invalidated after constructing the interpreter.
662
664
665 // DrumBrake
666 std::shared_ptr<WasmInterpreterRuntime> wasm_runtime_;
667
670};
671
672typedef void(VECTORCALL PWasmOp)(const uint8_t* code, uint32_t* sp,
674 int64_t r0, double fp0);
675#ifdef __clang__
676#define MUSTTAIL [[clang::musttail]]
677#else
678#define MUSTTAIL
679#endif // __clang__
680
681extern PWasmOp* kInstructionTable[];
682
683#ifdef V8_ENABLE_DRUMBRAKE_TRACING
684extern char const* kInstructionHandlerNames[];
685#endif // V8_ENABLE_DRUMBRAKE_TRACING
686
687// struct handler_traits defines types for small/large instruction handlers.
688template <bool compact_handler>
690
691template <>
692struct handler_traits<true> {
693 typedef uint16_t handler_id_t;
694 typedef uint16_t slot_offset_t;
695 typedef uint32_t memory_offset32_t;
696 typedef uint32_t memory_offset64_t;
697};
698
699template <>
700struct handler_traits<false> {
701 typedef uint16_t handler_id_t;
702 typedef uint32_t slot_offset_t;
703 typedef uint64_t memory_offset32_t;
704 typedef uint64_t memory_offset64_t;
705};
706
707// {OperatorMode}s are used for the
708// v8_flags.drumbrake_register_optimization. The prototype of instruction
709// handlers contains two arguments int64_t r0 and double fp0 that can be used to
710// pass in an integer or floating-point register the values that is at the top
711// of the Wasm execution stack.
712//
713// For this reasons, whenever possible we define four different versions of each
714// instruction handler, all identified by the following prefixes:
715//
716// - r2r_*: Wasm instruction handlers called when the stack top value is in a
717// register and that put the result in a register.
718// - r2s_*: Wasm instruction handlers called when the stack top value is in a
719// register and that push the result on the stack.
720// - s2r_*: Wasm instruction handlers called when the stack top value is not in
721// a register and that put the result in a register.
722// - s2s_*: Wasm instruction handlers called when the stack top value is not in
723// a register and that push the result on the stack.
724//
726static const size_t kOperatorModeCount = 4;
727
728// {RegMode} and {RegModeTransform} specifies how an instruction handler can
729// leverage the --drumbrake-register-optimization.
730//
731// {RegModeTransform} defines a pair of {RegMode}s, that specify whether an
732// instruction handler can take its input or provide its output from the stack
733// or from registers.
734//
735// For example:
736// {kF32Reg, kI32Reg}, // 0x5b F32Eq
737// declares that the F32Eq instruction handler can read the stack top value from
738// a floating point register as a F32 and pass the result to the next handler in
739// an integer register as an I32, so saving one stack pop and one stack push
740// operations.
741enum class RegMode {
742 kNoReg, // The instruction handler only gets inputs from stack slots or
743 // provide the result into a stack slot.
744
745 kI32Reg, // The instruction handler can be optimized to work with the integer
746 kI64Reg, // register 'r0'.
747
748 kF32Reg, // The instruction handler can be optimized to work with the
749 kF64Reg, // floating point register 'fp0'.
750
751 kAnyReg, // The instruction handler can be optimized to work either with the
752 // integer or fp register; the specific register depends on the
753 // type of the type of the value at the top of the stack. This is
754 // used for instructions like 'drop', 'select' and 'local.set.
755};
756
758 switch (kind) {
759 case kI32:
760 return RegMode::kI32Reg;
761 case kI64:
762 return RegMode::kI64Reg;
763 case kF32:
764 return RegMode::kF32Reg;
765 case kF64:
766 return RegMode::kF64Reg;
767 default:
768 UNREACHABLE();
769 }
770}
771
776
777static const RegModeTransform kRegModes[256] = {
778 {RegMode::kNoReg, RegMode::kNoReg}, // 0x00 Unreachable
779 {RegMode::kNoReg, RegMode::kNoReg}, // 0x01 Nop
780 {RegMode::kNoReg, RegMode::kNoReg}, // 0x02 Block
781 {RegMode::kNoReg, RegMode::kNoReg}, // 0x03 Loop
782 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x04 If
783 {RegMode::kNoReg, RegMode::kNoReg}, // 0x05 Else
784 {RegMode::kNoReg, RegMode::kNoReg}, // 0x06 Try - eh_prototype
785 {RegMode::kNoReg, RegMode::kNoReg}, // 0x07 Catch - eh_prototype
786 {RegMode::kNoReg, RegMode::kNoReg}, // 0x08 Throw - eh_prototype
787 {RegMode::kNoReg, RegMode::kNoReg}, // 0x09 Rethrow - eh_prototype
788 {RegMode::kNoReg, RegMode::kNoReg}, // 0x0a (reserved)
789 {RegMode::kNoReg, RegMode::kNoReg}, // 0x0b End
790 {RegMode::kNoReg, RegMode::kNoReg}, // 0x0c Br
791 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x0d BrIf
792 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x0e BrTable
793 {RegMode::kNoReg, RegMode::kNoReg}, // 0x0f Return
794 {RegMode::kNoReg, RegMode::kNoReg}, // 0x10 CallFunction
795 {RegMode::kNoReg, RegMode::kNoReg}, // 0x11 CallIndirect
796 {RegMode::kNoReg, RegMode::kNoReg}, // 0x12 ReturnCall
797 {RegMode::kNoReg, RegMode::kNoReg}, // 0x13 ReturnCallIndirect
798
800 RegMode::kNoReg}, // 0x14 CallRef - typed_funcref prototype - NOTIMPL
801 {RegMode::kNoReg, RegMode::kNoReg}, // 0x15 ReturnCallRef - typed_funcref
802 // prototype - NOTIMPL
803 {RegMode::kNoReg, RegMode::kNoReg}, // 0x16 (reserved)
804 {RegMode::kNoReg, RegMode::kNoReg}, // 0x17 (reserved)
805 {RegMode::kNoReg, RegMode::kNoReg}, // 0x18 Delegate - eh_prototype
806 {RegMode::kNoReg, RegMode::kNoReg}, // 0x19 CatchAll - eh_prototype
807
808 {RegMode::kAnyReg, RegMode::kNoReg}, // 0x1a Drop
809 {RegMode::kI32Reg, RegMode::kAnyReg}, // 0x1b Select
810 {RegMode::kI32Reg, RegMode::kAnyReg}, // 0x1c SelectWithType
811
812 {RegMode::kNoReg, RegMode::kNoReg}, // 0x1d (reserved)
813 {RegMode::kNoReg, RegMode::kNoReg}, // 0x1e (reserved)
814 {RegMode::kNoReg, RegMode::kNoReg}, // 0x1f (reserved)
815 {RegMode::kNoReg, RegMode::kNoReg}, // 0x20 LocalGet
816 {RegMode::kAnyReg, RegMode::kNoReg}, // 0x21 LocalSet
817 {RegMode::kNoReg, RegMode::kNoReg}, // 0x22 LocalTee
818 {RegMode::kNoReg, RegMode::kAnyReg}, // 0x23 GlobalGet
819 {RegMode::kAnyReg, RegMode::kNoReg}, // 0x24 GlobalSet
820 {RegMode::kNoReg, RegMode::kNoReg}, // 0x25 TableGet
821 {RegMode::kNoReg, RegMode::kNoReg}, // 0x26 TableSet
822 {RegMode::kNoReg, RegMode::kNoReg}, // 0x27 (reserved)
823 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x28 I32LoadMem
824 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x29 I64LoadMem
825 {RegMode::kI32Reg, RegMode::kF32Reg}, // 0x2a F32LoadMem
826 {RegMode::kI32Reg, RegMode::kF64Reg}, // 0x2b F64LoadMem
827 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x2c I32LoadMem8S
828 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x2d I32LoadMem8U
829 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x2e I32LoadMem16S
830 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x2f I32LoadMem16U
831 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x30 I64LoadMem8S
832 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x31 I64LoadMem8U
833 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x32 I64LoadMem16S
834 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x33 I64LoadMem16U
835 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x34 I64LoadMem32S
836 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0x35 I64LoadMem32U
837 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x36 I32StoreMem
838 {RegMode::kI64Reg, RegMode::kNoReg}, // 0x37 I64StoreMem
839 {RegMode::kF32Reg, RegMode::kNoReg}, // 0x38 F32StoreMem
840 {RegMode::kF64Reg, RegMode::kNoReg}, // 0x39 F64StoreMem
841 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x3a I32StoreMem8
842 {RegMode::kI32Reg, RegMode::kNoReg}, // 0x3b I32StoreMem16
843 {RegMode::kI64Reg, RegMode::kNoReg}, // 0x3c I64StoreMem8
844 {RegMode::kI64Reg, RegMode::kNoReg}, // 0x3d I64StoreMem16
845 {RegMode::kI64Reg, RegMode::kNoReg}, // 0x3e I64StoreMem32
846 {RegMode::kNoReg, RegMode::kNoReg}, // 0x3f MemorySize
847 {RegMode::kNoReg, RegMode::kNoReg}, // 0x40 MemoryGrow
848
849 {RegMode::kNoReg, RegMode::kNoReg}, // 0x41 I32Const
850 {RegMode::kNoReg, RegMode::kNoReg}, // 0x42 I64Const
851 {RegMode::kNoReg, RegMode::kNoReg}, // 0x43 F32Const
852 {RegMode::kNoReg, RegMode::kNoReg}, // 0x44 F64Const
853
854 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x45 I32Eqz
855 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x46 I32Eq
856 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x47 I32Ne
857 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x48 I32LtS
858 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x49 I32LtU
859 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4a I32GtS
860 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4b I32GtU
861 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4c I32LeS
862 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4d I32LeU
863 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4e I32GeS
864 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x4f I32GeU
865 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x50 I64Eqz
866 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x51 I64Eq
867 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x52 I64Ne
868 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x53 I64LtS
869 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x54 I64LtU
870 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x55 I64GtS
871 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x56 I64GtU
872 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x57 I64LeS
873 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x58 I64LeU
874 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x59 I64GeS
875 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x5a I64GeU
876 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x5b F32Eq
877 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x5c F32Ne
878 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x5d F32Lt
879 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x5e F32Gt
880 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x5f F32Le
881 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0x60 F32Ge
882 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x61 F64Eq
883 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x62 F64Ne
884 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x63 F64Lt
885 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x64 F64Gt
886 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x65 F64Le
887 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0x66 F64Ge
888
889 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x67 I32Clz
890 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x68 I32Ctz
891 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x69 I32Popcnt
892 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6a I32Add
893 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6b I32Sub
894 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6c I32Mul
895 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6d I32DivS
896 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6e I32DivU
897 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x6f I32RemS
898 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x70 I32RemU
899 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x71 I32And
900 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x72 I32Ior
901 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x73 I32Xor
902 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x74 I32Shl
903 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x75 I32ShrS
904 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x76 I32ShrU
905 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x77 I32Rol
906 {RegMode::kI32Reg, RegMode::kI32Reg}, // 0x78 I32Ror
907
908 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x79 I64Clz
909 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x7a I64Ctz
910 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0x7b I64Popcnt
911 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x7c I64Add
912 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x7d I64Sub
913 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x7e I64Mul
914 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x7f I64DivS
915 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x80 I64DivU
916 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x81 I64RemS
917 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x82 I64RemU
918 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x83 I64And
919 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x84 I64Ior
920 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x85 I64Xor
921 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x86 I64Shl
922 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x87 I64ShrS
923 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x88 I64ShrU
924 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x89 I64Rol
925 {RegMode::kI64Reg, RegMode::kI64Reg}, // 0x8a I64Ror
926
927 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x8b F32Abs
928 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x8c F32Neg
929 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x8d F32Ceil
930 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x8e F32Floor
931 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x8f F32Trunc
932 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x90 F32NearestInt
933 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x91 F32Sqrt
934 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x92 F32Add
935 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x93 F32Sub
936 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x94 F32Mul
937 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x95 F32Div
938 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x96 F32Min
939 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x97 F32Max
940 {RegMode::kF32Reg, RegMode::kF32Reg}, // 0x98 F32CopySign
941
942 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x99 F64Abs
943 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9a F64Neg
944 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9b F64Ceil
945 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9c F64Floor
946 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9d F64Trunc
947 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9e F64NearestInt
948 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0x9f F64Sqrt
949 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa0 F64Add
950 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa1 F64Sub
951 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa2 F64Mul
952 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa3 F64Div
953 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa4 F64Min
954 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa5 F64Max
955 {RegMode::kF64Reg, RegMode::kF64Reg}, // 0xa6 F64CopySign
956
957 {RegMode::kI64Reg, RegMode::kI32Reg}, // 0xa7 I32ConvertI64
958 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0xa8 I32SConvertF32
959 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0xa9 I32UConvertF32
960 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0xaa I32SConvertF64
961 {RegMode::kF64Reg, RegMode::kI32Reg}, // 0xab I32UConvertF64
962 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0xac I64SConvertI32
963 {RegMode::kI32Reg, RegMode::kI64Reg}, // 0xad I64UConvertI32
964 {RegMode::kF32Reg, RegMode::kI64Reg}, // 0xae I64SConvertF32
965 {RegMode::kF32Reg, RegMode::kI64Reg}, // 0xaf I64UConvertF32
966 {RegMode::kF64Reg, RegMode::kI64Reg}, // 0xb0 I64SConvertF64
967 {RegMode::kF64Reg, RegMode::kI64Reg}, // 0xb1 I64UConvertF64
968 {RegMode::kI32Reg, RegMode::kF32Reg}, // 0xb2 F32SConvertI32
969 {RegMode::kI32Reg, RegMode::kF32Reg}, // 0xb3 F32UConvertI32
970 {RegMode::kI64Reg, RegMode::kF32Reg}, // 0xb4 F32SConvertI64
971 {RegMode::kI64Reg, RegMode::kF32Reg}, // 0xb5 F32UConvertI64
972 {RegMode::kF64Reg, RegMode::kF32Reg}, // 0xb6 F32ConvertF64
973 {RegMode::kI32Reg, RegMode::kF64Reg}, // 0xb7 F64SConvertI32
974 {RegMode::kI32Reg, RegMode::kF64Reg}, // 0xb8 F64UConvertI32
975 {RegMode::kI64Reg, RegMode::kF64Reg}, // 0xb9 F64SConvertI64
976 {RegMode::kI64Reg, RegMode::kF64Reg}, // 0xba F64UConvertI64
977 {RegMode::kF32Reg, RegMode::kF64Reg}, // 0xbb F64ConvertF32
978 {RegMode::kF32Reg, RegMode::kI32Reg}, // 0xbc I32ReinterpretF32
979 {RegMode::kF64Reg, RegMode::kI64Reg}, // 0xbd I64ReinterpretF64
980 {RegMode::kI32Reg, RegMode::kF32Reg}, // 0xbe F32ReinterpretI32
981 {RegMode::kI64Reg, RegMode::kF64Reg}, // 0xbf F64ReinterpretI64
982
983 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc0 I32SExtendI8
984 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc1 I32SExtendI16
985 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc2 I64SExtendI8
986 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc3 I64SExtendI16
987 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc4 I64SExtendI32
988
989 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc5 (reserved)
990 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc6 (reserved)
991 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc7 (reserved)
992 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc8 (reserved)
993 {RegMode::kNoReg, RegMode::kNoReg}, // 0xc9 (reserved)
994 {RegMode::kNoReg, RegMode::kNoReg}, // 0xca (reserved)
995 {RegMode::kNoReg, RegMode::kNoReg}, // 0xcb (reserved)
996 {RegMode::kNoReg, RegMode::kNoReg}, // 0xcc (reserved)
997 {RegMode::kNoReg, RegMode::kNoReg}, // 0xcd (reserved)
998 {RegMode::kNoReg, RegMode::kNoReg}, // 0xce (reserved)
999 {RegMode::kNoReg, RegMode::kNoReg}, // 0xcf (reserved)
1000
1001 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd0 RefNull - ref
1002 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd1 RefIsNull - ref
1003 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd2 RefFunc - ref
1004 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd3 RefEq - ref
1005 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd4 RefAsNonNull
1006 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd5 BrOnNull
1007 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd6 BrOnNonNull
1008 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd7 (reserved)
1009 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd8 (reserved)
1010 {RegMode::kNoReg, RegMode::kNoReg}, // 0xd9 (reserved)
1011 {RegMode::kNoReg, RegMode::kNoReg}, // 0xda (reserved)
1012 {RegMode::kNoReg, RegMode::kNoReg}, // 0xdb (reserved)
1013 {RegMode::kNoReg, RegMode::kNoReg}, // 0xdc (reserved)
1014 {RegMode::kNoReg, RegMode::kNoReg}, // 0xdd (reserved)
1015 {RegMode::kNoReg, RegMode::kNoReg}, // 0xde (reserved)
1016 {RegMode::kNoReg, RegMode::kNoReg}, // 0xdf (reserved)
1017 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe0 (reserved)
1018 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe1 (reserved)
1019 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe2 (reserved)
1020 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe3 (reserved)
1021 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe4 (reserved)
1022 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe5 (reserved)
1023 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe6 (reserved)
1024 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe7 (reserved)
1025 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe8 (reserved)
1026 {RegMode::kNoReg, RegMode::kNoReg}, // 0xe9 (reserved)
1027 {RegMode::kNoReg, RegMode::kNoReg}, // 0xea (reserved)
1028 {RegMode::kNoReg, RegMode::kNoReg}, // 0xeb (reserved)
1029 {RegMode::kNoReg, RegMode::kNoReg}, // 0xec (reserved)
1030 {RegMode::kNoReg, RegMode::kNoReg}, // 0xed (reserved)
1031 {RegMode::kNoReg, RegMode::kNoReg}, // 0xee (reserved)
1032 {RegMode::kNoReg, RegMode::kNoReg}, // 0xef (reserved)
1033 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf0 (reserved)
1034 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf1 (reserved)
1035 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf2 (reserved)
1036 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf3 (reserved)
1037 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf4 (reserved)
1038 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf5 (reserved)
1039 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf6 (reserved)
1040 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf7 (reserved)
1041 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf8 (reserved)
1042 {RegMode::kNoReg, RegMode::kNoReg}, // 0xf9 (reserved)
1043 {RegMode::kNoReg, RegMode::kNoReg}, // 0xfa (reserved)
1044 {RegMode::kNoReg, RegMode::kNoReg}, // 0xfb - GC prefix
1045 {RegMode::kNoReg, RegMode::kNoReg}, // 0xfc - Numeric prefix
1046 {RegMode::kNoReg, RegMode::kNoReg}, // 0xfd - Simd prefix
1047 {RegMode::kNoReg, RegMode::kNoReg}, // 0xfe - Atomic prefix
1048 {RegMode::kNoReg, RegMode::kNoReg}, // 0xff (reserved)
1049};
1050
1051static const size_t kSlotSize = sizeof(int32_t);
1052static const ptrdiff_t kCodeOffsetSize = sizeof(int32_t);
1053
1055 // The function was executed and returned normally.
1057 // The function was executed, threw an exception.
1060
1063 uint32_t label_depth;
1064 uint32_t src_is_null : 1; // BrOnCastFlags
1065 uint32_t res_is_null : 1; // BrOnCastFlags
1067 : kBranchOnCastDataTargetTypeBitSize; // HeapType bit_fields
1068};
1069
1071 union Optional {
1072 uint32_t index; // global/local/label/memory/table index
1073 int32_t i32;
1074 int64_t i64;
1075 float f32;
1076 double f64;
1077 uint64_t offset;
1078 uint32_t depth;
1087 struct Block {
1089 uint32_t value_type_bitfield; // return type or kVoid if no return type
1090 // or kBottom if sig_index is valid.
1091 constexpr bool is_bottom() const { return value_type().is_bottom(); }
1092 constexpr ValueType value_type() const {
1094 }
1104 uint8_t simd_lane : 4;
1106 uint8_t lane : 4;
1107 uint8_t : 0;
1108 uint64_t offset : 48;
1140 };
1141
1143 : orig(0x00), opcode(kExprUnreachable), length(0), pc(0), optional({}) {}
1144 WasmInstruction(uint8_t orig, WasmOpcode opcode, int length, uint32_t pc,
1146 : orig(orig),
1147 opcode(opcode),
1148 length(length),
1149 pc(pc),
1150 optional(optional) {}
1151
1152 operator bool() const { return length > 0; }
1153
1154 RegMode InputRegMode() const { return kRegModes[orig].from; }
1155 bool SupportsToRegister() const {
1156 return kRegModes[orig].to != RegMode::kNoReg;
1157 }
1158 uint8_t orig;
1160 uint32_t length;
1161 uint32_t pc;
1163};
1164
1165struct Slot {
1167 uint32_t slot_offset;
1169
1170 constexpr ValueKind kind() const { return value_type.kind(); }
1171};
1172
1173template <typename T>
1174INSTRUCTION_HANDLER_FUNC trace_PushSlot(const uint8_t* code, uint32_t* sp,
1176 int64_t r0, double fp0);
1177
1178template <typename T>
1179static inline ValueType value_type() {
1180 UNREACHABLE();
1181}
1182template <>
1184 return kWasmI32;
1185}
1186template <>
1188 return kWasmI32;
1189}
1190template <>
1192 return kWasmI64;
1193}
1194template <>
1196 return kWasmI64;
1197}
1198template <>
1200 return kWasmF32;
1201}
1202template <>
1204 return kWasmF64;
1205}
1206template <>
1208 return kWasmS128;
1209}
1210template <>
1212 return kWasmAnyRef; // TODO(paolosev@microsoft.com)
1213}
1214
1215static constexpr uint32_t kInstructionTableSize = 4096;
1216static constexpr uint32_t kInstructionTableMask = kInstructionTableSize - 1;
1217
1218#define DEFINE_INSTR_HANDLER(name) k_##name,
1219enum InstructionHandler : uint16_t {
1221#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1222 FOREACH_TRACE_INSTR_HANDLER(DEFINE_INSTR_HANDLER)
1223#endif // V8_ENABLE_DRUMBRAKE_TRACING
1226#undef DEFINE_INSTR_HANDLER
1227
1228inline InstructionHandler ReadFnId(const uint8_t*& code) {
1230 reinterpret_cast<Address>(code));
1231#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1232 if (v8_flags.trace_drumbrake_compact_bytecode) {
1233 printf("* ReadFnId %04x %s%s\n", result,
1234 kInstructionHandlerNames[result % kInstructionCount],
1235 result >= kInstructionCount ? " (large)" : "");
1236 }
1237#endif // V8_ENABLE_DRUMBRAKE_TRACING
1238 code += sizeof(InstructionHandler);
1239 return result;
1240}
1241
1243
1245 public:
1246 static const int kCatchAllTagIndex = -1;
1247
1248 // Zero is always the id of a function main block, so it cannot identify a
1249 // try block.
1250 static const int kDelegateToCallerIndex = 0;
1251
1252 typedef int BlockIndex;
1253
1259
1260 struct TryBlock {
1267
1268 void SetDelegated(BlockIndex delegate_try_idx) {
1269 this->delegate_try_index = delegate_try_idx;
1270 }
1271 bool IsTryDelegate() const { return delegate_try_index >= 0; }
1272
1273 // The index of the first TryBlock that is a direct ancestor of this
1274 // TryBlock.
1276
1277 // If this TryBlock is contained in a CatchBlock, this is the matching
1278 // TryBlock index of the CatchBlock. Otherwise it matches
1279 // ancestor_try_index.
1281
1283 std::vector<CatchHandler> catch_handlers;
1285 };
1286
1292
1293 const TryBlock* GetTryBlock(CodeOffset code_offset) const;
1294 const TryBlock* GetParentTryBlock(const TryBlock* try_block) const;
1295 const TryBlock* GetDelegateTryBlock(const TryBlock* try_block) const;
1296
1297 size_t GetEndInstructionOffsetFor(BlockIndex catch_block_index) const;
1298
1304 BlockIndex catch_block_index) const;
1305
1306 void SetCaughtException(Isolate* isolate, BlockIndex catch_block_index,
1307 DirectHandle<Object> exception);
1309 BlockIndex catch_block_index) const;
1310
1311 protected:
1312 BlockIndex GetTryBranchOf(BlockIndex catch_block_index) const;
1313
1314 std::unordered_map<CodeOffset, BlockIndex> code_trycatch_map_;
1315 std::unordered_map<BlockIndex, TryBlock> try_blocks_;
1316 std::unordered_map<BlockIndex, CatchBlock> catch_blocks_;
1317};
1318
1320 public:
1322
1323 void AddTryBlock(BlockIndex try_block_index,
1324 BlockIndex parent_or_matching_try_block_index,
1325 BlockIndex ancestor_try_block_index);
1326 void AddCatchBlock(BlockIndex catch_block_index, int tag_index,
1327 uint32_t first_param_slot_offset,
1328 uint32_t first_param_ref_stack_index,
1329 CodeOffset code_offset);
1330 void AddDelegatedBlock(BlockIndex delegated_try_block_index);
1331 BlockIndex EndTryCatchBlocks(BlockIndex block_index, CodeOffset code_offset);
1333 CodeOffset code_offset);
1334
1338
1339 private:
1341};
1342
1344 public:
1345 WasmBytecode(int func_index, const uint8_t* code_data, size_t code_length,
1346 uint32_t stack_frame_size, const FunctionSig* signature,
1347 const CanonicalSig* canonical_signature,
1348 const InterpreterCode* interpreter_code, size_t blocks_count,
1349 const uint8_t* const_slots_data, size_t const_slots_length,
1350 uint32_t ref_slots_count, const WasmEHData&& eh_data,
1351 const std::map<CodeOffset, pc_t>&& code_pc_map);
1352
1353 inline const uint8_t* GetCode() const { return code_bytes_; }
1354 inline size_t GetCodeSize() const { return code_.size(); }
1355
1356 inline bool InitializeSlots(uint8_t* sp, size_t stack_space) const;
1357
1358 pc_t GetPcFromTrapCode(const uint8_t* current_code) const;
1359
1360 inline int GetFunctionIndex() const { return func_index_; }
1361
1362 inline uint32_t GetBlocksCount() const { return blocks_count_; }
1363
1364 inline const FunctionSig* GetFunctionSignature() const { return signature_; }
1366 return canonical_signature_;
1367 }
1368 inline ValueType return_type(size_t index) const;
1369 inline ValueType arg_type(size_t index) const;
1370 inline ValueType local_type(size_t index) const;
1371
1372 inline uint32_t args_count() const { return args_count_; }
1373 inline uint32_t args_slots_size() const { return args_slots_size_; }
1374 inline uint32_t return_count() const { return return_count_; }
1375 inline uint32_t rets_slots_size() const { return rets_slots_size_; }
1376 inline uint32_t locals_count() const { return locals_count_; }
1377 inline uint32_t locals_slots_size() const { return locals_slots_size_; }
1378 inline uint32_t const_slots_size_in_bytes() const {
1379 return static_cast<uint32_t>(const_slots_values_.size());
1380 }
1381
1382 inline uint32_t ref_args_count() const { return ref_args_count_; }
1383 inline uint32_t ref_rets_count() const { return ref_rets_count_; }
1384 inline uint32_t ref_locals_count() const { return ref_locals_count_; }
1385 inline uint32_t ref_slots_count() const { return ref_slots_count_; }
1386 inline uint32_t internal_ref_slots_count() const {
1387 // Ref slots for arguments and return value are allocated by the caller and
1388 // not counted in internal_ref_slots_count().
1390 }
1391
1392 inline uint32_t frame_size() { return total_frame_size_in_bytes_; }
1393
1394 static inline uint32_t ArgsSizeInSlots(const FunctionSig* sig);
1395 static inline uint32_t RetsSizeInSlots(const FunctionSig* sig);
1396 static inline uint32_t RefArgsCount(const FunctionSig* sig);
1397 static inline uint32_t RefRetsCount(const FunctionSig* sig);
1398 static inline bool ContainsSimd(const FunctionSig* sig);
1399 static inline bool HasRefOrSimdArgs(const FunctionSig* sig);
1400 static inline uint32_t JSToWasmWrapperPackedArraySize(const FunctionSig* sig);
1401 static inline uint32_t RefLocalsCount(const InterpreterCode* wasm_code);
1402 static inline uint32_t LocalsSizeInSlots(const InterpreterCode* wasm_code);
1403
1404 const WasmEHData::TryBlock* GetTryBlock(CodeOffset code_offset) const {
1405 return eh_data_.GetTryBlock(code_offset);
1406 }
1408 const WasmEHData::TryBlock* try_block) const {
1409 return eh_data_.GetParentTryBlock(try_block);
1410 }
1416 uint32_t catch_block_index) const {
1417 return eh_data_.GetCaughtException(isolate, catch_block_index);
1418 }
1419
1420 private:
1421 std::vector<uint8_t> code_;
1422 const uint8_t* code_bytes_;
1426 std::vector<uint8_t> const_slots_values_;
1427
1430 uint32_t args_count_;
1441
1443
1444 // TODO(paolosev@microsoft.com) slow! Use std::unordered_map ?
1445 std::map<CodeOffset, pc_t> code_pc_map_;
1446};
1447
1449 Large = 0, // false
1450 Small = 1 // true
1452
1454 public:
1457
1459 const WasmModule* module);
1460
1461 std::unique_ptr<WasmBytecode> GenerateBytecode();
1462
1463 static void PrintBytecodeCompressionStats();
1464
1465 private:
1466 struct BlockData {
1467 BlockData(WasmOpcode opcode, uint32_t begin_code_offset,
1468 int32_t parent_block_index, uint32_t stack_size,
1470 uint32_t first_block_index, uint32_t rets_slots_count,
1471 uint32_t params_slots_count, int32_t parent_try_block_index)
1472 : opcode_(opcode),
1473 stack_size_(stack_size),
1474 begin_code_offset_(begin_code_offset),
1476 parent_block_index_(parent_block_index),
1478 signature_(signature),
1479 first_block_index_(first_block_index),
1480 rets_slots_count_(rets_slots_count),
1481 params_slots_count_(params_slots_count),
1482 parent_try_block_index_(parent_try_block_index),
1483 is_unreachable_(false) {}
1484
1485 bool IsRootBlock() const { return parent_block_index_ < 0; }
1486 bool IsBlock() const { return opcode_ == kExprBlock; }
1487 bool IsLoop() const { return opcode_ == kExprLoop; }
1488 bool IsIf() const { return opcode_ == kExprIf; }
1489 bool IsElse() const { return opcode_ == kExprElse; }
1490 bool HasElseBranch() const { return if_else_block_index_ > 0; }
1491 bool IsTry() const { return opcode_ == kExprTry; }
1492 bool IsCatch() const { return opcode_ == kExprCatch; }
1493 bool IsCatchAll() const { return opcode_ == kExprCatchAll; }
1494
1495 void SaveParams(uint32_t* from, size_t params_count) {
1496 DCHECK(IsIf());
1498 for (size_t i = 0; i < params_count; i++) {
1499 if_block_params_[i] = from[i];
1500 }
1501 }
1502 uint32_t GetParam(size_t index) const {
1503 DCHECK(IsIf());
1505 return if_block_params_[index];
1506 }
1507
1509 uint32_t stack_size_;
1522 };
1523
1524 uint32_t const_slots_start() const {
1526 }
1527
1528 inline uint32_t GetStackFrameSize() const { return slot_offset_; }
1529
1530 uint32_t CurrentCodePos() const {
1531 return static_cast<uint32_t>(code_.size());
1532 }
1533
1535 void DecodeGCOp(WasmOpcode opcode, WasmInstruction::Optional* optional,
1536 Decoder* decoder, InterpreterCode* code, pc_t pc,
1537 int* const len);
1539 Decoder* decoder, InterpreterCode* code, pc_t pc,
1540 int* const len);
1542 Decoder* decoder, InterpreterCode* code, pc_t pc,
1543 int* const len);
1544 bool DecodeSimdOp(WasmOpcode opcode, WasmInstruction::Optional* optional,
1545 Decoder* decoder, InterpreterCode* code, pc_t pc,
1546 int* const len);
1547
1548 inline bool ToRegisterIsAllowed(const WasmInstruction& instr);
1550 RegMode next_reg_mode);
1552 RegMode curr_reg_mode, RegMode next_reg_mode);
1553
1554 bool EncodeSuperInstruction(RegMode& reg_mode,
1555 const WasmInstruction& curr_instr,
1556 const WasmInstruction& next_instr);
1557 bool DoEncodeSuperInstruction(RegMode& reg_mode,
1558 const WasmInstruction& curr_instr,
1559 const WasmInstruction& next_instr);
1560
1561 uint32_t ScanConstInstructions() const;
1562
1563 void Emit(const void* buff, size_t len) {
1564 code_.insert(code_.end(), static_cast<const uint8_t*>(buff),
1565 static_cast<const uint8_t*>(buff) + len);
1566 }
1567
1568 inline void I32Push(bool emit = true);
1569 inline void I64Push(bool emit = true);
1570 inline void MemIndexPush(bool emit = true) { (this->*int_mem_push_)(emit); }
1571 inline void ITableIndexPush(bool is_table64, bool emit = true);
1572 inline void F32Push(bool emit = true);
1573 inline void F64Push(bool emit = true);
1574 inline void S128Push(bool emit = true);
1575 inline void RefPush(ValueType type, bool emit = true);
1576 inline void Push(ValueType type);
1577
1578 inline void I32Pop(bool emit = true) { Pop(kI32, emit); }
1579 inline void I64Pop(bool emit = true) { Pop(kI64, emit); }
1580 inline void MemIndexPop(bool emit = true) { (this->*int_mem_pop_)(emit); }
1581 inline void F32Pop(bool emit = true) { Pop(kF32, emit); }
1582 inline void F64Pop(bool emit = true) { Pop(kF64, emit); }
1583 inline void S128Pop(bool emit = true) { Pop(kS128, emit); }
1584
1585 inline ValueType RefPop(bool emit = true) {
1587 uint32_t ref_index = slots_[stack_.back()].ref_stack_index;
1588 ValueType value_type = slots_[stack_.back()].value_type;
1590 PopSlot();
1591 if (emit) EmitRefStackIndex(ref_index);
1592 return value_type;
1593 }
1594
1595#ifdef DEBUG
1596 bool CheckEqualKind(ValueKind value_kind, ValueKind stack_slot_kind) {
1597 if (is_reference(value_kind)) {
1598 return is_reference(stack_slot_kind);
1599 } else if (value_kind == kI8 || value_kind == kI16) {
1600 return stack_slot_kind == kI32;
1601 } else {
1602 return value_kind == stack_slot_kind;
1603 }
1604 }
1605#endif // DEBUG
1606
1607 inline void Pop(ValueKind kind, bool emit = true) {
1608 if (kind == kRefNull || kind == kRef) {
1609 RefPop(emit);
1610 return;
1611 }
1612 DCHECK(CheckEqualKind(kind, slots_[stack_.back()].kind()));
1613 uint32_t slot_offset = PopSlot();
1614 if (emit) EmitSlotOffset(slot_offset);
1615 }
1616
1617 inline void EmitI16Const(int16_t value) { Emit(&value, sizeof(value)); }
1618 inline void EmitI32Const(int32_t value) { Emit(&value, sizeof(value)); }
1619 inline void EmitF32Const(float value) { Emit(&value, sizeof(value)); }
1620 inline void EmitF64Const(double value) { Emit(&value, sizeof(value)); }
1621
1622 inline void EmitSlotOffset(uint32_t value) {
1623#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1624 if (v8_flags.trace_drumbrake_compact_bytecode) {
1625 printf("EmitSlotOffset %d\n", value);
1626 }
1627#endif // V8_ENABLE_DRUMBRAKE_TRACING
1628
1630 if (V8_UNLIKELY(value > 0xffff)) {
1632 } else {
1633 uint16_t u16 = static_cast<uint16_t>(value);
1634 Emit(&u16, sizeof(u16));
1636 }
1637 } else {
1639 Emit(&value, sizeof(value));
1640 }
1641 }
1642 inline void EmitMemoryOffset(uint64_t value) {
1643#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1644 if (v8_flags.trace_drumbrake_compact_bytecode) {
1645 printf("EmitMemoryOffset %llu\n", value);
1646 }
1647#endif // V8_ENABLE_DRUMBRAKE_TRACING
1648
1650 if (V8_UNLIKELY(value > 0xffffffff)) {
1652 } else {
1653 uint32_t u32 = static_cast<uint32_t>(value);
1654 Emit(&u32, sizeof(u32));
1656 }
1657 } else {
1659 Emit(&value, sizeof(value));
1660 }
1661 }
1662
1663 inline void EmitStackIndex(int32_t value) { Emit(&value, sizeof(value)); }
1664 inline void EmitRefStackIndex(int32_t value) { Emit(&value, sizeof(value)); }
1665 inline void EmitRefValueType(int32_t value) { Emit(&value, sizeof(value)); }
1666 inline void EmitStructFieldOffset(int32_t value) {
1667 Emit(&value, sizeof(value));
1668 }
1669
1670 inline void EmitFnId(InstructionHandler func_id, uint32_t pc = UINT_MAX) {
1671 // If possible, compacts two consecutive CopySlot32 or CopySlot64
1672 // instructions into a single instruction, to save one dispatch.
1673 if (TryCompactInstructionHandler(func_id)) return;
1674
1675 if (pc != UINT_MAX) {
1676 code_pc_map_[code_.size()] = pc;
1677 }
1678
1680
1681 int16_t id = func_id;
1683 id += kInstructionCount;
1684 } else {
1686 }
1687 Emit(&id, sizeof(id));
1688
1689#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1690 if (v8_flags.trace_drumbrake_compact_bytecode) {
1691 printf("* EmitFnId %04x %s%s\n", id, kInstructionHandlerNames[func_id],
1692 id >= kInstructionCount ? " (large)" : "");
1693 }
1694#endif // V8_ENABLE_DRUMBRAKE_TRACING
1695 }
1696
1697 void EmitCopySlot(ValueType value_type, uint32_t from_slot_index,
1698 uint32_t to_slot_index, bool copy_from_reg = false);
1699
1700 inline bool IsMemory64() const;
1701 inline bool IsMultiMemory() const;
1702
1703 inline ValueKind GetGlobalType(uint32_t index) const;
1704 inline void EmitGlobalIndex(uint32_t index);
1705
1706 uint32_t ReserveBlockSlots(uint8_t opcode,
1707 const WasmInstruction::Optional::Block& block_data,
1708 size_t* rets_slots_count,
1709 size_t* params_slots_count);
1710 void StoreBlockParamsIntoSlots(uint32_t target_block_index,
1711 bool update_stack);
1712 void StoreBlockParamsAndResultsIntoSlots(uint32_t target_block_index,
1713 WasmOpcode opcode);
1714
1715 inline bool HasVoidSignature(
1716 const WasmBytecodeGenerator::BlockData& block_data) const;
1717 inline uint32_t ParamsCount(
1718 const WasmBytecodeGenerator::BlockData& block_data) const;
1719 inline ValueType GetParamType(
1720 const WasmBytecodeGenerator::BlockData& block_data, size_t index) const;
1721 inline uint32_t ReturnsCount(
1722 const WasmBytecodeGenerator::BlockData& block_data) const;
1723 inline ValueType GetReturnType(
1724 const WasmBytecodeGenerator::BlockData& block_data, size_t index) const;
1725
1726 void PreserveArgsAndLocals();
1727
1728 int32_t BeginBlock(WasmOpcode opcode,
1729 const WasmInstruction::Optional::Block signature);
1730 inline void BeginElseBlock(uint32_t if_block_index, bool dummy);
1731 int32_t EndBlock(WasmOpcode opcode);
1732
1733 void Return();
1734 inline void EmitBranchOffset(uint32_t delta);
1735 inline void EmitIfElseBranchOffset();
1736 inline void EmitTryCatchBranchOffset();
1737 inline void EmitBranchTableOffset(uint32_t delta, uint32_t code_pos);
1738 inline uint32_t GetCurrentBranchDepth() const;
1739 inline int32_t GetTargetBranch(uint32_t delta) const;
1740 int GetCurrentTryBlockIndex(bool return_matching_try_for_catch_blocks) const;
1741 void PatchBranchOffsets();
1743 void RestoreIfElseParams(uint32_t if_block_index);
1744
1745 bool HasSharedSlot(uint32_t stack_index) const;
1746 bool FindSharedSlot(uint32_t stack_index, uint32_t* new_slot_index);
1747
1748 inline const FunctionSig* GetFunctionSignature(uint32_t function_index) const;
1749
1750 inline ValueKind GetTopStackType(RegMode reg_mode) const;
1751
1752 inline uint32_t function_index() const { return function_index_; }
1753
1754 std::vector<Slot> slots_;
1755
1757 switch (value_type.kind()) {
1758 case kI32:
1760 case kI64:
1762 case kF32:
1764 case kF64:
1766 case kS128:
1768 case kRef:
1769 case kRefNull:
1771 default:
1772 UNREACHABLE();
1773 }
1774 }
1775
1776 template <typename T>
1778 // A gcc bug causes "error: explicit specialization in non-namespace scope"
1779 // with explicit specializations here:
1780 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85282
1781 if constexpr (std::is_same_v<T, WasmRef>) {
1783 }
1784 uint32_t slot_index = static_cast<uint32_t>(slots_.size());
1785 slots_.push_back({value_type, slot_offset_, 0});
1786 slot_offset_ += sizeof(T) / kSlotSize;
1787 return slot_index;
1788 }
1790 uint32_t slot_index = static_cast<uint32_t>(slots_.size());
1792 slot_offset_ += sizeof(WasmRef) / kSlotSize;
1794 return slot_index;
1795 }
1796
1797 template <typename T>
1798 inline uint32_t GetConstSlot(T value) {
1799 if constexpr (std::is_same_v<T, int32_t>) {
1800 return GetI32ConstSlot(value);
1801 }
1802 if constexpr (std::is_same_v<T, int64_t>) {
1803 return GetI64ConstSlot(value);
1804 }
1805 if constexpr (std::is_same_v<T, float>) {
1806 return GetF32ConstSlot(value);
1807 }
1808 if constexpr (std::is_same_v<T, double>) {
1809 return GetF64ConstSlot(value);
1810 }
1811 if constexpr (std::is_same_v<T, Simd128>) {
1812 return GetS128ConstSlot(value);
1813 }
1814 UNREACHABLE();
1815 }
1816 inline uint32_t GetI32ConstSlot(int32_t value) {
1817 auto it = i32_const_cache_.find(value);
1818 if (it != i32_const_cache_.end()) {
1819 return it->second;
1820 }
1821 return UINT_MAX;
1822 }
1823 inline uint32_t GetI64ConstSlot(int64_t value) {
1824 auto it = i64_const_cache_.find(value);
1825 if (it != i64_const_cache_.end()) {
1826 return it->second;
1827 }
1828 return UINT_MAX;
1829 }
1830 inline uint32_t GetF32ConstSlot(float value) {
1831 auto it = f32_const_cache_.find(value);
1832 if (it != f32_const_cache_.end()) {
1833 return it->second;
1834 }
1835 return UINT_MAX;
1836 }
1837 inline uint32_t GetF64ConstSlot(double value) {
1838 auto it = f64_const_cache_.find(value);
1839 if (it != f64_const_cache_.end()) {
1840 return it->second;
1841 }
1842 return UINT_MAX;
1843 }
1844 inline uint32_t GetS128ConstSlot(Simd128 value) {
1845 auto it = s128_const_cache_.find(reinterpret_cast<Simd128&>(value));
1846 if (it != s128_const_cache_.end()) {
1847 return it->second;
1848 }
1849 return UINT_MAX;
1850 }
1851
1852 template <typename T>
1853 inline uint32_t CreateConstSlot(T value) {
1854 if constexpr (std::is_same_v<T, WasmRef>) {
1855 UNREACHABLE();
1856 }
1857 uint32_t slot_index = GetConstSlot(value);
1858 if (slot_index == UINT_MAX) {
1859 uint32_t offset = const_slot_offset_ * sizeof(uint32_t);
1860 DCHECK_LE(offset + sizeof(T), const_slots_values_.size());
1861
1862 slot_index = static_cast<uint32_t>(slots_.size());
1863 slots_.push_back(
1866 reinterpret_cast<Address>(const_slots_values_.data() + offset),
1867 value);
1868 const_slot_offset_ += sizeof(T) / kSlotSize;
1869 }
1870 return slot_index;
1871 }
1872
1873 template <typename T>
1874 inline uint32_t PushConstSlot(T value) {
1875 uint32_t new_slot_index = CreateConstSlot(value);
1876 PushConstSlot(new_slot_index);
1877 return new_slot_index;
1878 }
1879 inline void PushConstSlot(uint32_t slot_index);
1880#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1881 void TracePushConstSlot(uint32_t slot_index);
1882#endif // V8_ENABLE_DRUMBRAKE_TRACING
1883
1884 inline void PushSlot(uint32_t slot_index) {
1885#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1886 if (v8_flags.trace_drumbrake_bytecode_generator &&
1887 v8_flags.trace_drumbrake_execution_verbose) {
1888 printf(" push - slot[%d] = %d\n", stack_size(), slot_index);
1889 }
1890#endif // V8_ENABLE_DRUMBRAKE_TRACING
1891
1892 stack_.push_back(slot_index);
1893 }
1894
1895 inline uint32_t _PushSlot(ValueType value_type) {
1896 PushSlot(static_cast<uint32_t>(slots_.size()));
1897 return CreateSlot(value_type);
1898 }
1899
1900 inline void PushCopySlot(uint32_t from_stack_index);
1901#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1902 void TracePushCopySlot(uint32_t from_stack_index);
1903#endif // V8_ENABLE_DRUMBRAKE_TRACING
1904
1905 inline uint32_t PopSlot() {
1906 // TODO(paolosev@microsoft.com) - We should try to mark as 'invalid' and
1907 // later reuse slots in the stack once we are sure they won't be referred
1908 // again, which should be the case once a slot is popped. This could make
1909 // the stack frame size smaller, especially for large Wasm functions.
1910 uint32_t slot_offset = slots_[stack_.back()].slot_offset;
1911
1912#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1913 if (v8_flags.trace_drumbrake_bytecode_generator &&
1914 v8_flags.trace_drumbrake_execution_verbose) {
1915 printf(" pop - slot[%d] = %d\n", stack_size() - 1, stack_.back());
1916 }
1917#endif // V8_ENABLE_DRUMBRAKE_TRACING
1918
1919 stack_.pop_back();
1920 return slot_offset;
1921 }
1922
1923 void CopyToSlot(ValueType value_type, uint32_t from_slot_index,
1924 uint32_t to_stack_index, bool copy_from_reg);
1925 void CopyToSlotAndPop(ValueType value_type, uint32_t to, bool is_tee,
1926 bool copy_from_reg);
1927
1928 inline void SetSlotType(uint32_t stack_index, ValueType type) {
1929 DCHECK_LT(stack_index, stack_.size());
1930
1931 uint32_t slot_index = stack_[stack_index];
1932 slots_[slot_index].value_type = type;
1933
1934#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1935 TraceSetSlotType(stack_index, type);
1936#endif // V8_ENABLE_DRUMBRAKE_TRACING
1937 }
1938
1939#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1940 void TraceSetSlotType(uint32_t stack_index, ValueType typo);
1941#endif // V8_ENABLE_DRUMBRAKE_TRACING
1942
1943 inline void UpdateStack(uint32_t index, uint32_t slot_index) {
1944 DCHECK_LT(index, stack_.size());
1945 stack_[index] = slot_index;
1946 }
1947 inline void UpdateStack(uint32_t index, uint32_t slot_index,
1949 DCHECK_LT(index, stack_.size());
1950 stack_[index] = slot_index;
1951 SetSlotType(index, value_type);
1952 }
1953
1954 inline uint32_t stack_top_index() const {
1955 DCHECK(!stack_.empty());
1956 return static_cast<uint32_t>(stack_.size() - 1);
1957 }
1958 inline uint32_t stack_size() const {
1959 return static_cast<uint32_t>(stack_.size());
1960 }
1961
1962 inline void SetUnreachableMode() {
1965
1967 blocks_[current_block_index_].is_unreachable_ = true;
1968 }
1969
1970 // Create slots for arguments and generates run-time commands to initialize
1971 // their values.
1972 void InitSlotsForFunctionArgs(const FunctionSig* sig, bool is_indirect_call);
1973
1975
1977 bool TypeCheckAlwaysFails(ValueType obj_type, HeapType expected_type,
1978 bool null_succeeds) const;
1979
1980#ifdef DEBUG
1981 static bool HasSideEffects(WasmOpcode opcode);
1982#endif // DEBUG
1983
1987
1988 std::vector<uint8_t> const_slots_values_;
1990 std::unordered_map<int32_t, uint32_t> i32_const_cache_;
1991 std::unordered_map<int64_t, uint32_t> i64_const_cache_;
1992 std::unordered_map<float, uint32_t> f32_const_cache_;
1993 std::unordered_map<double, uint32_t> f64_const_cache_;
1994
1996 size_t operator()(const Simd128& s128) const;
1997 };
1998 std::unordered_map<Simd128, uint32_t, Simd128Hash> s128_const_cache_;
1999
2000 std::vector<Simd128> simd_immediates_;
2001 uint32_t slot_offset_; // TODO(paolosev@microsoft.com): manage holes
2002
2004 public:
2005 typedef std::vector<uint32_t> Stack;
2006 void push_back(const uint32_t& value) {
2007 stack_.push_back(value);
2008 history_.push_back({Push, value});
2009 }
2010 void pop_back() {
2011 history_.push_back({Pop, back()});
2012 stack_.pop_back();
2013 }
2014 Stack::reference back() { return stack_.back(); }
2015 Stack::const_reference back() const { return stack_.back(); }
2016 Stack::reference operator[](Stack::size_type pos) { return stack_[pos]; }
2017 Stack::const_reference operator[](Stack::size_type pos) const {
2018 return stack_[pos];
2019 }
2020 size_t size() const { return stack_.size(); }
2021 bool empty() const { return stack_.empty(); }
2022 void reserve(Stack::size_type new_cap) { stack_.reserve(new_cap); }
2023 void resize(Stack::size_type count) { stack_.resize(count); }
2024
2025 void clear_history() { history_.resize(0); }
2026 void rollback() {
2027 while (!history_.empty()) {
2028 Entry entry = history_.back();
2029 history_.pop_back();
2030 if (entry.kind == EntryKind::Push) {
2031 stack_.pop_back();
2032 } else {
2034 stack_.push_back(entry.value);
2035 }
2036 }
2037 }
2038
2039 private:
2040 enum EntryKind { Push, Pop };
2041 struct Entry {
2043 uint32_t value;
2044 };
2046 std::vector<Entry> history_;
2047 };
2049
2051
2054 uint32_t args_count_;
2059
2060 std::vector<uint8_t> code_;
2061
2062 std::vector<BlockData> blocks_;
2064
2067#ifdef DEBUG
2068 bool was_current_instruction_reachable_;
2069#endif // DEBUG
2070
2073
2075
2076 // TODO(paolosev@microsoft.com) - Using a map is relatively slow because of
2077 // all the insertions that cause a ~10% performance hit in the generation of
2078 // the interpreter bytecode. The bytecode generation time is not a huge factor
2079 // when we run in purely jitless mode, because it is almost always dwarfed by
2080 // the interpreter execution time. It could be an important factor, however,
2081 // if we implemented a multi-tier strategy with the interpreter as a first
2082 // tier. It would probably be better to replace this with a plain vector and
2083 // use binary search for lookups.
2084 std::map<CodeOffset, pc_t> code_pc_map_;
2085
2088
2090
2091 // Manages bytecode compaction.
2095 static std::atomic<size_t> total_bytecode_size_;
2096 static std::atomic<size_t> emitted_short_slot_offset_count_;
2097 static std::atomic<size_t> emitted_short_memory_offset_count_;
2098
2101};
2102
2103// TODO(paolosev@microsoft.com) Duplicated from src/runtime/runtime-wasm.cc
2105 public:
2106 explicit ClearThreadInWasmScope(Isolate* isolate);
2108
2109 private:
2111};
2112
2113#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2114class InterpreterTracer final : public Malloced {
2115 public:
2116 explicit InterpreterTracer(int isolate_id)
2117 : isolate_id_(isolate_id),
2118 file_(nullptr),
2119 current_chunk_index_(0),
2120 write_count_(0) {
2121 if (0 != strcmp(v8_flags.trace_drumbrake_filter.value(), "*")) {
2122 std::stringstream s(v8_flags.trace_drumbrake_filter.value());
2123 for (int i; s >> i;) {
2124 traced_functions_.insert(i);
2125 if (s.peek() == ',') s.ignore();
2126 }
2127 }
2128
2129 OpenFile();
2130 }
2131
2132 ~InterpreterTracer() { CloseFile(); }
2133
2134 void OpenFile() {
2135 if (!ShouldRedirect()) {
2136 file_ = stdout;
2137 return;
2138 }
2139
2140 if (isolate_id_ >= 0) {
2141 base::SNPrintF(filename_, "trace-%d-%d-%d.dbt",
2142 base::OS::GetCurrentProcessId(), isolate_id_,
2143 current_chunk_index_);
2144 } else {
2145 base::SNPrintF(filename_, "trace-%d-%d.dbt",
2146 base::OS::GetCurrentProcessId(), current_chunk_index_);
2147 }
2148 WriteChars(filename_.begin(), "", 0, false);
2149
2150 if (file_ == nullptr) {
2151 file_ = base::OS::FOpen(filename_.begin(), "w");
2152 CHECK_WITH_MSG(file_ != nullptr, "could not open file.");
2153 }
2154 }
2155
2156 void CloseFile() {
2157 if (!ShouldRedirect()) {
2158 return;
2159 }
2160
2161 DCHECK_NOT_NULL(file_);
2162 base::Fclose(file_);
2163 file_ = nullptr;
2164 }
2165
2166 bool ShouldTraceFunction(int function_index) const {
2167 return traced_functions_.empty() ||
2168 traced_functions_.find(function_index) != traced_functions_.end();
2169 }
2170
2171 void PrintF(const char* format, ...);
2172
2173 void CheckFileSize() {
2174 if (!ShouldRedirect()) {
2175 return;
2176 }
2177
2178 ::fflush(file_);
2179 if (++write_count_ >= kWriteCountCheckInterval) {
2180 write_count_ = 0;
2181 ::fseek(file_, 0L, SEEK_END);
2182 if (::ftell(file_) > kMaxFileSize) {
2183 CloseFile();
2184 current_chunk_index_ = (current_chunk_index_ + 1) % kFileChunksCount;
2185 OpenFile();
2186 }
2187 }
2188 }
2189
2190 FILE* file() const { return file_; }
2191
2192 private:
2193 static bool ShouldRedirect() { return v8_flags.redirect_drumbrake_traces; }
2194
2195 int isolate_id_;
2196 base::EmbeddedVector<char, 128> filename_;
2197 FILE* file_;
2198 std::unordered_set<int> traced_functions_;
2199 int current_chunk_index_;
2200 int64_t write_count_;
2201
2202 static const int64_t kWriteCountCheckInterval = 1000;
2203 static const int kFileChunksCount = 10;
2204 static const int64_t kMaxFileSize = 100 * MB;
2205};
2206
2207class ShadowStack {
2208 public:
2209 void TracePop() { stack_.pop_back(); }
2210
2211 void TraceSetSlotType(uint32_t index, uint32_t type) {
2212 if (stack_.size() <= index) stack_.resize(index + 1);
2213 stack_[index].type_ = ValueType::FromRawBitField(type);
2214 }
2215
2216 template <typename T>
2217 void TracePush(uint32_t slot_offset) {
2218 stack_.push_back({value_type<T>(), slot_offset});
2219 }
2220
2221 void TracePushCopy(uint32_t index) { stack_.push_back(stack_[index]); }
2222
2223 void TraceUpdate(uint32_t stack_index, uint32_t slot_offset) {
2224 if (stack_.size() <= stack_index) stack_.resize(stack_index + 1);
2225 stack_[stack_index].slot_offset_ = slot_offset;
2226 }
2227
2228 void Print(WasmInterpreterRuntime* wasm_runtime, const uint32_t* sp,
2229 size_t start_params, size_t start_locals, size_t start_stack,
2230 RegMode reg_mode, int64_t r0, double fp0) const;
2231
2232 struct Slot {
2233 static void Print(WasmInterpreterRuntime* wasm_runtime, ValueType type,
2234 size_t index, char kind, const uint8_t* addr);
2235 void Print(WasmInterpreterRuntime* wasm_runtime, size_t index, char kind,
2236 const uint8_t* addr) const {
2237 return Print(wasm_runtime, type_, index, kind, addr);
2238 }
2239
2240 ValueType type_;
2241 uint32_t slot_offset_;
2242 };
2243
2244 private:
2245 std::vector<Slot> stack_;
2246};
2247#endif // V8_ENABLE_DRUMBRAKE_TRACING
2248
2249} // namespace wasm
2250} // namespace internal
2251} // namespace v8
2252
2253#endif // V8_WASM_INTERPRETER_WASM_INTERPRETER_H_
Isolate * isolate_
#define T
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
size_t size() const
static constexpr HeapType FromBits(uint32_t bits)
Definition value-type.h:721
constexpr bool is_bottom() const
Definition value-type.h:426
constexpr ValueKind kind() const
Definition value-type.h:631
constexpr bool is_object_reference() const
Definition value-type.h:601
static constexpr ValueType FromRawBitField(uint32_t bits)
Definition value-type.h:913
Stack::const_reference operator[](Stack::size_type pos) const
static std::atomic< size_t > emitted_short_memory_offset_count_
ValueType GetParamType(const WasmBytecodeGenerator::BlockData &block_data, size_t index) const
void BeginElseBlock(uint32_t if_block_index, bool dummy)
void Pop(ValueKind kind, bool emit=true)
void EmitFnId(InstructionHandler func_id, uint32_t pc=UINT_MAX)
void CopyToSlotAndPop(ValueType value_type, uint32_t to, bool is_tee, bool copy_from_reg)
void(WasmBytecodeGenerator::*) MemIndexPopFunc(bool emit)
void EmitBranchTableOffset(uint32_t delta, uint32_t code_pos)
void EmitCopySlot(ValueType value_type, uint32_t from_slot_index, uint32_t to_slot_index, bool copy_from_reg=false)
uint32_t _PushSlot(ValueType value_type)
void SetSlotType(uint32_t stack_index, ValueType type)
base::SmallVector< uint32_t, 8 > br_table_labels_
bool TypeCheckAlwaysSucceeds(ValueType obj_type, HeapType type) const
const FunctionSig * GetFunctionSignature(uint32_t function_index) const
void ITableIndexPush(bool is_table64, bool emit=true)
bool HasVoidSignature(const WasmBytecodeGenerator::BlockData &block_data) const
std::unordered_map< float, uint32_t > f32_const_cache_
WasmBytecodeGenerator(const WasmBytecodeGenerator &)=delete
void(WasmBytecodeGenerator::*) MemIndexPushFunc(bool emit)
uint32_t CreateWasmRefSlot(ValueType value_type)
uint32_t CreateSlot(ValueType value_type)
void DecodeAtomicOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
base::SmallVector< uint32_t, 16 > loop_begin_code_offsets_
ValueKind GetTopStackType(RegMode reg_mode) const
RegMode DoEncodeInstruction(const WasmInstruction &instr, RegMode curr_reg_mode, RegMode next_reg_mode)
void DecodeGCOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
void PushCopySlot(uint32_t from_stack_index)
std::map< CodeOffset, pc_t > code_pc_map_
ValueType GetReturnType(const WasmBytecodeGenerator::BlockData &block_data, size_t index) const
std::unordered_map< Simd128, uint32_t, Simd128Hash > s128_const_cache_
void StoreBlockParamsAndResultsIntoSlots(uint32_t target_block_index, WasmOpcode opcode)
std::unordered_map< double, uint32_t > f64_const_cache_
static std::atomic< size_t > total_bytecode_size_
uint32_t ReturnsCount(const WasmBytecodeGenerator::BlockData &block_data) const
WasmBytecodeGenerator(uint32_t function_index, InterpreterCode *wasm_code, const WasmModule *module)
uint32_t ParamsCount(const WasmBytecodeGenerator::BlockData &block_data) const
std::unordered_map< int64_t, uint32_t > i64_const_cache_
bool DoEncodeSuperInstruction(RegMode &reg_mode, const WasmInstruction &curr_instr, const WasmInstruction &next_instr)
int GetCurrentTryBlockIndex(bool return_matching_try_for_catch_blocks) const
void RestoreIfElseParams(uint32_t if_block_index)
WasmInstruction DecodeInstruction(pc_t pc, Decoder &decoder)
std::unordered_map< int32_t, uint32_t > i32_const_cache_
void StoreBlockParamsIntoSlots(uint32_t target_block_index, bool update_stack)
void UpdateStack(uint32_t index, uint32_t slot_index)
bool EncodeSuperInstruction(RegMode &reg_mode, const WasmInstruction &curr_instr, const WasmInstruction &next_instr)
bool HasSharedSlot(uint32_t stack_index) const
ValueKind GetGlobalType(uint32_t index) const
void InitSlotsForFunctionArgs(const FunctionSig *sig, bool is_indirect_call)
uint32_t ReserveBlockSlots(uint8_t opcode, const WasmInstruction::Optional::Block &block_data, size_t *rets_slots_count, size_t *params_slots_count)
bool TypeCheckAlwaysFails(ValueType obj_type, HeapType expected_type, bool null_succeeds) const
int32_t BeginBlock(WasmOpcode opcode, const WasmInstruction::Optional::Block signature)
bool DecodeSimdOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
void RefPush(ValueType type, bool emit=true)
uint32_t CreateSlot(ValueType value_type)
bool TryCompactInstructionHandler(InstructionHandler func_addr)
RegMode EncodeInstruction(const WasmInstruction &instr, RegMode curr_reg_mode, RegMode next_reg_mode)
void DecodeNumericOp(WasmOpcode opcode, WasmInstruction::Optional *optional, Decoder *decoder, InterpreterCode *code, pc_t pc, int *const len)
int32_t GetTargetBranch(uint32_t delta) const
void UpdateStack(uint32_t index, uint32_t slot_index, ValueType value_type)
bool FindSharedSlot(uint32_t stack_index, uint32_t *new_slot_index)
static std::atomic< size_t > emitted_short_slot_offset_count_
void Emit(const void *buff, size_t len)
bool ToRegisterIsAllowed(const WasmInstruction &instr)
WasmBytecodeGenerator & operator=(const WasmBytecodeGenerator &)=delete
void CopyToSlot(ValueType value_type, uint32_t from_slot_index, uint32_t to_stack_index, bool copy_from_reg)
std::unique_ptr< WasmBytecode > GenerateBytecode()
static bool HasRefOrSimdArgs(const FunctionSig *sig)
ValueType return_type(size_t index) const
static uint32_t RefLocalsCount(const InterpreterCode *wasm_code)
const FunctionSig * GetFunctionSignature() const
static uint32_t JSToWasmWrapperPackedArraySize(const FunctionSig *sig)
WasmEHData::ExceptionPayloadSlotOffsets GetExceptionPayloadStartSlotOffsets(WasmEHData::BlockIndex catch_block_index) const
bool InitializeSlots(uint8_t *sp, size_t stack_space) const
WasmBytecode(int func_index, const uint8_t *code_data, size_t code_length, uint32_t stack_frame_size, const FunctionSig *signature, const CanonicalSig *canonical_signature, const InterpreterCode *interpreter_code, size_t blocks_count, const uint8_t *const_slots_data, size_t const_slots_length, uint32_t ref_slots_count, const WasmEHData &&eh_data, const std::map< CodeOffset, pc_t > &&code_pc_map)
static uint32_t RetsSizeInSlots(const FunctionSig *sig)
pc_t GetPcFromTrapCode(const uint8_t *current_code) const
const InterpreterCode * interpreter_code_
DirectHandle< Object > GetCaughtException(Isolate *isolate, uint32_t catch_block_index) const
static uint32_t ArgsSizeInSlots(const FunctionSig *sig)
ValueType local_type(size_t index) const
const WasmEHData::TryBlock * GetParentTryBlock(const WasmEHData::TryBlock *try_block) const
static bool ContainsSimd(const FunctionSig *sig)
static uint32_t LocalsSizeInSlots(const InterpreterCode *wasm_code)
ValueType arg_type(size_t index) const
const CanonicalSig * canonical_signature_
std::map< CodeOffset, pc_t > code_pc_map_
std::vector< uint8_t > const_slots_values_
const CanonicalSig * GetCanonicalFunctionSignature() const
static uint32_t RefArgsCount(const FunctionSig *sig)
static uint32_t RefRetsCount(const FunctionSig *sig)
const WasmEHData::TryBlock * GetTryBlock(CodeOffset code_offset) const
void AddCatchBlock(BlockIndex catch_block_index, int tag_index, uint32_t first_param_slot_offset, uint32_t first_param_ref_stack_index, CodeOffset code_offset)
void AddTryBlock(BlockIndex try_block_index, BlockIndex parent_or_matching_try_block_index, BlockIndex ancestor_try_block_index)
void AddDelegatedBlock(BlockIndex delegated_try_block_index)
void RecordPotentialExceptionThrowingInstruction(WasmOpcode opcode, CodeOffset code_offset)
BlockIndex EndTryCatchBlocks(BlockIndex block_index, CodeOffset code_offset)
ExceptionPayloadSlotOffsets GetExceptionPayloadStartSlotOffsets(BlockIndex catch_block_index) const
std::unordered_map< BlockIndex, CatchBlock > catch_blocks_
void SetCaughtException(Isolate *isolate, BlockIndex catch_block_index, DirectHandle< Object > exception)
DirectHandle< Object > GetCaughtException(Isolate *isolate, BlockIndex catch_block_index) const
std::unordered_map< CodeOffset, BlockIndex > code_trycatch_map_
const TryBlock * GetTryBlock(CodeOffset code_offset) const
std::unordered_map< BlockIndex, TryBlock > try_blocks_
size_t GetEndInstructionOffsetFor(BlockIndex catch_block_index) const
BlockIndex GetTryBranchOf(BlockIndex catch_block_index) const
const TryBlock * GetParentTryBlock(const TryBlock *try_block) const
const TryBlock * GetDelegateTryBlock(const TryBlock *try_block) const
WasmExecutionTimer(Isolate *isolate, bool track_jitless_wasm)
std::unordered_map< int, std::unique_ptr< WasmInterpreterThread > > ThreadInterpreterMap
void SetCurrentActivationFrame(uint8_t *current_fp, uint32_t current_frame_size, uint32_t current_stack_size, uint32_t current_ref_stack_fp, uint32_t current_ref_stack_frame_size)
void SetCurrentFrame(const FrameState &frame_state)
std::unique_ptr< std::vector< WasmInterpreterStackEntry > > trap_stack_trace_
Activation(WasmInterpreterThread *thread, WasmInterpreterRuntime *wasm_runtime, Address frame_pointer, uint8_t *start_fp, const FrameState &callee_frame_state)
void SetTrapped(int trap_function_index, int trap_pc)
std::vector< WasmInterpreterStackEntry > GetStackTrace()
const WasmInterpreterRuntime * GetWasmRuntime() const
void Trap(TrapReason trap_reason, int trap_function_index, int trap_pc, const FrameState &current_frame)
bool ExpandStack(size_t additional_required_size)
static void NotifyIsolateDisposal(Isolate *isolate)
void SetCurrentActivationFrame(uint32_t *fp, uint32_t current_frame_size, uint32_t current_stack_size, uint32_t current_ref_stack_fp, uint32_t current_ref_stack_frame_size)
static WasmInterpreterThreadMap * thread_interpreter_map_s
WasmInterpreterThread::Activation * GetActivation(Address frame_pointer) const
Handle< FixedArray > reference_stack() const
std::vector< std::unique_ptr< Activation > > activations_
static WasmInterpreterThread * GetCurrentInterpreterThread(Isolate *isolate)
void SetCurrentFrame(const FrameState &frame_state)
ZoneVector< InterpreterCode > interpreter_code_
WasmInterpreter & operator=(const WasmInterpreter &)=delete
WasmInterpreterRuntime * GetWasmRuntime()
std::shared_ptr< WasmInterpreterRuntime > wasm_runtime_
IndirectHandle< WasmInstanceObject > instance_object_
const ZoneVector< uint8_t > module_bytes_
WasmInterpreter(const WasmInterpreter &)=delete
Handle< Code > code
const ObjectRef type_
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
#define FOREACH_INSTR_HANDLER(V)
OptionalOpIndex index
int32_t offset
Instruction * instr
ZoneVector< RpoNumber > & result
bool null_succeeds
ValueType obj_type
int s
Definition mul-fft.cc:297
static V ReadUnalignedValue(Address p)
Definition memory.h:28
static void WriteUnalignedValue(Address p, V value)
Definition memory.h:41
RegMode GetRegMode(ValueKind kind)
ValueType value_type< int64_t >()
static ValueType value_type()
static const size_t kSlotSize
ValueType value_type< uint64_t >()
PWasmOp * kInstructionTable[kInstructionTableSize]
static const RegModeTransform kRegModes[256]
constexpr uint32_t kBranchOnCastDataTargetTypeBitSize
static const size_t kOperatorModeCount
InstructionHandler ReadFnId(const uint8_t *&code)
ValueType value_type< Simd128 >()
uint32_t WasmInterpreterRuntime int64_t r0
ValueType value_type< double >()
constexpr IndependentValueType kWasmF32
constexpr IndependentHeapType kWasmAnyRef
uint32_t WasmInterpreterRuntime * wasm_runtime
constexpr IndependentValueType kWasmI32
ValueType value_type< uint32_t >()
uint32_t WasmInterpreterRuntime int64_t double fp0
DirectHandle< Object > WasmRef
INSTRUCTION_HANDLER_FUNC trace_PushSlot(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
static const ptrdiff_t kCodeOffsetSize
static constexpr uint32_t kInstructionTableMask
InstructionHandler s_unwind_code
ValueType value_type< WasmRef >()
ValueType value_type< int32_t >()
typedef void(VECTORCALL PWasmOp)(const uint8_t *code
static constexpr uint32_t kInstructionTableSize
constexpr IndependentValueType kWasmS128
constexpr IndependentValueType kWasmF64
constexpr bool is_reference(ValueKind kind)
constexpr IndependentValueType kWasmI64
ValueType value_type< float >()
bool SetPermissions(v8::PageAllocator *page_allocator, void *address, size_t size, PageAllocator::Permission access)
v8::PageAllocator * GetPlatformPageAllocator()
Definition allocation.cc:66
void PrintF(const char *format,...)
Definition utils.cc:39
wasm::WasmModule WasmModule
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
Definition globals.h:410
int WriteChars(const char *filename, const char *str, int size, bool verbose)
Definition utils.cc:188
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
wasm::WasmFunction WasmFunction
Definition c-api.cc:87
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define CHECK_WITH_MSG(condition, message)
Definition logging.h:118
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
WasmInterpreterThread * thread_
DirectHandle< Object > GetCaughtException(Isolate *isolate, uint32_t catch_block_index) const
void ResetHandleScope(Isolate *isolate)
void SetCaughtException(Isolate *isolate, uint32_t catch_block_index, DirectHandle< Object > exception)
const WasmBytecode * current_function_
Handle< FixedArray > caught_exceptions_
void DisposeCaughtExceptionsArray(Isolate *isolate)
std::unique_ptr< WasmBytecode > bytecode
InterpreterCode(const WasmFunction *function, BodyLocalDecls locals, const uint8_t *start, const uint8_t *end)
constexpr ValueKind kind() const
BlockData(WasmOpcode opcode, uint32_t begin_code_offset, int32_t parent_block_index, uint32_t stack_size, WasmInstruction::Optional::Block signature, uint32_t first_block_index, uint32_t rets_slots_count, uint32_t params_slots_count, int32_t parent_try_block_index)
void SaveParams(uint32_t *from, size_t params_count)
TryBlock(BlockIndex parent_or_matching_try_block, BlockIndex ancestor_try_index)
void SetDelegated(BlockIndex delegate_try_idx)
std::vector< CatchHandler > catch_handlers
WasmInstruction(uint8_t orig, WasmOpcode opcode, int length, uint32_t pc, Optional optional)
Symbol file
struct v8::internal::wasm::WasmInstruction::Optional::Block block
struct v8::internal::wasm::WasmInstruction::Optional::BrTable br_table
struct v8::internal::wasm::WasmInstruction::Optional::SimdLaneLoad simd_loadstore_lane
struct v8::internal::wasm::WasmInstruction::Optional::TableInit table_init
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayNewFixed gc_array_new_fixed
struct v8::internal::wasm::WasmInstruction::Optional::IndirectCall indirect_call
struct v8::internal::wasm::WasmInstruction::Optional::GC_FieldImmediate gc_field_immediate
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayCopy gc_array_copy
struct v8::internal::wasm::WasmInstruction::Optional::GC_MemoryImmediate gc_memory_immediate
struct v8::internal::wasm::WasmInstruction::Optional::GC_HeapTypeImmediate gc_heap_type_immediate
struct v8::internal::wasm::WasmInstruction::Optional::TableCopy table_copy
struct v8::internal::wasm::WasmInstruction::Optional::GC_ArrayNewOrInitData gc_array_new_or_init_data
#define V8_INLINE
Definition v8config.h:500
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NODISCARD
Definition v8config.h:693
wasm::ValueType type
const wasm::WasmModule * module_
#define VECTORCALL
#define DEFINE_INSTR_HANDLER(name)
#define INSTRUCTION_HANDLER_FUNC