v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-interpreter-runtime.cc
Go to the documentation of this file.
1// Copyright 2024 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8
20
21namespace v8 {
22namespace internal {
23
24namespace wasm {
25
27 public:
28 static inline int ElementSizeInBytes(ValueType type) {
29 switch (type.kind()) {
30 case kI32:
31 case kF32:
32 return 4;
33 case kI64:
34 case kF64:
35 return 8;
36 case kS128:
37 return 16;
38 case kRef:
39 case kRefNull:
40 return kSystemPointerSize;
41 default:
43 }
44 }
45};
46
47} // namespace wasm
48
49namespace {
50
51// Find the frame pointer of the interpreter frame on the stack.
52Address FindInterpreterEntryFramePointer(Isolate* isolate) {
53 StackFrameIterator it(isolate, isolate->thread_local_top());
54 // On top: C entry stub.
55 DCHECK_EQ(StackFrame::EXIT, it.frame()->type());
56 it.Advance();
57 // Next: the wasm interpreter entry.
58 DCHECK_EQ(StackFrame::WASM_INTERPRETER_ENTRY, it.frame()->type());
59 return it.frame()->fp();
60}
61
62} // namespace
63
64RUNTIME_FUNCTION(Runtime_WasmRunInterpreter) {
65 DCHECK_EQ(3, args.length());
66 HandleScope scope(isolate);
69 instance->trusted_data(isolate), isolate);
70 int32_t func_index = NumberToInt32(args[1]);
71 DirectHandle<Object> arg_buffer_obj = args.at(2);
72
73 // The arg buffer is the raw pointer to the caller's stack. It looks like a
74 // Smi (lowest bit not set, as checked by IsSmi), but is no valid Smi. We just
75 // cast it back to the raw pointer.
76 CHECK(!IsHeapObject(*arg_buffer_obj));
77 CHECK(IsSmi(*arg_buffer_obj));
78 Address arg_buffer = (*arg_buffer_obj).ptr();
79
80 // Reserve buffers for argument and return values.
81 DCHECK_GT(trusted_data->module()->functions.size(), func_index);
82 const wasm::FunctionSig* sig =
83 trusted_data->module()->functions[func_index].sig;
84 DCHECK_GE(kMaxInt, sig->parameter_count());
85 int num_params = static_cast<int>(sig->parameter_count());
86 std::vector<wasm::WasmValue> wasm_args(num_params);
87 DCHECK_GE(kMaxInt, sig->return_count());
88 int num_returns = static_cast<int>(sig->return_count());
89 std::vector<wasm::WasmValue> wasm_rets(num_returns);
90
91 // Set the current isolate's context.
92 isolate->set_context(trusted_data->native_context());
93
94 // Make sure the WasmInterpreterObject and InterpreterHandle for this instance
95 // exist.
96 DirectHandle<Tuple2> interpreter_object =
97 WasmTrustedInstanceData::GetOrCreateInterpreterObject(instance);
98 wasm::InterpreterHandle* interpreter_handle =
99 wasm::GetOrCreateInterpreterHandle(isolate, interpreter_object);
100
102 wasm::ClearThreadInWasmScope clear_wasm_flag(isolate);
103
104 interpreter_handle->SetTrapFunctionIndex(func_index);
105 isolate->Throw(*isolate->factory()->NewTypeError(
106 MessageTemplate::kWasmTrapJSTypeError));
107 return ReadOnlyRoots(isolate).exception();
108 }
109
110 Address frame_pointer = FindInterpreterEntryFramePointer(isolate);
111
112 // If there are Ref arguments or return values, we store their pointers into
113 // an array of bytes so we need to disable GC until they are unpacked by the
114 // callee.
115 {
117
118 // Copy the arguments for the {arg_buffer} into a vector of {WasmValue}.
119 // This also boxes reference types into handles, which needs to happen
120 // before any methods that could trigger a GC are being called.
121 Address arg_buf_ptr = arg_buffer;
122 for (int i = 0; i < num_params; ++i) {
123#define CASE_ARG_TYPE(type, ctype) \
124 case wasm::type: \
125 DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetParam(i)), \
126 sizeof(ctype)); \
127 wasm_args[i] = \
128 wasm::WasmValue(base::ReadUnalignedValue<ctype>(arg_buf_ptr)); \
129 arg_buf_ptr += sizeof(ctype); \
130 break;
131
132 wasm::ValueType value_type = sig->GetParam(i);
133 wasm::ValueKind kind = value_type.kind();
134 switch (kind) {
135 CASE_ARG_TYPE(kWasmI32.kind(), uint32_t)
136 CASE_ARG_TYPE(kWasmI64.kind(), uint64_t)
137 CASE_ARG_TYPE(kWasmF32.kind(), float)
138 CASE_ARG_TYPE(kWasmF64.kind(), double)
139#undef CASE_ARG_TYPE
140 case wasm::kRef:
141 case wasm::kRefNull: {
144 // MarkCompactCollector::RootMarkingVisitor requires ref slots to be
145 // 64-bit aligned.
146 arg_buf_ptr += (arg_buf_ptr & 0x04);
147
149 base::ReadUnalignedValue<Tagged<Object>>(arg_buf_ptr), isolate);
150
151 const wasm::WasmInterpreterRuntime* wasm_runtime =
152 interpreter_handle->interpreter()->GetWasmRuntime();
153 ref = wasm_runtime->JSToWasmObject(ref, value_type);
154 if (isolate->has_exception()) {
155 interpreter_handle->SetTrapFunctionIndex(func_index);
156 return ReadOnlyRoots(isolate).exception();
157 }
158
159 if ((value_type != wasm::kWasmExternRef &&
160 value_type != wasm::kWasmNullExternRef) &&
161 IsNull(*ref, isolate)) {
162 ref = isolate->factory()->wasm_null();
163 }
164
165 wasm_args[i] = wasm::WasmValue(ref, wasm::kWasmAnyRef);
166 arg_buf_ptr += kSystemPointerSize;
167 break;
168 }
169 case wasm::kWasmS128.kind():
170 default:
171 UNREACHABLE();
172 }
173 }
174
175 // Run the function in the interpreter. Note that neither the
176 // {WasmInterpreterObject} nor the {InterpreterHandle} have to exist,
177 // because interpretation might have been triggered by another Isolate
178 // sharing the same WasmEngine.
180 isolate, frame_pointer, instance, func_index, wasm_args, wasm_rets);
181
182 // Early return on failure.
183 if (!success) {
184 DCHECK(isolate->has_exception());
185 return ReadOnlyRoots(isolate).exception();
186 }
187
188 // Copy return values from the vector of {WasmValue} into {arg_buffer}. This
189 // also un-boxes reference types from handles into raw pointers.
190 arg_buf_ptr = arg_buffer;
191
192 for (int i = 0; i < num_returns; ++i) {
193#define CASE_RET_TYPE(type, ctype) \
194 case wasm::type: \
195 DCHECK_EQ(wasm::ValueTypes::ElementSizeInBytes(sig->GetReturn(i)), \
196 sizeof(ctype)); \
197 base::WriteUnalignedValue<ctype>(arg_buf_ptr, wasm_rets[i].to<ctype>()); \
198 arg_buf_ptr += sizeof(ctype); \
199 break;
200
201 switch (sig->GetReturn(i).kind()) {
202 CASE_RET_TYPE(kWasmI32.kind(), uint32_t)
203 CASE_RET_TYPE(kWasmI64.kind(), uint64_t)
204 CASE_RET_TYPE(kWasmF32.kind(), float)
205 CASE_RET_TYPE(kWasmF64.kind(), double)
206#undef CASE_RET_TYPE
207 case wasm::kRef:
208 case wasm::kRefNull: {
211 DirectHandle<Object> ref = wasm_rets[i].to_ref();
212 // Note: WasmToJSObject(ref) already called in ContinueExecution or
213 // CallExternalJSFunction.
214
215 // Make sure ref slots are 64-bit aligned.
216 arg_buf_ptr += (arg_buf_ptr & 0x04);
218 arg_buf_ptr += kSystemPointerSize;
219 break;
220 }
221 case wasm::kWasmS128.kind():
222 default:
223 UNREACHABLE();
224 }
225 }
226
227 return ReadOnlyRoots(isolate).undefined_value();
228 }
229}
230
231namespace wasm {
232
234 Isolate* isolate, DirectHandle<Tuple2> interpreter_object) {
237 isolate);
238 CHECK(!IsUndefined(*handle, isolate));
240}
241
243 Isolate* isolate, DirectHandle<Tuple2> interpreter_object) {
246 isolate);
247 if (IsUndefined(*handle, isolate)) {
248 // Use the maximum stack size to estimate the maximum size of the
249 // interpreter. The interpreter keeps its own stack internally, and the size
250 // of the stack should dominate the overall size of the interpreter. We
251 // multiply by '2' to account for the growing strategy for the backing store
252 // of the stack.
253 size_t interpreter_size = v8_flags.stack_size * KB * 2;
255 isolate, interpreter_size,
256 std::make_shared<InterpreterHandle>(isolate, interpreter_object));
258 }
259
261}
262
263// A helper for an entry in an indirect function table (IFT).
264// The underlying storage in the instance is used by generated code to
265// call functions indirectly at runtime.
266// Each entry has the following fields:
267// - implicit_arg = A WasmTrustedInstanceData or a WasmImportData.
268// - sig_id = signature id of function.
269// - target = entrypoint to Wasm code or import wrapper code.
270// - function_index = function index, if a Wasm function, or
271// WasmDispatchTable::kInvalidFunctionIndex otherwise.
273 public:
275 int table_index, int entry_index);
276
278 return table_->implicit_arg(index_);
279 }
280 inline wasm::CanonicalTypeIndex sig_id() const { return table_->sig(index_); }
281 inline WasmCodePointer target() const { return table_->target(index_); }
282 inline uint32_t function_index() const {
283 return table_->function_index(index_);
284 }
285
286 private:
288 int const index_;
289};
290
292 DirectHandle<WasmInstanceObject> instance, int table_index, int entry_index)
293 : table_(
294 table_index != 0
296 instance->trusted_data(instance->GetIsolate())
297 ->dispatch_tables()
298 ->get(table_index)),
299 instance->GetIsolate())
301 instance->trusted_data(instance->GetIsolate())
302 ->dispatch_table0()),
303 instance->GetIsolate())),
304 index_(entry_index) {
305 DCHECK_GE(entry_index, 0);
306 DCHECK_LT(entry_index, table_->length());
307}
308
310 const WasmModule* module, Isolate* isolate,
313 : isolate_(isolate),
314 module_(module),
315 instance_object_(instance_object),
316 codemap_(codemap),
317 start_function_index_(UINT_MAX),
318 trap_function_index_(-1),
319 trap_pc_(0),
320 current_thread_(nullptr),
321 fuzzer_start_time_(base::TimeTicks::Now()),
322 memory_start_(nullptr),
323 instruction_table_(kInstructionTable),
324 generic_wasm_to_js_interpreter_wrapper_fn_(
325 GeneratedCode<WasmToJSCallSig>::FromAddress(isolate, {}))
326#ifdef V8_ENABLE_DRUMBRAKE_TRACING
327 ,
328 shadow_stack_(nullptr)
329#endif // V8_ENABLE_DRUMBRAKE_TRACING
330{
331 DCHECK(v8_flags.wasm_jitless);
332
333 InitGlobalAddressCache();
334 InitMemoryAddresses();
335 InitIndirectFunctionTables();
336
337 // Initialize address of GenericWasmToJSInterpreterWrapper builtin.
338 Address wasm_to_js_code_addr_addr =
339 isolate->isolate_root() +
340 IsolateData::BuiltinEntrySlotOffset(Builtin::kWasmInterpreterCWasmEntry);
341 Address wasm_to_js_code_addr =
342 *reinterpret_cast<Address*>(wasm_to_js_code_addr_addr);
343 generic_wasm_to_js_interpreter_wrapper_fn_ =
345 wasm_to_js_code_addr);
346}
347
349 global_addresses_.resize(module_->globals.size());
350 for (size_t index = 0; index < module_->globals.size(); index++) {
351 const WasmGlobal& global = module_->globals[index];
352 if (!global.type.is_reference()) {
354 wasm_trusted_instance_data()->GetGlobalStorage(global);
355 }
356 }
357}
358
359// static
362 Isolate* isolate = instance->GetIsolate();
363 DirectHandle<Tuple2> interpreter_object =
364 WasmTrustedInstanceData::GetOrCreateInterpreterObject(instance);
366 GetOrCreateInterpreterHandle(isolate, interpreter_object);
368 handle->interpreter()->GetWasmRuntime();
370}
371
372int32_t WasmInterpreterRuntime::MemoryGrow(uint32_t delta_pages) {
373 HandleScope handle_scope(isolate_); // Avoid leaking handles.
374 // TODO(paolosev@microsoft.com): Support multiple memories.
375 uint32_t memory_index = 0;
377 wasm_trusted_instance_data()->memory_object(memory_index), isolate_);
378 int32_t result = WasmMemoryObject::Grow(isolate_, memory, delta_pages);
380 return result;
381}
382
384 int table_count = static_cast<int>(module_->tables.size());
385 indirect_call_tables_.resize(table_count);
386 for (int table_index = 0; table_index < table_count; ++table_index) {
387 PurgeIndirectCallCache(table_index);
388 }
389}
390
391bool WasmInterpreterRuntime::TableGet(const uint8_t*& current_code,
392 uint32_t table_index,
393 uint32_t entry_index,
395 // This function assumes that it is executed in a HandleScope.
396
397 auto table = direct_handle(
399 wasm_trusted_instance_data()->tables()->get(table_index)),
400 isolate_);
401 uint32_t table_size = table->current_length();
402 if (entry_index >= table_size) {
403 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
404 return false;
405 }
406
407 *result = WasmTableObject::Get(isolate_, table, entry_index);
408 return true;
409}
410
411void WasmInterpreterRuntime::TableSet(const uint8_t*& current_code,
412 uint32_t table_index,
413 uint32_t entry_index,
415 // This function assumes that it is executed in a HandleScope.
416
417 auto table = direct_handle(
419 wasm_trusted_instance_data()->tables()->get(table_index)),
420 isolate_);
421 uint32_t table_size = table->current_length();
422 if (entry_index >= table_size) {
423 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
424 } else {
425 WasmTableObject::Set(isolate_, table, entry_index, ref);
426 }
427}
428
429void WasmInterpreterRuntime::TableInit(const uint8_t*& current_code,
430 uint32_t table_index,
431 uint32_t element_segment_index,
432 uint32_t dst, uint32_t src,
433 uint32_t size) {
434 HandleScope scope(isolate_); // Avoid leaking handles.
435
438 auto table = direct_handle(
439 Cast<WasmTableObject>(trusted_data->tables()->get(table_index)),
440 isolate_);
441 if (IsSubtypeOf(table->type(module_), kWasmFuncRef, module_)) {
442 PurgeIndirectCallCache(table_index);
443 }
444
445 std::optional<MessageTemplate> msg_template =
448 table_index, element_segment_index, dst, src, size);
449 // See WasmInstanceObject::InitTableEntries.
450 if (msg_template == MessageTemplate::kWasmTrapTableOutOfBounds) {
451 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
452 } else if (msg_template ==
453 MessageTemplate::kWasmTrapElementSegmentOutOfBounds) {
454 SetTrap(TrapReason::kTrapElementSegmentOutOfBounds, current_code);
455 }
456}
457
458void WasmInterpreterRuntime::TableCopy(const uint8_t*& current_code,
459 uint32_t dst_table_index,
460 uint32_t src_table_index, uint32_t dst,
461 uint32_t src, uint32_t size) {
462 HandleScope scope(isolate_); // Avoid leaking handles.
463
466 auto table_dst = direct_handle(
467 Cast<WasmTableObject>(trusted_data->tables()->get(dst_table_index)),
468 isolate_);
469 if (IsSubtypeOf(table_dst->type(module_), kWasmFuncRef, module_)) {
470 PurgeIndirectCallCache(dst_table_index);
471 }
472
474 isolate_, trusted_data, dst_table_index, src_table_index, dst, src,
475 size)) {
476 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
477 }
478}
479
480uint32_t WasmInterpreterRuntime::TableGrow(uint32_t table_index, uint32_t delta,
481 DirectHandle<Object> value) {
482 // This function assumes that it is executed in a HandleScope.
483
484 auto table = direct_handle(
486 wasm_trusted_instance_data()->tables()->get(table_index)),
487 isolate_);
488 return WasmTableObject::Grow(isolate_, table, delta, value);
489}
490
491uint32_t WasmInterpreterRuntime::TableSize(uint32_t table_index) {
492 HandleScope handle_scope(isolate_); // Avoid leaking handles.
493 auto table = direct_handle(
495 wasm_trusted_instance_data()->tables()->get(table_index)),
496 isolate_);
497 return table->current_length();
498}
499
500void WasmInterpreterRuntime::TableFill(const uint8_t*& current_code,
501 uint32_t table_index, uint32_t count,
503 uint32_t start) {
504 // This function assumes that it is executed in a HandleScope.
505
506 auto table = direct_handle(
508 wasm_trusted_instance_data()->tables()->get(table_index)),
509 isolate_);
510 uint32_t table_size = table->current_length();
511 if (start + count < start || // Check for overflow.
512 start + count > table_size) {
513 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
514 return;
515 }
516
517 if (count == 0) {
518 return;
519 }
520
521 WasmTableObject::Fill(isolate_, table, start, value, count);
522}
523
524bool WasmInterpreterRuntime::MemoryInit(const uint8_t*& current_code,
525 uint32_t data_segment_index,
526 uint64_t dst, uint64_t src,
527 uint64_t size) {
530 Address dst_addr;
531 uint64_t src_max =
532 trusted_data->data_segment_sizes()->get(data_segment_index);
533 if (!BoundsCheckMemRange(dst, &size, &dst_addr) ||
534 !base::IsInBounds(src, size, src_max)) {
535 SetTrap(TrapReason::kTrapMemOutOfBounds, current_code);
536 return false;
537 }
538
539 Address src_addr =
540 trusted_data->data_segment_starts()->get(data_segment_index) + src;
541 std::memmove(reinterpret_cast<void*>(dst_addr),
542 reinterpret_cast<void*>(src_addr), size);
543 return true;
544}
545
546bool WasmInterpreterRuntime::MemoryCopy(const uint8_t*& current_code,
547 uint64_t dst, uint64_t src,
548 uint64_t size) {
549 Address dst_addr;
550 Address src_addr;
551 if (!BoundsCheckMemRange(dst, &size, &dst_addr) ||
552 !BoundsCheckMemRange(src, &size, &src_addr)) {
553 SetTrap(TrapReason::kTrapMemOutOfBounds, current_code);
554 return false;
555 }
556
557 std::memmove(reinterpret_cast<void*>(dst_addr),
558 reinterpret_cast<void*>(src_addr), size);
559 return true;
560}
561
562bool WasmInterpreterRuntime::MemoryFill(const uint8_t*& current_code,
563 uint64_t dst, uint32_t value,
564 uint64_t size) {
565 Address dst_addr;
566 if (!BoundsCheckMemRange(dst, &size, &dst_addr)) {
567 SetTrap(TrapReason::kTrapMemOutOfBounds, current_code);
568 return false;
569 }
570
571 std::memset(reinterpret_cast<void*>(dst_addr), value, size);
572 return true;
573}
574
575// Unpack the values encoded in the given exception. The exception values are
576// pushed onto the operand stack.
578 uint32_t* sp, const WasmTag& tag, DirectHandle<Object> exception_object,
579 uint32_t first_param_slot_index, uint32_t first_param_ref_stack_index) {
580 DirectHandle<FixedArray> encoded_values =
582 isolate_, Cast<WasmExceptionPackage>(exception_object)));
583 // Decode the exception values from the given exception package and push
584 // them onto the operand stack. This encoding has to be in sync with other
585 // backends so that exceptions can be passed between them.
586 const WasmTagSig* sig = tag.sig;
587 uint32_t encoded_index = 0;
588 uint32_t* p = sp + first_param_slot_index;
589 for (size_t i = 0; i < sig->parameter_count(); ++i) {
590#ifdef V8_ENABLE_DRUMBRAKE_TRACING
591 if (v8_flags.trace_drumbrake_execution) {
592 TracePush(sig->GetParam(i).kind(), static_cast<uint32_t>(p - sp));
593 }
594#endif // V8_ENABLE_DRUMBRAKE_TRACING
595
597 switch (sig->GetParam(i).kind()) {
598 case kI32: {
599 uint32_t u32 = 0;
600 DecodeI32ExceptionValue(encoded_values, &encoded_index, &u32);
601 base::WriteUnalignedValue<uint32_t>(reinterpret_cast<Address>(p), u32);
602 p += sizeof(uint32_t) / kSlotSize;
603 break;
604 }
605 case kF32: {
606 uint32_t f32_bits = 0;
607 DecodeI32ExceptionValue(encoded_values, &encoded_index, &f32_bits);
608 float f32 = Float32::FromBits(f32_bits).get_scalar();
609 base::WriteUnalignedValue<float>(reinterpret_cast<Address>(p), f32);
610 p += sizeof(float) / kSlotSize;
611 break;
612 }
613 case kI64: {
614 uint64_t u64 = 0;
615 DecodeI64ExceptionValue(encoded_values, &encoded_index, &u64);
616 base::WriteUnalignedValue<uint64_t>(reinterpret_cast<Address>(p), u64);
617 p += sizeof(uint64_t) / kSlotSize;
618 break;
619 }
620 case kF64: {
621 uint64_t f64_bits = 0;
622 DecodeI64ExceptionValue(encoded_values, &encoded_index, &f64_bits);
623 float f64 = Float64::FromBits(f64_bits).get_scalar();
624 base::WriteUnalignedValue<double>(reinterpret_cast<Address>(p), f64);
625 p += sizeof(double) / kSlotSize;
626 break;
627 }
628 case kS128: {
629 int32x4 s128 = {0, 0, 0, 0};
630 uint32_t* vals = reinterpret_cast<uint32_t*>(s128.val);
631 DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[0]);
632 DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[1]);
633 DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[2]);
634 DecodeI32ExceptionValue(encoded_values, &encoded_index, &vals[3]);
635 base::WriteUnalignedValue<Simd128>(reinterpret_cast<Address>(p),
636 Simd128(s128));
637 p += sizeof(Simd128) / kSlotSize;
638 break;
639 }
640 case kRef:
641 case kRefNull: {
642 DirectHandle<Object> ref(encoded_values->get(encoded_index++),
643 isolate_);
644 if (sig->GetParam(i).value_type_code() == wasm::kFuncRefCode &&
645 i::IsNull(*ref, isolate_)) {
646 ref = isolate_->factory()->wasm_null();
647 }
648 StoreWasmRef(first_param_ref_stack_index++, ref);
649 base::WriteUnalignedValue<WasmRef>(reinterpret_cast<Address>(p), ref);
650 p += sizeof(WasmRef) / kSlotSize;
651 break;
652 }
653 default:
654 UNREACHABLE();
655 }
656 }
658}
659
660namespace {
661void RedirectCodeToUnwindHandler(const uint8_t*& code) {
662 // Resume execution from s2s_Unwind, which unwinds the Wasm stack frames
663 code = reinterpret_cast<uint8_t*>(&s_unwind_code);
664}
665} // namespace
666
667// Allocate a new exception.
668DirectHandle<WasmExceptionPackage>
672 DirectHandle<WasmExceptionTag> exception_tag(
673 Cast<WasmExceptionTag>(trusted_data->tags_table()->get(tag_index)),
674 isolate_);
675 const WasmTag& tag = GetWasmTag(tag_index);
676 uint32_t encoded_size = WasmExceptionPackage::GetEncodedSize(&tag);
677 DirectHandle<WasmExceptionPackage> exception_object =
678 WasmExceptionPackage::New(isolate_, exception_tag, encoded_size);
679 return exception_object;
680}
681
682// Throw a Wasm exception.
683void WasmInterpreterRuntime::ThrowException(const uint8_t*& code, uint32_t* sp,
684 Tagged<Object> exception_object) {
685 // Keep track of the code offset of the current instruction, which we'll need
686 // to calculate the stack trace from Isolate::Throw.
687 current_frame_.current_bytecode_ = code;
688
691
692 // Now that the exception is ready, set it as pending.
693 {
694 wasm::ClearThreadInWasmScope clear_wasm_flag(isolate_);
695 isolate_->Throw(exception_object);
697 RedirectCodeToUnwindHandler(code);
698 }
699 }
701}
702
703// Throw a given existing exception caught by the catch block specified.
705 uint32_t* sp,
706 uint32_t catch_block_index) {
707 DirectHandle<Object> exception_object =
708 current_frame_.GetCaughtException(isolate_, catch_block_index);
709 DCHECK(!IsTheHole(*exception_object));
710 ThrowException(code, sp, *exception_object);
711}
712
713// Handle a thrown exception. Returns whether the exception was handled inside
714// of wasm. Unwinds the interpreted stack accordingly.
717 const uint8_t*& current_code) {
718 DCHECK_IMPLIES(current_code, current_frame_.current_function_);
719 DCHECK_IMPLIES(!current_code, !current_frame_.current_function_);
721
722 bool catchable = current_frame_.current_function_ &&
724 if (catchable) {
725 HandleScope scope(isolate_);
729
730 // We might need to allocate a new FixedArray<Object> to store the caught
731 // exception.
732 DCHECK(AllowHeapAllocation::IsAllowed());
733
734 size_t current_code_offset =
735 current_code - current_frame_.current_function_->GetCode();
736 const WasmEHData::TryBlock* try_block =
737 current_frame_.current_function_->GetTryBlock(current_code_offset);
738 while (try_block) {
739 for (const auto& catch_handler : try_block->catch_handlers) {
740 if (catch_handler.tag_index < 0) {
741 // Catch all.
742 current_code = current_frame_.current_function_->GetCode() +
743 catch_handler.code_offset;
744 current_frame_.SetCaughtException(
745 isolate_, catch_handler.catch_block_index, exception);
748 } else if (IsWasmExceptionPackage(*exception, isolate_)) {
749 // The exception was thrown by Wasm code and it's wrapped in a
750 // WasmExceptionPackage.
751 DirectHandle<Object> caught_tag =
754 DirectHandle<Object> expected_tag(
755 trusted_data->tags_table()->get(catch_handler.tag_index),
756 isolate_);
757 DCHECK(IsWasmExceptionTag(*expected_tag));
758 // Determines whether the given exception has a tag matching the
759 // expected tag for the given index within the exception table of the
760 // current instance.
761 if (expected_tag.is_identical_to(caught_tag)) {
762 current_code = current_frame_.current_function_->GetCode() +
763 catch_handler.code_offset;
764 DCHECK_LT(catch_handler.tag_index, module_->tags.size());
765 const WasmTag& tag = module_->tags[catch_handler.tag_index];
766 auto exception_payload_slot_offsets =
767 current_frame_.current_function_
768 ->GetExceptionPayloadStartSlotOffsets(
769 catch_handler.catch_block_index);
771 sp, tag, exception,
772 exception_payload_slot_offsets.first_param_slot_offset,
773 exception_payload_slot_offsets.first_param_ref_stack_index);
774 current_frame_.SetCaughtException(
775 isolate_, catch_handler.catch_block_index, exception);
778 }
779 } else {
780 // Check for the special case where the tag is WebAssembly.JSTag and
781 // the exception is not a WebAssembly.Exception. In this case the
782 // exception is caught and pushed on the operand stack.
783 // Only perform this check if the tag signature is the same as
784 // the JSTag signature, i.e. a single externref, otherwise we know
785 // statically that it cannot be the JSTag.
786 DCHECK_LT(catch_handler.tag_index, module_->tags.size());
787 const WasmTagSig* sig = module_->tags[catch_handler.tag_index].sig;
788 if (sig->return_count() != 0 || sig->parameter_count() != 1 ||
789 (sig->GetParam(0).kind() != kRefNull &&
790 sig->GetParam(0).kind() != kRef)) {
791 continue;
792 }
793
794 DirectHandle<JSObject> js_tag_object(
795 isolate_->native_context()->wasm_js_tag(), isolate_);
796 DirectHandle<WasmTagObject> wasm_tag_object(
797 Cast<WasmTagObject>(*js_tag_object), isolate_);
798 DirectHandle<Object> caught_tag(wasm_tag_object->tag(), isolate_);
799 DirectHandle<Object> expected_tag(
800 trusted_data->tags_table()->get(catch_handler.tag_index),
801 isolate_);
802 if (!expected_tag.is_identical_to(caught_tag)) {
803 continue;
804 }
805
806 current_code = current_frame_.current_function_->GetCode() +
807 catch_handler.code_offset;
808 // Push exception on the operand stack.
809 auto exception_payload_slot_offsets =
810 current_frame_.current_function_
811 ->GetExceptionPayloadStartSlotOffsets(
812 catch_handler.catch_block_index);
814 exception_payload_slot_offsets.first_param_ref_stack_index,
815 exception);
817 reinterpret_cast<Address>(
818 sp + exception_payload_slot_offsets.first_param_slot_offset),
819 exception);
820
821 current_frame_.SetCaughtException(
822 isolate_, catch_handler.catch_block_index, exception);
825 }
826 }
827 try_block =
828 current_frame_.current_function_->GetParentTryBlock(try_block);
829 }
830 }
831
835}
836
838 return !module_->memories.empty() && module_->memories[0].is_shared &&
840}
841
842int32_t WasmInterpreterRuntime::AtomicNotify(uint64_t buffer_offset,
843 int32_t val) {
844 if (module_->memories.empty() || !module_->memories[0].is_shared) {
845 return 0;
846 } else {
847 HandleScope handle_scope(isolate_);
848 // TODO(paolosev@microsoft.com): Support multiple memories.
849 uint32_t memory_index = 0;
851 ->memory_object(memory_index)
852 ->array_buffer(),
853 isolate_);
854 int result = FutexEmulation::Wake(*array_buffer, buffer_offset, val);
855 return result;
856 }
857}
858
859int32_t WasmInterpreterRuntime::I32AtomicWait(uint64_t buffer_offset,
860 int32_t val, int64_t timeout) {
861 // We might handle interrupts (for example from a debugger) here while we are
862 // locked in the futex. We need to make sure that the current activation is
863 // up-to-date, if we need to traverse the call stack.
865
866 HandleScope handle_scope(isolate_);
867 // TODO(paolosev@microsoft.com): Support multiple memories.
868 uint32_t memory_index = 0;
869 DirectHandle<JSArrayBuffer> array_buffer(
870 wasm_trusted_instance_data()->memory_object(memory_index)->array_buffer(),
871 isolate_);
872 auto result = FutexEmulation::WaitWasm32(isolate_, array_buffer,
873 buffer_offset, val, timeout);
874 return result.ToSmi().value();
875}
876
877int32_t WasmInterpreterRuntime::I64AtomicWait(uint64_t buffer_offset,
878 int64_t val, int64_t timeout) {
879 // We might handle interrupts (for example from a debugger) here while we are
880 // locked in the futex. We need to make sure that the current activation is
881 // up-to-date, if we need to traverse the call stack.
883
884 HandleScope handle_scope(isolate_);
885 // TODO(paolosev@microsoft.com): Support multiple memories.
886 uint32_t memory_index = 0;
887 DirectHandle<JSArrayBuffer> array_buffer(
888 wasm_trusted_instance_data()->memory_object(memory_index)->array_buffer(),
889 isolate_);
890 auto result = FutexEmulation::WaitWasm64(isolate_, array_buffer,
891 buffer_offset, val, timeout);
892 return result.ToSmi().value();
893}
894
896 WasmInterpreterThread* thread, uint32_t func_index, Address frame_pointer,
897 uint8_t* interpreter_fp, uint32_t ref_stack_offset,
898 const std::vector<WasmValue>* argument_values) {
900 start_function_index_ = func_index;
901
902 thread->StartActivation(this, frame_pointer, interpreter_fp, current_frame_);
903
904 current_frame_.current_function_ = nullptr;
905 current_frame_.previous_frame_ = nullptr;
906 current_frame_.current_bytecode_ = nullptr;
907 current_frame_.current_sp_ = interpreter_fp;
908 current_frame_.ref_array_current_sp_ = ref_stack_offset;
909 current_frame_.thread_ = thread;
910#ifdef V8_ENABLE_DRUMBRAKE_TRACING
911 current_frame_.current_stack_start_args_ = thread->CurrentStackFrameStart();
912#endif // V8_ENABLE_DRUMBRAKE_TRACING
913
914 const FunctionSig* sig = module_->functions[func_index].sig;
915 size_t args_count = 0;
916 uint32_t rets_slots_size = 0;
917 uint32_t ref_rets_count = 0;
918 uint32_t ref_args_count = 0;
919 WasmBytecode* target_function = GetFunctionBytecode(func_index);
920 if (target_function) {
921 args_count = target_function->args_count();
922 rets_slots_size = target_function->rets_slots_size();
923 ref_rets_count = target_function->ref_rets_count();
924 ref_args_count = target_function->ref_args_count();
925 } else {
926 // We begin execution by calling an imported function.
927 args_count = sig->parameter_count();
928 rets_slots_size = WasmBytecode::RetsSizeInSlots(sig);
929 ref_rets_count = WasmBytecode::RefRetsCount(sig);
930 ref_args_count = WasmBytecode::RefArgsCount(sig);
931 }
932
933 // Here GC is disabled, we cannot "resize" the reference_stack_ FixedArray
934 // before having created Handles for the Ref arguments passed in
935 // argument_values.
936 HandleScope handle_scope(isolate_); // Avoid leaking handles.
937
939 if (ref_args_count > 0) {
940 ref_args.reserve(ref_args_count);
941 }
942
943 uint8_t* p = interpreter_fp + rets_slots_size * kSlotSize;
944
945 // Check stack overflow.
946 const uint8_t* stack_limit = thread->StackLimitAddress();
947 if (V8_UNLIKELY(p + (ref_rets_count + ref_args_count) * sizeof(WasmRef) >=
948 stack_limit)) {
949 size_t additional_required_size =
950 p + (ref_rets_count + ref_args_count) * sizeof(WasmRef) - stack_limit;
951 if (!thread->ExpandStack(additional_required_size)) {
952 // TODO(paolosev@microsoft.com) - Calculate initial function offset.
953 ClearThreadInWasmScope clear_wasm_flag(isolate_);
956 const pc_t trap_pc = 0;
957 SetTrap(TrapReason::kTrapUnreachable, trap_pc);
958 thread->FinishActivation();
959 return;
960 }
961 }
962
963 if (argument_values) {
964 // We are being called from JS, arguments are passed in the
965 // {argument_values} vector.
966 for (size_t i = 0; i < argument_values->size(); i++) {
967 const WasmValue& value = (*argument_values)[i];
968 switch (value.type().kind()) {
969 case kI32:
970 base::WriteUnalignedValue<int32_t>(reinterpret_cast<Address>(p),
971 value.to<int32_t>());
972 p += sizeof(int32_t);
973 break;
974 case kI64:
975 base::WriteUnalignedValue<int64_t>(reinterpret_cast<Address>(p),
976 value.to<int64_t>());
977 p += sizeof(int64_t);
978 break;
979 case kF32:
980 base::WriteUnalignedValue<float>(reinterpret_cast<Address>(p),
981 value.to<float>());
982 p += sizeof(float);
983 break;
984 case kF64:
985 base::WriteUnalignedValue<double>(reinterpret_cast<Address>(p),
986 value.to<double>());
987 p += sizeof(double);
988 break;
989 case kRef:
990 case kRefNull:
991 ref_args.push_back(value.to_ref());
992 base::WriteUnalignedValue<uint64_t>(reinterpret_cast<Address>(p),
994 p += sizeof(WasmRef);
995 break;
996 case kS128:
997 default:
998 UNREACHABLE();
999 }
1000 }
1001 } else {
1002 // We are being called from Wasm, arguments are already in the stack.
1003 for (size_t i = 0; i < args_count; i++) {
1004 switch (sig->GetParam(i).kind()) {
1005 case kI32:
1006 p += sizeof(int32_t);
1007 break;
1008 case kI64:
1009 p += sizeof(int64_t);
1010 break;
1011 case kF32:
1012 p += sizeof(float);
1013 break;
1014 case kF64:
1015 p += sizeof(double);
1016 break;
1017 case kS128:
1018 p += sizeof(Simd128);
1019 break;
1020 case kRef:
1021 case kRefNull: {
1024 reinterpret_cast<Address>(p));
1025 ref_args.push_back(ref);
1026 p += sizeof(WasmRef);
1027 break;
1028 }
1029 default:
1030 UNREACHABLE();
1031 }
1032 }
1033 }
1034
1035 {
1036 // Once we have read ref argument passed on the stack and we have stored
1037 // them into the ref_args vector of Handles, we can re-enable the GC.
1038 AllowHeapAllocation allow_gc;
1039
1040 if (ref_rets_count + ref_args_count > 0) {
1041 // Reserve space for reference args and return values in the
1042 // reference_stack_.
1044 current_frame_.ref_array_current_sp_ + ref_rets_count +
1045 ref_args_count);
1046
1047 uint32_t ref_stack_arg_index = ref_rets_count;
1048 for (uint32_t ref_arg_index = 0; ref_arg_index < ref_args_count;
1049 ref_arg_index++) {
1050 StoreWasmRef(ref_stack_arg_index++, ref_args[ref_arg_index]);
1051 }
1052 }
1053 }
1054}
1055
1057 bool called_from_js) {
1059
1060 uint32_t start_function_index = start_function_index_;
1061 FrameState current_frame = current_frame_;
1062
1063 const uint8_t* code = nullptr;
1064 const FunctionSig* sig = nullptr;
1065 const CanonicalSig* canonicalized_sig = nullptr;
1066 uint32_t return_count = 0;
1068 if (target_function) {
1069 sig = target_function->GetFunctionSignature();
1070 canonicalized_sig = target_function->GetCanonicalFunctionSignature();
1071 return_count = target_function->return_count();
1072 ExecuteFunction(code, start_function_index_, target_function->args_count(),
1073 0, 0, 0);
1074 } else {
1075 sig = module_->functions[start_function_index_].sig;
1076 CanonicalTypeIndex canonical_sig_index = module_->canonical_sig_id(
1077 module_->functions[start_function_index_].sig_index);
1078 canonicalized_sig =
1079 GetTypeCanonicalizer()->LookupFunctionSignature(canonical_sig_index);
1080 return_count = static_cast<uint32_t>(sig->return_count());
1082 static_cast<uint32_t>(sig->parameter_count()), 0, 0,
1083 0);
1084 }
1085
1086 // If there are Ref types in the set of result types defined in the function
1087 // signature, they are located from the first ref_stack_ slot of the current
1088 // Activation.
1089 uint32_t ref_result_slot_index = 0;
1090
1092 if (return_count > 0) {
1093 uint32_t* dst = reinterpret_cast<uint32_t*>(current_frame_.current_sp_);
1094
1095 if (called_from_js) {
1096 // We are returning the results to a JS caller, we need to store them
1097 // into the {function_result_} vector and they will be retrieved via
1098 // {GetReturnValue}.
1099 function_result_.resize(return_count);
1100 for (size_t index = 0; index < return_count; index++) {
1101 CanonicalValueType ret_value_type =
1102 canonicalized_sig->GetReturn(index);
1103 switch (ret_value_type.kind()) {
1104 case kI32:
1107 reinterpret_cast<Address>(dst)));
1108 dst += sizeof(uint32_t) / kSlotSize;
1109 break;
1110 case kI64:
1113 reinterpret_cast<Address>(dst)));
1114 dst += sizeof(uint64_t) / kSlotSize;
1115 break;
1116 case kF32:
1119 reinterpret_cast<Address>(dst)));
1120 dst += sizeof(float) / kSlotSize;
1121 break;
1122 case kF64:
1125 reinterpret_cast<Address>(dst)));
1126 dst += sizeof(double) / kSlotSize;
1127 break;
1128 case kRef:
1129 case kRefNull: {
1131 ExtractWasmRef(ref_result_slot_index++);
1132 ref = WasmToJSObject(ref);
1133 function_result_[index] = WasmValue(ref, ret_value_type);
1134 dst += sizeof(WasmRef) / kSlotSize;
1135 break;
1136 }
1137 case kS128:
1138 default:
1139 UNREACHABLE();
1140 }
1141 }
1142 } else {
1143 // We are returning the results on the stack
1144 for (size_t index = 0; index < return_count; index++) {
1145 switch (sig->GetReturn(index).kind()) {
1146 case kI32:
1147 dst += sizeof(uint32_t) / kSlotSize;
1148 break;
1149 case kI64:
1150 dst += sizeof(uint64_t) / kSlotSize;
1151 break;
1152 case kF32:
1153 dst += sizeof(float) / kSlotSize;
1154 break;
1155 case kF64:
1156 dst += sizeof(double) / kSlotSize;
1157 break;
1158 case kS128:
1159 dst += sizeof(Simd128) / kSlotSize;
1160 break;
1161 case kRef:
1162 case kRefNull: {
1163 // Make sure the ref result is termporarily stored in a stack
1164 // slot, to be retrieved by the caller.
1166 ExtractWasmRef(ref_result_slot_index++);
1167 base::WriteUnalignedValue<WasmRef>(reinterpret_cast<Address>(dst),
1168 ref);
1169 dst += sizeof(WasmRef) / kSlotSize;
1170 break;
1171 }
1172 default:
1173 UNREACHABLE();
1174 }
1175 }
1176 }
1177 }
1178
1179 if (ref_result_slot_index > 0) {
1180 current_thread_->ClearRefStackValues(current_frame_.ref_array_current_sp_,
1181 ref_result_slot_index);
1182 }
1183
1184 DCHECK(current_frame_.caught_exceptions_.is_null());
1185
1186 start_function_index_ = start_function_index;
1187 current_frame_ = current_frame;
1189 MessageTemplate message_id =
1190 WasmOpcodes::TrapReasonToMessageId(thread->GetTrapReason());
1191 thread->RaiseException(isolate_, message_id);
1193 // Uncaught exception.
1194 thread->Stop();
1195 } else {
1197 }
1198
1199 thread->FinishActivation();
1200 const FrameState* frame_state = thread->GetCurrentActivationFor(this);
1201 current_frame_ = frame_state ? *frame_state : FrameState();
1202}
1203
1204void WasmInterpreterRuntime::StoreWasmRef(uint32_t ref_stack_index,
1205 const WasmRef& ref) {
1206 uint32_t index = ref_stack_index + current_frame_.ref_array_current_sp_;
1207 if (ref.is_null()) {
1208 reference_stack()->set_the_hole(isolate_, index);
1209 } else {
1210 reference_stack()->set(index, *ref);
1211 }
1212}
1213
1215 int index =
1216 static_cast<int>(ref_stack_index) + current_frame_.ref_array_current_sp_;
1217 DirectHandle<Object> ref(reference_stack()->get(index), isolate_);
1218 DCHECK(!IsTheHole(*ref, isolate_));
1219 return WasmRef(ref);
1220}
1221
1222// A tail call should not add an additional stack frame to the interpreter
1223// stack. This is implemented by unwinding the current stack frame just before
1224// the tail call.
1226 uint32_t* sp, uint32_t slot_offset, uint32_t rets_size, uint32_t args_size,
1227 uint32_t rets_refs, uint32_t args_refs, uint32_t ref_stack_fp_offset) {
1228 // At the moment of the call the interpreter stack is as in the diagram below.
1229 // A new interpreter frame for the callee function has been initialized, with
1230 // `R` slots to contain the R return values, followed by {args_size} slots to
1231 // contain the callee arguments.
1232 //
1233 // In order to unwind an interpreter stack frame we just copy the content of
1234 // the slots that contain the callee arguments into the caller stack frame,
1235 // just after the slots of the return values. Note that the return call is
1236 // invalid if the number and types of the return values of the callee function
1237 // do not exactly match the number and types of the return values of the
1238 // caller function. Instead, the number of types of the caller and callee
1239 // functions arguments can differ.
1240 //
1241 // The other slots in the caller frame, for const values and locals, will be
1242 // initialized later in ExecuteFunction().
1243 //
1244 // +----------------------+
1245 // | argA-1 | ^ ^
1246 // | ... | | | ->-----+
1247 // | ... | | | |
1248 // | arg0 | callee v |
1249 // | retR-1 | frame |
1250 // | ... | | |
1251 // | ret0 | v | copy
1252 // +----------------------+ (slot_offset) |
1253 // | ... | ^ V
1254 // | <stack slots> | | |
1255 // | <locals slots> | | |
1256 // | <const slots> | | ^ |
1257 // | argN-1 | caller | <------+
1258 // | ... | frame |
1259 // | arg0 | | v
1260 // | retR-1 | |
1261 // | ... | |
1262 // | ret0 | v
1263 // +----------------------+ (0)
1264
1265 uint8_t* next_sp = reinterpret_cast<uint8_t*>(sp);
1266 uint8_t* prev_sp = next_sp + slot_offset;
1267 // Here {args_size} is the number of arguments expected by the function we are
1268 // calling, which can be different from the number of args of the caller
1269 // function.
1270 ::memmove(next_sp + rets_size, prev_sp, args_size);
1271
1272 // If some of the argument-slots contain Ref values, we need to move them
1273 // accordingly, in the thread {reference_stack_}.
1274 if (rets_refs) {
1275 current_thread_->ClearRefStackValues(current_frame_.ref_array_current_sp_,
1276 rets_refs);
1277 }
1278 // Here {args_refs} is the number of reference args expected by the function
1279 // we are calling, which can be different from the number of reference args of
1280 // the caller function.
1281 for (uint32_t i = 0; i < args_refs; i++) {
1282 StoreWasmRef(rets_refs + i, ExtractWasmRef(ref_stack_fp_offset + i));
1283 }
1284 if (ref_stack_fp_offset > rets_refs + args_refs) {
1286 current_frame_.ref_array_current_sp_ + rets_refs + args_refs,
1287 ref_stack_fp_offset - rets_refs - args_refs);
1288 }
1289}
1290
1292 uint8_t* sp, uint32_t ref_stack_fp_offset, const FunctionSig* sig) {
1293 // Argument values of type Ref, if present, are already stored in the thread's
1294 // reference_stack_ starting at index ref_stack_fp_offset + RefRetsCount(sig).
1295 // We want to temporarily copy the pointers to these object also in the stack
1296 // slots, because functions WasmInterpreter::RunInterpreter() and
1297 // WasmInterpreter::CallExternalJSFunction gets all arguments from the stack.
1298
1299 // TODO(paolosev@microsoft.com) - Too slow?
1300 ref_stack_fp_offset += WasmBytecode::RefRetsCount(sig);
1301
1302 size_t args_count = sig->parameter_count();
1304 for (size_t i = 0; i < args_count; i++) {
1305 switch (sig->GetParam(i).kind()) {
1306 case kI32:
1307 case kF32:
1308 sp += sizeof(int32_t);
1309 break;
1310 case kI64:
1311 case kF64:
1312 sp += sizeof(int64_t);
1313 break;
1314 case kS128:
1315 sp += sizeof(Simd128);
1316 break;
1317 case kRef:
1318 case kRefNull: {
1319 WasmRef ref = ExtractWasmRef(ref_stack_fp_offset++);
1320 base::WriteUnalignedValue<WasmRef>(reinterpret_cast<Address>(sp), ref);
1321 sp += sizeof(WasmRef);
1322 break;
1323 }
1324 default:
1325 UNREACHABLE();
1326 }
1327 }
1328}
1329
1331 uint8_t* sp, uint32_t ref_stack_fp_offset, const FunctionSig* sig) {
1332 size_t rets_count = sig->return_count();
1333 for (size_t i = 0; i < rets_count; i++) {
1334 switch (sig->GetReturn(i).kind()) {
1335 case kI32:
1336 case kF32:
1337 sp += sizeof(int32_t);
1338 break;
1339 case kI64:
1340 case kF64:
1341 sp += sizeof(int64_t);
1342 break;
1343 case kS128:
1344 sp += sizeof(Simd128);
1345 break;
1346 case kRef:
1347 case kRefNull:
1348 StoreWasmRef(ref_stack_fp_offset++, base::ReadUnalignedValue<WasmRef>(
1349 reinterpret_cast<Address>(sp)));
1352 sp += sizeof(WasmRef);
1353 break;
1354 default:
1355 UNREACHABLE();
1356 }
1357 }
1358}
1359
1361 const uint8_t*& code, uint32_t func_index, uint32_t current_stack_size,
1362 uint32_t ref_stack_fp_offset, uint32_t slot_offset,
1363 uint32_t return_slot_offset, bool is_tail_call) {
1364 WasmInterpreterThread* thread = this->thread();
1365 DCHECK_NOT_NULL(thread);
1366
1367 // Store a pointer to the current FrameState before leaving the current
1368 // Activation.
1369 current_frame_.current_bytecode_ = code;
1370 thread->SetCurrentFrame(current_frame_);
1371 thread->SetCurrentActivationFrame(
1372 reinterpret_cast<uint32_t*>(current_frame_.current_sp_ + slot_offset),
1373 slot_offset, current_stack_size,
1374 current_frame_.ref_array_current_sp_ + ref_stack_fp_offset,
1375 ref_stack_fp_offset);
1376
1378 code, func_index,
1379 reinterpret_cast<uint32_t*>(current_frame_.current_sp_ + slot_offset),
1380 current_stack_size, ref_stack_fp_offset, slot_offset);
1381
1383 WasmInterpreterThread::ExceptionHandlingResult exception_handling_result =
1384 HandleException(reinterpret_cast<uint32_t*>(current_frame_.current_sp_),
1385 code);
1386 if (exception_handling_result ==
1388 // The exception was caught by Wasm EH. Resume execution,
1389 // {HandleException} has already updated {code} to point to the first
1390 // instruction in the catch handler.
1391 thread->Run();
1392 } else {
1394 DCHECK_EQ(exception_handling_result,
1396 if (thread->state() != WasmInterpreterThread::State::EH_UNWINDING) {
1397 thread->Stop();
1398 }
1399 // Resume execution from s2s_Unwind, which unwinds the Wasm stack frames.
1400 RedirectCodeToUnwindHandler(code);
1401 }
1402 }
1403
1404 if (is_tail_call) {
1405 RedirectCodeToUnwindHandler(code);
1406 }
1407}
1408
1410 const uint8_t* code, uint32_t* sp, WasmInterpreterRuntime* wasm_runtime,
1411 int64_t r0, double fp0) {
1413 code, sp, wasm_runtime, r0, fp0);
1414}
1415
1417 const WasmBytecode* target_function) {
1418 if (V8_UNLIKELY(target_function->ref_locals_count() > 0)) {
1419 uint32_t ref_stack_index =
1420 target_function->ref_rets_count() + target_function->ref_args_count();
1421 for (uint32_t i = 0; i < target_function->locals_count(); i++) {
1422 ValueType local_type = target_function->local_type(i);
1423 if (local_type == kWasmExternRef || local_type == kWasmNullExternRef) {
1424 StoreWasmRef(ref_stack_index++,
1425 WasmRef(isolate_->factory()->null_value()));
1426 } else if (local_type.is_reference()) {
1427 StoreWasmRef(ref_stack_index++,
1428 WasmRef(isolate_->factory()->wasm_null()));
1429 }
1430 }
1431 }
1432}
1433
1434// Sets up the current interpreter stack frame to start executing a new function
1435// with a tail call. Do not move the stack pointer for the interpreter stack,
1436// and avoids calling WasmInterpreterRuntime::ExecuteFunction(), which would add
1437// a new C++ stack frame.
1439 uint32_t func_index,
1440 uint32_t current_stack_size,
1441 uint32_t return_slot_offset) {
1442 // TODO(paolosev@microsoft.com): avoid to duplicate code from ExecuteFunction?
1443
1444 WasmBytecode* target_function = GetFunctionBytecode(func_index);
1445 DCHECK_NOT_NULL(target_function);
1446
1447 // Update the current (caller) frame setting current_bytecode_ to the initial
1448 // instruction of the callee.
1449 current_frame_.current_function_ = target_function;
1450 current_frame_.DisposeCaughtExceptionsArray(isolate_);
1451
1452#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1453 current_frame_.current_stack_start_locals_ =
1454 current_frame_.current_stack_start_args_ + target_function->args_count();
1455 current_frame_.current_stack_start_stack_ =
1456 current_frame_.current_stack_start_locals_ +
1457 target_function->locals_count();
1458
1459 if (v8_flags.trace_drumbrake_execution) {
1460 Trace("\nTailCallFunction: %d\n", func_index);
1461 Trace("= > PushFrame #%d(#%d @%d)\n", current_frame_.current_stack_height_,
1462 func_index, 0);
1463 }
1464#endif // V8_ENABLE_DRUMBRAKE_TRACING
1465
1466 if (!WasmStackCheck(target_function->GetCode(), code)) {
1467 return;
1468 }
1469
1470 const uint8_t* stack_limit = current_frame_.thread_->StackLimitAddress();
1471 if (V8_UNLIKELY(stack_limit <= current_frame_.current_sp_ ||
1472 !target_function->InitializeSlots(
1473 current_frame_.current_sp_,
1474 stack_limit - current_frame_.current_sp_))) {
1475 // Try to resize the stack.
1476 size_t additional_required_space =
1477 target_function->frame_size() -
1478 (stack_limit - current_frame_.current_sp_);
1479 // Try again.
1480 if (!current_frame_.thread_->ExpandStack(additional_required_space) ||
1481 !target_function->InitializeSlots(
1482 current_frame_.current_sp_,
1483 (stack_limit = current_frame_.thread_->StackLimitAddress()) -
1484 current_frame_.current_sp_)) {
1485 ClearThreadInWasmScope clear_wasm_flag(isolate_);
1487 SetTrap(TrapReason::kTrapUnreachable, code);
1489 return;
1490 }
1491 }
1492
1493 uint32_t ref_slots_count = target_function->ref_slots_count();
1494 if (V8_UNLIKELY(ref_slots_count > 0)) {
1495 uint32_t ref_array_length =
1496 current_frame_.ref_array_current_sp_ + ref_slots_count;
1497 current_thread_->EnsureRefStackSpace(ref_array_length);
1498 InitializeRefLocalsRefs(target_function);
1499 }
1500
1501#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1502 uint32_t shadow_stack_offset = 0;
1503 if (v8_flags.trace_drumbrake_execution) {
1504 shadow_stack_offset = target_function->rets_slots_size() * kSlotSize;
1505 for (uint32_t i = 0; i < target_function->args_count(); i++) {
1506 shadow_stack_offset +=
1507 TracePush(target_function->arg_type(i).kind(), shadow_stack_offset);
1508 }
1509
1510 // Make room for locals in shadow stack
1511 shadow_stack_offset += target_function->const_slots_size_in_bytes();
1512 for (size_t i = 0; i < target_function->locals_count(); i++) {
1513 shadow_stack_offset +=
1514 TracePush(target_function->local_type(i).kind(), shadow_stack_offset);
1515 }
1516 }
1517#endif // V8_ENABLE_DRUMBRAKE_TRACING
1518
1519 code = target_function->GetCode();
1520}
1521
1523 uint32_t func_index,
1524 uint32_t current_stack_size,
1525 uint32_t ref_stack_fp_offset,
1526 uint32_t slot_offset,
1527 uint32_t return_slot_offset) {
1528 // Execute an internal call.
1529 WasmBytecode* target_function = GetFunctionBytecode(func_index);
1530 DCHECK_NOT_NULL(target_function);
1531
1532#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1533 ShadowStack* prev_shadow_stack = shadow_stack_;
1534 ShadowStack shadow_stack;
1535 if (v8_flags.trace_drumbrake_execution) {
1536 shadow_stack_ = &shadow_stack;
1537 }
1538#endif // V8_ENABLE_DRUMBRAKE_TRACING
1539
1540 // This HandleScope is used for all handles created in instruction handlers.
1541 // We reset it every time we get to a backward jump in a loop.
1542 HandleScope handle_scope(GetIsolate());
1543
1544 // Update the current (caller) frame setting current_bytecode_ to the current
1545 // call instruction.
1546 current_frame_.current_bytecode_ = code;
1547
1548 FrameState prev_frame_state = current_frame_;
1549 current_frame_.current_sp_ += slot_offset;
1550 current_frame_.handle_scope_ = &handle_scope;
1551
1552#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1553 current_frame_.current_stack_start_args_ +=
1554 (current_stack_size - target_function->args_count());
1555#endif // V8_ENABLE_DRUMBRAKE_TRACING
1556
1557 current_frame_.current_function_ = target_function;
1558 current_frame_.previous_frame_ = &prev_frame_state;
1559 current_frame_.caught_exceptions_ = Handle<FixedArray>::null();
1560
1561#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1562 current_frame_.current_stack_height_++;
1563 current_frame_.current_stack_start_locals_ =
1564 current_frame_.current_stack_start_args_ + target_function->args_count();
1565 current_frame_.current_stack_start_stack_ =
1566 current_frame_.current_stack_start_locals_ +
1567 target_function->locals_count();
1568
1569 if (v8_flags.trace_drumbrake_execution) {
1570 Trace("\nCallFunction: %d\n", func_index);
1571 Trace("= > PushFrame #%d(#%d @%d)\n", current_frame_.current_stack_height_,
1572 func_index, 0);
1573 }
1574#endif // V8_ENABLE_DRUMBRAKE_TRACING
1575
1576 if (!WasmStackCheck(target_function->GetCode(), code)) {
1577 return;
1578 }
1579
1580 const uint8_t* stack_limit = current_frame_.thread_->StackLimitAddress();
1581 if (V8_UNLIKELY(stack_limit <= current_frame_.current_sp_ ||
1582 !target_function->InitializeSlots(
1583 current_frame_.current_sp_,
1584 stack_limit - current_frame_.current_sp_))) {
1585 // Try to resize the stack.
1586 size_t additional_required_space =
1587 target_function->frame_size() -
1588 (stack_limit - current_frame_.current_sp_);
1589 // Try again.
1590 if (!current_frame_.thread_->ExpandStack(additional_required_space) ||
1591 !target_function->InitializeSlots(
1592 current_frame_.current_sp_,
1593 (stack_limit = current_frame_.thread_->StackLimitAddress()) -
1594 current_frame_.current_sp_)) {
1595 ClearThreadInWasmScope clear_wasm_flag(isolate_);
1597 SetTrap(TrapReason::kTrapUnreachable, code);
1599 return;
1600 }
1601 }
1602
1603 uint32_t ref_slots_count = target_function->ref_slots_count();
1604 current_frame_.ref_array_current_sp_ += ref_stack_fp_offset;
1605 if (V8_UNLIKELY(ref_slots_count > 0)) {
1606 uint32_t ref_array_length =
1607 current_frame_.ref_array_current_sp_ + ref_slots_count;
1608 current_thread_->EnsureRefStackSpace(ref_array_length);
1609 InitializeRefLocalsRefs(target_function);
1610 }
1611
1612#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1613 uint32_t shadow_stack_offset = 0;
1614 if (v8_flags.trace_drumbrake_execution) {
1615 shadow_stack_offset = target_function->rets_slots_size() * kSlotSize;
1616 for (uint32_t i = 0; i < target_function->args_count(); i++) {
1617 shadow_stack_offset +=
1618 TracePush(target_function->arg_type(i).kind(), shadow_stack_offset);
1619 }
1620
1621 // Make room for locals in shadow stack
1622 shadow_stack_offset += target_function->const_slots_size_in_bytes();
1623 for (size_t i = 0; i < target_function->locals_count(); i++) {
1624 shadow_stack_offset +=
1625 TracePush(target_function->local_type(i).kind(), shadow_stack_offset);
1626 }
1627 }
1628#endif // V8_ENABLE_DRUMBRAKE_TRACING
1629
1630 const uint8_t* callee_code = target_function->GetCode();
1631 int64_t r0 = 0;
1632 double fp0 = .0;
1633
1634 // Execute function
1636 callee_code, reinterpret_cast<uint32_t*>(current_frame_.current_sp_),
1637 this, r0, fp0);
1638
1639 uint32_t ref_slots_to_clear =
1640 ref_slots_count - target_function->ref_rets_count();
1641 if (V8_UNLIKELY(ref_slots_to_clear > 0)) {
1642 current_thread_->ClearRefStackValues(current_frame_.ref_array_current_sp_ +
1643 target_function->ref_rets_count(),
1644 ref_slots_to_clear);
1645 }
1646
1647#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1648 shadow_stack_ = prev_shadow_stack;
1649
1650 if (v8_flags.trace_drumbrake_execution && shadow_stack_ != nullptr &&
1651 prev_frame_state.current_function_) {
1652 for (size_t i = 0; i < target_function->args_count(); i++) {
1653 TracePop();
1654 }
1655
1656 for (size_t i = 0; i < target_function->return_count(); i++) {
1657 return_slot_offset +=
1658 TracePush(target_function->return_type(i).kind(), return_slot_offset);
1659 }
1660 }
1661#endif // V8_ENABLE_DRUMBRAKE_TRACING
1662
1663 current_frame_.handle_scope_ = nullptr;
1664 current_frame_.DisposeCaughtExceptionsArray(isolate_);
1665 current_frame_ = prev_frame_state;
1666
1667 // Check state.
1668 WasmInterpreterThread::State current_state = state();
1669 if (V8_UNLIKELY(current_state != WasmInterpreterThread::State::RUNNING)) {
1670 switch (current_state) {
1673 if (!current_frame_.current_function_) {
1674 // We unwound the whole call stack without finding a catch handler.
1675 current_frame_.thread_->Stop();
1676 RedirectCodeToUnwindHandler(code);
1677 } else if (HandleException(
1678 reinterpret_cast<uint32_t*>(current_frame_.current_sp_),
1680 current_frame_.thread_->Run();
1681 } else {
1682 // UNWOUND
1684 RedirectCodeToUnwindHandler(code);
1685 }
1686 break;
1687
1690 RedirectCodeToUnwindHandler(code);
1691 break;
1692
1693 default:
1694 UNREACHABLE();
1695 }
1696 }
1697 // TODO(paolosev@microsoft.com): StackCheck.
1698}
1699
1701 DCHECK_LT(table_index, indirect_call_tables_.size());
1702 const WasmTable& table = module_->tables[table_index];
1703 if (IsSubtypeOf(table.type, kWasmFuncRef, module_)) {
1704 size_t length =
1706 wasm_trusted_instance_data()->dispatch_tables()->get(table_index))
1707 ->length();
1708 indirect_call_tables_[table_index].resize(length);
1709 for (size_t i = 0; i < length; i++) {
1710 indirect_call_tables_[table_index][i] = {};
1711 }
1712 }
1713}
1714
1715// static
1717 Isolate* isolate, DirectHandle<WasmInstanceObject> instance,
1718 uint32_t table_index, uint32_t entry_index) {
1719 DirectHandle<Tuple2> interpreter_object =
1720 WasmTrustedInstanceData::GetOrCreateInterpreterObject(instance);
1722 GetOrCreateInterpreterHandle(isolate, interpreter_object);
1724 handle->interpreter()->GetWasmRuntime();
1725 DCHECK_LT(table_index, wasm_runtime->indirect_call_tables_.size());
1726 DCHECK_LT(entry_index,
1727 wasm_runtime->indirect_call_tables_[table_index].size());
1728 wasm_runtime->indirect_call_tables_[table_index][entry_index] = {};
1729}
1730
1731// static
1733 Isolate* isolate, DirectHandle<WasmInstanceObject> instance,
1734 uint32_t table_index) {
1735 DirectHandle<Tuple2> interpreter_object =
1736 WasmTrustedInstanceData::GetOrCreateInterpreterObject(instance);
1738 GetOrCreateInterpreterHandle(isolate, interpreter_object);
1740 handle->interpreter()->GetWasmRuntime();
1742}
1743
1745 uint32_t table_index, uint32_t entry_index, uint32_t sig_index) const {
1746 const WasmTable& table = module_->tables[table_index];
1747 bool needs_type_check =
1748 !EquivalentTypes(table.type.AsNonNull(),
1749 ValueType::Ref(ModuleTypeIndex({sig_index}), false,
1751 module_, module_);
1752 bool needs_null_check = table.type.is_nullable();
1753
1754 // Copied from Liftoff.
1755 // We do both the type check and the null check by checking the signature,
1756 // so this shares most code. For the null check we then only check if the
1757 // stored signature is != -1.
1758 if (needs_type_check || needs_null_check) {
1759 const IndirectCallTable& dispatch_table =
1760 indirect_call_tables_[table_index];
1761 const wasm::CanonicalTypeIndex real_sig_id =
1762 dispatch_table[entry_index].sig_index;
1763 wasm::CanonicalTypeIndex canonical_sig_id =
1764 module_->canonical_sig_id(ModuleTypeIndex({sig_index}));
1765 if (!needs_type_check) {
1766 // Only check for -1 (nulled table entry).
1767 if (!real_sig_id.valid()) return false;
1768 } else if (!module_->types[sig_index].is_final) {
1769 if (real_sig_id == canonical_sig_id) return true;
1770 if (needs_null_check && !real_sig_id.valid()) return false;
1771
1773 ->wasm_canonical_rtts()
1774 ->get(real_sig_id.index)
1775 .GetHeapObjectAssumeWeak());
1776 DirectHandle<Map> formal_rtt = RttCanon(sig_index);
1777 return SubtypeCheck(rtt, *formal_rtt, sig_index);
1778 } else {
1779 if (real_sig_id != canonical_sig_id) return false;
1780 }
1781 }
1782
1783 return true;
1784}
1785
1787 const uint8_t*& current_code, uint32_t table_index, uint32_t sig_index,
1788 uint32_t entry_index, uint32_t stack_pos, uint32_t* sp,
1789 uint32_t ref_stack_fp_offset, uint32_t slot_offset,
1790 uint32_t return_slot_offset, bool is_tail_call) {
1791 DCHECK_LT(table_index, indirect_call_tables_.size());
1792
1793 IndirectCallTable& table = indirect_call_tables_[table_index];
1794
1795 // Bounds check against table size.
1796 DCHECK_GE(
1797 table.size(),
1799 wasm_trusted_instance_data()->dispatch_tables()->get(table_index))
1800 ->length());
1801 if (entry_index >= table.size()) {
1802 SetTrap(TrapReason::kTrapTableOutOfBounds, current_code);
1803 return;
1804 }
1805
1806 if (!table[entry_index]) {
1807 HandleScope handle_scope(isolate_); // Avoid leaking handles.
1808
1810 entry_index);
1811 const FunctionSig* signature = module_->signature({sig_index});
1812
1813 DirectHandle<Object> object_implicit_arg(entry.implicit_arg(), isolate_);
1814 if (IsWasmTrustedInstanceData(*object_implicit_arg)) {
1815 Tagged<WasmTrustedInstanceData> trusted_instance_object =
1816 Cast<WasmTrustedInstanceData>(*object_implicit_arg);
1818 Cast<WasmInstanceObject>(trusted_instance_object->instance_object()),
1819 isolate_);
1820 if (instance_object_.is_identical_to(instance_object)) {
1821 // Call to an import.
1822 uint32_t func_index = entry.function_index();
1823 table[entry_index] = IndirectCallValue(func_index, entry.sig_id());
1824 } else {
1825 // Cross-instance call.
1826 table[entry_index] = IndirectCallValue(signature, entry.sig_id());
1827 }
1828 } else {
1829 // Call JS function.
1830 table[entry_index] = IndirectCallValue(signature, entry.sig_id());
1831 }
1832 }
1833
1834 if (!CheckIndirectCallSignature(table_index, entry_index, sig_index)) {
1835 SetTrap(TrapReason::kTrapFuncSigMismatch, current_code);
1836 return;
1837 }
1838
1839 IndirectCallValue indirect_call = table[entry_index];
1840 DCHECK(indirect_call);
1841
1842 if (indirect_call.mode == IndirectCallValue::Mode::kInternalCall) {
1843 if (is_tail_call) {
1844 PrepareTailCall(current_code, indirect_call.func_index, stack_pos,
1845 return_slot_offset);
1846 } else {
1847 ExecuteFunction(current_code, indirect_call.func_index, stack_pos,
1848 ref_stack_fp_offset, slot_offset, return_slot_offset);
1852 RedirectCodeToUnwindHandler(current_code);
1853 }
1854 }
1855 } else {
1856 // ExternalCall
1857 HandleScope handle_scope(isolate_); // Avoid leaking handles.
1858
1859 DCHECK_NOT_NULL(indirect_call.signature);
1860
1861 // Store a pointer to the current FrameState before leaving the current
1862 // Activation.
1863 WasmInterpreterThread* thread = this->thread();
1864 current_frame_.current_bytecode_ = current_code;
1865 thread->SetCurrentFrame(current_frame_);
1866 thread->SetCurrentActivationFrame(
1867 sp, slot_offset, stack_pos,
1868 current_frame_.ref_array_current_sp_ + ref_stack_fp_offset,
1869 ref_stack_fp_offset);
1870
1871 // TODO(paolosev@microsoft.com): Optimize this code.
1873 entry_index);
1874 DirectHandle<Object> object_implicit_arg(entry.implicit_arg(), isolate_);
1875
1876 if (IsWasmTrustedInstanceData(*object_implicit_arg)) {
1877 // Call Wasm function in a different instance.
1878
1879 // Note that tail calls across WebAssembly module boundaries should
1880 // guarantee tail behavior, so this implementation does not conform to the
1881 // spec for a tail call. But it is really difficult to implement
1882 // cross-instance calls in the interpreter without recursively adding C++
1883 // stack frames.
1884 DirectHandle<WasmInstanceObject> target_instance(
1886 Cast<WasmTrustedInstanceData>(*object_implicit_arg)
1887 ->instance_object()),
1888 isolate_);
1889
1890 // Make sure the target WasmInterpreterObject and InterpreterHandle exist.
1891 DirectHandle<Tuple2> interpreter_object =
1892 WasmTrustedInstanceData::GetOrCreateInterpreterObject(
1893 target_instance);
1894 GetOrCreateInterpreterHandle(isolate_, interpreter_object);
1895
1896 Address frame_pointer = FindInterpreterEntryFramePointer(isolate_);
1897
1898 {
1899 // We should not allocate anything in the heap and avoid GCs after we
1900 // store ref arguments into stack slots.
1902
1903 uint8_t* fp = reinterpret_cast<uint8_t*>(sp) + slot_offset;
1904 StoreRefArgsIntoStackSlots(fp, ref_stack_fp_offset,
1905 indirect_call.signature);
1907 isolate_, frame_pointer, target_instance, entry.function_index(),
1908 fp);
1909 if (success) {
1910 StoreRefResultsIntoRefStack(fp, ref_stack_fp_offset,
1911 indirect_call.signature);
1912
1913#ifdef V8_ENABLE_DRUMBRAKE_TRACING
1914 // Update shadow stack
1915 if (v8_flags.trace_drumbrake_execution && shadow_stack_ != nullptr) {
1916 for (size_t i = 0; i < indirect_call.signature->parameter_count();
1917 i++) {
1918 TracePop();
1919 }
1920
1921 for (size_t i = 0; i < indirect_call.signature->return_count();
1922 i++) {
1923 return_slot_offset +=
1924 TracePush(indirect_call.signature->GetReturn(i).kind(),
1925 return_slot_offset);
1926 }
1927 }
1928#endif // V8_ENABLE_DRUMBRAKE_TRACING
1929 } else {
1930 thread->Stop();
1931 RedirectCodeToUnwindHandler(current_code);
1932 }
1933 }
1934 } else {
1935 // We should not allocate anything in the heap and avoid GCs after we
1936 // store ref arguments into stack slots.
1938
1939 // Note that tail calls to host functions do not have to guarantee tail
1940 // behaviour, so it is ok to recursively allocate C++ stack frames here.
1941 uint8_t* fp = reinterpret_cast<uint8_t*>(sp) + slot_offset;
1942 StoreRefArgsIntoStackSlots(fp, ref_stack_fp_offset,
1943 indirect_call.signature);
1945 current_code, module_, object_implicit_arg, indirect_call.signature,
1946 sp + slot_offset / kSlotSize, slot_offset);
1948 StoreRefResultsIntoRefStack(fp, ref_stack_fp_offset,
1949 indirect_call.signature);
1950 } else { // ExternalCallResult::EXTERNAL_EXCEPTION
1951 AllowHeapAllocation allow_gc;
1952
1953 if (HandleException(sp, current_code) ==
1956 thread->Stop();
1957 RedirectCodeToUnwindHandler(current_code);
1958 } else {
1959 thread->Run();
1960 }
1961 }
1962 }
1963
1964 if (is_tail_call) {
1965 // For tail calls to external functions, we need to unwind the callee
1966 // frame.
1967 RedirectCodeToUnwindHandler(current_code);
1968 }
1969 }
1970}
1971
1973 const uint8_t*& current_code, WasmRef func_ref, uint32_t sig_index,
1974 uint32_t stack_pos, uint32_t* sp, uint32_t ref_stack_fp_offset,
1975 uint32_t slot_offset, uint32_t return_slot_offset, bool is_tail_call) {
1976 if (IsWasmFuncRef(*func_ref)) {
1977 func_ref = direct_handle(Cast<WasmFuncRef>(*func_ref)->internal(isolate_),
1978 isolate_);
1979 }
1980 if (IsWasmInternalFunction(*func_ref)) {
1981 Tagged<WasmInternalFunction> wasm_internal_function =
1982 Cast<WasmInternalFunction>(*func_ref);
1983 Tagged<Object> implicit_arg = wasm_internal_function->implicit_arg();
1984 if (IsWasmImportData(implicit_arg)) {
1985 func_ref = direct_handle(implicit_arg, isolate_);
1986 } else {
1987 DCHECK(IsWasmTrustedInstanceData(implicit_arg));
1989 direct_handle(wasm_internal_function, isolate_));
1990 DCHECK(IsJSFunction(*func_ref) || IsUndefined(*func_ref));
1991 }
1992 }
1993
1994 const FunctionSig* signature = module_->signature({sig_index});
1995
1996 // ExternalCall
1997 HandleScope handle_scope(isolate_); // Avoid leaking handles.
1998
1999 // Store a pointer to the current FrameState before leaving the current
2000 // Activation.
2001 WasmInterpreterThread* thread = this->thread();
2002 current_frame_.current_bytecode_ = current_code;
2003 thread->SetCurrentFrame(current_frame_);
2004 thread->SetCurrentActivationFrame(
2005 sp, slot_offset, stack_pos,
2006 current_frame_.ref_array_current_sp_ + ref_stack_fp_offset,
2007 ref_stack_fp_offset);
2008
2009 // We should not allocate anything in the heap and avoid GCs after we
2010 // store ref arguments into stack slots.
2012
2013 // Note that tail calls to host functions do not have to guarantee tail
2014 // behaviour, so it is ok to recursively allocate C++ stack frames here.
2015 uint8_t* fp = reinterpret_cast<uint8_t*>(sp) + slot_offset;
2016 StoreRefArgsIntoStackSlots(fp, ref_stack_fp_offset, signature);
2018 CallExternalJSFunction(current_code, module_, func_ref, signature,
2019 sp + slot_offset / kSlotSize, return_slot_offset);
2021 StoreRefResultsIntoRefStack(fp, ref_stack_fp_offset, signature);
2022 } else { // ExternalCallResult::EXTERNAL_EXCEPTION
2023 AllowHeapAllocation allow_gc;
2024
2025 if (HandleException(sp, current_code) ==
2028 thread->Stop();
2029 RedirectCodeToUnwindHandler(current_code);
2030 } else {
2031 thread->Run();
2032 }
2033 }
2034
2035 if (is_tail_call) {
2036 RedirectCodeToUnwindHandler(current_code);
2037 }
2038}
2039
2041 const uint8_t*& current_code, uint32_t function_index, uint32_t* sp,
2042 uint32_t current_stack_size, uint32_t ref_stack_fp_offset,
2043 uint32_t current_slot_offset) {
2044 DCHECK_GT(module_->num_imported_functions, function_index);
2045 HandleScope handle_scope(isolate_); // Avoid leaking handles.
2046
2047 const FunctionSig* sig = module_->functions[function_index].sig;
2048
2049 ImportedFunctionEntry entry(wasm_trusted_instance_data(), function_index);
2050 int target_function_index = entry.function_index_in_called_module();
2051 if (target_function_index >= 0) {
2052 // WasmToWasm call.
2053 DCHECK(IsWasmTrustedInstanceData(entry.implicit_arg()));
2054 DirectHandle<WasmInstanceObject> target_instance(
2057 ->instance_object()),
2058 isolate_);
2059
2060 // Make sure the WasmInterpreterObject and InterpreterHandle for this
2061 // instance exist.
2062 DirectHandle<Tuple2> interpreter_object =
2063 WasmTrustedInstanceData::GetOrCreateInterpreterObject(target_instance);
2064 GetOrCreateInterpreterHandle(isolate_, interpreter_object);
2065
2066 Address frame_pointer = FindInterpreterEntryFramePointer(isolate_);
2067
2068 {
2069 // We should not allocate anything in the heap and avoid GCs after we
2070 // store ref arguments into stack slots.
2072
2073 uint8_t* fp = reinterpret_cast<uint8_t*>(sp);
2074 StoreRefArgsIntoStackSlots(fp, ref_stack_fp_offset, sig);
2075 // Note that tail calls across WebAssembly module boundaries should
2076 // guarantee tail behavior, so this implementation does not conform to the
2077 // spec for a tail call. But it is really difficult to implement
2078 // cross-instance calls in the interpreter without recursively adding C++
2079 // stack frames.
2080
2081 // TODO(paolosev@microsoft.com) - Is it possible to short-circuit this in
2082 // the case where we are calling a function in the same Wasm instance,
2083 // with a simple call to WasmInterpreterRuntime::ExecuteFunction()?
2085 isolate_, frame_pointer, target_instance, target_function_index, fp);
2086 if (success) {
2087 StoreRefResultsIntoRefStack(fp, ref_stack_fp_offset, sig);
2088
2089#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2090 // Update shadow stack
2091 if (v8_flags.trace_drumbrake_execution && shadow_stack_ != nullptr) {
2092 for (size_t i = 0; i < sig->parameter_count(); i++) {
2093 TracePop();
2094 }
2095
2096 for (size_t i = 0; i < sig->return_count(); i++) {
2097 current_slot_offset +=
2098 TracePush(sig->GetReturn(i).kind(), current_slot_offset);
2099 }
2100 }
2101#endif // V8_ENABLE_DRUMBRAKE_TRACING
2103 }
2105 }
2106 } else {
2107 // WasmToJS call.
2108
2109 // Note that tail calls to host functions do not have to guarantee tail
2110 // behaviour, so it is ok to recursively allocate C++ stack frames here.
2111
2112 DirectHandle<Object> object_implicit_arg(entry.implicit_arg(), isolate_);
2113
2114 // We should not allocate anything in the heap and avoid GCs after we store
2115 // ref arguments into stack slots.
2117
2118 uint8_t* fp = reinterpret_cast<uint8_t*>(sp);
2119 StoreRefArgsIntoStackSlots(fp, ref_stack_fp_offset, sig);
2121 CallExternalJSFunction(current_code, module_, object_implicit_arg, sig,
2122 sp, current_slot_offset);
2124 StoreRefResultsIntoRefStack(fp, ref_stack_fp_offset, sig);
2125 }
2126 return result;
2127 }
2128}
2129
2130// static
2134
2135// static
2139
2144
2146 Isolate* isolate, DirectHandle<Object> object_ref, Address packed_args,
2147 const FunctionSig* sig) {
2149 DirectHandle<Object> callable;
2150 if (IsWasmImportData(*object_ref)) {
2151 callable =
2152 direct_handle(Cast<WasmImportData>(*object_ref)->callable(), isolate);
2153 } else {
2154 callable = object_ref;
2155 DCHECK(!IsUndefined(*callable));
2156 }
2157
2158 // TODO(paolosev@microsoft.com) - Can callable be a JSProxy?
2159 DirectHandle<Object> js_function = callable;
2160 while (IsJSBoundFunction(*js_function, isolate_)) {
2161 if (IsJSBoundFunction(*js_function, isolate_)) {
2162 js_function = direct_handle(
2163 Cast<JSBoundFunction>(js_function)->bound_target_function(), isolate);
2164 }
2165 }
2166
2167 if (IsJSProxy(*js_function, isolate_)) {
2168 do {
2169 Tagged<HeapObject> target = Cast<JSProxy>(js_function)->target(isolate);
2170 js_function = DirectHandle<Object>(target, isolate);
2171 } while (IsJSProxy(*js_function, isolate_));
2172 }
2173
2174 if (!IsJSFunction(*js_function, isolate_)) {
2175 AllowHeapAllocation allow_gc;
2177
2178 isolate->set_exception(*isolate_->factory()->NewTypeError(
2179 MessageTemplate::kWasmTrapJSTypeError));
2180 return;
2181 }
2182
2183 // Save and restore context around invocation and block the
2184 // allocation of handles without explicit handle scopes.
2185 SaveContext save(isolate);
2186 SealHandleScope shs(isolate);
2187
2188 Address saved_c_entry_fp = *isolate->c_entry_fp_address();
2189 Address saved_js_entry_sp = *isolate->js_entry_sp_address();
2190 if (saved_js_entry_sp == kNullAddress) {
2191 *isolate->js_entry_sp_address() = GetCurrentStackPosition();
2192 }
2193 StackHandlerMarker stack_handler;
2194 stack_handler.next = isolate->thread_local_top()->handler_;
2195#ifdef V8_USE_ADDRESS_SANITIZER
2196 stack_handler.padding = GetCurrentStackPosition();
2197#else
2198 stack_handler.padding = 0;
2199#endif
2200 // {saved_c_entry_fp} can be null if we run the interpreter directly from the
2201 // fuzzer, not from JS. In this case, we need to break the handler chain.
2202 isolate->thread_local_top()->handler_ =
2203 saved_c_entry_fp ? reinterpret_cast<Address>(&stack_handler)
2204 : kNullAddress;
2207 }
2208
2209 {
2210 RCS_SCOPE(isolate, RuntimeCallCounterId::kJS_Execution);
2212 (*js_function).ptr(), packed_args, isolate->isolate_root(), sig,
2213 saved_c_entry_fp, (*callable).ptr());
2214 if (result != WasmToJSInterpreterFrameConstants::kSuccess) {
2215 isolate->set_exception(Tagged<Object>(result));
2218 }
2219 } else {
2223 }
2224 }
2225 }
2226
2227 isolate->thread_local_top()->handler_ = stack_handler.next;
2228 if (saved_js_entry_sp == kNullAddress) {
2229 *isolate->js_entry_sp_address() = saved_js_entry_sp;
2230 }
2231 *isolate->c_entry_fp_address() = saved_c_entry_fp;
2232}
2233
2235 const uint8_t*& current_code, const WasmModule* module,
2236 DirectHandle<Object> object_ref, const FunctionSig* sig, uint32_t* sp,
2237 uint32_t return_slot_offset) {
2238 // TODO(paolosev@microsoft.com) Cache IsJSCompatibleSignature result?
2240 reinterpret_cast<const wasm::CanonicalSig*>(sig))) {
2241 AllowHeapAllocation allow_gc;
2242 ClearThreadInWasmScope clear_wasm_flag(isolate_);
2243
2244 isolate_->Throw(*isolate_->factory()->NewTypeError(
2245 MessageTemplate::kWasmTrapJSTypeError));
2247 }
2248
2249#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2250 if (v8_flags.trace_drumbrake_execution) {
2251 Trace(" => Calling external function\n");
2252 }
2253#endif // V8_ENABLE_DRUMBRAKE_TRACING
2254
2256 reinterpret_cast<const wasm::CanonicalSig*>(sig)));
2257 uint32_t* p = sp + WasmBytecode::RetsSizeInSlots(sig);
2258 for (size_t i = 0; i < sig->parameter_count(); ++i) {
2259 switch (sig->GetParam(i).kind()) {
2260 case kI32:
2261 packer.Push(
2262 base::ReadUnalignedValue<int32_t>(reinterpret_cast<Address>(p)));
2263 p += sizeof(int32_t) / kSlotSize;
2264 break;
2265 case kI64:
2266 packer.Push(
2267 base::ReadUnalignedValue<int64_t>(reinterpret_cast<Address>(p)));
2268 p += sizeof(int64_t) / kSlotSize;
2269 break;
2270 case kF32:
2271 packer.Push(
2272 base::ReadUnalignedValue<float>(reinterpret_cast<Address>(p)));
2273 p += sizeof(float) / kSlotSize;
2274 break;
2275 case kF64:
2276 packer.Push(
2277 base::ReadUnalignedValue<double>(reinterpret_cast<Address>(p)));
2278 p += sizeof(double) / kSlotSize;
2279 break;
2280 case kRef:
2281 case kRefNull: {
2283 base::ReadUnalignedValue<WasmRef>(reinterpret_cast<Address>(p));
2284 ref = WasmToJSObject(ref);
2285 packer.Push(*ref);
2286 p += sizeof(WasmRef) / kSlotSize;
2287 break;
2288 }
2289 case kS128:
2290 default:
2291 UNREACHABLE();
2292 }
2293 }
2294
2297 {
2298 // If there were Ref values passed as arguments they have already been read
2299 // in BeginExecution(), so we can re-enable GC.
2300 AllowHeapAllocation allow_gc;
2301
2302 CallWasmToJSBuiltin(isolate_, object_ref, packer.argv(), sig);
2303 }
2305
2306#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2307 if (v8_flags.trace_drumbrake_execution) {
2308 Trace(" => External wasm function returned%s\n",
2309 isolate_->has_exception() ? " with exception" : "");
2310 }
2311#endif // V8_ENABLE_DRUMBRAKE_TRACING
2312
2315 }
2316
2317 // Push return values.
2318 if (sig->return_count() > 0) {
2319 packer.Reset();
2320 for (size_t i = 0; i < sig->return_count(); i++) {
2321 switch (sig->GetReturn(i).kind()) {
2322 case kI32:
2324 packer.Pop<uint32_t>());
2325 sp += sizeof(uint32_t) / kSlotSize;
2326 break;
2327 case kI64:
2329 packer.Pop<uint64_t>());
2330 sp += sizeof(uint64_t) / kSlotSize;
2331 break;
2332 case kF32:
2333 base::WriteUnalignedValue<float>(reinterpret_cast<Address>(sp),
2334 packer.Pop<float>());
2335 sp += sizeof(float) / kSlotSize;
2336 break;
2337 case kF64:
2338 base::WriteUnalignedValue<double>(reinterpret_cast<Address>(sp),
2339 packer.Pop<double>());
2340 sp += sizeof(double) / kSlotSize;
2341 break;
2342 case kRef:
2343 case kRefNull:
2344 // TODO(paolosev@microsoft.com): Handle WasmNull case?
2345#ifdef V8_COMPRESS_POINTERS
2346 {
2347 Address address = packer.Pop<Address>();
2349 if (sig->GetReturn(i).value_type_code() == wasm::kFuncRefCode &&
2350 i::IsNull(*ref, isolate_)) {
2351 ref = isolate_->factory()->wasm_null();
2352 }
2353 ref = JSToWasmObject(ref, sig->GetReturn(i));
2354 if (isolate_->has_exception()) {
2356 }
2358 reinterpret_cast<Address>(sp), ref);
2359 sp += sizeof(WasmRef) / kSlotSize;
2360 }
2361#else
2362 CHECK(false); // Not supported.
2363#endif // V8_COMPRESS_POINTERS
2364 break;
2365 case kS128:
2366 default:
2367 UNREACHABLE();
2368 }
2369 }
2370 }
2371
2372#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2373 if (v8_flags.trace_drumbrake_execution && shadow_stack_ != nullptr) {
2374 for (size_t i = 0; i < sig->parameter_count(); i++) {
2375 TracePop();
2376 }
2377
2378 for (size_t i = 0; i < sig->return_count(); i++) {
2379 return_slot_offset +=
2380 TracePush(sig->GetReturn(i).kind(), return_slot_offset);
2381 }
2382 }
2383#endif // V8_ENABLE_DRUMBRAKE_TRACING
2384
2386}
2387
2390 Cast<Map>(
2391 wasm_trusted_instance_data()->managed_object_maps()->get(type_index)),
2392 isolate_};
2393 return rtt;
2394}
2395
2396std::pair<DirectHandle<WasmStruct>, const StructType*>
2398 const StructType* struct_type = module_->struct_type({index});
2399 DirectHandle<Map> rtt = RttCanon(index);
2400 return {isolate_->factory()->NewWasmStructUninitialized(struct_type, rtt),
2401 struct_type};
2402}
2403
2404std::pair<DirectHandle<WasmArray>, const ArrayType*>
2406 uint32_t array_index) const {
2407 const ArrayType* array_type = GetArrayType(array_index);
2408 if (V8_UNLIKELY(static_cast<int>(length) < 0 ||
2409 static_cast<int>(length) >
2410 WasmArray::MaxLength(array_type))) {
2411 return {};
2412 }
2413
2414 DirectHandle<Map> rtt = RttCanon(array_index);
2415 return {
2416 {isolate_->factory()->NewWasmArrayUninitialized(length, rtt), isolate_},
2417 array_type};
2418}
2419
2421 uint32_t segment_index,
2422 uint32_t offset,
2423 uint32_t length) {
2424 DirectHandle<Map> rtt = RttCanon(array_index);
2425 // Call runtime function Runtime_WasmArrayNewSegment. Store the arguments in
2426 // reverse order and pass a pointer to the first argument, which is the last
2427 // on the stack.
2428 //
2429 // args[args_length] -> | rtt |
2430 // | length |
2431 // | offset |
2432 // | segment_index |
2433 // first_arg_addr -> | trusted_instance |
2434 //
2435 constexpr size_t kArgsLength = 5;
2436 Address args[kArgsLength] = {rtt->ptr(), IntToSmi(length), IntToSmi(offset),
2437 IntToSmi(segment_index),
2439 Address* first_arg_addr = &args[kArgsLength - 1];
2440
2441 // A runtime function can throw, therefore we need to make sure that the
2442 // current activation is up-to-date, if we need to traverse the call stack.
2444
2445 Address result =
2446 Runtime_WasmArrayNewSegment(kArgsLength, first_arg_addr, isolate_);
2447 if (isolate_->has_exception()) return {};
2448
2450}
2451
2453 WasmRef wasm_array,
2454 uint32_t array_offset,
2455 uint32_t segment_offset,
2456 uint32_t length) {
2457 // Call runtime function Runtime_WasmArrayInitSegment. Store the arguments in
2458 // reverse order and pass a pointer to the first argument, which is the last
2459 // on the stack.
2460 //
2461 // args[args_length] -> | length |
2462 // | segment_offset |
2463 // | array_offset |
2464 // | wasm_array |
2465 // | segment_index |
2466 // first_arg_addr -> | trusted_instance |
2467 //
2468 constexpr size_t kArgsLength = 6;
2469 Address args[kArgsLength] = {
2471 IntToSmi(array_offset), (*wasm_array).ptr(),
2472 IntToSmi(segment_index), wasm_trusted_instance_data()->ptr()};
2473 Address* first_arg_addr = &args[kArgsLength - 1];
2474
2475 // A runtime function can throw, therefore we need to make sure that the
2476 // current activation is up-to-date, if we need to traverse the call stack.
2478
2479 Runtime_WasmArrayInitSegment(kArgsLength, first_arg_addr, isolate_);
2480 return (!isolate_->has_exception());
2481}
2482
2484 uint32_t dest_index,
2485 WasmRef src_wasm_array,
2486 uint32_t src_index,
2487 uint32_t length) {
2488 // Call runtime function Runtime_WasmArrayCopy. Store the arguments in reverse
2489 // order and pass a pointer to the first argument, which is the last on the
2490 // stack.
2491 //
2492 // args[args_length] -> | length |
2493 // | src_index |
2494 // | src_array |
2495 // | dest_index |
2496 // first_arg_addr -> | dest_array |
2497 //
2498 constexpr size_t kArgsLength = 5;
2499 Address args[kArgsLength] = {IntToSmi(length), IntToSmi(src_index),
2500 (*src_wasm_array).ptr(), IntToSmi(dest_index),
2501 (*dest_wasm_array).ptr()};
2502 Address* first_arg_addr = &args[kArgsLength - 1];
2503
2504 // A runtime function can throw, therefore we need to make sure that the
2505 // current activation is up-to-date, if we need to traverse the call stack.
2507
2508 Runtime_WasmArrayCopy(kArgsLength, first_arg_addr, isolate_);
2509 return (!isolate_->has_exception());
2510}
2511
2513 WasmRef extern_ref, ValueType value_type, uint32_t canonical_index) const {
2514 // Call runtime function Runtime_WasmJSToWasmObject. Store the arguments in
2515 // reverse order and pass a pointer to the first argument, which is the last
2516 // on the stack.
2517 //
2518 // args[args_length] -> | value_type represent.|
2519 // first_arg_addr -> | extern_ref |
2520 //
2521 constexpr size_t kArgsLength = 2;
2522 Address args[kArgsLength] = {
2523 IntToSmi(value_type.raw_bit_field()), (*extern_ref).ptr()};
2524 Address* first_arg_addr = &args[kArgsLength - 1];
2525
2526 // A runtime function can throw, therefore we need to make sure that the
2527 // current activation is up-to-date, if we need to traverse the call stack.
2529
2530 Address result =
2531 Runtime_WasmJSToWasmObject(kArgsLength, first_arg_addr, isolate_);
2532 if (isolate_->has_exception()) return {};
2533
2535}
2536
2538 ValueType type) const {
2540 const char* error_message;
2541 if (wasm::JSToWasmObject(isolate_, module_, extern_ref, type, &error_message)
2542 .ToHandle(&result)) {
2543 return result;
2544 }
2545
2546 {
2547 // Only in case of exception it can allocate.
2548 AllowHeapAllocation allow_gc;
2549
2550 if (v8_flags.wasm_jitless && trap_handler::IsThreadInWasm()) {
2552 }
2553 Tagged<Object> error = isolate_->Throw(*isolate_->factory()->NewTypeError(
2554 MessageTemplate::kWasmTrapJSTypeError));
2555 return direct_handle(error, isolate_);
2556 }
2557}
2558
2560 if (IsWasmFuncRef(*value)) {
2561 value =
2562 direct_handle(Cast<WasmFuncRef>(*value)->internal(isolate_), isolate_);
2563 }
2564 if (IsWasmInternalFunction(*value)) {
2568 }
2569 if (IsWasmNull(*value)) {
2570 return direct_handle(ReadOnlyRoots(isolate_).null_value(), isolate_);
2571 }
2572 return value;
2573}
2574
2575// Implementation similar to Liftoff's SubtypeCheck in
2576// src\wasm\baseline\liftoff-compiler.cc.
2578 Tagged<Map> formal_rtt,
2579 uint32_t type_index) const {
2580 // Constant-time subtyping check: load exactly one candidate RTT from the
2581 // supertypes list.
2582 // Step 1: load the WasmTypeInfo.
2583 Tagged<WasmTypeInfo> type_info = rtt->wasm_type_info();
2584
2585 // Step 2: check the list's length if needed.
2586 uint32_t rtt_depth =
2588 if (rtt_depth >= kMinimumSupertypeArraySize &&
2589 static_cast<uint32_t>(type_info->supertypes_length()) <= rtt_depth) {
2590 return false;
2591 }
2592
2593 // Step 3: load the candidate list slot into {tmp1}, and compare it.
2594 Tagged<Object> supertype = type_info->supertypes(rtt_depth);
2595 if (formal_rtt != supertype) return false;
2596 return true;
2597}
2598
2599// Implementation similar to Liftoff's SubtypeCheck in
2600// src\wasm\baseline\liftoff-compiler.cc.
2602 const ValueType obj_type,
2603 const DirectHandle<Map> rtt,
2604 const ModuleTypeIndex target_type,
2605 bool null_succeeds) const {
2606 bool is_cast_from_any = obj_type.is_reference_to(HeapType::kAny);
2607
2608 // Skip the null check if casting from any and not {null_succeeds}.
2609 // In that case the instance type check will identify null as not being a
2610 // wasm object and fail.
2611 if (obj_type.is_nullable() && (!is_cast_from_any || null_succeeds)) {
2613 if (i::IsNull(*obj, isolate_)) return null_succeeds;
2614 } else {
2615 if (i::IsWasmNull(*obj, isolate_)) return null_succeeds;
2616 }
2617 }
2618
2619 // Add Smi check if the source type may store a Smi (i31ref or JS Smi).
2620 if (IsSubtypeOf(kWasmRefI31, obj_type, module_) && IsSmi(*obj)) {
2621 return false;
2622 }
2623
2624 if (!IsHeapObject(*obj)) return false;
2625 Tagged<Map> obj_map = Cast<HeapObject>(obj)->map();
2626
2627 if (module_->types[target_type.index].is_final) {
2628 // In this case, simply check for map equality.
2629 if (*obj_map != *rtt) {
2630 return false;
2631 }
2632 } else {
2633 // Check for rtt equality, and if not, check if the rtt is a struct/array
2634 // rtt.
2635 if (*obj_map == *rtt) {
2636 return true;
2637 }
2638
2639 if (is_cast_from_any) {
2640 // Check for map being a map for a wasm object (struct, array, func).
2641 InstanceType obj_type1 = obj_map->instance_type();
2642 if (obj_type1 < FIRST_WASM_OBJECT_TYPE ||
2643 obj_type1 > LAST_WASM_OBJECT_TYPE) {
2644 return false;
2645 }
2646 }
2647
2648 return SubtypeCheck(obj_map, *rtt, target_type.index);
2649 }
2650
2651 return true;
2652}
2653
2654using TypeChecker = bool (*)(const WasmRef obj);
2655
2656template <TypeChecker type_checker>
2657bool AbstractTypeCast(Isolate* isolate, const WasmRef obj,
2658 const ValueType obj_type, bool null_succeeds) {
2661 return true;
2662 }
2663 return type_checker(obj);
2664}
2665
2666static bool EqCheck(const WasmRef obj) {
2667 if (IsSmi(*obj)) {
2668 return true;
2669 }
2670 if (!IsHeapObject(*obj)) return false;
2671 InstanceType instance_type = Cast<HeapObject>(obj)->map()->instance_type();
2672 return instance_type >= FIRST_WASM_OBJECT_TYPE &&
2673 instance_type <= LAST_WASM_OBJECT_TYPE;
2674}
2680
2681static bool I31Check(const WasmRef obj) { return IsSmi(*obj); }
2687
2688static bool StructCheck(const WasmRef obj) {
2689 if (IsSmi(*obj)) {
2690 return false;
2691 }
2692 if (!IsHeapObject(*obj)) return false;
2693 InstanceType instance_type = Cast<HeapObject>(obj)->map()->instance_type();
2694 return instance_type == WASM_STRUCT_TYPE;
2695}
2701
2702static bool ArrayCheck(const WasmRef obj) {
2703 if (IsSmi(*obj)) {
2704 return false;
2705 }
2706 if (!IsHeapObject(*obj)) return false;
2707 InstanceType instance_type = Cast<HeapObject>(obj)->map()->instance_type();
2708 return instance_type == WASM_ARRAY_TYPE;
2709}
2715
2716static bool StringCheck(const WasmRef obj) {
2717 if (IsSmi(*obj)) {
2718 return false;
2719 }
2720 if (!IsHeapObject(*obj)) return false;
2721 InstanceType instance_type = Cast<HeapObject>(obj)->map()->instance_type();
2722 return instance_type < FIRST_NONSTRING_TYPE;
2723}
2729
2732 current_frame_.current_function_
2733 ? current_frame_.current_function_->GetFunctionIndex()
2734 : 0;
2736 DCHECK_LT(trap_function_index_, module_->functions.size());
2737
2738 trap_pc_ = trap_pc;
2739 thread()->Trap(trap_reason, trap_function_index_, static_cast<int>(trap_pc_),
2741}
2742
2744 const uint8_t*& code) {
2745 SetTrap(trap_reason,
2746 current_frame_.current_function_
2747 ? current_frame_.current_function_->GetPcFromTrapCode(code)
2748 : 0);
2749 RedirectCodeToUnwindHandler(code);
2750}
2751
2755
2756std::vector<WasmInterpreterStackEntry>
2758 // The current thread can be nullptr if we throw an exception before calling
2759 // {BeginExecution}.
2760 if (current_thread_) {
2762 current_thread_->GetActivation(frame_pointer);
2763 if (activation) {
2764 return activation->GetStackTrace();
2765 }
2766
2767 if (trap_function_index_ < 0) {
2768 // It is possible to have a trap before starting to execute Wasm code.
2769 // This can happen if the JSToWasmObject fails for a type mismatch in the
2770 // JSToWasmInterpreterWrapper. In this case we know that we were going to
2771 // execute the initial function {start_function_index_}.
2773 }
2774
2775 return {{trap_function_index_, static_cast<int>(trap_pc_)}};
2776 }
2777
2778 // It is possible to throw before entering a Wasm function, while converting
2779 // the args from JS to Wasm, with JSToWasmObject.
2780 return {{0, 0}};
2781}
2782
2784 int index) const {
2785 if (current_thread_) {
2787 current_thread_->GetActivation(frame_pointer);
2788 if (activation) {
2789 return activation->GetFunctionIndex(index);
2790 }
2791 }
2792 return -1;
2793}
2794
2796 trap_function_index_ = func_index;
2797 trap_pc_ = 0;
2798}
2799
2801 int64_t r0, double fp0) {
2802#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2803 if (tracer_ && tracer_->ShouldTraceFunction(
2804 current_frame_.current_function_->GetFunctionIndex())) {
2805 shadow_stack_->Print(this, sp, current_frame_.current_stack_start_args_,
2806 current_frame_.current_stack_start_locals_,
2807 current_frame_.current_stack_start_stack_, reg_mode,
2808 r0, fp0);
2809 }
2810#endif // V8_ENABLE_DRUMBRAKE_TRACING
2811}
2812
2813#ifdef V8_ENABLE_DRUMBRAKE_TRACING
2814InterpreterTracer* WasmInterpreterRuntime::GetTracer() {
2815 if (tracer_ == nullptr) tracer_.reset(new InterpreterTracer(-1));
2816 return tracer_.get();
2817}
2818
2819void WasmInterpreterRuntime::Trace(const char* format, ...) {
2820 if (!current_frame_.current_function_) {
2821 // This can happen when the entry function is an imported JS function.
2822 return;
2823 }
2824 InterpreterTracer* tracer = GetTracer();
2825 if (tracer->ShouldTraceFunction(
2826 current_frame_.current_function_->GetFunctionIndex())) {
2827 va_list arguments;
2828 va_start(arguments, format);
2829 base::OS::VFPrint(tracer->file(), format, arguments);
2830 va_end(arguments);
2831 tracer->CheckFileSize();
2832 }
2833}
2834#endif // V8_ENABLE_DRUMBRAKE_TRACING
2835
2836// static
2838 Tagged<WasmInstanceObject> wasm_instance =
2839 WasmInterpreterObject::get_wasm_instance(interpreter_object);
2840 NativeModule* native_module = wasm_instance->module_object()->native_module();
2841 return ModuleWireBytes{native_module->wire_bytes()};
2842}
2843
2845 DirectHandle<Tuple2> interpreter_object)
2846 : isolate_(isolate),
2847 module_(WasmInterpreterObject::get_wasm_instance(*interpreter_object)
2848 ->module_object()
2849 ->module()),
2850 interpreter_(isolate, module_, GetBytes(*interpreter_object),
2851 direct_handle(WasmInterpreterObject::get_wasm_instance(
2852 *interpreter_object),
2853 isolate)) {}
2854
2856 WasmInterpreterThread* thread, bool called_from_js) {
2857 // If there were Ref values passed as arguments they have already been read
2858 // in BeginExecution(), so we can re-enable GC.
2859 AllowHeapAllocation allow_gc;
2860
2861 bool finished = false;
2862 WasmInterpreterThread::State state = thread->state();
2864 return state;
2865 }
2866
2867 while (!finished) {
2868 state = ContinueExecution(thread, called_from_js);
2869 switch (state) {
2872 // Perfect, just break the switch and exit the loop.
2873 finished = true;
2874 break;
2876 if (!isolate_->has_exception()) {
2877 // An exception handler was found, keep running the loop.
2880 }
2881 break;
2882 }
2883 thread->Stop();
2884 [[fallthrough]];
2885 }
2887 // An exception happened, and the current activation was unwound
2888 // without hitting a local exception handler. All that remains to be
2889 // done is finish the activation and let the exception propagate.
2891 return state; // Either STOPPED or TRAPPED.
2893 thread->Stop();
2895 }
2896 }
2897 }
2898 return state;
2899}
2900
2902 WasmInterpreterThread* thread, Address frame_pointer, uint32_t func_index,
2903 const std::vector<WasmValue>& argument_values,
2904 std::vector<WasmValue>& return_values) {
2905 DCHECK_GT(module()->functions.size(), func_index);
2906 const FunctionSig* sig = module()->functions[func_index].sig;
2907 DCHECK_EQ(sig->parameter_count(), argument_values.size());
2908 DCHECK_EQ(sig->return_count(), return_values.size());
2909
2910 thread->StartExecutionTimer();
2911 interpreter_.BeginExecution(thread, func_index, frame_pointer,
2912 thread->NextFrameAddress(),
2913 thread->NextRefStackOffset(), argument_values);
2914
2915 WasmInterpreterThread::State state = RunExecutionLoop(thread, true);
2916 thread->StopExecutionTimer();
2917
2918 switch (state) {
2921 for (unsigned i = 0; i < sig->return_count(); ++i) {
2922 return_values[i] = interpreter_.GetReturnValue(i);
2923 }
2924 return true;
2925
2927 for (unsigned i = 0; i < sig->return_count(); ++i) {
2928 return_values[i] = WasmValue(0xDEADBEEF);
2929 }
2930 return false;
2931
2933 return false;
2934
2936 UNREACHABLE();
2937 }
2938}
2939
2941 Address frame_pointer, uint32_t func_index,
2942 uint8_t* interpreter_fp) {
2943 DCHECK_GT(module()->functions.size(), func_index);
2944
2945 interpreter_.BeginExecution(thread, func_index, frame_pointer,
2946 interpreter_fp);
2947 WasmInterpreterThread::State state = RunExecutionLoop(thread, false);
2948 return (state == WasmInterpreterThread::RUNNING ||
2950}
2951
2954 WasmInterpreterEntryFrame* frame =
2955 WasmInterpreterEntryFrame::cast(it.frame());
2956 DirectHandle<WasmInstanceObject> instance_obj(frame->wasm_instance(),
2957 isolate_);
2958 // Check that this is indeed the instance which is connected to this
2959 // interpreter.
2960 DCHECK_EQ(this,
2963 instance_obj->trusted_data(isolate_)->interpreter_object()))
2964 ->raw());
2965 return instance_obj;
2966}
2967
2968std::vector<WasmInterpreterStackEntry> InterpreterHandle::GetInterpretedStack(
2969 Address frame_pointer) {
2970 return interpreter_.GetInterpretedStack(frame_pointer);
2971}
2972
2974 int index) const {
2975 return interpreter_.GetFunctionIndex(frame_pointer, index);
2976}
2977
2980}
2981
2982} // namespace wasm
2983} // namespace internal
2984} // namespace v8
Isolate * isolate_
Builtins::Kind kind
Definition builtins.cc:40
void push_back(const DirectHandle< T > &x)
Definition handles.h:940
V8_INLINE bool is_null() const
Definition handles.h:693
V8_INLINE bool is_identical_to(Handle< S > other) const
Definition handles.h:716
static V8_EXPORT_PRIVATE Tagged< Object > WaitWasm64(Isolate *isolate, DirectHandle< JSArrayBuffer > array_buffer, size_t addr, int64_t value, int64_t rel_timeout_ns)
static V8_EXPORT_PRIVATE Tagged< Object > WaitWasm32(Isolate *isolate, DirectHandle< JSArrayBuffer > array_buffer, size_t addr, int32_t value, int64_t rel_timeout_ns)
static V8_EXPORT_PRIVATE int Wake(Tagged< JSArrayBuffer > array_buffer, size_t addr, uint32_t num_waiters_to_wake)
static GeneratedCode FromAddress(Isolate *isolate, Address addr)
Definition simulator.h:154
DISABLE_CFI_ICALL Return Call(Args... args)
Definition simulator.h:184
static constexpr int BuiltinEntrySlotOffset(Builtin id)
Tagged< Object > Throw(Tagged< Object > exception, MessageLocation *location=nullptr)
Definition isolate.cc:2091
Tagged< Object > exception()
Handle< NativeContext > native_context()
Definition isolate-inl.h:48
bool is_catchable_by_wasm(Tagged< Object > exception)
v8::internal::Factory * factory()
Definition isolate.h:1527
Tagged< Object > StackOverflow()
Definition isolate.cc:1862
static DirectHandle< Managed< CppType > > From(Isolate *isolate, size_t estimated_size, std::shared_ptr< CppType > shared_ptr, AllocationType allocation_type=AllocationType::kYoung)
Definition managed-inl.h:27
T GetReturn(size_t index=0) const
Definition signature.h:103
static constexpr int MaxLength(uint32_t element_size_bytes)
static DirectHandle< WasmExceptionPackage > New(Isolate *isolate, DirectHandle< WasmExceptionTag > exception_tag, int encoded_size)
static DirectHandle< Object > GetExceptionValues(Isolate *isolate, DirectHandle< WasmExceptionPackage > exception_package)
static uint32_t GetEncodedSize(const wasm::WasmTagSig *tag)
static DirectHandle< Object > GetExceptionTag(Isolate *isolate, DirectHandle< WasmExceptionPackage > exception_package)
static V8_EXPORT_PRIVATE DirectHandle< JSFunction > GetOrCreateExternal(DirectHandle< WasmInternalFunction > internal)
static Tagged< Object > get_interpreter_handle(Tagged< Tuple2 > interpreter_object)
static void set_interpreter_handle(Tagged< Tuple2 > interpreter_object, Tagged< Object > interpreter_handle)
static Tagged< WasmInstanceObject > get_wasm_instance(Tagged< Tuple2 > interpreter_object)
static bool RunInterpreter(Isolate *isolate, Address frame_pointer, DirectHandle< WasmInstanceObject > instance, int func_index, const std::vector< wasm::WasmValue > &argument_values, std::vector< wasm::WasmValue > &return_values)
static V8_EXPORT_PRIVATE int32_t Grow(Isolate *, DirectHandle< WasmMemoryObject >, uint32_t pages)
static V8_EXPORT_PRIVATE DirectHandle< Object > Get(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t index)
static V8_EXPORT_PRIVATE void Fill(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t start, DirectHandle< Object > entry, uint32_t count)
static V8_EXPORT_PRIVATE void Set(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t index, DirectHandle< Object > entry)
static V8_EXPORT_PRIVATE int Grow(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t count, DirectHandle< Object > init_value)
static std::optional< MessageTemplate > InitTableEntries(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, DirectHandle< WasmTrustedInstanceData > shared_trusted_instance_data, uint32_t table_index, uint32_t segment_index, uint32_t dst, uint32_t src, uint32_t count) V8_WARN_UNUSED_RESULT
static bool CopyTableEntries(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, uint32_t table_dst_index, uint32_t table_src_index, uint32_t dst, uint32_t src, uint32_t count) V8_WARN_UNUSED_RESULT
static int TotalSize(const CanonicalSig *sig)
DirectHandle< WasmDispatchTable > const table_
IndirectFunctionTableEntry(DirectHandle< WasmInstanceObject >, int table_index, int entry_index)
static ModuleWireBytes GetBytes(Tagged< Tuple2 > interpreter_object)
int GetFunctionIndex(Address frame_pointer, int index) const
WasmInterpreterThread::State RunExecutionLoop(WasmInterpreterThread *thread, bool called_from_js)
std::vector< WasmInterpreterStackEntry > GetInterpretedStack(Address frame_pointer)
bool Execute(WasmInterpreterThread *thread, Address frame_pointer, uint32_t func_index, const std::vector< WasmValue > &argument_values, std::vector< WasmValue > &return_values)
WasmInterpreterThread::State ContinueExecution(WasmInterpreterThread *thread, bool called_from_js)
InterpreterHandle(Isolate *isolate, DirectHandle< Tuple2 > interpreter_object)
DirectHandle< WasmInstanceObject > GetInstanceObject()
base::Vector< const uint8_t > wire_bytes() const
V8_EXPORT_PRIVATE const CanonicalSig * LookupFunctionSignature(CanonicalTypeIndex index) const
constexpr ValueKind kind() const
Definition value-type.h:631
constexpr bool is_reference() const
Definition value-type.h:600
constexpr bool is_nullable() const
Definition value-type.h:393
constexpr uint32_t raw_bit_field() const
Definition value-type.h:594
static constexpr ValueType Ref(ModuleTypeIndex index, bool shared, RefTypeKind kind)
Definition value-type.h:887
static int ElementSizeInBytes(ValueType type)
ValueType return_type(size_t index) const
const FunctionSig * GetFunctionSignature() const
bool InitializeSlots(uint8_t *sp, size_t stack_space) const
static uint32_t RetsSizeInSlots(const FunctionSig *sig)
ValueType local_type(size_t index) const
static bool ContainsSimd(const FunctionSig *sig)
ValueType arg_type(size_t index) const
const CanonicalSig * GetCanonicalFunctionSignature() const
static uint32_t RefArgsCount(const FunctionSig *sig)
static uint32_t RefRetsCount(const FunctionSig *sig)
std::vector< IndirectCallValue > IndirectCallTable
void SetTrap(TrapReason trap_reason, pc_t trap_pc)
std::vector< WasmInterpreterStackEntry > GetInterpretedStack(Address frame_pointer)
void TableCopy(const uint8_t *&current_code, uint32_t dst_table_index, uint32_t src_table_index, uint32_t dst, uint32_t src, uint32_t size)
void PrepareTailCall(const uint8_t *&code, uint32_t func_index, uint32_t current_stack_size, uint32_t return_slot_offset)
DirectHandle< WasmTrustedInstanceData > wasm_trusted_instance_data() const
bool WasmArrayCopy(WasmRef dest_wasm_array, uint32_t dest_index, WasmRef src_wasm_array, uint32_t src_index, uint32_t length)
void BeginExecution(WasmInterpreterThread *thread, uint32_t function_index, Address frame_pointer, uint8_t *interpreter_fp, uint32_t ref_stack_offset, const std::vector< WasmValue > *argument_values=nullptr)
static void ClearIndirectCallCacheEntry(Isolate *isolate, DirectHandle< WasmInstanceObject > instance, uint32_t table_index, uint32_t entry_index)
void StoreRefArgsIntoStackSlots(uint8_t *sp, uint32_t ref_stack_fp_offset, const FunctionSig *sig)
void UnwindCurrentStackFrame(uint32_t *sp, uint32_t slot_offset, uint32_t rets_size, uint32_t args_size, uint32_t rets_refs, uint32_t args_refs, uint32_t ref_stack_fp_offset)
void RethrowException(const uint8_t *&code, uint32_t *sp, uint32_t catch_block_index)
bool MemoryInit(const uint8_t *&current_code, uint32_t data_segment_index, uint64_t dst, uint64_t src, uint64_t size)
std::pair< DirectHandle< WasmArray >, const ArrayType * > ArrayNewUninitialized(uint32_t length, uint32_t array_index) const
void TableSet(const uint8_t *&current_code, uint32_t table_index, uint32_t entry_index, DirectHandle< Object > ref)
void ExecuteFunction(const uint8_t *&code, uint32_t function_index, uint32_t current_stack_size, uint32_t ref_stack_fp_offset, uint32_t slot_offset, uint32_t return_slot_offset)
WasmRef JSToWasmObject(WasmRef extern_ref, ValueType value_type) const
bool RefIsArray(const WasmRef obj, const ValueType obj_type, bool null_succeeds) const
bool RefIsString(const WasmRef obj, const ValueType obj_type, bool null_succeeds) const
uint32_t TableGrow(uint32_t table_index, uint32_t delta, DirectHandle< Object > value)
static void UpdateIndirectCallTable(Isolate *isolate, DirectHandle< WasmInstanceObject > instance, uint32_t table_index)
IndirectHandle< WasmInstanceObject > instance_object_
ExternalCallResult CallExternalJSFunction(const uint8_t *&current_code, const WasmModule *module, DirectHandle< Object > object_ref, const FunctionSig *sig, uint32_t *sp, uint32_t return_slot_offset)
WasmBytecode * GetFunctionBytecode(uint32_t func_index)
void ThrowException(const uint8_t *&code, uint32_t *sp, Tagged< Object > exception_object)
void ExecuteImportedFunction(const uint8_t *&code, uint32_t func_index, uint32_t current_stack_size, uint32_t ref_stack_fp_offset, uint32_t slot_offset, uint32_t return_slot_offset, bool is_tail_call=false)
std::vector< IndirectCallTable > indirect_call_tables_
void StoreWasmRef(uint32_t ref_stack_index, const WasmRef &ref)
static void UpdateMemoryAddress(DirectHandle< WasmInstanceObject > instance)
static bool IsNull(Isolate *isolate, const WasmRef obj, const ValueType obj_type)
ExternalCallResult CallImportedFunction(const uint8_t *&current_code, uint32_t function_index, uint32_t *sp, uint32_t current_stack_size, uint32_t ref_stack_fp_index, uint32_t current_slot_offset)
bool MemoryCopy(const uint8_t *&current_code, uint64_t dst, uint64_t src, uint64_t size)
bool SubtypeCheck(const WasmRef obj, const ValueType obj_type, const DirectHandle< Map > rtt, const ModuleTypeIndex target_type, bool null_succeeds) const
GeneratedCode< WasmToJSCallSig > generic_wasm_to_js_interpreter_wrapper_fn_
void InitializeRefLocalsRefs(const WasmBytecode *target_function)
bool CheckIndirectCallSignature(uint32_t table_index, uint32_t entry_index, uint32_t sig_index) const
WasmRef WasmArrayNewSegment(uint32_t array_index, uint32_t segment_index, uint32_t offset, uint32_t length)
bool RefIsI31(const WasmRef obj, const ValueType obj_type, bool null_succeeds) const
DirectHandle< Map > RttCanon(uint32_t type_index) const
WasmInterpreterThread::ExceptionHandlingResult HandleException(uint32_t *sp, const uint8_t *&current_code)
int GetFunctionIndex(Address frame_pointer, int index) const
bool RefIsStruct(const WasmRef obj, const ValueType obj_type, bool null_succeeds) const
WasmInterpreterRuntime(const WasmModule *module, Isolate *isolate, IndirectHandle< WasmInstanceObject > instance_object, WasmInterpreter::CodeMap *codemap)
void PrintStack(uint32_t *sp, RegMode reg_mode, int64_t r0, double fp0)
const ArrayType * GetArrayType(uint32_t array_index) const
void ExecuteCallRef(const uint8_t *&current_code, WasmRef func_ref, uint32_t sig_index, uint32_t stack_pos, uint32_t *sp, uint32_t ref_stack_fp_offset, uint32_t slot_offset, uint32_t return_slot_offset, bool is_tail_call)
bool RefIsEq(const WasmRef obj, const ValueType obj_type, bool null_succeeds) const
int32_t AtomicNotify(uint64_t effective_index, int32_t val)
bool TableGet(const uint8_t *&current_code, uint32_t table_index, uint32_t entry_index, DirectHandle< Object > *result)
bool WasmStackCheck(const uint8_t *current_bytecode, const uint8_t *&code)
WasmInterpreterThread::State state() const
void TableFill(const uint8_t *&current_code, uint32_t table_index, uint32_t count, DirectHandle< Object > value, uint32_t start)
bool BoundsCheckMemRange(uint64_t index, uint64_t *size, Address *out_address) const
std::pair< DirectHandle< WasmStruct >, const StructType * > StructNewUninitialized(uint32_t index) const
void StoreRefResultsIntoRefStack(uint8_t *sp, uint32_t ref_stack_fp_offset, const FunctionSig *sig)
int32_t I32AtomicWait(uint64_t effective_index, int32_t val, int64_t timeout)
void ExecuteIndirectCall(const uint8_t *&current_code, uint32_t table_index, uint32_t sig_index, uint32_t entry_index, uint32_t stack_pos, uint32_t *sp, uint32_t ref_stack_fp_offset, uint32_t slot_offset, uint32_t return_slot_offset, bool is_tail_call)
bool MemoryFill(const uint8_t *&current_code, uint64_t dst, uint32_t value, uint64_t size)
Address(Address js_function, Address packed_args, Address saved_c_entry_fp, const FunctionSig *sig, Address c_entry_fp, Address callable) WasmToJSCallSig
void ContinueExecution(WasmInterpreterThread *thread, bool called_from_js)
WasmRef WasmJSToWasmObject(WasmRef extern_ref, ValueType value_type, uint32_t canonical_index) const
int32_t I64AtomicWait(uint64_t effective_index, int64_t val, int64_t timeout)
void TableInit(const uint8_t *&current_code, uint32_t table_index, uint32_t element_segment_index, uint32_t dst, uint32_t src, uint32_t size)
DirectHandle< FixedArray > reference_stack() const
const WasmTag & GetWasmTag(uint32_t tag_index) const
DirectHandle< WasmExceptionPackage > CreateWasmExceptionPackage(uint32_t tag_index) const
bool WasmArrayInitSegment(uint32_t segment_index, WasmRef wasm_array, uint32_t array_offset, uint32_t segment_offset, uint32_t length)
void CallWasmToJSBuiltin(Isolate *isolate, DirectHandle< Object > object_ref, Address packed_args, const FunctionSig *sig)
void UnpackException(uint32_t *sp, const WasmTag &tag, DirectHandle< Object > exception_object, uint32_t first_param_slot_index, uint32_t first_param_ref_stack_index)
std::vector< WasmInterpreterStackEntry > GetStackTrace()
void Trap(TrapReason trap_reason, int trap_function_index, int trap_pc, const FrameState &current_frame)
WasmInterpreterThread::Activation * GetActivation(Address frame_pointer) const
void ClearRefStackValues(size_t index, size_t count)
void SetCurrentFrame(const FrameState &frame_state)
std::vector< WasmInterpreterStackEntry > GetInterpretedStack(Address frame_pointer)
WasmInterpreterRuntime * GetWasmRuntime()
int GetFunctionIndex(Address frame_pointer, int index) const
void SetTrapFunctionIndex(int32_t func_index)
void BeginExecution(WasmInterpreterThread *thread, uint32_t function_index, Address frame_pointer, uint8_t *interpreter_fp, uint32_t ref_stack_offset, const std::vector< WasmValue > &argument_values)
WasmValue GetReturnValue(int index) const
static constexpr MessageTemplate TrapReasonToMessageId(TrapReason)
Register const index_
Handle< Code > code
int start
#define RUNTIME_FUNCTION(Name)
Definition arguments.h:162
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
int32_t offset
ZoneVector< RpoNumber > & result
bool null_succeeds
ValueType obj_type
LiftoffAssembler::CacheState state
static V ReadUnalignedValue(Address p)
Definition memory.h:28
constexpr bool IsInBounds(T index, T length, T max)
Definition bounds.h:49
static void WriteUnalignedValue(Address p, V value)
Definition memory.h:41
WordWithBits< 128 > Simd128
Definition index.h:236
TH_DISABLE_ASAN bool IsThreadInWasm()
static bool StringCheck(const WasmRef obj)
V8_EXPORT_PRIVATE InterpreterHandle * GetInterpreterHandle(Isolate *isolate, DirectHandle< Tuple2 > interpreter_object)
static ValueType value_type()
static const size_t kSlotSize
static bool EqCheck(const WasmRef obj)
PWasmOp * kInstructionTable[kInstructionTableSize]
int GetSubtypingDepth(const WasmModule *module, ModuleTypeIndex type_index)
constexpr IndependentHeapType kWasmNullExternRef
constexpr uint32_t kMinimumSupertypeArraySize
MaybeDirectHandle< Object > JSToWasmObject(Isolate *isolate, DirectHandle< Object > value, CanonicalValueType expected, const char **error_message)
bool AbstractTypeCast(Isolate *isolate, const WasmRef obj, const ValueType obj_type, bool null_succeeds)
InstructionHandler ReadFnId(const uint8_t *&code)
uint32_t WasmInterpreterRuntime int64_t r0
constexpr IndependentHeapType kWasmAnyRef
V8_NOINLINE bool EquivalentTypes(ValueType type1, ValueType type2, const WasmModule *module1, const WasmModule *module2)
TypeCanonicalizer * GetTypeCanonicalizer()
uint32_t WasmInterpreterRuntime * wasm_runtime
V8_EXPORT_PRIVATE InterpreterHandle * GetOrCreateInterpreterHandle(Isolate *isolate, DirectHandle< Tuple2 > interpreter_object)
DISABLE_CFI_ICALL void CallThroughDispatchTable(const uint8_t *code, uint32_t *sp, WasmInterpreterRuntime *wasm_runtime, int64_t r0, double fp0)
constexpr IndependentHeapType kWasmExternRef
bool IsJSCompatibleSignature(const CanonicalSig *sig)
static bool I31Check(const WasmRef obj)
uint32_t WasmInterpreterRuntime int64_t double fp0
DirectHandle< Object > WasmRef
static bool StructCheck(const WasmRef obj)
constexpr IndependentHeapType kWasmFuncRef
constexpr IndependentHeapType kWasmRefI31
static constexpr uint32_t kInstructionTableMask
bool(*)(const WasmRef obj) TypeChecker
InstructionHandler s_unwind_code
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
static bool ArrayCheck(const WasmRef obj)
constexpr IndependentValueType kWasmS128
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
void DecodeI32ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint32_t *value)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
constexpr int kSystemPointerSize
Definition globals.h:410
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset tables
uintptr_t GetCurrentStackPosition()
Definition utils.cc:222
void DecodeI64ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint64_t *value)
int32_t NumberToInt32(Tagged< Object > number)
V8_INLINE constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:669
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset dispatch_table0
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr int kMaxInt
Definition globals.h:374
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset instance_object
static V8_INLINE constexpr Address IntToSmi(int value)
constexpr uint32_t kSlotsZapValue
Definition globals.h:1014
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
Definition c-api.cc:87
SourcePositionTable *const table_
Definition pipeline.cc:227
const size_t segment_offset
#define RCS_SCOPE(...)
#define UNREACHABLE()
Definition logging.h:67
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define DISABLE_CFI_ICALL
Definition macros.h:209
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define OFFSET_OF(type, field)
Definition macros.h:57
constexpr bool valid() const
Definition value-type.h:58
std::vector< CatchHandler > catch_handlers
const WasmTagSig * sig
#define V8_UNLIKELY(condition)
Definition v8config.h:660
const wasm::WasmModule * module_
#define CASE_RET_TYPE(type, ctype)
#define CASE_ARG_TYPE(type, ctype)