v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-objects.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#if V8_TARGET_OS_LINUX
6#include <sys/mman.h>
7#include <sys/stat.h>
8// `sys/mman.h defines `MAP_TYPE`, but `MAP_TYPE` also gets defined within V8.
9// Since we don't need `sys/mman.h`'s `MAP_TYPE`, we undefine it immediately
10// after the `#include`.
11#undef MAP_TYPE
12#endif // V8_TARGET_OS_LINUX
13
15
16#include <optional>
17
18#include "src/base/iterator.h"
19#include "src/base/vector.h"
22#include "src/debug/debug.h"
26#include "src/objects/oddball.h"
28#include "src/roots/roots-inl.h"
29#include "src/utils/utils.h"
35#include "src/wasm/stacks.h"
36#include "src/wasm/value-type.h"
44#include "src/wasm/wasm-value.h"
45
46#if V8_ENABLE_DRUMBRAKE
49#endif // V8_ENABLE_DRUMBRAKE
50
51// Needs to be last so macros do not get undefined.
53
54#define TRACE_IFT(...) \
55 do { \
56 if (false) PrintF(__VA_ARGS__); \
57 } while (false)
58
59namespace v8 {
60namespace internal {
61
62namespace {
63
64// Utilities for handling "uses" lists. We reserve one slot for the
65// used length, then store pairs of (instance, table_index).
66static constexpr int kReservedSlotOffset = 1;
67void SetUsedLength(Tagged<ProtectedWeakFixedArray> uses, int length) {
68 // {set} includes a DCHECK for sufficient capacity.
69 uses->set(0, Smi::FromInt(length));
70}
71int GetUsedLength(Tagged<ProtectedWeakFixedArray> uses) {
72 if (uses->length() == 0) return 0;
73 return Cast<Smi>(uses->get(0)).value();
74}
75void SetEntry(Tagged<ProtectedWeakFixedArray> uses, int slot_index,
76 Tagged<WasmTrustedInstanceData> user, int table_index) {
77 DCHECK(slot_index & 1);
78 uses->set(slot_index, MakeWeak(user));
79 uses->set(slot_index + 1, Smi::FromInt(table_index));
80}
81// These are two separate functions because GCMole produces bogus warnings
82// when we return a std::pair<A, B> and call it as `auto [a, b] = ...`.
84 Tagged<ProtectedWeakFixedArray> uses, int slot_index) {
85 DCHECK(slot_index & 1);
87 uses->get(slot_index).GetHeapObjectAssumeWeak());
88}
89int GetTableIndex(Tagged<ProtectedWeakFixedArray> uses, int slot_index) {
90 DCHECK(slot_index & 1);
91 return Cast<Smi>(uses->get(slot_index + 1)).value();
92}
93void CopyEntry(Tagged<ProtectedWeakFixedArray> dst, int dst_index,
94 Tagged<ProtectedWeakFixedArray> src, int src_index) {
95 DCHECK(dst_index & 1);
96 DCHECK(src_index & 1);
97 // There shouldn't be a reason to copy cleared entries.
98 DCHECK(
99 IsWasmTrustedInstanceData(src->get(src_index).GetHeapObjectAssumeWeak()));
100 DCHECK(IsSmi(src->get(src_index + 1)));
101 dst->set(dst_index, src->get(src_index));
102 dst->set(dst_index + 1, src->get(src_index + 1));
103}
104
105} // namespace
106
107// Import a few often used types from the wasm namespace.
110
111// static
113 Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module,
114 DirectHandle<Script> script) {
115 DirectHandle<Managed<wasm::NativeModule>> managed_native_module;
116 if (script->type() == Script::Type::kWasm) {
117 managed_native_module = direct_handle(
118 Cast<Managed<wasm::NativeModule>>(script->wasm_managed_native_module()),
119 isolate);
120 } else {
122 size_t memory_estimate =
125 managed_native_module = Managed<wasm::NativeModule>::From(
126 isolate, memory_estimate, std::move(native_module));
127 }
129 isolate->factory()->NewJSObject(isolate->wasm_module_constructor()));
130 module_object->set_managed_native_module(*managed_native_module);
131 module_object->set_script(*script);
132 return module_object;
133}
134
136 Isolate* isolate, DirectHandle<WasmModuleObject> module_object,
137 wasm::WireBytesRef ref, InternalizeString internalize) {
138 base::Vector<const uint8_t> wire_bytes =
139 module_object->native_module()->wire_bytes();
140 return ExtractUtf8StringFromModuleBytes(isolate, wire_bytes, ref,
141 internalize);
142}
143
145 Isolate* isolate, base::Vector<const uint8_t> wire_bytes,
146 wasm::WireBytesRef ref, InternalizeString internalize) {
148 wire_bytes.SubVector(ref.offset(), ref.end_offset());
149 // UTF8 validation happens at decode time.
150 DCHECK(unibrow::Utf8::ValidateEncoding(name_vec.begin(), name_vec.length()));
151 auto* factory = isolate->factory();
152 return internalize
153 ? factory->InternalizeUtf8String(
155 : factory
156 ->NewStringFromUtf8(base::Vector<const char>::cast(name_vec))
157 .ToHandleChecked();
158}
159
161 Isolate* isolate, DirectHandle<WasmModuleObject> module_object) {
162 const WasmModule* module = module_object->module();
163 if (!module->name.is_set()) return {};
164 return ExtractUtf8StringFromModuleBytes(isolate, module_object, module->name,
166}
167
169 Isolate* isolate, DirectHandle<WasmModuleObject> module_object,
170 uint32_t func_index) {
171 DCHECK_LT(func_index, module_object->module()->functions.size());
172 wasm::WireBytesRef name =
173 module_object->module()->lazily_generated_names.LookupFunctionName(
174 wasm::ModuleWireBytes(module_object->native_module()->wire_bytes()),
175 func_index);
176 if (!name.is_set()) return {};
177 return ExtractUtf8StringFromModuleBytes(isolate, module_object, name,
179}
180
182 int func_index) {
183 if (func_index == wasm::kAnonymousFuncIndex) {
184 return base::Vector<const uint8_t>({nullptr, 0});
185 }
186 DCHECK_GT(module()->functions.size(), func_index);
187 wasm::ModuleWireBytes wire_bytes(native_module()->wire_bytes());
188 wasm::WireBytesRef name_ref =
189 module()->lazily_generated_names.LookupFunctionName(wire_bytes,
190 func_index);
191 wasm::WasmName name = wire_bytes.GetNameOrNull(name_ref);
193}
194
197 wasm::ValueType type, wasm::CanonicalValueType canonical_type,
198 uint32_t initial, bool has_maximum, uint64_t maximum,
199 DirectHandle<Object> initial_value, wasm::AddressType address_type,
200 DirectHandle<WasmDispatchTable>* out_dispatch_table) {
201 CHECK(type.is_object_reference());
202
204 DirectHandle<FixedArray> entries = isolate->factory()->NewFixedArray(initial);
205 for (int i = 0; i < static_cast<int>(initial); ++i) {
206 entries->set(i, *initial_value);
207 }
208 bool is_function_table = canonical_type.IsFunctionType();
209 DirectHandle<WasmDispatchTable> dispatch_table =
210 is_function_table
211 ? isolate->factory()->NewWasmDispatchTable(initial, canonical_type)
213
215 isolate->factory()->undefined_value();
216 if (has_maximum) {
217 if (address_type == wasm::AddressType::kI32) {
218 DCHECK_GE(kMaxUInt32, maximum);
219 max = isolate->factory()->NewNumber(maximum);
220 } else {
221 max = BigInt::FromUint64(isolate, maximum);
222 }
223 }
224
225 DirectHandle<JSFunction> table_ctor(
226 isolate->native_context()->wasm_table_constructor(), isolate);
227 auto table_obj =
228 Cast<WasmTableObject>(isolate->factory()->NewJSObject(table_ctor));
230
231 if (!trusted_data.is_null()) {
232 table_obj->set_trusted_data(*trusted_data);
233 } else {
234 table_obj->clear_trusted_data();
235 }
236 table_obj->set_entries(*entries);
237 table_obj->set_current_length(initial);
238 table_obj->set_maximum_length(*max);
239 table_obj->set_raw_type(static_cast<int>(type.raw_bit_field()));
240 table_obj->set_address_type(address_type);
241 table_obj->set_padding_for_address_type_0(0);
242 table_obj->set_padding_for_address_type_1(0);
243#if TAGGED_SIZE_8_BYTES
244 table_obj->set_padding_for_address_type_2(0);
245#endif
246
247 if (is_function_table) {
248 DCHECK_EQ(table_obj->current_length(), dispatch_table->length());
249 table_obj->set_trusted_dispatch_table(*dispatch_table);
250 if (out_dispatch_table) *out_dispatch_table = dispatch_table;
251 } else {
252 table_obj->clear_trusted_dispatch_table();
253 }
254 return table_obj;
255}
256
258 uint32_t count, DirectHandle<Object> init_value) {
259 uint32_t old_size = table->current_length();
260 if (count == 0) return old_size; // Degenerate case: nothing to do.
261
262 // Check if growing by {count} is valid.
263 static_assert(wasm::kV8MaxWasmTableSize <= kMaxUInt32);
264 uint64_t static_max_size = wasm::max_table_size();
265 uint32_t max_size = static_cast<uint32_t>(std::min(
266 static_max_size, table->maximum_length_u64().value_or(static_max_size)));
267 DCHECK_LE(old_size, max_size);
268 if (count > max_size - old_size) return -1;
269
270 uint32_t new_size = old_size + count;
271 // Even with 2x over-allocation, there should not be an integer overflow.
272 static_assert(wasm::kV8MaxWasmTableSize <= kMaxInt / 2);
273 DCHECK_GE(kMaxInt, new_size);
274 int old_capacity = table->entries()->length();
275 if (new_size > static_cast<uint32_t>(old_capacity)) {
276 int grow = static_cast<int>(new_size) - old_capacity;
277 // Grow at least by the old capacity, to implement exponential growing.
278 grow = std::max(grow, old_capacity);
279 // Never grow larger than the max size.
280 grow = std::min(grow, static_cast<int>(max_size - old_capacity));
281 auto new_store = isolate->factory()->CopyFixedArrayAndGrow(
282 direct_handle(table->entries(), isolate), grow);
283 table->set_entries(*new_store, WriteBarrierMode::UPDATE_WRITE_BARRIER);
284 }
285
286 if (table->has_trusted_dispatch_table()) {
287 DirectHandle<WasmDispatchTable> dispatch_table(
288 table->trusted_dispatch_table(isolate), isolate);
289 DCHECK_EQ(old_size, dispatch_table->length());
290 DirectHandle<WasmDispatchTable> new_dispatch_table =
291 WasmDispatchTable::Grow(isolate, dispatch_table, new_size);
292 if (!dispatch_table.is_identical_to(new_dispatch_table)) {
293 table->set_trusted_dispatch_table(*new_dispatch_table);
294 }
295 DCHECK_EQ(new_size, table->trusted_dispatch_table(isolate)->length());
296
297#if V8_ENABLE_DRUMBRAKE
298 if (v8_flags.wasm_jitless) {
299 Tagged<ProtectedWeakFixedArray> uses = dispatch_table->protected_uses();
300 int used_length = GetUsedLength(uses);
301 for (int i = kReservedSlotOffset; i < used_length; i += 2) {
302 if (uses->get(i).IsCleared()) continue;
303 Tagged<WasmTrustedInstanceData> instance = GetInstance(uses, i);
304 if (instance->has_interpreter_object()) {
305 int table_index = GetTableIndex(uses, i);
307 isolate, direct_handle(instance->instance_object(), isolate),
308 table_index);
309 }
310 }
311 }
312#endif // V8_ENABLE_DRUMBRAKE
313 }
314
315 // Only update the current length after all allocations of sub-objects
316 // (in particular: the new dispatch table) are done, so that heap verification
317 // can assert that the dispatch table's length matches the table's length.
318 table->set_current_length(new_size);
319
320 for (uint32_t entry = old_size; entry < new_size; ++entry) {
321 WasmTableObject::Set(isolate, table, entry, init_value);
322 }
323 return old_size;
324}
325
328 DirectHandle<Object> entry, const char** error_message) {
329 const WasmModule* module = !table->has_trusted_data()
330 ? nullptr
331 : table->trusted_data(isolate)->module();
332 return wasm::JSToWasmObject(isolate, module, entry, table->type(module),
333 error_message);
334}
335
338 int entry_index,
339 DirectHandle<Object> entry) {
340 if (IsWasmNull(*entry, isolate)) {
341 table->ClearDispatchTable(entry_index); // Degenerate case.
342 table->entries()->set(entry_index, ReadOnlyRoots(isolate).wasm_null());
343 return;
344 }
345 DCHECK(IsWasmFuncRef(*entry));
347 direct_handle(Cast<WasmFuncRef>(*entry)->internal(isolate), isolate));
348
350 auto exported_function = Cast<WasmExportedFunction>(external);
351 auto func_data = exported_function->shared()->wasm_exported_function_data();
352 DirectHandle<WasmTrustedInstanceData> target_instance_data(
353 func_data->instance_data(), isolate);
354 int func_index = func_data->function_index();
355 const WasmModule* module = target_instance_data->module();
356 SBXCHECK_BOUNDS(func_index, module->functions.size());
357 auto* wasm_function = module->functions.data() + func_index;
358 UpdateDispatchTable(isolate, table, entry_index, wasm_function,
359 target_instance_data
360#if V8_ENABLE_DRUMBRAKE
361 ,
362 func_index
363#endif // V8_ENABLE_DRUMBRAKE
364 );
365 } else if (WasmJSFunction::IsWasmJSFunction(*external)) {
366 UpdateDispatchTable(isolate, table, entry_index,
367 Cast<WasmJSFunction>(external));
368 } else {
370 UpdateDispatchTable(isolate, table, entry_index,
371 Cast<WasmCapiFunction>(external));
372 }
373 table->entries()->set(entry_index, *entry);
374}
375
376// Note: This needs to be handlified because it can call {NewWasmImportData}.
378 uint32_t index, DirectHandle<Object> entry) {
379 // Callers need to perform bounds checks, type check, and error handling.
380 DCHECK(table->is_in_bounds(index));
381
382 DirectHandle<FixedArray> entries(table->entries(), isolate);
383 // The FixedArray is addressed with int's.
384 int entry_index = static_cast<int>(index);
385
386 wasm::ValueType unsafe_type = table->unsafe_type();
387 if (unsafe_type.has_index()) {
388 DCHECK(table->has_trusted_data());
389 const wasm::WasmModule* module = table->trusted_data(isolate)->module();
390 if (module->has_signature(table->type(module).ref_index())) {
391 SetFunctionTableEntry(isolate, table, entry_index, entry);
392 return;
393 }
394 entries->set(entry_index, *entry);
395 return;
396 }
397 switch (unsafe_type.generic_kind()) {
398 case wasm::GenericKind::kExtern:
399 case wasm::GenericKind::kString:
400 case wasm::GenericKind::kStringViewWtf8:
401 case wasm::GenericKind::kStringViewWtf16:
402 case wasm::GenericKind::kStringViewIter:
403 case wasm::GenericKind::kEq:
404 case wasm::GenericKind::kStruct:
405 case wasm::GenericKind::kArray:
406 case wasm::GenericKind::kAny:
407 case wasm::GenericKind::kI31:
408 case wasm::GenericKind::kNone:
409 case wasm::GenericKind::kNoFunc:
410 case wasm::GenericKind::kNoExtern:
411 case wasm::GenericKind::kExn:
412 case wasm::GenericKind::kNoExn:
413 case wasm::GenericKind::kCont:
414 case wasm::GenericKind::kNoCont:
415 entries->set(entry_index, *entry);
416 return;
417 case wasm::GenericKind::kFunc:
418 SetFunctionTableEntry(isolate, table, entry_index, entry);
419 return;
420 case wasm::GenericKind::kBottom:
421 case wasm::GenericKind::kTop:
422 case wasm::GenericKind::kVoid:
423 case wasm::GenericKind::kExternString:
424 break;
425 }
426 UNREACHABLE();
427}
428
431 uint32_t index) {
432 DirectHandle<FixedArray> entries(table->entries(), isolate);
433 // Callers need to perform bounds checks and error handling.
434 DCHECK(table->is_in_bounds(index));
435
436 // The FixedArray is addressed with int's.
437 int entry_index = static_cast<int>(index);
438
439 DirectHandle<Object> entry(entries->get(entry_index), isolate);
440
441 if (IsWasmNull(*entry, isolate)) return entry;
442 if (IsWasmFuncRef(*entry)) return entry;
443
444 wasm::ValueType unsafe_type = table->unsafe_type();
445 if (unsafe_type.has_index()) {
446 DCHECK(table->has_trusted_data());
447 const WasmModule* module = table->trusted_data(isolate)->module();
448 wasm::ModuleTypeIndex element_type = table->type(module).ref_index();
449 if (module->has_array(element_type) || module->has_struct(element_type)) {
450 return entry;
451 }
452 DCHECK(module->has_signature(element_type));
453 // Fall through.
454 } else {
455 switch (unsafe_type.generic_kind()) {
456 case wasm::GenericKind::kStringViewWtf8:
457 case wasm::GenericKind::kStringViewWtf16:
458 case wasm::GenericKind::kStringViewIter:
459 case wasm::GenericKind::kExtern:
460 case wasm::GenericKind::kString:
461 case wasm::GenericKind::kEq:
462 case wasm::GenericKind::kI31:
463 case wasm::GenericKind::kStruct:
464 case wasm::GenericKind::kArray:
465 case wasm::GenericKind::kAny:
466 case wasm::GenericKind::kNone:
467 case wasm::GenericKind::kNoFunc:
468 case wasm::GenericKind::kNoExtern:
469 case wasm::GenericKind::kExn:
470 case wasm::GenericKind::kNoExn:
471 case wasm::GenericKind::kCont:
472 case wasm::GenericKind::kNoCont:
473 return entry;
474 case wasm::GenericKind::kFunc:
475 // Placeholder; handled below.
476 break;
477 case wasm::GenericKind::kBottom:
478 case wasm::GenericKind::kTop:
479 case wasm::GenericKind::kVoid:
480 case wasm::GenericKind::kExternString:
481 UNREACHABLE();
482 }
483 }
484
485 // {entry} is not a valid entry in the table. It has to be a placeholder
486 // for lazy initialization.
487 DirectHandle<Tuple2> tuple = Cast<Tuple2>(entry);
488 auto trusted_instance_data = direct_handle(
489 Cast<WasmInstanceObject>(tuple->value1())->trusted_data(isolate),
490 isolate);
491 int function_index = Cast<Smi>(tuple->value2()).value();
492
493 // Create a WasmInternalFunction and WasmFuncRef for the function if it does
494 // not exist yet, and store it in the table.
497 isolate, trusted_instance_data, function_index);
498 entries->set(entry_index, *func_ref);
499 return func_ref;
500}
501
503 DirectHandle<WasmTableObject> table, uint32_t start,
504 DirectHandle<Object> entry, uint32_t count) {
505 // Bounds checks must be done by the caller.
506 DCHECK_LE(start, table->current_length());
507 DCHECK_LE(count, table->current_length());
508 DCHECK_LE(start + count, table->current_length());
509
510 for (uint32_t i = 0; i < count; i++) {
511 WasmTableObject::Set(isolate, table, start + i, entry);
512 }
513}
514
515#if V8_ENABLE_SANDBOX || DEBUG
516bool FunctionSigMatchesTable(wasm::CanonicalTypeIndex sig_id,
517 wasm::CanonicalValueType table_type) {
518 DCHECK(table_type.is_object_reference());
519 // When in-sandbox data is corrupted, we can't trust the statically
520 // checked types; to prevent sandbox escapes, we have to verify actual
521 // types before installing the dispatch table entry. There are three
522 // alternative success conditions:
523 // (1) Generic "funcref" tables can hold any function entry.
524 if (!table_type.has_index() &&
525 table_type.generic_kind() == wasm::GenericKind::kFunc) {
526 return true;
527 }
528 // (2) Most function types are expected to be final, so they can be compared
529 // cheaply by canonicalized index equality.
530 wasm::CanonicalTypeIndex canonical_table_type = table_type.ref_index();
531 if (V8_LIKELY(sig_id == canonical_table_type)) return true;
532 // (3) In the remaining cases, perform the full subtype check.
534 sig_id, canonical_table_type);
535}
536#endif // V8_ENABLE_SANDBOX || DEBUG
537
538// static
540 Isolate* isolate, DirectHandle<WasmTableObject> table, int entry_index,
541 const wasm::WasmFunction* func,
542 DirectHandle<WasmTrustedInstanceData> target_instance_data
543#if V8_ENABLE_DRUMBRAKE
544 ,
545 int target_func_index
546#endif // V8_ENABLE_DRUMBRAKE
547) {
548 DirectHandle<TrustedObject> implicit_arg =
549 func->imported
550 // The function in the target instance was imported. Use its imports
551 // table to look up the ref.
553 target_instance_data->dispatch_table_for_imports()
554 ->implicit_arg(func->func_index)),
555 isolate)
556 // For wasm functions, just pass the target instance data.
557 : target_instance_data;
558 WasmCodePointer call_target =
559 target_instance_data->GetCallTarget(func->func_index);
560
561#if V8_ENABLE_DRUMBRAKE
562 if (target_func_index <
563 static_cast<int>(
564 target_instance_data->module()->num_imported_functions)) {
565 target_func_index = target_instance_data->imported_function_indices()->get(
566 target_func_index);
567 }
568#endif // V8_ENABLE_DRUMBRAKE
569
570 const WasmModule* target_module = target_instance_data->module();
572 target_module->canonical_sig_id(func->sig_index);
573 DirectHandle<WasmDispatchTable> dispatch_table(
574 table->trusted_dispatch_table(isolate), isolate);
575 SBXCHECK(FunctionSigMatchesTable(sig_id, dispatch_table->table_type()));
576
577 if (v8_flags.wasm_generic_wrapper && IsWasmImportData(*implicit_arg)) {
578 auto import_data = Cast<WasmImportData>(implicit_arg);
579 DirectHandle<WasmImportData> new_import_data =
580 isolate->factory()->NewWasmImportData(import_data);
581 new_import_data->set_call_origin(*dispatch_table);
582 new_import_data->set_table_slot(entry_index);
583 implicit_arg = new_import_data;
584 }
585 if (target_instance_data->dispatch_table_for_imports()->IsAWrapper(
586 func->func_index)) {
587 wasm::WasmCodeRefScope code_ref_scope;
588 uint64_t signature_hash = wasm::GetTypeCanonicalizer()
590 ->signature_hash();
591 dispatch_table->SetForWrapper(
592 entry_index, *implicit_arg,
594 call_target, signature_hash),
595 sig_id, signature_hash,
596#if V8_ENABLE_DRUMBRAKE
597 target_func_index,
598#endif
599 wasm::GetWasmImportWrapperCache()->FindWrapper(call_target),
601 } else {
602 dispatch_table->SetForNonWrapper(entry_index, *implicit_arg, call_target,
603 sig_id,
604#if V8_ENABLE_DRUMBRAKE
605 target_func_index,
606#endif
608 }
609
610#if V8_ENABLE_DRUMBRAKE
611 if (v8_flags.wasm_jitless) {
612 Tagged<ProtectedWeakFixedArray> uses = dispatch_table->protected_uses();
613 int used_length = GetUsedLength(uses);
614 for (int i = kReservedSlotOffset; i < used_length; i += 2) {
615 if (uses->get(i).IsCleared()) continue;
616 Tagged<WasmTrustedInstanceData> instance = GetInstance(uses, i);
617 if (instance->has_interpreter_object()) {
618 int table_index = GetTableIndex(uses, i);
620 isolate, direct_handle(instance->instance_object(), isolate),
621 table_index);
622 }
623 }
624 }
625#endif // V8_ENABLE_DRUMBRAKE
626}
627
628// static
630 Isolate* isolate, DirectHandle<WasmTableObject> table, int entry_index,
632 Tagged<WasmJSFunctionData> function_data =
633 function->shared()->wasm_js_function_data();
634 wasm::CanonicalTypeIndex sig_id = function_data->sig_index();
635
636 wasm::WasmCodeRefScope code_ref_scope;
637
638 DirectHandle<WasmDispatchTable> dispatch_table(
639 table->trusted_dispatch_table(isolate), isolate);
640 SBXCHECK(FunctionSigMatchesTable(sig_id, dispatch_table->table_type()));
641
643 Cast<WasmImportData>(function_data->internal()->implicit_arg()), isolate);
644 WasmCodePointer code_pointer = function_data->internal()->call_target();
646 wasm::WasmCode* wasm_code = cache->FindWrapper(code_pointer);
649 uint64_t signature_hash;
650 if (wasm_code) {
651 DCHECK_EQ(wasm_code->instruction_start(), call_target);
652 signature_hash = wasm_code->signature_hash();
653 } else {
654 // The function's code_pointer is not a compiled wrapper.
655 // Opportunistically check if a matching wrapper has already been
656 // compiled, but otherwise don't eagerly compile it now.
657 const wasm::CanonicalSig* sig =
659 sig_id);
660 signature_hash = sig->signature_hash();
661 wasm::ResolvedWasmImport resolved({}, -1, function, sig, sig_id,
663 wasm::ImportCallKind kind = resolved.kind();
664 DirectHandle<JSReceiver> callable = resolved.callable();
666 int expected_arity = static_cast<int>(sig->parameter_count());
668 expected_arity = Cast<JSFunction>(callable)
669 ->shared()
670 ->internal_formal_parameter_count_without_receiver();
671 }
672 wasm::Suspend suspend = function_data->GetSuspend();
673 wasm_code = cache->MaybeGet(kind, sig_id, expected_arity, suspend);
674 if (wasm_code) {
675 call_target = wasm_code->instruction_start();
676 DCHECK_EQ(sig->signature_hash(), wasm_code->signature_hash());
677 } else {
678 // We still don't have a compiled wrapper. Allocate a new import_data
679 // so we can store the proper call_origin for later wrapper tier-up.
680 DCHECK(call_target ==
681 Builtins::EntryOf(Builtin::kWasmToJsWrapperAsm, isolate) ||
682 call_target == Builtins::EntryOf(
683 Builtin::kWasmToJsWrapperInvalidSig, isolate));
684 import_data = isolate->factory()->NewWasmImportData(
685 callable, suspend, MaybeDirectHandle<WasmTrustedInstanceData>{}, sig);
686 import_data->SetIndexInTableAsCallOrigin(*dispatch_table, entry_index);
687 }
688 }
689
690 DCHECK(wasm_code ||
691 call_target ==
692 Builtins::EntryOf(Builtin::kWasmToJsWrapperAsm, isolate) ||
693 call_target ==
694 Builtins::EntryOf(Builtin::kWasmToJsWrapperInvalidSig, isolate));
695 dispatch_table->SetForWrapper(entry_index, *import_data, call_target, sig_id,
696 signature_hash,
697#if V8_ENABLE_DRUMBRAKE
698 WasmDispatchTable::kInvalidFunctionIndex,
699#endif // V8_ENABLE_DRUMBRAKE
701}
702
703// static
705 Isolate* isolate, DirectHandle<WasmTableObject> table, int entry_index,
706 DirectHandle<WasmCapiFunction> capi_function) {
708 capi_function->shared()->wasm_capi_function_data(), isolate);
709 const wasm::CanonicalSig* sig = func_data->sig();
711 wasm::CanonicalTypeIndex sig_index = func_data->sig_index();
712
713 wasm::WasmCodeRefScope code_ref_scope;
716 int param_count = static_cast<int>(sig->parameter_count());
717 wasm::WasmCode* wasm_code =
718 cache->MaybeGet(kind, sig_index, param_count, wasm::kNoSuspend);
719 if (wasm_code == nullptr) {
722 {
724 wasm::WasmImportWrapperCache::CacheKey key(kind, sig_index, param_count,
726 wasm_code = cache_scope.AddWrapper(
728 sig->signature_hash());
729 }
730 // To avoid lock order inversion, code printing must happen after the
731 // end of the {cache_scope}.
732 wasm_code->MaybePrint();
733 isolate->counters()->wasm_generated_code_size()->Increment(
734 wasm_code->instructions().length());
735 isolate->counters()->wasm_reloc_size()->Increment(
736 wasm_code->reloc_info().length());
737 }
738 Tagged<HeapObject> implicit_arg = func_data->internal()->implicit_arg();
739 Address call_target = wasm_code->instruction_start();
740 Tagged<WasmDispatchTable> dispatch_table =
741 table->trusted_dispatch_table(isolate);
742 SBXCHECK(FunctionSigMatchesTable(sig_index, dispatch_table->table_type()));
743 dispatch_table->SetForWrapper(entry_index, implicit_arg, call_target,
744 sig_index, wasm_code->signature_hash(),
745#if V8_ENABLE_DRUMBRAKE
746 WasmDispatchTable::kInvalidFunctionIndex,
747#endif // V8_ENABLE_DRUMBRAKE
749}
750
753 Isolate* isolate = Isolate::Current();
754 Tagged<WasmDispatchTable> dispatch_table = trusted_dispatch_table(isolate);
755 dispatch_table->Clear(index, WasmDispatchTable::kExistingEntry);
756#if V8_ENABLE_DRUMBRAKE
757 if (v8_flags.wasm_jitless) {
758 Tagged<ProtectedWeakFixedArray> uses = dispatch_table->protected_uses();
759 int used_length = GetUsedLength(uses);
760 for (int i = kReservedSlotOffset; i < used_length; i += 2) {
761 if (uses->get(i).IsCleared()) continue;
762 Tagged<WasmTrustedInstanceData> non_shared_instance_data =
763 GetInstance(uses, i);
764 if (non_shared_instance_data->has_interpreter_object()) {
765 int table_index = GetTableIndex(uses, i);
766 DirectHandle<WasmInstanceObject> instance_handle(
767 non_shared_instance_data->instance_object(), isolate);
769 isolate, instance_handle, table_index, index);
770 }
771 }
772 }
773#endif // V8_ENABLE_DRUMBRAKE
774}
775
776// static
778 Isolate* isolate, DirectHandle<WasmTableObject> table, int entry_index,
779 DirectHandle<WasmTrustedInstanceData> trusted_instance_data,
780 int func_index) {
781 // Put (instance, func_index) as a Tuple2 into the entry_index.
782 // The {WasmExportedFunction} will be created lazily.
783 // Allocate directly in old space as the tuples are typically long-lived, and
784 // we create many of them, which would result in lots of GC when initializing
785 // large tables.
786 // TODO(42204563): Avoid crashing if the instance object is not available.
787 CHECK(trusted_instance_data->has_instance_object());
788 DirectHandle<Tuple2> tuple = isolate->factory()->NewTuple2(
789 direct_handle(trusted_instance_data->instance_object(), isolate),
790 direct_handle(Smi::FromInt(func_index), isolate), AllocationType::kOld);
791 table->entries()->set(entry_index, *tuple);
792}
793
794// static
796 Isolate* isolate, DirectHandle<WasmTableObject> table, int entry_index,
797 bool* is_valid, bool* is_null,
799 int* function_index, MaybeDirectHandle<WasmJSFunction>* maybe_js_function) {
800#if DEBUG
801 if (table->has_trusted_data()) {
802 const wasm::WasmModule* module = table->trusted_data(isolate)->module();
803 DCHECK(wasm::IsSubtypeOf(table->type(module), wasm::kWasmFuncRef, module));
804 } else {
805 // A function table defined outside a module may only have type exactly
806 // {funcref}.
807 DCHECK(table->unsafe_type() == wasm::kWasmFuncRef);
808 }
809 DCHECK_LT(entry_index, table->current_length());
810#endif
811 // We initialize {is_valid} with {true}. We may change it later.
812 *is_valid = true;
813 DirectHandle<Object> element(table->entries()->get(entry_index), isolate);
814
815 *is_null = IsWasmNull(*element, isolate);
816 if (*is_null) return;
817
818 if (IsWasmFuncRef(*element)) {
820 Cast<WasmFuncRef>(*element)->internal(isolate), isolate};
822 }
824 auto target_func = Cast<WasmExportedFunction>(element);
825 auto func_data = Cast<WasmExportedFunctionData>(
826 target_func->shared()->wasm_exported_function_data());
827 *instance_data = direct_handle(func_data->instance_data(), isolate);
828 *function_index = func_data->function_index();
829 *maybe_js_function = MaybeDirectHandle<WasmJSFunction>();
830 return;
831 }
832 if (WasmJSFunction::IsWasmJSFunction(*element)) {
834 *maybe_js_function = Cast<WasmJSFunction>(element);
835 return;
836 }
837 if (IsTuple2(*element)) {
838 auto tuple = Cast<Tuple2>(element);
840 Cast<WasmInstanceObject>(tuple->value1())->trusted_data(isolate),
841 isolate);
842 *function_index = Cast<Smi>(tuple->value2()).value();
843 *maybe_js_function = MaybeDirectHandle<WasmJSFunction>();
844 return;
845 }
846 *is_valid = false;
847}
848
850 Isolate* isolate, DirectHandle<JSReceiver> callable) {
851 DirectHandle<JSFunction> suspending_ctor(
852 isolate->native_context()->wasm_suspending_constructor(), isolate);
853 auto suspending_obj = Cast<WasmSuspendingObject>(
854 isolate->factory()->NewJSObject(suspending_ctor));
855 suspending_obj->set_callable(*callable);
856 return suspending_obj;
857}
858
859namespace {
860
861void SetInstanceMemory(Tagged<WasmTrustedInstanceData> trusted_instance_data,
862 Tagged<JSArrayBuffer> buffer, int memory_index) {
864 const WasmModule* module = trusted_instance_data->module();
865 const wasm::WasmMemory& memory = module->memories[memory_index];
866
867 bool is_wasm_module = module->origin == wasm::kWasmOrigin;
868 bool use_trap_handler = memory.bounds_checks == wasm::kTrapHandler;
869 // Asm.js does not use trap handling.
870 CHECK_IMPLIES(use_trap_handler, is_wasm_module);
871 // ArrayBuffers allocated for Wasm do always have a BackingStore.
872 std::shared_ptr<BackingStore> backing_store = buffer->GetBackingStore();
873 CHECK_IMPLIES(is_wasm_module, backing_store);
874 CHECK_IMPLIES(is_wasm_module, backing_store->is_wasm_memory());
875 // Wasm modules compiled to use the trap handler don't have bounds checks,
876 // so they must have a memory that has guard regions.
877 // Note: This CHECK can fail when in-sandbox corruption modified a
878 // WasmMemoryObject. We currently believe that this would at worst
879 // corrupt the contents of other Wasm memories or ArrayBuffers, but having
880 // this CHECK in release mode is nice as an additional layer of defense.
881 CHECK_IMPLIES(use_trap_handler, backing_store->has_guard_regions());
882 // We checked this before, but a malicious worker thread with an in-sandbox
883 // corruption primitive could have modified it since then.
884 size_t byte_length = buffer->GetByteLength();
885 SBXCHECK_GE(byte_length, memory.min_memory_size);
886
887 trusted_instance_data->SetRawMemory(
888 memory_index, reinterpret_cast<uint8_t*>(buffer->backing_store()),
889 byte_length);
890
891#if V8_ENABLE_DRUMBRAKE
892 if (v8_flags.wasm_jitless &&
893 trusted_instance_data->has_interpreter_object()) {
894 AllowHeapAllocation allow_heap;
895 Isolate* isolate = Isolate::Current();
896 HandleScope scope(isolate);
898 direct_handle(trusted_instance_data->instance_object(), isolate));
899 }
900#endif // V8_ENABLE_DRUMBRAKE
901}
902
903} // namespace
904
906 Isolate* isolate, DirectHandle<JSArrayBuffer> buffer, int maximum,
907 wasm::AddressType address_type) {
908 DirectHandle<JSFunction> memory_ctor(
909 isolate->native_context()->wasm_memory_constructor(), isolate);
910
911 auto memory_object = Cast<WasmMemoryObject>(
912 isolate->factory()->NewJSObject(memory_ctor, AllocationType::kOld));
913 memory_object->set_array_buffer(*buffer);
914 memory_object->set_maximum_pages(maximum);
915 memory_object->set_address_type(address_type);
916 memory_object->set_padding_for_address_type_0(0);
917 memory_object->set_padding_for_address_type_1(0);
918#if TAGGED_SIZE_8_BYTES
919 memory_object->set_padding_for_address_type_2(0);
920#endif
921 memory_object->set_instances(ReadOnlyRoots{isolate}.empty_weak_array_list());
922
923 if (buffer->is_resizable_by_js()) {
924 memory_object->FixUpResizableArrayBuffer(*buffer);
925 }
926
927 std::shared_ptr<BackingStore> backing_store = buffer->GetBackingStore();
928 if (buffer->is_shared()) {
929 // Only Wasm memory can be shared (in contrast to asm.js memory).
930 CHECK(backing_store && backing_store->is_wasm_memory());
931 backing_store->AttachSharedWasmMemoryObject(isolate, memory_object);
932 } else if (backing_store) {
933 CHECK(!backing_store->is_shared());
934 }
935
936 // Memorize a link from the JSArrayBuffer to its owning WasmMemoryObject
937 // instance.
938 DirectHandle<Symbol> symbol =
939 isolate->factory()->array_buffer_wasm_memory_symbol();
940 Object::SetProperty(isolate, buffer, symbol, memory_object).Check();
941
942 return memory_object;
943}
944
946 Isolate* isolate, int initial, int maximum, SharedFlag shared,
947 wasm::AddressType address_type) {
948 bool has_maximum = maximum != kNoMaximum;
949
950 int engine_maximum = address_type == wasm::AddressType::kI64
951 ? static_cast<int>(wasm::max_mem64_pages())
952 : static_cast<int>(wasm::max_mem32_pages());
953
954 if (initial > engine_maximum) return {};
955
956#ifdef V8_TARGET_ARCH_32_BIT
957 // On 32-bit platforms we need an heuristic here to balance overall memory
958 // and address space consumption.
959 constexpr int kGBPages = 1024 * 1024 * 1024 / wasm::kWasmPageSize;
960 // We allocate the smallest of the following sizes, but at least the initial
961 // size:
962 // 1) the module-defined maximum;
963 // 2) 1GB;
964 // 3) the engine maximum;
965 int allocation_maximum = std::min(kGBPages, engine_maximum);
966 int heuristic_maximum;
967 if (initial > kGBPages) {
968 // We always allocate at least the initial size.
969 heuristic_maximum = initial;
970 } else if (has_maximum) {
971 // We try to reserve the maximum, but at most the allocation_maximum to
972 // avoid OOMs.
973 heuristic_maximum = std::min(maximum, allocation_maximum);
974 } else if (shared == SharedFlag::kShared) {
975 // If shared memory has no maximum, we use the allocation_maximum as an
976 // implicit maximum.
977 heuristic_maximum = allocation_maximum;
978 } else {
979 // If non-shared memory has no maximum, we only allocate the initial size
980 // and then grow with realloc.
981 heuristic_maximum = initial;
982 }
983#else
984 int heuristic_maximum =
985 has_maximum ? std::min(engine_maximum, maximum) : engine_maximum;
986#endif
987
988 std::unique_ptr<BackingStore> backing_store =
989 BackingStore::AllocateWasmMemory(isolate, initial, heuristic_maximum,
990 address_type == wasm::AddressType::kI32
993 shared);
994
995 if (!backing_store) return {};
996
998 shared == SharedFlag::kShared
999 ? isolate->factory()->NewJSSharedArrayBuffer(std::move(backing_store))
1000 : isolate->factory()->NewJSArrayBuffer(std::move(backing_store));
1001
1002 return New(isolate, buffer, maximum, address_type);
1003}
1004
1006 Isolate* isolate, DirectHandle<WasmMemoryObject> memory,
1007 DirectHandle<WasmTrustedInstanceData> trusted_instance_data,
1008 DirectHandle<WasmTrustedInstanceData> shared_trusted_instance_data,
1009 int memory_index_in_instance) {
1010 SetInstanceMemory(*trusted_instance_data, memory->array_buffer(),
1011 memory_index_in_instance);
1012 if (!shared_trusted_instance_data.is_null()) {
1013 SetInstanceMemory(*shared_trusted_instance_data, memory->array_buffer(),
1014 memory_index_in_instance);
1015 }
1016 DirectHandle<WeakArrayList> instances{memory->instances(), isolate};
1017 auto weak_instance_object = MaybeObjectDirectHandle::Weak(
1018 trusted_instance_data->instance_object(), isolate);
1019 instances = WeakArrayList::Append(isolate, instances, weak_instance_object);
1020 memory->set_instances(*instances);
1021}
1022
1024 Tagged<JSArrayBuffer> new_buffer) {
1026 const bool new_buffer_is_resizable_by_js = new_buffer->is_resizable_by_js();
1027 if (new_buffer_is_resizable_by_js) {
1028 FixUpResizableArrayBuffer(*new_buffer);
1029 }
1030 Tagged<JSArrayBuffer> old_buffer = array_buffer();
1031 set_array_buffer(new_buffer);
1032 // Iterating and updating all instances is a slow operation, and is required
1033 // when the pointer to memory or size of memory changes.
1034 //
1035 // When refreshing the buffer for changing the resizability of the JS-exposed
1036 // (S)AB, both the data pointer and size stay the same, only the JS object
1037 // changes.
1038 //
1039 // When refreshing the buffer for growing a memory exposing a fixed-length
1040 // (S)AB (i.e. both the old and new buffers are !is_resizable_by_js()), the
1041 // size is changing, and updating the instances is needed.
1042 //
1043 // This function is never called in a way such that both the old and new
1044 // (S)ABs are resizable. Once a Wasm memory exposes a resizable (S)AB,
1045 // changing the size does not refresh the buffer.
1046 DCHECK(!old_buffer->is_resizable_by_js() ||
1047 !new_buffer->is_resizable_by_js());
1048 if (!old_buffer->is_resizable_by_js() && !new_buffer_is_resizable_by_js) {
1049 UpdateInstances(isolate);
1050 }
1051}
1052
1055 Tagged<WeakArrayList> instances = this->instances();
1056 for (int i = 0, len = instances->length(); i < len; ++i) {
1057 Tagged<MaybeObject> elem = instances->Get(i);
1058 if (elem.IsCleared()) continue;
1062 instance_object->trusted_data(isolate);
1063 // TODO(clemens): Avoid the iteration by also remembering the memory index
1064 // if we ever see larger numbers of memories.
1066 int num_memories = memory_objects->length();
1067 for (int mem_idx = 0; mem_idx < num_memories; ++mem_idx) {
1068 if (memory_objects->get(mem_idx) == *this) {
1069 SetInstanceMemory(trusted_data, array_buffer(), mem_idx);
1070 }
1071 }
1072 }
1073}
1074
1076 Tagged<JSArrayBuffer> new_buffer) {
1078 DCHECK(new_buffer->is_resizable_by_js());
1080 if (new_buffer->is_shared()) new_buffer->set_byte_length(0);
1081 // Unlike JS-created resizable buffers, Wasm memories' backing store maximum
1082 // may differ from the exposed maximum.
1083 uintptr_t max_byte_length;
1084 if constexpr (kSystemPointerSize == 4) {
1085 // The spec says the maximum number of pages for 32-bit memories is 65536,
1086 // which means the maximum byte size is 65536 * 65536 (= 2^32), which is
1087 // UINT32_MAX+1. BackingStores, ArrayBuffers, and TypedArrays represent byte
1088 // lengths as uintptr_t, and UINT32_MAX+1 is not representable on 32bit.
1089 //
1090 // As a willful violation and gross hack, if we're exposing a Wasm memory
1091 // with an unrepresentable maximum, subtract one page size.
1092 uint64_t max_byte_length64 =
1093 static_cast<uint64_t>(maximum_pages()) * wasm::kWasmPageSize;
1094 if (max_byte_length64 > std::numeric_limits<uintptr_t>::max()) {
1095 max_byte_length64 =
1096 std::numeric_limits<uintptr_t>::max() - wasm::kWasmPageSize;
1097 CHECK(new_buffer->GetBackingStore()->max_byte_length() <=
1098 max_byte_length64);
1099 }
1100 max_byte_length = static_cast<uintptr_t>(max_byte_length64);
1101 } else {
1102 max_byte_length = maximum_pages() * wasm::kWasmPageSize;
1103 }
1104 new_buffer->set_max_byte_length(max_byte_length);
1105}
1106
1107// static
1109 Isolate* isolate, DirectHandle<WasmMemoryObject> memory_object,
1110 std::shared_ptr<BackingStore> new_backing_store) {
1111 // Detach old and create a new one with the new backing store.
1112 DirectHandle<JSArrayBuffer> old_buffer(memory_object->array_buffer(),
1113 isolate);
1114#ifdef DEBUG
1115 void* old_data_pointer = old_buffer->backing_store();
1116 size_t old_byte_length = old_buffer->byte_length();
1117#endif
1118 JSArrayBuffer::Detach(old_buffer, true).Check();
1119 DirectHandle<JSArrayBuffer> new_buffer =
1120 isolate->factory()->NewJSArrayBuffer(std::move(new_backing_store));
1121#ifdef DEBUG
1122 bool data_pointer_unchanged = new_buffer->backing_store() == old_data_pointer;
1123 bool byte_length_unchanged = new_buffer->byte_length() == old_byte_length;
1124 bool resizability_changed =
1125 old_buffer->is_resizable_by_js() != new_buffer->is_resizable_by_js();
1126 // SetNewBuffer only calls UpdateInstances if resizability didn't change,
1127 // which depends on resizability changing not changing the data pointer or
1128 // byte length. See comment in SetNewBuffer.
1129 DCHECK_IMPLIES(resizability_changed,
1130 data_pointer_unchanged && byte_length_unchanged);
1131#endif
1132 memory_object->SetNewBuffer(isolate, *new_buffer);
1133 // Memorize a link from the JSArrayBuffer to its owning WasmMemoryObject
1134 // instance.
1135 DirectHandle<Symbol> symbol =
1136 isolate->factory()->array_buffer_wasm_memory_symbol();
1137 Object::SetProperty(isolate, new_buffer, symbol, memory_object).Check();
1138 return new_buffer;
1139}
1140
1141// static
1143 Isolate* isolate, DirectHandle<WasmMemoryObject> memory_object,
1144 ResizableFlag resizable_by_js) {
1145 DirectHandle<JSArrayBuffer> old_buffer(memory_object->array_buffer(),
1146 isolate);
1147 std::shared_ptr<BackingStore> backing_store = old_buffer->GetBackingStore();
1148 // Wasm memory always has a BackingStore.
1149 CHECK_NOT_NULL(backing_store);
1150 CHECK(backing_store->is_wasm_memory());
1151 CHECK(backing_store->is_shared());
1152
1153 // Keep a raw pointer to the backing store for a CHECK later one. Make it
1154 // {void*} so we do not accidentally try to use it for anything else.
1155 void* expected_backing_store = backing_store.get();
1156
1157 DirectHandle<JSArrayBuffer> new_buffer =
1158 isolate->factory()->NewJSSharedArrayBuffer(std::move(backing_store));
1159 CHECK_EQ(expected_backing_store, new_buffer->GetBackingStore().get());
1160 if (resizable_by_js == ResizableFlag::kResizable) {
1161 new_buffer->set_is_resizable_by_js(true);
1162 }
1163 memory_object->SetNewBuffer(isolate, *new_buffer);
1164 // Memorize a link from the JSArrayBuffer to its owning WasmMemoryObject
1165 // instance.
1166 DirectHandle<Symbol> symbol =
1167 isolate->factory()->array_buffer_wasm_memory_symbol();
1168 Object::SetProperty(isolate, new_buffer, symbol, memory_object).Check();
1169 return new_buffer;
1170}
1171
1172// static
1174 DirectHandle<WasmMemoryObject> memory_object,
1175 uint32_t pages) {
1176 TRACE_EVENT0("v8.wasm", "wasm.GrowMemory");
1177 DirectHandle<JSArrayBuffer> old_buffer(memory_object->array_buffer(),
1178 isolate);
1179
1180 std::shared_ptr<BackingStore> backing_store = old_buffer->GetBackingStore();
1181 // Wasm memory can grow, and Wasm memory always has a backing store.
1182 DCHECK_NOT_NULL(backing_store);
1183
1184 // Check for maximum memory size.
1185 // Note: The {wasm::max_mem_pages()} limit is already checked in
1186 // {BackingStore::CopyWasmMemory}, and is irrelevant for
1187 // {GrowWasmMemoryInPlace} because memory is never allocated with more
1188 // capacity than that limit.
1189 size_t old_size = old_buffer->GetByteLength();
1190 DCHECK_EQ(0, old_size % wasm::kWasmPageSize);
1191 size_t old_pages = old_size / wasm::kWasmPageSize;
1192 size_t max_pages = memory_object->is_memory64() ? wasm::max_mem64_pages()
1194 if (memory_object->has_maximum_pages()) {
1195 max_pages = std::min(max_pages,
1196 static_cast<size_t>(memory_object->maximum_pages()));
1197 }
1198 DCHECK_GE(max_pages, old_pages);
1199 if (pages > max_pages - old_pages) return -1;
1200
1201 const bool must_grow_in_place = old_buffer->is_shared() ||
1202 backing_store->has_guard_regions() ||
1203 backing_store->is_resizable_by_js();
1204 const bool try_grow_in_place =
1205 must_grow_in_place || !v8_flags.stress_wasm_memory_moving;
1206
1207 std::optional<size_t> result_inplace =
1208 try_grow_in_place
1209 ? backing_store->GrowWasmMemoryInPlace(isolate, pages, max_pages)
1210 : std::nullopt;
1211 if (must_grow_in_place && !result_inplace.has_value()) {
1212 // There are different limits per platform, thus crash if the correctness
1213 // fuzzer is running.
1214 if (v8_flags.correctness_fuzzer_suppressions) {
1215 FATAL("could not grow wasm memory");
1216 }
1217 return -1;
1218 }
1219
1220 // Handle shared memory first.
1221 if (old_buffer->is_shared()) {
1222 DCHECK(result_inplace.has_value());
1223 backing_store->BroadcastSharedWasmMemoryGrow(isolate);
1224 if (!old_buffer->is_resizable_by_js()) {
1225 // Broadcasting the update should update this memory object too.
1226 CHECK_NE(*old_buffer, memory_object->array_buffer());
1227 }
1228 size_t new_pages = result_inplace.value() + pages;
1229 // If the allocation succeeded, then this can't possibly overflow:
1230 size_t new_byte_length = new_pages * wasm::kWasmPageSize;
1231 // This is a less than check, as it is not guaranteed that the SAB
1232 // length here will be equal to the stashed length above as calls to
1233 // grow the same memory object can come in from different workers.
1234 // It is also possible that a call to Grow was in progress when
1235 // handling this call.
1236 CHECK_LE(new_byte_length, memory_object->array_buffer()->GetByteLength());
1237 // As {old_pages} was read racefully, we return here the synchronized
1238 // value provided by {GrowWasmMemoryInPlace}, to provide the atomic
1239 // read-modify-write behavior required by the spec.
1240 return static_cast<int32_t>(result_inplace.value()); // success
1241 }
1242
1243 size_t new_pages = old_pages + pages;
1244 // Check for overflow (should be excluded via {max_pages} above).
1245 DCHECK_LE(old_pages, new_pages);
1246
1247 // Check if the non-shared memory could grow in-place.
1248 if (result_inplace.has_value()) {
1249 if (memory_object->array_buffer()->is_resizable_by_js()) {
1250 memory_object->array_buffer()->set_byte_length(new_pages *
1252 memory_object->UpdateInstances(isolate);
1253 } else {
1254 RefreshBuffer(isolate, memory_object, std::move(backing_store));
1255 }
1256 DCHECK_EQ(result_inplace.value(), old_pages);
1257 return static_cast<int32_t>(result_inplace.value()); // success
1258 }
1259 DCHECK(!memory_object->array_buffer()->is_resizable_by_js());
1260
1261 // Trying to grow in-place without actually growing must always succeed.
1262 DCHECK_IMPLIES(try_grow_in_place, old_pages < new_pages);
1263
1264 // Try allocating a new backing store and copying.
1265 // To avoid overall quadratic complexity of many small grow operations, we
1266 // grow by at least 0.5 MB + 12.5% of the existing memory size.
1267 // These numbers are kept small because we must be careful about address
1268 // space consumption on 32-bit platforms.
1269 size_t min_growth = old_pages + 8 + (old_pages >> 3);
1270 // First apply {min_growth}, then {max_pages}. The order is important, because
1271 // {min_growth} can be bigger than {max_pages}, and in that case we want to
1272 // cap to {max_pages}.
1273 size_t new_capacity = std::min(max_pages, std::max(new_pages, min_growth));
1274 DCHECK_LE(new_pages, new_capacity);
1275 std::unique_ptr<BackingStore> new_backing_store =
1276 backing_store->CopyWasmMemory(isolate, new_pages, new_capacity,
1277 memory_object->is_memory64()
1280 if (!new_backing_store) {
1281 // Crash on out-of-memory if the correctness fuzzer is running.
1282 if (v8_flags.correctness_fuzzer_suppressions) {
1283 FATAL("could not grow wasm memory");
1284 }
1285 // Set the non-determinism flag in the WasmEngine.
1287 return -1;
1288 }
1289
1290 RefreshBuffer(isolate, memory_object, std::move(new_backing_store));
1291
1292 return static_cast<int32_t>(old_pages); // success
1293}
1294
1295// static
1297 Isolate* isolate, DirectHandle<WasmMemoryObject> memory_object) {
1298 DirectHandle<JSArrayBuffer> old_buffer(memory_object->array_buffer(),
1299 isolate);
1300 DCHECK(old_buffer->is_resizable_by_js());
1301 if (old_buffer->is_shared()) {
1302 return RefreshSharedBuffer(isolate, memory_object,
1304 }
1305 std::shared_ptr<BackingStore> backing_store = old_buffer->GetBackingStore();
1306 DCHECK_NOT_NULL(backing_store);
1307 backing_store->MakeWasmMemoryResizableByJS(false);
1308 return RefreshBuffer(isolate, memory_object, std::move(backing_store));
1309}
1310
1311// static
1313 Isolate* isolate, DirectHandle<WasmMemoryObject> memory_object) {
1314 // Resizable ArrayBuffers require a maximum size during creation. Mirror the
1315 // requirement when reflecting Wasm memory as a resizable buffer.
1316 DCHECK(memory_object->has_maximum_pages());
1317 DirectHandle<JSArrayBuffer> old_buffer(memory_object->array_buffer(),
1318 isolate);
1319 DCHECK(!old_buffer->is_resizable_by_js());
1320 if (old_buffer->is_shared()) {
1321 return RefreshSharedBuffer(isolate, memory_object,
1323 }
1324 std::shared_ptr<BackingStore> backing_store = old_buffer->GetBackingStore();
1325 DCHECK_NOT_NULL(backing_store);
1326 backing_store->MakeWasmMemoryResizableByJS(true);
1327 return RefreshBuffer(isolate, memory_object, std::move(backing_store));
1328}
1329
1332#if V8_TARGET_OS_LINUX
1333 CHECK(v8_flags.experimental_wasm_memory_control);
1334 DirectHandle<JSFunction> descriptor_ctor(
1335 isolate->native_context()->wasm_memory_map_descriptor_constructor(),
1336 isolate);
1337
1338 int file_descriptor = memfd_create("wasm_memory_map_descriptor", MFD_CLOEXEC);
1339 if (file_descriptor == -1) {
1340 return {};
1341 }
1342 int ret_val = ftruncate(file_descriptor, length);
1343 if (ret_val == -1) {
1344 return {};
1345 }
1346
1347 return NewFromFileDescriptor(isolate, file_descriptor);
1348#else // V8_TARGET_OS_LINUX
1349 return {};
1350#endif // V8_TARGET_OS_LINUX
1351}
1352
1355 int file_descriptor) {
1356 CHECK(v8_flags.experimental_wasm_memory_control);
1357 DirectHandle<JSFunction> descriptor_ctor(
1358 isolate->native_context()->wasm_memory_map_descriptor_constructor(),
1359 isolate);
1360
1361 auto descriptor_object = Cast<WasmMemoryMapDescriptor>(
1362 isolate->factory()->NewJSObject(descriptor_ctor, AllocationType::kOld));
1363
1364 descriptor_object->set_file_descriptor(file_descriptor);
1365 descriptor_object->set_memory(ClearedValue(isolate));
1366 descriptor_object->set_offset(0);
1367 descriptor_object->set_size(0);
1368
1369 return descriptor_object;
1370}
1371
1373 DirectHandle<WasmMemoryObject> memory, size_t offset) {
1374#if V8_TARGET_OS_LINUX
1375 CHECK(v8_flags.experimental_wasm_memory_control);
1376 if (memory->array_buffer()->is_shared()) {
1377 // TODO(ahaas): Handle concurrent calls to `MapDescriptor`. To prevent
1378 // concurrency issues, we disable `MapDescriptor` for shared wasm memories
1379 // so far.
1380 return 0;
1381 }
1382 if (memory->is_memory64()) {
1383 // TODO(ahaas): Handle memory64. So far the offset in the
1384 // MemoryMapDescriptor is only an uint32. Either the offset has to be
1385 // interpreted as a wasm memory page, or be extended to an uint64.
1386 return 0;
1387 }
1388
1389 uint8_t* target =
1390 reinterpret_cast<uint8_t*>(memory->array_buffer()->backing_store()) +
1391 offset;
1392
1393 struct stat stat_for_size;
1394 if (fstat(this->file_descriptor(), &stat_for_size) == -1) {
1395 // Could not determine file size.
1396 return 0;
1397 }
1398 size_t size = RoundUp(stat_for_size.st_size,
1400
1401 if (size + offset < size) {
1402 // Overflow
1403 return 0;
1404 }
1405 if (size + offset > memory->array_buffer()->GetByteLength()) {
1406 return 0;
1407 }
1408
1409 void* ret_val = mmap(target, size, PROT_READ | PROT_WRITE,
1410 MAP_FIXED | MAP_SHARED, this->file_descriptor(), 0);
1411 CHECK_NE(ret_val, MAP_FAILED);
1412 CHECK_EQ(ret_val, target);
1413 return size;
1414#else
1415 return 0;
1416#endif
1417}
1418
1420#if V8_TARGET_OS_LINUX
1421 CHECK(v8_flags.experimental_wasm_memory_control);
1423
1425 Cast<i::WasmMemoryObject>(MakeStrong(this->memory()));
1426 if (memory.is_null()) {
1427 return true;
1428 }
1429 uint32_t offset = this->offset();
1430 uint32_t size = this->size();
1431
1432 // The following checks already passed during `MapDescriptor`, and they should
1433 // still pass.
1434 CHECK(!memory->is_memory64());
1435 CHECK(!memory->array_buffer()->is_shared());
1437 CHECK_GE(size + offset, size);
1438 CHECK_LE(size + offset, memory->array_buffer()->byte_length());
1439
1440 uint8_t* target =
1441 reinterpret_cast<uint8_t*>(memory->array_buffer()->backing_store()) +
1442 offset;
1443
1444 void* ret_val = mmap(target, size, PROT_READ | PROT_WRITE,
1445 MAP_FIXED | MAP_SHARED | MAP_ANONYMOUS, -1, 0);
1446
1447 CHECK_NE(ret_val, MAP_FAILED);
1448 CHECK_EQ(ret_val, target);
1449 return true;
1450#else
1451 return false;
1452#endif
1453}
1454
1455// static
1458 MaybeDirectHandle<JSArrayBuffer> maybe_untagged_buffer,
1459 MaybeDirectHandle<FixedArray> maybe_tagged_buffer, wasm::ValueType type,
1460 int32_t offset, bool is_mutable) {
1461 DirectHandle<JSFunction> global_ctor(
1462 isolate->native_context()->wasm_global_constructor(), isolate);
1463 auto global_obj =
1464 Cast<WasmGlobalObject>(isolate->factory()->NewJSObject(global_ctor));
1465 {
1466 // Disallow GC until all fields have acceptable types.
1468 if (!trusted_data.is_null()) {
1469 global_obj->set_trusted_data(*trusted_data);
1470 } else {
1471 global_obj->clear_trusted_data();
1472 }
1473 global_obj->set_type(type);
1474 global_obj->set_offset(offset);
1475 global_obj->set_is_mutable(is_mutable);
1476 }
1477
1478 if (type.is_reference()) {
1479 DCHECK(maybe_untagged_buffer.is_null());
1481 if (!maybe_tagged_buffer.ToHandle(&tagged_buffer)) {
1482 // If no buffer was provided, create one.
1484 isolate->factory()->NewFixedArray(1, AllocationType::kOld);
1485 CHECK_EQ(offset, 0);
1486 }
1487 global_obj->set_tagged_buffer(*tagged_buffer);
1488 } else {
1489 DCHECK(maybe_tagged_buffer.is_null());
1490 uint32_t type_size = type.value_kind_size();
1491
1492 DirectHandle<JSArrayBuffer> untagged_buffer;
1493 if (!maybe_untagged_buffer.ToHandle(&untagged_buffer)) {
1495 isolate->factory()->NewJSArrayBufferAndBackingStore(
1497
1498 if (!result.ToHandle(&untagged_buffer)) {
1499 isolate->Throw(*isolate->factory()->NewRangeError(
1500 MessageTemplate::kOutOfMemory,
1501 isolate->factory()->NewStringFromAsciiChecked(
1502 "WebAssembly.Global")));
1503 return {};
1504 }
1505 }
1506
1507 // Check that the offset is in bounds.
1508 CHECK_LE(offset + type_size, untagged_buffer->GetByteLength());
1509
1510 global_obj->set_untagged_buffer(*untagged_buffer);
1511 }
1512
1513 return global_obj;
1514}
1515
1517 Isolate* isolate,
1518 DirectHandle<WasmTrustedInstanceData> target_instance_data,
1519 int target_func_index) {
1520 implicit_arg_ = target_instance_data;
1521 if (target_func_index <
1522 static_cast<int>(
1523 target_instance_data->module()->num_imported_functions)) {
1524 // The function in the target instance was imported. Load the ref from the
1525 // dispatch table for imports.
1528 target_instance_data->dispatch_table_for_imports()->implicit_arg(
1529 target_func_index)),
1530 isolate);
1531#if V8_ENABLE_DRUMBRAKE
1532 target_func_index_ = target_instance_data->imported_function_indices()->get(
1533 target_func_index);
1534#endif // V8_ENABLE_DRUMBRAKE
1535 } else {
1536 // The function in the target instance was not imported.
1537#if V8_ENABLE_DRUMBRAKE
1538 target_func_index_ = target_func_index;
1539#endif // V8_ENABLE_DRUMBRAKE
1540 }
1541 call_target_ = target_instance_data->GetCallTarget(target_func_index);
1542}
1543
1545 Isolate* isolate, DirectHandle<JSReceiver> callable, wasm::Suspend suspend,
1547 Address wrapper_entry;
1549 DCHECK(
1551 wrapper_entry = Builtins::EntryOf(Builtin::kWasmToJsWrapperAsm, isolate);
1552 } else {
1553 wrapper_entry =
1554 Builtins::EntryOf(Builtin::kWasmToJsWrapperInvalidSig, isolate);
1555 }
1556 TRACE_IFT("Import callable 0x%" PRIxPTR "[%d] = {callable=0x%" PRIxPTR
1557 ", target=0x%" PRIxPTR "}\n",
1558 instance_data_->ptr(), index_, callable->ptr(), wrapper_entry);
1559 DirectHandle<WasmImportData> import_data =
1560 isolate->factory()->NewWasmImportData(callable, suspend, instance_data_,
1561 sig);
1562 import_data->SetIndexInTableAsCallOrigin(
1563 instance_data_->dispatch_table_for_imports(), index_);
1565
1566 instance_data_->dispatch_table_for_imports()->SetForWrapper(
1567 index_, *import_data, wrapper_entry, sig_id, sig->signature_hash(),
1568#if V8_ENABLE_DRUMBRAKE
1569 WasmDispatchTable::kInvalidFunctionIndex,
1570#endif // V8_ENABLE_DRUMBRAKE
1572#if V8_ENABLE_DRUMBRAKE
1573 instance_data_->imported_function_indices()->set(index_, -1);
1574#endif // V8_ENABLE_DRUMBRAKE
1575}
1576
1578 Isolate* isolate, DirectHandle<JSReceiver> callable,
1579 wasm::WasmCode* wasm_to_js_wrapper, wasm::Suspend suspend,
1581 TRACE_IFT("Import callable 0x%" PRIxPTR "[%d] = {callable=0x%" PRIxPTR
1582 ", target=%p}\n",
1583 instance_data_->ptr(), index_, callable->ptr(),
1584 wasm_to_js_wrapper ? nullptr
1585 : wasm_to_js_wrapper->instructions().begin());
1586 DCHECK(v8_flags.wasm_jitless ||
1587 wasm_to_js_wrapper->kind() == wasm::WasmCode::kWasmToJsWrapper ||
1588 wasm_to_js_wrapper->kind() == wasm::WasmCode::kWasmToCapiWrapper);
1589 DirectHandle<WasmImportData> import_data =
1590 isolate->factory()->NewWasmImportData(callable, suspend, instance_data_,
1591 sig);
1593 Tagged<WasmDispatchTable> dispatch_table =
1594 instance_data_->dispatch_table_for_imports();
1595 DCHECK_EQ(v8_flags.wasm_jitless, wasm_to_js_wrapper == nullptr);
1596 DCHECK_IMPLIES(wasm_to_js_wrapper != nullptr,
1597 wasm_to_js_wrapper->signature_hash() == sig->signature_hash());
1598
1599 dispatch_table->SetForWrapper(
1600 index_, *import_data,
1601 v8_flags.wasm_jitless ? Address{}
1602 : wasm_to_js_wrapper->instruction_start(),
1603 sig_id, sig->signature_hash(),
1604#if V8_ENABLE_DRUMBRAKE
1605 WasmDispatchTable::kInvalidFunctionIndex,
1606#endif // V8_ENABLE_DRUMBRAKE
1607 wasm_to_js_wrapper, WasmDispatchTable::kNewEntry);
1608
1609#if V8_ENABLE_DRUMBRAKE
1610 instance_data_->imported_function_indices()->set(index_, -1);
1611#endif // V8_ENABLE_DRUMBRAKE
1612}
1613
1615 Tagged<WasmTrustedInstanceData> target_instance_data,
1616 WasmCodePointer call_target, wasm::CanonicalTypeIndex sig_id
1617#if V8_ENABLE_DRUMBRAKE
1618 ,
1619 int exported_function_index
1620#endif // V8_ENABLE_DRUMBRAKE
1621) {
1622 TRACE_IFT("Import Wasm 0x%" PRIxPTR "[%d] = {instance_data=0x%" PRIxPTR
1623 ", target=0x%" PRIxPTR "}\n",
1624 instance_data_->ptr(), index_, target_instance_data.ptr(),
1626 ->GetEntrypointWithoutSignatureCheck(call_target));
1628 Tagged<WasmDispatchTable> dispatch_table =
1629 instance_data_->dispatch_table_for_imports();
1630 dispatch_table->SetForNonWrapper(index_, target_instance_data, call_target,
1631 sig_id,
1632#if V8_ENABLE_DRUMBRAKE
1633 WasmDispatchTable::kInvalidFunctionIndex,
1634#endif // V8_ENABLE_DRUMBRAKE
1636
1637#if V8_ENABLE_DRUMBRAKE
1638 instance_data_->imported_function_indices()->set(index_,
1639 exported_function_index);
1640#endif // V8_ENABLE_DRUMBRAKE
1641}
1642
1643// Returns an empty Tagged<Object>() if no callable is available, a JSReceiver
1644// otherwise.
1647 if (!IsWasmImportData(data)) return Tagged<Object>();
1649}
1650
1654
1656 return instance_data_->dispatch_table_for_imports()->implicit_arg(index_);
1657}
1658
1660 return instance_data_->dispatch_table_for_imports()->target(index_);
1661}
1662
1663#if V8_ENABLE_DRUMBRAKE
1664int ImportedFunctionEntry::function_index_in_called_module() {
1665 return instance_data_->imported_function_indices()->get(index_);
1666}
1667#endif // V8_ENABLE_DRUMBRAKE
1668
1669// static
1670constexpr std::array<uint16_t, WasmTrustedInstanceData::kTaggedFieldsCount>
1672// static
1673constexpr std::array<const char*, WasmTrustedInstanceData::kTaggedFieldsCount>
1675// static
1676constexpr std::array<uint16_t, 6>
1678// static
1679constexpr std::array<const char*, 6>
1681
1682void WasmTrustedInstanceData::SetRawMemory(int memory_index, uint8_t* mem_start,
1683 size_t mem_size) {
1684 CHECK_LT(memory_index, module()->memories.size());
1685
1686 CHECK_LE(mem_size, module()->memories[memory_index].is_memory64()
1689 // All memory bases and sizes are stored in a TrustedFixedAddressArray.
1690 Tagged<TrustedFixedAddressArray> bases_and_sizes = memory_bases_and_sizes();
1691 bases_and_sizes->set(memory_index * 2, reinterpret_cast<Address>(mem_start));
1692 bases_and_sizes->set(memory_index * 2 + 1, mem_size);
1693 // Memory 0 has fast-access fields.
1694 if (memory_index == 0) {
1695 set_memory0_start(mem_start);
1696 set_memory0_size(mem_size);
1697 }
1698}
1699
1700#if V8_ENABLE_DRUMBRAKE
1701DirectHandle<Tuple2> WasmTrustedInstanceData::GetOrCreateInterpreterObject(
1703 DCHECK(v8_flags.wasm_jitless);
1704 Isolate* isolate = Isolate::Current();
1706 instance->trusted_data(isolate), isolate);
1707 if (trusted_data->has_interpreter_object()) {
1708 return direct_handle(trusted_data->interpreter_object(), isolate);
1709 }
1710 DirectHandle<Tuple2> new_interpreter = WasmInterpreterObject::New(instance);
1711 DCHECK(trusted_data->has_interpreter_object());
1712 return new_interpreter;
1713}
1714
1715DirectHandle<Tuple2> WasmTrustedInstanceData::GetInterpreterObject(
1716 DirectHandle<WasmInstanceObject> instance) {
1717 DCHECK(v8_flags.wasm_jitless);
1718 Isolate* isolate = Isolate::Current();
1719 DirectHandle<WasmTrustedInstanceData> trusted_data(
1720 instance->trusted_data(isolate), isolate);
1721 CHECK(trusted_data->has_interpreter_object());
1722 return direct_handle(trusted_data->interpreter_object(), isolate);
1723}
1724#endif // V8_ENABLE_DRUMBRAKE
1725
1727 Isolate* isolate, DirectHandle<WasmModuleObject> module_object,
1728 bool shared) {
1729 // Read the link to the {std::shared_ptr<NativeModule>} once from the
1730 // `module_object` and use it to initialize the fields of the
1731 // `WasmTrustedInstanceData`. It will then be stored in a `TrustedManaged` in
1732 // the `WasmTrustedInstanceData` where it is safe from manipulation.
1733 std::shared_ptr<wasm::NativeModule> native_module =
1734 module_object->shared_native_module();
1735
1736 // Do first allocate all objects that will be stored in instance fields,
1737 // because otherwise we would have to allocate when the instance is not fully
1738 // initialized yet, which can lead to heap verification errors.
1740
1741 int num_imported_functions = module->num_imported_functions;
1743 isolate->factory()->NewWasmDispatchTable(num_imported_functions,
1745 DirectHandle<FixedArray> well_known_imports =
1746 isolate->factory()->NewFixedArray(num_imported_functions);
1747
1749 isolate->factory()->NewFixedArrayWithZeroes(
1750 static_cast<int>(module->functions.size()));
1751
1752 int num_imported_mutable_globals = module->num_imported_mutable_globals;
1753 // The imported_mutable_globals is essentially a FixedAddressArray (storing
1754 // sandboxed pointers), but some entries (the indices for reference-type
1755 // globals) are accessed as 32-bit integers which is more convenient with a
1756 // raw ByteArray.
1757 DirectHandle<FixedAddressArray> imported_mutable_globals =
1758 FixedAddressArray::New(isolate, num_imported_mutable_globals);
1759
1760 int num_data_segments = module->num_declared_data_segments;
1762 FixedAddressArray::New(isolate, num_data_segments);
1763 DirectHandle<FixedUInt32Array> data_segment_sizes =
1764 FixedUInt32Array::New(isolate, num_data_segments);
1765
1766#if V8_ENABLE_DRUMBRAKE
1767 DirectHandle<FixedInt32Array> imported_function_indices =
1768 FixedInt32Array::New(isolate, num_imported_functions);
1769#endif // V8_ENABLE_DRUMBRAKE
1770
1771 static_assert(wasm::kV8MaxWasmMemories < kMaxInt / 2);
1772 int num_memories = static_cast<int>(module->memories.size());
1774 isolate->factory()->NewFixedArray(num_memories);
1775 DirectHandle<TrustedFixedAddressArray> memory_bases_and_sizes =
1776 TrustedFixedAddressArray::New(isolate, 2 * num_memories);
1777
1778 // TODO(clemensb): Should we have singleton empty dispatch table in the
1779 // trusted space?
1780 DirectHandle<WasmDispatchTable> empty_dispatch_table =
1781 isolate->factory()->NewWasmDispatchTable(0, wasm::kWasmFuncRef);
1782 DirectHandle<ProtectedFixedArray> empty_protected_fixed_array =
1783 isolate->factory()->empty_protected_fixed_array();
1784
1785 // Use the same memory estimate as the (untrusted) Managed in
1786 // WasmModuleObject. This is not security critical, and we at least always
1787 // read the memory estimation of *some* NativeModule here.
1788 size_t estimated_size =
1789 module_object->managed_native_module()->estimated_size();
1791 trusted_managed_native_module = TrustedManaged<wasm::NativeModule>::From(
1792 isolate, estimated_size, native_module);
1793
1794 // Now allocate the WasmTrustedInstanceData.
1795 // During this step, no more allocations should happen because the instance is
1796 // incomplete yet, so we should not trigger heap verification at this point.
1798 isolate->factory()->NewWasmTrustedInstanceData();
1799 {
1801
1802 // Some constants:
1803 uint8_t* empty_backing_store_buffer =
1804 reinterpret_cast<uint8_t*>(EmptyBackingStoreBuffer());
1805 ReadOnlyRoots ro_roots{isolate};
1806 Tagged<FixedArray> empty_fixed_array = ro_roots.empty_fixed_array();
1807
1808 trusted_data->set_dispatch_table_for_imports(*dispatch_table_for_imports);
1809 trusted_data->set_imported_mutable_globals(*imported_mutable_globals);
1810 trusted_data->set_dispatch_table0(*empty_dispatch_table);
1811 trusted_data->set_dispatch_tables(*empty_protected_fixed_array);
1812 trusted_data->set_shared_part(*trusted_data); // TODO(14616): Good enough?
1813 trusted_data->set_data_segment_starts(*data_segment_starts);
1814 trusted_data->set_data_segment_sizes(*data_segment_sizes);
1815 trusted_data->set_element_segments(empty_fixed_array);
1816 trusted_data->set_managed_native_module(*trusted_managed_native_module);
1817 trusted_data->set_new_allocation_limit_address(
1818 isolate->heap()->NewSpaceAllocationLimitAddress());
1819 trusted_data->set_new_allocation_top_address(
1820 isolate->heap()->NewSpaceAllocationTopAddress());
1821 trusted_data->set_old_allocation_limit_address(
1822 isolate->heap()->OldSpaceAllocationLimitAddress());
1823 trusted_data->set_old_allocation_top_address(
1824 isolate->heap()->OldSpaceAllocationTopAddress());
1825 trusted_data->set_globals_start(empty_backing_store_buffer);
1826#if V8_ENABLE_DRUMBRAKE
1827 trusted_data->set_imported_function_indices(*imported_function_indices);
1828#endif // V8_ENABLE_DRUMBRAKE
1829 trusted_data->set_native_context(*isolate->native_context());
1830 trusted_data->set_jump_table_start(native_module->jump_table_start());
1831 trusted_data->set_hook_on_function_call_address(
1832 isolate->debug()->hook_on_function_call_address());
1833 trusted_data->set_managed_object_maps(
1834 *isolate->factory()->empty_fixed_array());
1835 trusted_data->set_well_known_imports(*well_known_imports);
1836 trusted_data->set_func_refs(*func_refs);
1837 trusted_data->set_feedback_vectors(
1838 *isolate->factory()->empty_fixed_array());
1839 trusted_data->set_tiering_budget_array(
1841 trusted_data->set_break_on_entry(module_object->script()->break_on_entry());
1842 trusted_data->InitDataSegmentArrays(native_module.get());
1843 trusted_data->set_memory0_start(empty_backing_store_buffer);
1844 trusted_data->set_memory0_size(0);
1845 trusted_data->set_memory_objects(*memory_objects);
1846 trusted_data->set_memory_bases_and_sizes(*memory_bases_and_sizes);
1847 trusted_data->set_stress_deopt_counter_address(
1848 ExternalReference::stress_deopt_count(isolate).address());
1849
1850 for (int i = 0; i < num_memories; ++i) {
1851 memory_bases_and_sizes->set(
1852 2 * i, reinterpret_cast<Address>(empty_backing_store_buffer));
1853 memory_bases_and_sizes->set(2 * i + 1, 0);
1854 }
1855 }
1856
1857 // Allocate the exports object, to be store in the instance object.
1858 DirectHandle<JSObject> exports_object =
1859 isolate->factory()->NewJSObjectWithNullProto();
1860
1862
1863 if (!shared) {
1864 // Allocate the WasmInstanceObject (JS wrapper).
1865 DirectHandle<JSFunction> instance_cons(
1866 isolate->native_context()->wasm_instance_constructor(), isolate);
1868 isolate->factory()->NewJSObject(instance_cons, AllocationType::kOld));
1869 instance_object->set_trusted_data(*trusted_data);
1870 instance_object->set_module_object(*module_object);
1871 instance_object->set_exports_object(*exports_object);
1872 trusted_data->set_instance_object(*instance_object);
1873 }
1874
1875 // Insert the new instance into the scripts weak list of instances. This list
1876 // is used for breakpoints affecting all instances belonging to the script.
1877 if (module_object->script()->type() == Script::Type::kWasm &&
1878 !instance_object.is_null()) {
1879 DirectHandle<WeakArrayList> weak_instance_list(
1880 module_object->script()->wasm_weak_instance_list(), isolate);
1881 weak_instance_list =
1882 WeakArrayList::Append(isolate, weak_instance_list,
1884 module_object->script()->set_wasm_weak_instance_list(*weak_instance_list);
1885 }
1886
1887 return trusted_data;
1888}
1889
1891 const wasm::NativeModule* native_module) {
1894 uint32_t num_data_segments = module->num_declared_data_segments;
1895 // The number of declared data segments will be zero if there is no DataCount
1896 // section. These arrays will not be allocated nor initialized in that case,
1897 // since they cannot be used (since the validator checks that number of
1898 // declared data segments when validating the memory.init and memory.drop
1899 // instructions).
1900 DCHECK(num_data_segments == 0 ||
1901 num_data_segments == module->data_segments.size());
1902 for (uint32_t i = 0; i < num_data_segments; ++i) {
1903 const wasm::WasmDataSegment& segment = module->data_segments[i];
1904 // Initialize the pointer and size of passive segments.
1905 auto source_bytes = wire_bytes.SubVector(segment.source.offset(),
1906 segment.source.end_offset());
1907 data_segment_starts()->set(i,
1908 reinterpret_cast<Address>(source_bytes.begin()));
1909 // Set the active segments to being already dropped, since memory.init on
1910 // a dropped passive segment and an active segment have the same
1911 // behavior.
1912 data_segment_sizes()->set(static_cast<int>(i),
1913 segment.active ? 0 : source_bytes.length());
1914 }
1915}
1916
1920 if (func_index < native_module->num_imported_functions()) {
1921 return dispatch_table_for_imports()->target(func_index);
1922 }
1923
1924 if (v8_flags.wasm_jitless) {
1926 }
1927
1928 return native_module->GetCodePointerHandle(func_index);
1929}
1930
1931// static
1933 Isolate* isolate,
1934 DirectHandle<WasmTrustedInstanceData> trusted_instance_data,
1935 uint32_t table_dst_index, uint32_t table_src_index, uint32_t dst,
1936 uint32_t src, uint32_t count) {
1937 CHECK_LT(table_dst_index, trusted_instance_data->tables()->length());
1938 CHECK_LT(table_src_index, trusted_instance_data->tables()->length());
1939 auto table_dst =
1941 trusted_instance_data->tables()->get(table_dst_index)),
1942 isolate);
1943 auto table_src =
1945 trusted_instance_data->tables()->get(table_src_index)),
1946 isolate);
1947 uint32_t max_dst = table_dst->current_length();
1948 uint32_t max_src = table_src->current_length();
1949 bool copy_backward = src < dst;
1950 if (!base::IsInBounds(dst, count, max_dst) ||
1951 !base::IsInBounds(src, count, max_src)) {
1952 return false;
1953 }
1954
1955 // no-op
1956 if ((dst == src && table_dst_index == table_src_index) || count == 0) {
1957 return true;
1958 }
1959
1960 for (uint32_t i = 0; i < count; ++i) {
1961 uint32_t src_index = copy_backward ? (src + count - i - 1) : src + i;
1962 uint32_t dst_index = copy_backward ? (dst + count - i - 1) : dst + i;
1963 auto value = WasmTableObject::Get(isolate, table_src, src_index);
1964 WasmTableObject::Set(isolate, table_dst, dst_index, value);
1965 }
1966 return true;
1967}
1968
1969// static
1970std::optional<MessageTemplate> WasmTrustedInstanceData::InitTableEntries(
1971 Isolate* isolate,
1972 DirectHandle<WasmTrustedInstanceData> trusted_instance_data,
1973 DirectHandle<WasmTrustedInstanceData> shared_trusted_instance_data,
1974 uint32_t table_index, uint32_t segment_index, uint32_t dst, uint32_t src,
1975 uint32_t count) {
1976 AccountingAllocator allocator;
1977 // This {Zone} will be used only by the temporary WasmFullDecoder allocated
1978 // down the line from this call. Therefore it is safe to stack-allocate it
1979 // here.
1980 Zone zone(&allocator, "LoadElemSegment");
1981
1982 const WasmModule* module = trusted_instance_data->module();
1983
1984 bool table_is_shared = module->tables[table_index].shared;
1985 bool segment_is_shared = module->elem_segments[segment_index].shared;
1986
1987 DirectHandle<WasmTableObject> table_object(
1988 Cast<WasmTableObject>((table_is_shared ? shared_trusted_instance_data
1989 : trusted_instance_data)
1990 ->tables()
1991 ->get(table_index)),
1992 isolate);
1993
1994 // If needed, try to lazily initialize the element segment.
1995 std::optional<MessageTemplate> opt_error = wasm::InitializeElementSegment(
1996 &zone, isolate, trusted_instance_data, shared_trusted_instance_data,
1997 segment_index);
1998 if (opt_error.has_value()) return opt_error;
1999
2000 DirectHandle<FixedArray> elem_segment(
2001 Cast<FixedArray>((segment_is_shared ? shared_trusted_instance_data
2002 : trusted_instance_data)
2004 ->get(segment_index)),
2005 isolate);
2006 if (!base::IsInBounds<uint64_t>(dst, count, table_object->current_length())) {
2007 return {MessageTemplate::kWasmTrapTableOutOfBounds};
2008 }
2009 if (!base::IsInBounds<uint64_t>(src, count, elem_segment->length())) {
2010 return {MessageTemplate::kWasmTrapElementSegmentOutOfBounds};
2011 }
2012
2013 for (size_t i = 0; i < count; i++) {
2015 isolate, table_object, static_cast<int>(dst + i),
2016 direct_handle(elem_segment->get(static_cast<int>(src + i)), isolate));
2017 }
2018
2019 return {};
2020}
2021
2024 Tagged<Object> val = func_refs()->get(index);
2025 if (IsSmi(val)) return false;
2026 *result = Cast<WasmFuncRef>(val);
2027 return true;
2028}
2029
2031 Isolate* isolate,
2032 DirectHandle<WasmTrustedInstanceData> trusted_instance_data,
2033 int function_index) {
2034 Tagged<WasmFuncRef> existing_func_ref;
2035 if (trusted_instance_data->try_get_func_ref(function_index,
2036 &existing_func_ref)) {
2037 return direct_handle(existing_func_ref, isolate);
2038 }
2039
2040 const WasmModule* module = trusted_instance_data->module();
2041 bool is_import =
2042 function_index < static_cast<int>(module->num_imported_functions);
2043 wasm::ModuleTypeIndex sig_index = module->functions[function_index].sig_index;
2044 DirectHandle<TrustedObject> implicit_arg =
2045 is_import ? direct_handle(
2047 trusted_instance_data->dispatch_table_for_imports()
2048 ->implicit_arg(function_index)),
2049 isolate)
2050 : trusted_instance_data;
2051
2052 // TODO(14034): Create funcref RTTs lazily?
2054 Cast<Map>(
2055 trusted_instance_data->managed_object_maps()->get(sig_index.index)),
2056 isolate};
2057
2058 DirectHandle<WasmInternalFunction> internal_function =
2059 isolate->factory()->NewWasmInternalFunction(implicit_arg, function_index);
2060 DirectHandle<WasmFuncRef> func_ref =
2061 isolate->factory()->NewWasmFuncRef(internal_function, rtt);
2062 trusted_instance_data->func_refs()->set(function_index, *func_ref);
2063
2064 // Reuse the call target of the instance. In case of import wrappers, the
2065 // wrapper will automatically get tiered up together since it will use the
2066 // same CPT entry.
2067 internal_function->set_call_target(
2068 trusted_instance_data->GetCallTarget(function_index));
2069
2070 return func_ref;
2071}
2072
2074 if (IsUndefined(external())) return false;
2075 *result = Cast<JSFunction>(external());
2076 return true;
2077}
2078
2079// static
2082 Isolate* isolate = Isolate::Current();
2083
2084 Tagged<JSFunction> existing_external;
2085 if (internal->try_get_external(&existing_external)) {
2086 return direct_handle(existing_external, isolate);
2087 }
2088
2089 // {this} can either be:
2090 // - a declared function, i.e. {implicit_arg()} is a WasmTrustedInstanceData,
2091 // - or an imported callable, i.e. {implicit_arg()} is a WasmImportData which
2092 // refers to the imported instance.
2093 // It cannot be a JS/C API function as for those, the external function is set
2094 // at creation.
2095 DirectHandle<TrustedObject> implicit_arg{internal->implicit_arg(), isolate};
2097 IsWasmTrustedInstanceData(*implicit_arg)
2098 ? Cast<WasmTrustedInstanceData>(implicit_arg)
2099 : direct_handle(Cast<WasmImportData>(*implicit_arg)->instance_data(),
2100 isolate);
2101 const WasmModule* module = instance_data->module();
2102 const WasmFunction& function = module->functions[internal->function_index()];
2104 module->canonical_sig_id(function.sig_index);
2105 const wasm::CanonicalSig* sig =
2108 int wrapper_index = sig_id.index;
2109
2110 Tagged<MaybeObject> entry =
2111 isolate->heap()->js_to_wasm_wrappers()->get(wrapper_index);
2112
2113 DirectHandle<Code> wrapper_code;
2114 // {entry} can be cleared or a weak reference to a ready {CodeWrapper}.
2115 if (!entry.IsCleared()) {
2116 wrapper_code = direct_handle(
2117 Cast<CodeWrapper>(entry.GetHeapObjectAssumeWeak())->code(isolate),
2118 isolate);
2119#if V8_ENABLE_DRUMBRAKE
2120 } else if (v8_flags.wasm_jitless) {
2121 wrapper_code = isolate->builtins()->code_handle(
2122 Builtin::kGenericJSToWasmInterpreterWrapper);
2123#endif // V8_ENABLE_DRUMBRAKE
2124 } else if (CanUseGenericJsToWasmWrapper(module, sig)) {
2125 if (v8_flags.stress_wasm_stack_switching) {
2126 wrapper_code =
2127 isolate->builtins()->code_handle(Builtin::kWasmStressSwitch);
2128 } else {
2129 wrapper_code =
2130 isolate->builtins()->code_handle(Builtin::kJSToWasmWrapper);
2131 }
2132 } else {
2133 // The wrapper does not exist yet; compile it now.
2135 isolate, sig, sig_id);
2136 // This should have added an entry in the per-isolate cache.
2137 DCHECK_EQ(MakeWeak(wrapper_code->wrapper()),
2138 isolate->heap()->js_to_wasm_wrappers()->get(wrapper_index));
2139 }
2142 instance_data->func_refs()->get(internal->function_index())),
2143 isolate};
2144 DCHECK_EQ(func_ref->internal(isolate), *internal);
2146 isolate, instance_data, func_ref, internal,
2147 static_cast<int>(sig->parameter_count()), wrapper_code);
2148
2149 internal->set_external(*result);
2150 return result;
2151}
2152
2154 Tagged<WasmDispatchTable> table, int entry_index) {
2155 set_call_origin(table);
2156 set_table_slot(entry_index);
2157}
2158
2160 set_call_origin(func);
2161}
2162
2164 const wasm::WasmGlobal& global) {
2165 DCHECK(!global.type.is_reference());
2166 if (global.mutability && global.imported) {
2167 return reinterpret_cast<uint8_t*>(
2168 imported_mutable_globals()->get_sandboxed_pointer(global.index));
2169 }
2170 return globals_start() + global.offset;
2171}
2172
2173std::pair<Tagged<FixedArray>, uint32_t>
2175 const wasm::WasmGlobal& global) {
2177 DCHECK(global.type.is_reference());
2178 if (global.mutability && global.imported) {
2179 Tagged<FixedArray> buffer =
2180 Cast<FixedArray>(imported_mutable_globals_buffers()->get(global.index));
2181 Address idx = imported_mutable_globals()->get(global.index);
2182 DCHECK_LE(idx, std::numeric_limits<uint32_t>::max());
2183 return {buffer, static_cast<uint32_t>(idx)};
2184 }
2185 return {tagged_globals_buffer(), global.offset};
2186}
2187
2189 Isolate* isolate, const wasm::WasmGlobal& global) {
2191 if (global.type.is_reference()) {
2192 Tagged<FixedArray> global_buffer; // The buffer of the global.
2193 uint32_t global_index = 0; // The index into the buffer.
2194 std::tie(global_buffer, global_index) = GetGlobalBufferAndIndex(global);
2195 return wasm::WasmValue(
2196 direct_handle(global_buffer->get(global_index), isolate),
2197 module()->canonical_type(global.type));
2198 }
2199 Address ptr = reinterpret_cast<Address>(GetGlobalStorage(global));
2200 switch (global.type.kind()) {
2201#define CASE_TYPE(valuetype, ctype) \
2202 case wasm::valuetype: \
2203 return wasm::WasmValue(base::ReadUnalignedValue<ctype>(ptr));
2205#undef CASE_TYPE
2206 default:
2207 UNREACHABLE();
2208 }
2209}
2210
2212 DCHECK_EQ(WASM_STRUCT_TYPE, map->instance_type());
2213 Tagged<HeapObject> raw = Cast<HeapObject>(map->constructor_or_back_pointer());
2214 // The {WasmTypeInfo} might be in the middle of being moved, which is why we
2215 // can't read its map for a checked cast. But we can rely on its native type
2216 // pointer being intact in the old location.
2218 return wasm::GetTypeCanonicalizer()->LookupStruct(type_info->type_index());
2219}
2220
2221// Allocates a Wasm Struct that is a descriptor for another type, leaving
2222// its fields uninitialized.
2223// Descriptor structs have a 1:1 relationship with the internal "RTT" (aka
2224// v8::internal::Map) of the struct type they are describing, so this RTT
2225// is allocated along with the descriptor below, and the links between them
2226// are set up. RTTs with custom descriptors always are subtypes of the
2227// canonical RTT for the same type, so that canonical RTT is installed as the
2228// super-RTT of the customized RTT.
2229// The RTT/map of the descriptor itself is provided by the caller as {map}.
2230//
2231// The eventual on-heap object structure will be something like the following,
2232// where (A) is the object returned by this function, and (B) is allocated
2233// along with it. There will likely be many instance of (C), and they will be
2234// allocated (much) later, by one or more {struct.new} instructions that
2235// take (A) as input and retrieve (B) from it.
2236//
2237// Wasm struct (C): Wasm Descriptor Struct (A):
2238// +-----------+ +-----------+
2239// | Map |------\ | Map |
2240// +-----------+ | +-----------+
2241// | hash | | | hash |
2242// +-----------+ | /--------| RTT |
2243// | fields... | v (B) v +-----------+
2244// | | +-------------+ | fields... |
2245// +-----------+ | Meta-map | | |
2246// +-------------+ +-----------+
2247// | ... | ^
2248// | Descriptor |---------/
2249// | ... |
2250// +-------------+
2251// static
2255 const wasm::WasmModule* module = trusted_data->module();
2256 const wasm::TypeDefinition& type = module->type(index);
2257 DCHECK(type.is_descriptor());
2258 // TODO(jkummerow): Figure out support for shared objects.
2259 if (type.is_shared) UNIMPLEMENTED();
2260 wasm::CanonicalTypeIndex described_index =
2261 module->canonical_type_id(type.describes);
2262 DirectHandle<Map> rtt_parent{
2263 Cast<Map>(trusted_data->managed_object_maps()->get(type.describes.index)),
2264 isolate};
2266 Cast<NativeContext>(trusted_data->native_context()), isolate);
2267 DirectHandle<Map> rtt =
2268 CreateStructMap(isolate, described_index, rtt_parent, context);
2269 DirectHandle<WasmStruct> descriptor =
2270 isolate->factory()->NewWasmStructUninitialized(type.struct_type, map,
2272 // The struct's body is uninitialized. As soon as we return, callers will
2273 // take care of that. Until then, no allocations are allowed.
2275 descriptor->set_described_rtt(*rtt);
2276 rtt->set_custom_descriptor(*descriptor);
2277 return descriptor;
2278}
2279
2281 const wasm::CanonicalStructType* type =
2283 map()->wasm_type_info()->type_index());
2284 wasm::CanonicalValueType field_type = type->field(index);
2285 int field_offset = WasmStruct::kHeaderSize + type->field_offset(index);
2286 Address field_address = GetFieldAddress(field_offset);
2287 switch (field_type.kind()) {
2288#define CASE_TYPE(valuetype, ctype) \
2289 case wasm::valuetype: \
2290 return wasm::WasmValue(base::ReadUnalignedValue<ctype>(field_address));
2291 CASE_TYPE(kI8, int8_t)
2292 CASE_TYPE(kI16, int16_t)
2294#undef CASE_TYPE
2295 case wasm::kF16:
2296 return wasm::WasmValue(fp16_ieee_to_fp32_value(
2297 base::ReadUnalignedValue<uint16_t>(field_address)));
2298 case wasm::kRef:
2299 case wasm::kRefNull: {
2300 DirectHandle<Object> ref(TaggedField<Object>::load(*this, field_offset),
2302 return wasm::WasmValue(ref, field_type);
2303 }
2304 case wasm::kVoid:
2305 case wasm::kTop:
2306 case wasm::kBottom:
2307 UNREACHABLE();
2308 }
2309}
2310
2312 wasm::CanonicalValueType element_type =
2313 map()->wasm_type_info()->element_type();
2314 int element_offset =
2315 WasmArray::kHeaderSize + index * element_type.value_kind_size();
2316 Address element_address = GetFieldAddress(element_offset);
2317 switch (element_type.kind()) {
2318#define CASE_TYPE(value_type, ctype) \
2319 case wasm::value_type: \
2320 return wasm::WasmValue(base::ReadUnalignedValue<ctype>(element_address));
2321 CASE_TYPE(kI8, int8_t)
2322 CASE_TYPE(kI16, int16_t)
2324#undef CASE_TYPE
2325 case wasm::kF16:
2326 return wasm::WasmValue(fp16_ieee_to_fp32_value(
2327 base::ReadUnalignedValue<uint16_t>(element_address)));
2328 case wasm::kRef:
2329 case wasm::kRefNull: {
2332 return wasm::WasmValue(ref, element_type);
2333 }
2334 case wasm::kVoid:
2335 case wasm::kTop:
2336 case wasm::kBottom:
2337 UNREACHABLE();
2338 }
2339}
2340
2342 WriteBarrierMode mode) {
2343 DCHECK(map()->wasm_type_info()->element_type().is_reference());
2344 TaggedField<Object>::store(*this, element_offset(index), *value);
2345 CONDITIONAL_WRITE_BARRIER(*this, element_offset(index), *value, mode);
2346}
2347
2348// static
2350 Isolate* isolate, const wasm::FunctionSig* sig,
2353 DirectHandle<JSFunction> tag_cons(
2354 isolate->native_context()->wasm_tag_constructor(), isolate);
2355
2356 // Serialize the signature.
2357 DCHECK_EQ(0, sig->return_count());
2358 DCHECK_LE(sig->parameter_count(), std::numeric_limits<int>::max());
2359 int sig_size = static_cast<int>(sig->parameter_count());
2362 int index = 0; // Index into the {PodArray} above.
2363 for (wasm::ValueType param : sig->parameters()) {
2364 serialized_sig->set(index++, param);
2365 }
2366
2367 DirectHandle<JSObject> tag_object =
2368 isolate->factory()->NewJSObject(tag_cons, AllocationType::kOld);
2369 DirectHandle<WasmTagObject> tag_wrapper = Cast<WasmTagObject>(tag_object);
2370 tag_wrapper->set_serialized_signature(*serialized_sig);
2371 tag_wrapper->set_canonical_type_index(type_index.index);
2372 tag_wrapper->set_tag(*tag);
2373 if (!trusted_data.is_null()) {
2374 tag_wrapper->set_trusted_data(*trusted_data);
2375 } else {
2376 tag_wrapper->clear_trusted_data();
2377 }
2378
2379 return tag_wrapper;
2380}
2381
2383 return wasm::CanonicalTypeIndex{static_cast<uint32_t>(
2384 this->canonical_type_index())} == expected_index;
2385}
2386
2388 return shared()->wasm_capi_function_data()->sig();
2389}
2390
2391#ifdef DEBUG
2392WasmCodePointer WasmDispatchTableData::WrapperCodePointerForDebugging(
2393 int index) {
2394 auto it = wrappers_.find(index);
2395 CHECK_NE(it, wrappers_.end());
2396 return it->second.call_target;
2397}
2398#endif
2399
2401 return wrappers_.contains(index);
2402}
2403
2405 if (wrappers_.empty()) return;
2406 std::vector<wasm::WasmCode*> codes;
2407 for (auto [index, entry] : wrappers_) {
2409 if (entry.code) codes.push_back(entry.code);
2410 }
2412}
2413
2415 wasm::WasmCode* compiled_wrapper,
2416 uint64_t signature_hash) {
2417 WasmCodePointer code_pointer;
2418 auto it = wrappers_.find(index);
2419 if (it == wrappers_.end()) {
2420 code_pointer =
2422 call_target, signature_hash);
2423 auto [wrapper_cache, was_inserted] =
2424 wrappers_.emplace(index, WrapperEntry{code_pointer, compiled_wrapper});
2425 USE(was_inserted);
2426 DCHECK(was_inserted);
2427 } else {
2428 auto& [existing_code_pointer, wrapper_code] = it->second;
2429 code_pointer = existing_code_pointer;
2431 code_pointer, call_target, signature_hash);
2432 DCHECK_NULL(wrapper_code);
2433 DCHECK_NOT_NULL(compiled_wrapper);
2434 wrapper_code = compiled_wrapper;
2435 }
2436 if (compiled_wrapper) {
2437 compiled_wrapper->IncRef();
2438 } else {
2439 DCHECK_NULL(wasm::GetWasmImportWrapperCache()->FindWrapper(code_pointer));
2440 }
2441
2442 return code_pointer;
2443}
2444
2446 if (call_target == wasm::kInvalidWasmCodePointer) return;
2447
2448 auto entry = wrappers_.find(index);
2449 if (entry == wrappers_.end()) {
2450 // This is certainly not a wrapper.
2451 DCHECK_NULL(wasm::GetWasmImportWrapperCache()->FindWrapper(call_target));
2452 return;
2453 }
2454 auto& [code_pointer, wrapper_code] = entry->second;
2456 if (wrapper_code) {
2457 // TODO(clemensb): We should speed this up by doing
2458 // {WasmCodeRefScope::AddRef} and then {DecRefOnLiveCode}.
2459 wasm::WasmCode::DecrementRefCount({&wrapper_code, 1});
2460 }
2461
2462 wrappers_.erase(entry);
2463}
2464
2466 WasmCodePointer call_target,
2468#if V8_ENABLE_DRUMBRAKE
2469 uint32_t function_index,
2470#endif // V8_ENABLE_DRUMBRAKE
2471 NewOrExistingEntry new_or_existing) {
2472 if (implicit_arg == Smi::zero()) {
2474 Clear(index, new_or_existing);
2475 return;
2476 }
2477
2478 SBXCHECK_BOUNDS(index, length());
2479 DCHECK(IsWasmImportData(implicit_arg) ||
2480 IsWasmTrustedInstanceData(implicit_arg));
2481 DCHECK(sig_id.valid());
2482 const int offset = OffsetOf(index);
2483 if (!v8_flags.wasm_jitless) {
2484 // When overwriting an existing entry, we must decrement the refcount
2485 // of any overwritten wrappers. When initializing an entry, we must not
2486 // read uninitialized memory.
2487 if (new_or_existing == kExistingEntry) {
2488 WasmCodePointer old_target =
2490 offheap_data()->Remove(index, old_target);
2491 }
2492 WriteField<uint32_t>(offset + kTargetBias, call_target.value());
2493 } else {
2494#if V8_ENABLE_DRUMBRAKE
2495 // Ignore call_target, not used in jitless mode.
2496 WriteField<int>(offset + kFunctionIndexBias, function_index);
2497#endif // V8_ENABLE_DRUMBRAKE
2498 }
2504}
2505
2507 Address call_target,
2509 uint64_t signature_hash,
2510#if V8_ENABLE_DRUMBRAKE
2511 uint32_t function_index,
2512#endif // V8_ENABLE_DRUMBRAKE
2513 wasm::WasmCode* compiled_wrapper,
2514 NewOrExistingEntry new_or_existing) {
2516 SBXCHECK(!compiled_wrapper || !compiled_wrapper->is_dying());
2517 SBXCHECK_BOUNDS(index, length());
2518 DCHECK(IsWasmImportData(implicit_arg) ||
2519 IsWasmTrustedInstanceData(implicit_arg));
2520 DCHECK(sig_id.valid());
2521 const int offset = OffsetOf(index);
2526 if (!v8_flags.wasm_jitless) {
2527 // When overwriting an existing entry, we must decrement the refcount
2528 // of any overwritten wrappers. When initializing an entry, we must not
2529 // read uninitialized memory.
2530 if (new_or_existing == kExistingEntry) {
2531 WasmCodePointer old_target =
2533 offheap_data()->Remove(index, old_target);
2534 }
2535 WasmCodePointer code_pointer = offheap_data()->Add(
2536 index, call_target, compiled_wrapper, signature_hash);
2537 WriteField<uint32_t>(offset + kTargetBias, code_pointer.value());
2538 } else {
2539#if V8_ENABLE_DRUMBRAKE
2540 // Ignore call_target, not used in jitless mode.
2541 WriteField<int>(offset + kFunctionIndexBias, function_index);
2542#endif // V8_ENABLE_DRUMBRAKE
2543 }
2544
2546}
2547
2548void WasmDispatchTable::Clear(int index, NewOrExistingEntry new_or_existing) {
2549 SBXCHECK_BOUNDS(index, length());
2550 const int offset = OffsetOf(index);
2551 // When clearing an existing entry, we must update the refcount of any
2552 // wrappers. When clear-initializing new entries, we must not read
2553 // uninitialized memory.
2554 if (new_or_existing == kExistingEntry) {
2555 WasmCodePointer old_target =
2557 offheap_data()->Remove(index, old_target);
2558 }
2563}
2564
2566 wasm::WasmCode* wrapper) {
2567 SBXCHECK_BOUNDS(index, length());
2568 if (v8_flags.wasm_jitless) return; // Nothing to do.
2569
2570 WasmCodePointer call_target = offheap_data()->Add(
2571 index, wrapper->instruction_start(), wrapper, wrapper->signature_hash());
2572 USE(call_target);
2573 // When installing a compiled wrapper, we already had the generic wrapper in
2574 // place, which shares the same code pointer table entry.
2576 call_target);
2577}
2578
2579bool WasmDispatchTable::IsAWrapper(int index) const {
2580 return offheap_data()->IsAWrapper(index);
2581}
2582
2583// static
2585 DirectHandle<WasmDispatchTable> dispatch_table,
2587 int table_index) {
2589 MaybeGrowUsesList(isolate, dispatch_table);
2590 int cursor = GetUsedLength(uses);
2591 // {MaybeGrowUsesList} ensures that we have enough capacity.
2592 SetEntry(uses, cursor, *instance, table_index);
2593 SetUsedLength(uses, cursor + 2);
2594}
2595
2596// static
2598 Isolate* isolate, DirectHandle<WasmDispatchTable> dispatch_table) {
2599 Tagged<ProtectedWeakFixedArray> uses = dispatch_table->protected_uses();
2600 int capacity = uses->length();
2601 if (capacity == 0) {
2602 constexpr int kInitialLength = 3; // 1 slot + 1 pair.
2604 isolate->factory()->NewProtectedWeakFixedArray(kInitialLength);
2605 SetUsedLength(*new_uses, kReservedSlotOffset);
2606 dispatch_table->set_protected_uses(*new_uses);
2607 return *new_uses;
2608 }
2609 DCHECK_GT(uses->length(), 0);
2610 int used_length = GetUsedLength(uses);
2611 if (used_length < capacity) return uses;
2612 // Try to compact, grow if that doesn't free up enough space.
2613 int cleared_entries = 0;
2614 int write_cursor = kReservedSlotOffset;
2615 for (int i = kReservedSlotOffset; i < capacity; i += 2) {
2616 DCHECK(uses->get(i).IsWeakOrCleared());
2617 if (uses->get(i).IsCleared()) {
2618 cleared_entries++;
2619 continue;
2620 }
2621 if (write_cursor != i) {
2622 CopyEntry(uses, write_cursor, uses, i);
2623 }
2624 write_cursor += 2;
2625 }
2626 // We need at least one free entry. We want at least half the array to be
2627 // empty; each entry needs two slots.
2628 int min_free_entries = 1 + (capacity >> 2);
2629 if (cleared_entries >= min_free_entries) {
2630 SetUsedLength(uses, write_cursor);
2631 return uses;
2632 }
2633 // Grow by 50%, at least one entry.
2634 DirectHandle<ProtectedWeakFixedArray> uses_handle(uses, isolate);
2635 uses = {};
2636 int old_entries = capacity >> 1; // Two slots per entry.
2637 int new_entries = std::max(old_entries + 1, old_entries + (old_entries >> 1));
2638 int new_capacity = new_entries * 2 + kReservedSlotOffset;
2640 isolate->factory()->NewProtectedWeakFixedArray(new_capacity);
2641 // The allocation could have triggered GC, freeing more entries.
2642 // The previous compaction's {write_cursor} is the new upper bound on
2643 // existing entries.
2644 used_length = write_cursor;
2646 uses = *uses_handle;
2647 write_cursor = kReservedSlotOffset;
2648 for (int i = kReservedSlotOffset; i < used_length; i += 2) {
2649 if (uses->get(i).IsCleared()) continue;
2650 CopyEntry(*new_uses, write_cursor, uses, i);
2651 write_cursor += 2;
2652 }
2653 SetUsedLength(*new_uses, write_cursor);
2654 dispatch_table->set_protected_uses(*new_uses);
2655 return *new_uses;
2656}
2657
2658// static
2660 Isolate* isolate, int length, wasm::CanonicalValueType table_type) {
2661 return isolate->factory()->NewWasmDispatchTable(length, table_type);
2662}
2663
2664// static
2666 Isolate* isolate, DirectHandle<WasmDispatchTable> old_table,
2667 uint32_t new_length) {
2668 uint32_t old_length = old_table->length();
2669 // This method should only be called if we actually grow. For sandbox
2670 // purposes we also want to ensure tables can never shrink below their
2671 // static minimum size.
2672 SBXCHECK_LT(old_length, new_length);
2673
2674 uint32_t old_capacity = old_table->capacity();
2675 // Catch possible corruption. {new_length} is computed from untrusted data.
2677 // {old_length} and {old_capacity} are read from trusted space, so we trust
2678 // them. The DCHECKs give fuzzers a chance to catch potential bugs.
2679 DCHECK_LE(old_length, wasm::max_table_size());
2680 DCHECK_LE(old_capacity, wasm::max_table_size());
2681
2682 if (new_length < old_capacity) {
2684 // All fields within the old capacity are already cleared (see below).
2685 return old_table;
2686 }
2687
2688 // Grow table exponentially to guarantee amortized constant allocation and gc
2689 // time.
2690 uint32_t limit =
2692 uint32_t max_grow = limit - old_capacity;
2693 uint32_t min_grow = new_length - old_capacity;
2694 CHECK_LE(min_grow, max_grow);
2695 // Grow by old capacity, and at least by 8. Clamp to min_grow and max_grow.
2696 uint32_t exponential_grow = std::max(old_capacity, 8u);
2697 uint32_t grow = std::clamp(exponential_grow, min_grow, max_grow);
2698 uint32_t new_capacity = old_capacity + grow;
2699 DCHECK_LE(new_capacity, limit);
2701 WasmDispatchTable::New(isolate, new_capacity, old_table->table_type());
2702
2704 // Writing non-atomically is fine here because this is a freshly allocated
2705 // object.
2706 new_table->WriteField<int>(kLengthOffset, new_length);
2707 for (uint32_t i = 0; i < old_length; ++i) {
2708 WasmCodePointer call_target = old_table->target(i);
2709 // Update any stored call origins, so that future compiled wrappers
2710 // get installed into the new dispatch table.
2711 Tagged<Object> implicit_arg = old_table->implicit_arg(i);
2712 if (IsWasmImportData(implicit_arg)) {
2714 // After installing a compiled wrapper, we don't set or update
2715 // call origins any more.
2716 if (import_data->has_call_origin()) {
2717 if (import_data->call_origin() == *old_table) {
2718 import_data->set_call_origin(*new_table);
2719 } else {
2720#if DEBUG
2721 wasm::WasmCodeRefScope code_ref_scope;
2723 wasm::GetWasmImportWrapperCache()->FindWrapper(call_target));
2724#endif // DEBUG
2725 }
2726 }
2727 }
2728
2729 if (implicit_arg == Smi::zero()) {
2730 new_table->Clear(i, kNewEntry);
2731 continue;
2732 }
2733
2734 const int offset = OffsetOf(i);
2735 if (!v8_flags.wasm_jitless) {
2736 new_table->WriteField<uint32_t>(offset + kTargetBias,
2737 call_target.value());
2738 } else {
2739#if V8_ENABLE_DRUMBRAKE
2740 // Ignore call_target, not used in jitless mode.
2741 new_table->WriteField<int>(offset + kFunctionIndexBias,
2742 old_table->function_index(i));
2743#endif // V8_ENABLE_DRUMBRAKE
2744 }
2745 new_table->WriteProtectedPointerField(offset + kImplicitArgBias,
2749 new_table->WriteField<uint32_t>(offset + kSigBias, old_table->sig(i).index);
2750 }
2751
2752 new_table->offheap_data()->wrappers_ =
2753 std::move(old_table->offheap_data()->wrappers_);
2754
2755 // Update users.
2756 Tagged<ProtectedWeakFixedArray> uses = old_table->protected_uses();
2757 new_table->set_protected_uses(uses);
2758 int used_length = GetUsedLength(uses);
2759 for (int i = kReservedSlotOffset; i < used_length; i += 2) {
2760 if (uses->get(i).IsCleared()) continue;
2761 Tagged<WasmTrustedInstanceData> instance = GetInstance(uses, i);
2762 int table_index = GetTableIndex(uses, i);
2763 DCHECK_EQ(instance->dispatch_tables()->get(table_index), *old_table);
2764 instance->dispatch_tables()->set(table_index, *new_table);
2765 if (table_index == 0) {
2766 DCHECK_EQ(instance->dispatch_table0(), *old_table);
2767 instance->set_dispatch_table0(*new_table);
2768 }
2769 }
2770 return new_table;
2771}
2772
2774 wasm::CanonicalTypeIndex other_canonical_sig_index) const {
2775#if DEBUG
2776 // TODO(14034): Change this if indexed types are allowed.
2777 for (wasm::CanonicalValueType type : this->sig()->all()) {
2778 CHECK(!type.has_index());
2779 }
2780#endif
2781 // TODO(14034): Check for subtyping instead if C API functions can define
2782 // signature supertype.
2783 return shared()->wasm_capi_function_data()->sig_index() ==
2784 other_canonical_sig_index;
2785}
2786
2787// static
2789 Isolate* isolate, DirectHandle<WasmExceptionTag> exception_tag, int size) {
2790 DirectHandle<FixedArray> values = isolate->factory()->NewFixedArray(size);
2791 return New(isolate, exception_tag, values);
2792}
2793
2795 Isolate* isolate, DirectHandle<WasmExceptionTag> exception_tag,
2796 DirectHandle<FixedArray> values) {
2797 DirectHandle<JSFunction> exception_cons(
2798 isolate->native_context()->wasm_exception_constructor(), isolate);
2799 DirectHandle<JSObject> exception =
2800 isolate->factory()->NewJSObject(exception_cons);
2801 exception->InObjectPropertyAtPut(kTagIndex, *exception_tag);
2802 exception->InObjectPropertyAtPut(kValuesIndex, *values);
2803 return Cast<WasmExceptionPackage>(exception);
2804}
2805
2806// static
2808 Isolate* isolate, DirectHandle<WasmExceptionPackage> exception_package) {
2810 if (JSReceiver::GetProperty(isolate, exception_package,
2811 isolate->factory()->wasm_exception_tag_symbol())
2812 .ToHandle(&tag)) {
2813 return tag;
2814 }
2815 return isolate->factory()->undefined_value();
2816}
2817
2818// static
2820 Isolate* isolate, DirectHandle<WasmExceptionPackage> exception_package) {
2821 DirectHandle<Object> values;
2823 isolate, exception_package,
2824 isolate->factory()->wasm_exception_values_symbol())
2825 .ToHandle(&values)) {
2826 DCHECK_IMPLIES(!IsUndefined(*values), IsFixedArray(*values));
2827 return values;
2828 }
2829 return isolate->factory()->undefined_value();
2830}
2831
2833 uint32_t* encoded_index, uint32_t value) {
2834 encoded_values->set((*encoded_index)++, Smi::FromInt(value >> 16));
2835 encoded_values->set((*encoded_index)++, Smi::FromInt(value & 0xffff));
2836}
2837
2839 uint32_t* encoded_index, uint64_t value) {
2840 EncodeI32ExceptionValue(encoded_values, encoded_index,
2841 static_cast<uint32_t>(value >> 32));
2842 EncodeI32ExceptionValue(encoded_values, encoded_index,
2843 static_cast<uint32_t>(value));
2844}
2845
2847 uint32_t* encoded_index, uint32_t* value) {
2848 uint32_t msb = Cast<Smi>(encoded_values->get((*encoded_index)++)).value();
2849 uint32_t lsb = Cast<Smi>(encoded_values->get((*encoded_index)++)).value();
2850 *value = (msb << 16) | (lsb & 0xffff);
2851}
2852
2854 uint32_t* encoded_index, uint64_t* value) {
2855 uint32_t lsb = 0, msb = 0;
2856 DecodeI32ExceptionValue(encoded_values, encoded_index, &msb);
2857 DecodeI32ExceptionValue(encoded_values, encoded_index, &lsb);
2858 *value = (static_cast<uint64_t>(msb) << 32) | static_cast<uint64_t>(lsb);
2859}
2860
2862 const wasm::CanonicalSig* sig,
2863 wasm::Suspend suspend) {
2866 return false;
2867 }
2869#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_ARM && \
2870 !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_RISCV64 && \
2871 !V8_TARGET_ARCH_RISCV32 && !V8_TARGET_ARCH_PPC64 && \
2872 !V8_TARGET_ARCH_S390X && !V8_TARGET_ARCH_LOONG64 && !V8_TARGET_ARCH_MIPS64
2873 return false;
2874#else
2875 if (suspend != wasm::Suspend::kNoSuspend) return false;
2876
2877 return v8_flags.wasm_generic_wrapper;
2878#endif
2879}
2880
2881#ifdef DEBUG
2882
2883namespace {
2884
2885constexpr uint32_t kBytesPerExceptionValuesArrayElement = 2;
2886
2887size_t ComputeEncodedElementSize(wasm::ValueType type) {
2888 size_t byte_size = type.value_kind_size();
2889 DCHECK_EQ(byte_size % kBytesPerExceptionValuesArrayElement, 0);
2890 DCHECK_LE(1, byte_size / kBytesPerExceptionValuesArrayElement);
2891 return byte_size / kBytesPerExceptionValuesArrayElement;
2892}
2893
2894} // namespace
2895
2896#endif // DEBUG
2897
2898// static
2900 return GetEncodedSize(tag->sig);
2901}
2902
2903// static
2905 uint32_t encoded_size = 0;
2906 for (size_t i = 0; i < sig->parameter_count(); ++i) {
2907 switch (sig->GetParam(i).kind()) {
2908 case wasm::kI32:
2909 case wasm::kF32:
2910 DCHECK_EQ(2, ComputeEncodedElementSize(sig->GetParam(i)));
2911 encoded_size += 2;
2912 break;
2913 case wasm::kI64:
2914 case wasm::kF64:
2915 DCHECK_EQ(4, ComputeEncodedElementSize(sig->GetParam(i)));
2916 encoded_size += 4;
2917 break;
2918 case wasm::kS128:
2919 DCHECK_EQ(8, ComputeEncodedElementSize(sig->GetParam(i)));
2920 encoded_size += 8;
2921 break;
2922 case wasm::kRef:
2923 case wasm::kRefNull:
2924 encoded_size += 1;
2925 break;
2926 case wasm::kVoid:
2927 case wasm::kTop:
2928 case wasm::kBottom:
2929 case wasm::kI8:
2930 case wasm::kI16:
2931 case wasm::kF16:
2932 UNREACHABLE();
2933 }
2934 }
2935 return encoded_size;
2936}
2937
2939 if (!IsJSFunction(object)) return false;
2940 Tagged<JSFunction> js_function = Cast<JSFunction>(object);
2941 Tagged<Code> code = js_function->code(GetCurrentIsolateForSandbox());
2942 if (CodeKind::JS_TO_WASM_FUNCTION != code->kind() &&
2943#if V8_ENABLE_DRUMBRAKE
2944 code->builtin_id() != Builtin::kGenericJSToWasmInterpreterWrapper &&
2945#endif // V8_ENABLE_DRUMBRAKE
2946 code->builtin_id() != Builtin::kJSToWasmWrapper &&
2947 code->builtin_id() != Builtin::kWasmPromising &&
2948 code->builtin_id() != Builtin::kWasmStressSwitch) {
2949 return false;
2950 }
2951 DCHECK(js_function->shared()->HasWasmExportedFunctionData());
2952 return true;
2953}
2954
2956 if (!IsJSFunction(object)) return false;
2957 Tagged<JSFunction> js_function = Cast<JSFunction>(object);
2958 // TODO(jkummerow): Enable this when there is a JavaScript wrapper
2959 // able to call this function.
2960 // if (js_function->code()->kind() != CodeKind::WASM_TO_CAPI_FUNCTION) {
2961 // return false;
2962 // }
2963 // DCHECK(js_function->shared()->HasWasmCapiFunctionData());
2964 // return true;
2965 return js_function->shared()->HasWasmCapiFunctionData();
2966}
2967
2969 Isolate* isolate, Address call_target, DirectHandle<Foreign> embedder_data,
2971 // TODO(jkummerow): Install a JavaScript wrapper. For now, calling
2972 // these functions directly is unsupported; they can only be called
2973 // from Wasm code.
2974
2975 // To support simulator builds, we potentially have to redirect the
2976 // call target (which is an address pointing into the C++ binary).
2977 call_target = ExternalReference::Create(call_target).address();
2978
2979 DirectHandle<Map> rtt = isolate->factory()->wasm_func_ref_map();
2981 isolate->factory()->NewWasmCapiFunctionData(
2982 call_target, embedder_data, BUILTIN_CODE(isolate, Illegal), rtt,
2983 sig_index, sig);
2985 isolate->factory()->NewSharedFunctionInfoForWasmCapiFunction(fun_data);
2987 Factory::JSFunctionBuilder{isolate, shared, isolate->native_context()}
2988 .Build();
2989 fun_data->internal()->set_external(*result);
2991}
2992
2996 DirectHandle<WasmInternalFunction> internal_function, int arity,
2997 DirectHandle<Code> export_wrapper) {
2998 DCHECK(CodeKind::JS_TO_WASM_FUNCTION == export_wrapper->kind() ||
2999 (export_wrapper->is_builtin() &&
3000 (export_wrapper->builtin_id() == Builtin::kJSToWasmWrapper ||
3001#if V8_ENABLE_DRUMBRAKE
3002 export_wrapper->builtin_id() ==
3003 Builtin::kGenericJSToWasmInterpreterWrapper ||
3004#endif // V8_ENABLE_DRUMBRAKE
3005 export_wrapper->builtin_id() == Builtin::kWasmPromising ||
3006 export_wrapper->builtin_id() == Builtin::kWasmStressSwitch)));
3007 int func_index = internal_function->function_index();
3008 Factory* factory = isolate->factory();
3010 wasm::Promise promise =
3011 export_wrapper->builtin_id() == Builtin::kWasmPromising
3014 const wasm::WasmModule* module = instance_data->module();
3016 module->canonical_sig_id(module->functions[func_index].sig_index);
3017 const wasm::CanonicalSig* sig =
3020 factory->NewWasmExportedFunctionData(
3021 export_wrapper, instance_data, func_ref, internal_function, sig,
3022 sig_id, v8_flags.wasm_wrapper_tiering_budget, promise);
3023
3024#if V8_ENABLE_DRUMBRAKE
3025 if (v8_flags.wasm_jitless) {
3026 const wasm::FunctionSig* function_sig =
3027 reinterpret_cast<const wasm::FunctionSig*>(sig);
3028 uint32_t aligned_size =
3030 bool hasRefArgs = wasm::WasmBytecode::RefArgsCount(function_sig) > 0;
3031 bool hasRefRets = wasm::WasmBytecode::RefRetsCount(function_sig) > 0;
3032 function_data->set_packed_args_size(
3034 aligned_size) |
3037 }
3038#endif // V8_ENABLE_DRUMBRAKE
3039
3040 MaybeDirectHandle<String> maybe_name;
3041 bool is_asm_js_module = is_asmjs_module(module);
3042 if (is_asm_js_module) {
3043 // We can use the function name only for asm.js. For WebAssembly, the
3044 // function name is specified as the function_index.toString().
3046 isolate, direct_handle(instance_data->module_object(), isolate),
3047 func_index);
3048 }
3050 if (!maybe_name.ToHandle(&name)) {
3052 int length = SNPrintF(buffer, "%d", func_index);
3053 name = factory
3055 base::Vector<uint8_t>::cast(buffer.SubVector(0, length)))
3056 .ToHandleChecked();
3057 }
3058 DirectHandle<Map> function_map;
3059 switch (module->origin) {
3060 case wasm::kWasmOrigin:
3061 function_map = isolate->wasm_exported_function_map();
3062 break;
3064 function_map = isolate->sloppy_function_map();
3065 break;
3067 function_map = isolate->strict_function_map();
3068 break;
3069 }
3070
3071 DirectHandle<NativeContext> context(isolate->native_context());
3073 factory->NewSharedFunctionInfoForWasmExportedFunction(name, function_data,
3074 arity, kAdapt);
3075
3076 DirectHandle<JSFunction> js_function =
3078 .set_map(function_map)
3079 .Build();
3080
3081 // According to the spec, exported functions should not have a [[Construct]]
3082 // method. This does not apply to functions exported from asm.js however.
3083 DCHECK_EQ(is_asm_js_module, IsConstructor(*js_function));
3084 if (instance_data->has_instance_object()) {
3085 shared->set_script(instance_data->module_object()->script(), kReleaseStore);
3086 } else {
3087 shared->set_script(*isolate->factory()->undefined_value(), kReleaseStore);
3088 }
3089 function_data->internal()->set_external(*js_function);
3090 return Cast<WasmExportedFunction>(js_function);
3091}
3092
3094 wasm::CanonicalTypeIndex other_canonical_type_index) {
3096 sig_index(), other_canonical_type_index);
3097}
3098
3099// static
3101 const wasm::CanonicalSig* sig) {
3102 constexpr const char kPrefix[] = "js-to-wasm:";
3103 // prefix + parameters + delimiter + returns + zero byte
3104 size_t len = strlen(kPrefix) + sig->all().size() + 2;
3105 auto buffer = base::OwnedVector<char>::New(len);
3106 memcpy(buffer.begin(), kPrefix, strlen(kPrefix));
3107 PrintSignature(buffer.as_vector() + strlen(kPrefix), sig);
3108 return buffer.ReleaseData();
3109}
3110
3111// static
3113 if (!IsJSFunction(object)) return false;
3114 Tagged<JSFunction> js_function = Cast<JSFunction>(object);
3115 return js_function->shared()->HasWasmJSFunctionData();
3116}
3117
3119 Isolate* isolate, wasm::CanonicalTypeIndex struct_index,
3120 DirectHandle<Map> opt_rtt_parent,
3121 DirectHandle<NativeContext> opt_native_context) {
3122 const wasm::CanonicalStructType* type =
3124 const int inobject_properties = 0;
3125 // We have to use the variable size sentinel because the instance size
3126 // stored directly in a Map is capped at 255 pointer sizes.
3127 const int map_instance_size = kVariableSizeSentinel;
3128 const InstanceType instance_type = WASM_STRUCT_TYPE;
3129 // TODO(jkummerow): If NO_ELEMENTS were supported, we could use that here.
3130 const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
3131 const wasm::CanonicalValueType no_array_element = wasm::kWasmBottom;
3132 constexpr bool shared = false; // TODO(42204563): Implement.
3133 // If we had a CanonicalHeapType, we could use that here.
3135 struct_index, shared, wasm::RefTypeKind::kStruct);
3136 DirectHandle<WasmTypeInfo> type_info = isolate->factory()->NewWasmTypeInfo(
3137 heaptype, no_array_element, opt_rtt_parent);
3139 if (opt_native_context.is_null()) {
3140 map = isolate->factory()->NewContextlessMap(
3141 instance_type, map_instance_size, elements_kind, inobject_properties);
3142 } else {
3143 map = isolate->factory()->NewContextfulMap(
3144 opt_native_context, instance_type, map_instance_size, elements_kind,
3145 inobject_properties);
3146 }
3147 map->set_wasm_type_info(*type_info);
3148 map->set_is_extensible(false);
3149 const int real_instance_size = WasmStruct::Size(type);
3150 WasmStruct::EncodeInstanceSizeInMap(real_instance_size, *map);
3151 return map;
3152}
3153
3155 wasm::CanonicalTypeIndex array_index,
3156 DirectHandle<Map> opt_rtt_parent) {
3157 const wasm::CanonicalArrayType* type =
3159 wasm::CanonicalValueType element_type = type->element_type();
3160 const int inobject_properties = 0;
3161 const int instance_size = kVariableSizeSentinel;
3162 const InstanceType instance_type = WASM_ARRAY_TYPE;
3163 const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
3164 constexpr bool shared = false; // TODO(42204563): Implement.
3166 array_index, shared, wasm::RefTypeKind::kArray);
3167 DirectHandle<WasmTypeInfo> type_info = isolate->factory()->NewWasmTypeInfo(
3168 heaptype, element_type, opt_rtt_parent);
3169 DirectHandle<Map> map = isolate->factory()->NewContextlessMap(
3170 instance_type, instance_size, elements_kind, inobject_properties);
3171 map->set_wasm_type_info(*type_info);
3172 map->SetInstanceDescriptors(isolate,
3173 *isolate->factory()->empty_descriptor_array(), 0,
3175 map->set_is_extensible(false);
3177 return map;
3178}
3179
3182 DirectHandle<Map> opt_rtt_parent) {
3183 const int inobject_properties = 0;
3184 const InstanceType instance_type = WASM_FUNC_REF_TYPE;
3185 const ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND;
3186 const wasm::CanonicalValueType no_array_element = wasm::kWasmBottom;
3187 constexpr bool shared = false; // TODO(42204563): Implement.
3188 wasm::CanonicalValueType heaptype =
3190 DirectHandle<WasmTypeInfo> type_info = isolate->factory()->NewWasmTypeInfo(
3191 heaptype, no_array_element, opt_rtt_parent);
3192 constexpr int kInstanceSize = WasmFuncRef::kSize;
3193 DCHECK_EQ(
3194 kInstanceSize,
3195 Cast<Map>(isolate->root(RootIndex::kWasmFuncRefMap))->instance_size());
3196 DirectHandle<Map> map = isolate->factory()->NewContextlessMap(
3197 instance_type, kInstanceSize, elements_kind, inobject_properties);
3198 map->set_wasm_type_info(*type_info);
3199 return map;
3200}
3201
3203 Isolate* isolate, const wasm::FunctionSig* sig,
3204 DirectHandle<JSReceiver> callable, wasm::Suspend suspend) {
3205 DCHECK_LE(sig->all().size(), kMaxInt);
3206 int parameter_count = static_cast<int>(sig->parameter_count());
3207 Factory* factory = isolate->factory();
3208
3210 DirectHandle<NativeContext> context(isolate->native_context());
3211
3212 static_assert(wasm::kMaxCanonicalTypes <= kMaxInt);
3213 // TODO(clemensb): Merge the next two lines into a single call.
3216 const wasm::CanonicalSig* canonical_sig =
3218
3220
3221 DirectHandle<WeakFixedArray> canonical_rtts(
3222 isolate->heap()->wasm_canonical_rtts(), isolate);
3223
3224 Tagged<MaybeObject> maybe_canonical_map = canonical_rtts->get(sig_id.index);
3225
3226 if (!maybe_canonical_map.IsCleared()) {
3227 rtt = direct_handle(
3228 Cast<Map>(maybe_canonical_map.GetHeapObjectAssumeWeak()), isolate);
3229 } else {
3230 rtt = CreateFuncRefMap(isolate, sig_id, DirectHandle<Map>());
3231 canonical_rtts->set(sig_id.index, MakeWeak(*rtt));
3232 }
3233
3234 DirectHandle<Code> js_to_js_wrapper_code =
3235 wasm::IsJSCompatibleSignature(canonical_sig)
3236 ? isolate->builtins()->code_handle(Builtin::kJSToJSWrapper)
3237 : isolate->builtins()->code_handle(Builtin::kJSToJSWrapperInvalidSig);
3238
3239 DirectHandle<WasmJSFunctionData> function_data =
3240 factory->NewWasmJSFunctionData(sig_id, callable, js_to_js_wrapper_code,
3241 rtt, suspend, wasm::kNoPromise);
3242 DirectHandle<WasmInternalFunction> internal_function{
3243 function_data->internal(), isolate};
3244
3245 if (!wasm::IsJSCompatibleSignature(canonical_sig)) {
3246 Address builtin_entry =
3247 Builtins::EntryOf(Builtin::kWasmToJsWrapperInvalidSig, isolate);
3248 WasmCodePointer wrapper_code_pointer =
3249 function_data->offheap_data()->set_generic_wrapper(builtin_entry);
3250 internal_function->set_call_target(wrapper_code_pointer);
3251#if V8_ENABLE_DRUMBRAKE
3252 } else if (v8_flags.wasm_jitless) {
3253 Address builtin_entry =
3254 Builtins::EntryOf(Builtin::kGenericWasmToJSInterpreterWrapper, isolate);
3255 WasmCodePointer wrapper_code_pointer =
3256 function_data->offheap_data()->set_generic_wrapper(builtin_entry);
3257 internal_function->set_call_target(wrapper_code_pointer);
3258#endif // V8_ENABLE_DRUMBRAKE
3259 } else {
3260 int expected_arity = parameter_count;
3262 if (IsJSFunction(*callable)) {
3263 Tagged<SharedFunctionInfo> shared = Cast<JSFunction>(callable)->shared();
3264 expected_arity =
3265 shared->internal_formal_parameter_count_without_receiver();
3266 if (expected_arity == parameter_count) {
3268 } else {
3270 }
3271 } else {
3273 }
3274 wasm::WasmCodeRefScope code_ref_scope;
3276 wasm::WasmCode* wrapper =
3277 cache->MaybeGet(kind, sig_id, expected_arity, suspend);
3278 WasmCodePointer code_pointer;
3279 if (wrapper) {
3280 code_pointer =
3281 function_data->offheap_data()->set_compiled_wrapper(wrapper);
3282 // Some later DCHECKs assume that we don't have a {call_origin} when
3283 // the function already uses a compiled wrapper.
3284 Cast<WasmImportData>(internal_function->implicit_arg())
3285 ->clear_call_origin();
3286 } else if (UseGenericWasmToJSWrapper(kind, canonical_sig, suspend)) {
3287 Address code_entry =
3288 Builtins::EntryOf(Builtin::kWasmToJsWrapperAsm, isolate);
3289 code_pointer =
3290 function_data->offheap_data()->set_generic_wrapper(code_entry);
3291 } else {
3292 // Initialize the import wrapper cache if that hasn't happened yet.
3293 cache->LazyInitialize(isolate);
3294 constexpr bool kNoSourcePositions = false;
3295 wrapper = cache->CompileWasmImportCallWrapper(
3296 isolate, kind, canonical_sig, sig_id, kNoSourcePositions,
3297 expected_arity, suspend);
3298 code_pointer =
3299 function_data->offheap_data()->set_compiled_wrapper(wrapper);
3300 // Some later DCHECKs assume that we don't have a {call_origin} when
3301 // the function already uses a compiled wrapper.
3302 Cast<WasmImportData>(internal_function->implicit_arg())
3303 ->clear_call_origin();
3304 }
3305 internal_function->set_call_target(code_pointer);
3306 }
3307
3308 DirectHandle<String> name = factory->Function_string();
3309 if (IsJSFunction(*callable)) {
3311 name = String::Flatten(isolate, name);
3312 }
3314 factory->NewSharedFunctionInfoForWasmJSFunction(name, function_data);
3315 shared->set_internal_formal_parameter_count(
3317 DirectHandle<JSFunction> js_function =
3319 .set_map(isolate->wasm_exported_function_map())
3320 .Build();
3321 internal_function->set_external(*js_function);
3322 return Cast<WasmJSFunction>(js_function);
3323}
3324
3342
3344 Address code_entry) {
3345 DCHECK_EQ(wrapper_code_pointer_, wasm::kInvalidWasmCodePointer);
3346 wrapper_code_pointer_ =
3348 code_entry, signature_hash_);
3349 return wrapper_code_pointer_;
3350}
3351
3353 if (wrapper_) {
3354 wasm::WasmCode::DecrementRefCount({&wrapper_, 1});
3355 }
3356 if (wrapper_code_pointer_ != wasm::kInvalidWasmCodePointer) {
3358 wrapper_code_pointer_);
3359 }
3360}
3361
3363 return Cast<JSReceiver>(
3364 Cast<WasmImportData>(internal()->implicit_arg())->callable());
3365}
3366
3368 return Cast<WasmImportData>(internal()->implicit_arg())->suspend();
3369}
3370
3375
3377 wasm::CanonicalTypeIndex other_canonical_sig_index) const {
3378#if DEBUG
3379 // TODO(14034): Change this if indexed types are allowed.
3381 for (wasm::CanonicalValueType type : sig->all()) CHECK(!type.has_index());
3382#endif
3383 // TODO(14034): Check for subtyping instead if WebAssembly.Function can define
3384 // signature supertype.
3385 return sig_index() == other_canonical_sig_index;
3386}
3387
3393
3395 int index) {
3396 auto result = Cast<WasmExceptionTag>(isolate->factory()->NewStruct(
3397 WASM_EXCEPTION_TAG_TYPE, AllocationType::kOld));
3398 result->set_index(index);
3399 return result;
3400}
3401
3403 Isolate* isolate, std::shared_ptr<wasm::NativeModule> native_module,
3404 DirectHandle<HeapNumber> uses_bitset) {
3405 const WasmModule* module = native_module->module();
3406 size_t memory_estimate =
3409 DirectHandle<Managed<wasm::NativeModule>> managed_native_module =
3410 Managed<wasm::NativeModule>::From(isolate, memory_estimate,
3411 std::move(native_module));
3413 isolate->factory()->NewStruct(ASM_WASM_DATA_TYPE, AllocationType::kOld));
3414 result->set_managed_native_module(*managed_native_module);
3415 result->set_uses_bitset(*uses_bitset);
3416 return result;
3417}
3418
3419namespace {
3420constexpr int32_t kInt31MaxValue = 0x3fffffff;
3421constexpr int32_t kInt31MinValue = -kInt31MaxValue - 1;
3422
3423// Tries to canonicalize a HeapNumber to an i31ref Smi. Returns the original
3424// HeapNumber if it fails.
3425DirectHandle<Object> CanonicalizeHeapNumber(DirectHandle<Object> number,
3426 Isolate* isolate) {
3427 double double_value = Cast<HeapNumber>(number)->value();
3428 if (double_value >= kInt31MinValue && double_value <= kInt31MaxValue &&
3429 !IsMinusZero(double_value) &&
3430 double_value == FastI2D(FastD2I(double_value))) {
3431 return direct_handle(Smi::FromInt(FastD2I(double_value)), isolate);
3432 }
3433 return number;
3434}
3435
3436// Tries to canonicalize a Smi into an i31 Smi. Returns a HeapNumber if it
3437// fails.
3438DirectHandle<Object> CanonicalizeSmi(DirectHandle<Object> smi,
3439 Isolate* isolate) {
3440 if constexpr (SmiValuesAre31Bits()) return smi;
3441
3442 int32_t value = Cast<Smi>(*smi).value();
3443
3444 if (value <= kInt31MaxValue && value >= kInt31MinValue) {
3445 return smi;
3446 } else {
3447 return isolate->factory()->NewHeapNumber(value);
3448 }
3449}
3450} // namespace
3451
3452namespace wasm {
3455 CanonicalValueType expected,
3456 const char** error_message) {
3457 DCHECK(expected.is_object_reference());
3458 if (expected.kind() == kRefNull && IsNull(*value, isolate)) {
3459 switch (expected.heap_representation()) {
3460 case HeapType::kStringViewWtf8:
3461 *error_message = "stringview_wtf8 has no JS representation";
3462 return {};
3463 case HeapType::kStringViewWtf16:
3464 *error_message = "stringview_wtf16 has no JS representation";
3465 return {};
3466 case HeapType::kStringViewIter:
3467 *error_message = "stringview_iter has no JS representation";
3468 return {};
3469 case HeapType::kExn:
3470 *error_message = "invalid type (ref null exn)";
3471 return {};
3472 case HeapType::kNoExn:
3473 *error_message = "invalid type (ref null noexn)";
3474 return {};
3475 case HeapType::kNoCont:
3476 *error_message = "invalid type (ref null nocont)";
3477 return {};
3478 case HeapType::kCont:
3479 *error_message = "invalid type (ref null cont)";
3480 return {};
3481 default:
3482 return expected.use_wasm_null() ? isolate->factory()->wasm_null()
3483 : value;
3484 }
3485 }
3486
3487 switch (expected.heap_representation_non_shared()) {
3488 case HeapType::kFunc: {
3489 if (!(WasmExternalFunction::IsWasmExternalFunction(*value) ||
3490 WasmCapiFunction::IsWasmCapiFunction(*value))) {
3491 *error_message =
3492 "function-typed object must be null (if nullable) or a Wasm "
3493 "function object";
3494 return {};
3495 }
3496 return direct_handle(
3497 Cast<JSFunction>(*value)->shared()->wasm_function_data()->func_ref(),
3498 isolate);
3499 }
3500 case HeapType::kExtern: {
3501 if (!IsNull(*value, isolate)) return value;
3502 *error_message = "null is not allowed for (ref extern)";
3503 return {};
3504 }
3505 case HeapType::kAny: {
3506 if (IsSmi(*value)) return CanonicalizeSmi(value, isolate);
3507 if (IsHeapNumber(*value)) {
3508 return CanonicalizeHeapNumber(value, isolate);
3509 }
3510 if (!IsNull(*value, isolate)) return value;
3511 *error_message = "null is not allowed for (ref any)";
3512 return {};
3513 }
3514 case HeapType::kExn:
3515 *error_message = "invalid type (ref exn)";
3516 return {};
3517 case HeapType::kCont:
3518 *error_message = "invalid type (ref cont)";
3519 return {};
3520 case HeapType::kStruct: {
3521 if (IsWasmStruct(*value)) {
3522 return value;
3523 }
3524 *error_message =
3525 "structref object must be null (if nullable) or a wasm struct";
3526 return {};
3527 }
3528 case HeapType::kArray: {
3529 if (IsWasmArray(*value)) {
3530 return value;
3531 }
3532 *error_message =
3533 "arrayref object must be null (if nullable) or a wasm array";
3534 return {};
3535 }
3536 case HeapType::kEq: {
3537 if (IsSmi(*value)) {
3538 DirectHandle<Object> truncated = CanonicalizeSmi(value, isolate);
3539 if (IsSmi(*truncated)) return truncated;
3540 } else if (IsHeapNumber(*value)) {
3541 DirectHandle<Object> truncated = CanonicalizeHeapNumber(value, isolate);
3542 if (IsSmi(*truncated)) return truncated;
3543 } else if (IsWasmStruct(*value) || IsWasmArray(*value)) {
3544 return value;
3545 }
3546 *error_message =
3547 "eqref object must be null (if nullable), or a wasm "
3548 "struct/array, or a Number that fits in i31ref range";
3549 return {};
3550 }
3551 case HeapType::kI31: {
3552 if (IsSmi(*value)) {
3553 DirectHandle<Object> truncated = CanonicalizeSmi(value, isolate);
3554 if (IsSmi(*truncated)) return truncated;
3555 } else if (IsHeapNumber(*value)) {
3556 DirectHandle<Object> truncated = CanonicalizeHeapNumber(value, isolate);
3557 if (IsSmi(*truncated)) return truncated;
3558 }
3559 *error_message =
3560 "i31ref object must be null (if nullable) or a Number that fits "
3561 "in i31ref range";
3562 return {};
3563 }
3564 case HeapType::kString:
3565 if (IsString(*value)) return value;
3566 *error_message = "wrong type (expected a string)";
3567 return {};
3568 case HeapType::kStringViewWtf8:
3569 *error_message = "stringview_wtf8 has no JS representation";
3570 return {};
3571 case HeapType::kStringViewWtf16:
3572 *error_message = "stringview_wtf16 has no JS representation";
3573 return {};
3574 case HeapType::kStringViewIter:
3575 *error_message = "stringview_iter has no JS representation";
3576 return {};
3577 case HeapType::kNoFunc:
3578 case HeapType::kNoExtern:
3579 case HeapType::kNoExn:
3580 case HeapType::kNoCont:
3581 case HeapType::kNone: {
3582 *error_message = "only null allowed for null types";
3583 return {};
3584 }
3585 default: {
3586 DCHECK(expected.has_index());
3587 CanonicalTypeIndex canonical_index = expected.ref_index();
3588 auto type_canonicalizer = GetWasmEngine()->type_canonicalizer();
3589
3590 if (WasmExportedFunction::IsWasmExportedFunction(*value)) {
3592 Cast<WasmExportedFunction>(*value);
3593 CanonicalTypeIndex real_type_index =
3594 function->shared()->wasm_exported_function_data()->sig_index();
3595 if (!type_canonicalizer->IsCanonicalSubtype(real_type_index,
3596 canonical_index)) {
3597 *error_message =
3598 "assigned exported function has to be a subtype of the "
3599 "expected type";
3600 return {};
3601 }
3602 return direct_handle(Cast<WasmExternalFunction>(*value)->func_ref(),
3603 isolate);
3604 } else if (WasmJSFunction::IsWasmJSFunction(*value)) {
3605 if (!Cast<WasmJSFunction>(*value)
3606 ->shared()
3607 ->wasm_js_function_data()
3608 ->MatchesSignature(canonical_index)) {
3609 *error_message =
3610 "assigned WebAssembly.Function has to be a subtype of the "
3611 "expected type";
3612 return {};
3613 }
3614 return direct_handle(Cast<WasmExternalFunction>(*value)->func_ref(),
3615 isolate);
3616 } else if (WasmCapiFunction::IsWasmCapiFunction(*value)) {
3617 if (!Cast<WasmCapiFunction>(*value)->MatchesSignature(
3618 canonical_index)) {
3619 *error_message =
3620 "assigned C API function has to be a subtype of the expected "
3621 "type";
3622 return {};
3623 }
3624 return direct_handle(Cast<WasmExternalFunction>(*value)->func_ref(),
3625 isolate);
3626 } else if (IsWasmStruct(*value) || IsWasmArray(*value)) {
3627 DirectHandle<WasmObject> wasm_obj = Cast<WasmObject>(value);
3628 Tagged<WasmTypeInfo> type_info = wasm_obj->map()->wasm_type_info();
3629 CanonicalTypeIndex actual_type = type_info->type_index();
3630 if (!type_canonicalizer->IsCanonicalSubtype(actual_type,
3631 canonical_index)) {
3632 *error_message = "object is not a subtype of expected type";
3633 return {};
3634 }
3635 return value;
3636 } else {
3637 *error_message = "JS object does not match expected wasm type";
3638 return {};
3639 }
3640 }
3641 }
3642}
3643
3644// Utility which canonicalizes {expected} in addition.
3646 const WasmModule* module,
3648 ValueType expected,
3649 const char** error_message) {
3650 CanonicalValueType canonical;
3651 if (expected.has_index()) {
3652 canonical = module->canonical_type(expected);
3653 } else {
3654 canonical = CanonicalValueType{expected};
3655 }
3656 return JSToWasmObject(isolate, value, canonical, error_message);
3657}
3658
3660 DirectHandle<Object> value) {
3661 if (IsWasmNull(*value)) {
3662 return isolate->factory()->null_value();
3663 } else if (IsWasmFuncRef(*value)) {
3664 return i::WasmInternalFunction::GetOrCreateExternal(i::direct_handle(
3665 i::Cast<i::WasmFuncRef>(*value)->internal(isolate), isolate));
3666 } else {
3667 return value;
3668 }
3669}
3670
3671} // namespace wasm
3672
3673} // namespace internal
3674} // namespace v8
3675
3677#undef TRACE_IFT
int16_t parameter_count
Definition builtins.cc:67
Builtins::Kind kind
Definition builtins.cc:40
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
#define SBXCHECK_LE(lhs, rhs)
Definition check.h:67
#define SBXCHECK_LT(lhs, rhs)
Definition check.h:66
#define SBXCHECK_BOUNDS(index, limit)
Definition check.h:68
#define SBXCHECK_GE(lhs, rhs)
Definition check.h:65
#define SBXCHECK(condition)
Definition check.h:61
static bool ValidateEncoding(const uint8_t *str, size_t length)
Definition unicode.cc:234
static constexpr U encode(T value)
Definition bit-field.h:55
static OwnedVector< T > New(size_t size)
Definition vector.h:287
int length() const
Definition vector.h:64
Vector< T > SubVector(size_t from, size_t to) const
Definition vector.h:41
constexpr T * begin() const
Definition vector.h:96
static Vector< T > cast(Vector< S > input)
Definition vector.h:157
static Handle< AsmWasmData > New(Isolate *isolate, std::shared_ptr< wasm::NativeModule > native_module, DirectHandle< HeapNumber > uses_bitset)
static V8_EXPORT_PRIVATE Handle< BigInt > FromUint64(Isolate *isolate, uint64_t n)
Definition bigint.cc:1355
static Address EntryOf(Builtin builtin, Isolate *isolate)
V8_INLINE bool is_null() const
Definition handles.h:693
V8_INLINE bool is_identical_to(Handle< S > other) const
Definition handles.h:716
V8_EXPORT_PRIVATE Address address() const
static ExternalReference Create(const SCTableReference &table_ref)
MaybeHandle< String > NewStringFromOneByte(base::Vector< const uint8_t > string, AllocationType allocation=AllocationType::kYoung)
JSFunctionBuilder & set_map(DirectHandle< Map > v)
Definition factory.h:1116
V8_WARN_UNUSED_RESULT Handle< JSFunction > Build()
Definition factory.cc:4732
static DirectHandle< FixedAddressArrayBase > New(Isolate *isolate, int length, MoreArgs &&... more_args)
static Handle< FixedIntegerArrayBase< T, Base > > New(Isolate *isolate, int length, MoreArgs &&... more_args)
FunctionTargetAndImplicitArg(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > target_instance_data, int target_func_index)
DirectHandle< TrustedObject > implicit_arg_
T ReadField(size_t offset) const
Address address() const
void WriteField(size_t offset, T value) const
V8_EXPORT_PRIVATE void SetCompiledWasmToJs(Isolate *, DirectHandle< JSReceiver > callable, wasm::WasmCode *wasm_to_js_wrapper, wasm::Suspend suspend, const wasm::CanonicalSig *sig, wasm::CanonicalTypeIndex sig_id)
void SetGenericWasmToJs(Isolate *, DirectHandle< JSReceiver > callable, wasm::Suspend suspend, const wasm::CanonicalSig *sig, wasm::CanonicalTypeIndex sig_id)
DirectHandle< WasmTrustedInstanceData > const instance_data_
void SetWasmToWasm(Tagged< WasmTrustedInstanceData > target_instance_object, WasmCodePointer call_target, wasm::CanonicalTypeIndex sig_id)
static V8_INLINE Isolate * Current()
Definition isolate-inl.h:35
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > Detach(DirectHandle< JSArrayBuffer > buffer, bool force_for_wasm_memory=false, DirectHandle< Object > key={})
Tagged< Context > context()
static DirectHandle< String > GetDebugName(DirectHandle< JSFunction > function)
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetProperty(Isolate *isolate, DirectHandle< JSReceiver > receiver, const char *key)
static DirectHandle< Managed< CppType > > From(Isolate *isolate, size_t estimated_size, std::shared_ptr< CppType > shared_ptr, AllocationType allocation_type=AllocationType::kYoung)
Definition managed-inl.h:27
V8_WARN_UNUSED_RESULT V8_INLINE bool ToHandle(DirectHandle< S > *out) const
V8_INLINE bool is_null() const
static MaybeObjectDirectHandle Weak(Tagged< Object > object, Isolate *isolate)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > SetProperty(LookupIterator *it, DirectHandle< Object > value, StoreOrigin store_origin, Maybe< ShouldThrow > should_throw=Nothing< ShouldThrow >())
Definition objects.cc:2439
static Handle< PodArray< T > > New(Isolate *isolate, int length, AllocationType allocation=AllocationType::kYoung)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static V8_INLINE HandleType< String > Flatten(Isolate *isolate, HandleType< T > string, AllocationType allocation=AllocationType::kYoung)
static void store(Tagged< HeapObject > host, PtrType value)
V8_INLINE constexpr StorageType ptr() const
constexpr bool IsCleared() const
Tagged< HeapObject > GetHeapObjectAssumeWeak() const
static DirectHandle< TrustedManaged< CppType > > From(Isolate *isolate, size_t estimated_size, std::shared_ptr< CppType > shared_ptr)
Definition managed-inl.h:52
void ClearProtectedPointerField(int offset)
void WriteProtectedPointerField(int offset, Tagged< TrustedObject > value)
V8_EXPORT_PRIVATE wasm::WasmValue GetElement(uint32_t index)
uint32_t element_offset(uint32_t index)
void SetTaggedElement(uint32_t index, DirectHandle< Object > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static void EncodeElementSizeInMap(int element_size, Tagged< Map > map)
const wasm::CanonicalSig * sig() const
static DirectHandle< WasmCapiFunction > New(Isolate *isolate, Address call_target, DirectHandle< Foreign > embedder_data, wasm::CanonicalTypeIndex sig_index, const wasm::CanonicalSig *sig)
static bool IsWasmCapiFunction(Tagged< Object > object)
bool MatchesSignature(wasm::CanonicalTypeIndex other_canonical_sig_index) const
std::unordered_map< int, WrapperEntry > wrappers_
void Remove(int index, WasmCodePointer call_target)
V8_EXPORT_PRIVATE bool IsAWrapper(int index) const
WasmCodePointer Add(int index, Address call_target, wasm::WasmCode *compiled_wrapper, uint64_t signature_hash)
void Clear(int index, NewOrExistingEntry new_or_existing)
static constexpr int kMaxLength
static V8_WARN_UNUSED_RESULT DirectHandle< WasmDispatchTable > Grow(Isolate *, DirectHandle< WasmDispatchTable >, uint32_t new_length)
bool V8_EXPORT_PRIVATE IsAWrapper(int index) const
static constexpr size_t kTargetBias
void V8_EXPORT_PRIVATE SetForNonWrapper(int index, Tagged< Object > implicit_arg, WasmCodePointer call_target, wasm::CanonicalTypeIndex sig_id, NewOrExistingEntry new_or_existing)
static constexpr size_t kSigBias
static void V8_EXPORT_PRIVATE AddUse(Isolate *isolate, DirectHandle< WasmDispatchTable > dispatch_table, DirectHandle< WasmTrustedInstanceData > instance, int table_index)
static V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT DirectHandle< WasmDispatchTable > New(Isolate *isolate, int length, wasm::CanonicalValueType table_type)
static constexpr size_t kImplicitArgBias
static constexpr size_t kLengthOffset
void V8_EXPORT_PRIVATE SetForWrapper(int index, Tagged< Object > implicit_arg, Address call_target, wasm::CanonicalTypeIndex sig_id, uint64_t signature_hash, wasm::WasmCode *compiled_wrapper, NewOrExistingEntry new_or_existing)
Tagged< Object > implicit_arg(int index) const
static Tagged< ProtectedWeakFixedArray > MaybeGrowUsesList(Isolate *isolate, DirectHandle< WasmDispatchTable > dispatch_table)
void InstallCompiledWrapper(int index, wasm::WasmCode *wrapper)
static constexpr int OffsetOf(int index)
static DirectHandle< WasmExceptionPackage > New(Isolate *isolate, DirectHandle< WasmExceptionTag > exception_tag, int encoded_size)
static DirectHandle< Object > GetExceptionValues(Isolate *isolate, DirectHandle< WasmExceptionPackage > exception_package)
static uint32_t GetEncodedSize(const wasm::WasmTagSig *tag)
static DirectHandle< Object > GetExceptionTag(Isolate *isolate, DirectHandle< WasmExceptionPackage > exception_package)
static V8_EXPORT_PRIVATE DirectHandle< WasmExceptionTag > New(Isolate *isolate, int index)
wasm::CanonicalTypeIndex sig_index() const
bool MatchesSignature(wasm::CanonicalTypeIndex other_canonical_sig_index)
static std::unique_ptr< char[]> GetDebugName(const wasm::CanonicalSig *sig)
static V8_EXPORT_PRIVATE bool IsWasmExportedFunction(Tagged< Object > object)
static V8_EXPORT_PRIVATE DirectHandle< WasmExportedFunction > New(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > instance_data, DirectHandle< WasmFuncRef > func_ref, DirectHandle< WasmInternalFunction > internal_function, int arity, DirectHandle< Code > export_wrapper)
static bool IsWasmExternalFunction(Tagged< Object > object)
static V8_EXPORT_PRIVATE MaybeDirectHandle< WasmGlobalObject > New(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > instance_object, MaybeDirectHandle< JSArrayBuffer > maybe_untagged_buffer, MaybeDirectHandle< FixedArray > maybe_tagged_buffer, wasm::ValueType type, int32_t offset, bool is_mutable)
void SetFuncRefAsCallOrigin(Tagged< WasmInternalFunction > func)
void SetIndexInTableAsCallOrigin(Tagged< WasmDispatchTable > table, int entry_index)
static V8_EXPORT_PRIVATE DirectHandle< JSFunction > GetOrCreateExternal(DirectHandle< WasmInternalFunction > internal)
bool try_get_external(Tagged< JSFunction > *result)
static DirectHandle< Tuple2 > New(DirectHandle< WasmInstanceObject >)
WasmCodePointer set_compiled_wrapper(wasm::WasmCode *wrapper)
WasmCodePointer set_generic_wrapper(Address call_target)
bool MatchesSignature(wasm::CanonicalTypeIndex other_canonical_sig_index) const
const wasm::CanonicalSig * GetSignature() const
wasm::Suspend GetSuspend() const
wasm::CanonicalTypeIndex sig_index() const
Tagged< JSReceiver > GetCallable() const
static DirectHandle< WasmJSFunction > New(Isolate *isolate, const wasm::FunctionSig *sig, DirectHandle< JSReceiver > callable, wasm::Suspend suspend)
static bool IsWasmJSFunction(Tagged< Object > object)
V8_EXPORT_PRIVATE size_t MapDescriptor(DirectHandle< WasmMemoryObject > memory, size_t offset)
V8_EXPORT_PRIVATE bool UnmapDescriptor()
static V8_EXPORT_PRIVATE MaybeDirectHandle< WasmMemoryMapDescriptor > NewFromAnonymous(Isolate *isolate, size_t length)
static V8_EXPORT_PRIVATE DirectHandle< WasmMemoryMapDescriptor > NewFromFileDescriptor(Isolate *isolate, v8::WasmMemoryMapDescriptor::WasmFileDescriptor file_descriptor)
static DirectHandle< JSArrayBuffer > RefreshSharedBuffer(Isolate *isolate, DirectHandle< WasmMemoryObject > memory, ResizableFlag resizable_by_js)
static DirectHandle< JSArrayBuffer > ToFixedLengthBuffer(Isolate *isolate, DirectHandle< WasmMemoryObject > memory)
static DirectHandle< JSArrayBuffer > ToResizableBuffer(Isolate *isolate, DirectHandle< WasmMemoryObject > memory)
static DirectHandle< JSArrayBuffer > RefreshBuffer(Isolate *isolate, DirectHandle< WasmMemoryObject > memory, std::shared_ptr< BackingStore > new_backing_store)
static constexpr int kNoMaximum
static V8_EXPORT_PRIVATE void UseInInstance(Isolate *isolate, DirectHandle< WasmMemoryObject > memory, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, DirectHandle< WasmTrustedInstanceData > shared_trusted_instance_data, int memory_index_in_instance)
void SetNewBuffer(Isolate *isolate, Tagged< JSArrayBuffer > new_buffer)
static V8_EXPORT_PRIVATE int32_t Grow(Isolate *, DirectHandle< WasmMemoryObject >, uint32_t pages)
static V8_EXPORT_PRIVATE DirectHandle< WasmMemoryObject > New(Isolate *isolate, DirectHandle< JSArrayBuffer > buffer, int maximum, wasm::AddressType address_type)
void UpdateInstances(Isolate *isolate)
void FixUpResizableArrayBuffer(Tagged< JSArrayBuffer > new_buffer)
const wasm::WasmModule * module() const
static V8_EXPORT_PRIVATE DirectHandle< WasmModuleObject > New(Isolate *isolate, std::shared_ptr< wasm::NativeModule > native_module, DirectHandle< Script > script)
static MaybeDirectHandle< String > GetFunctionNameOrNull(Isolate *, DirectHandle< WasmModuleObject >, uint32_t func_index)
static MaybeDirectHandle< String > GetModuleNameOrNull(Isolate *, DirectHandle< WasmModuleObject >)
wasm::NativeModule * native_module() const
base::Vector< const uint8_t > GetRawFunctionName(int func_index)
static DirectHandle< String > ExtractUtf8StringFromModuleBytes(Isolate *, DirectHandle< WasmModuleObject >, wasm::WireBytesRef, InternalizeString)
static int Size(const wasm::StructType *type)
static const wasm::CanonicalStructType * GcSafeType(Tagged< Map > map)
static void EncodeInstanceSizeInMap(int instance_size, Tagged< Map > map)
V8_EXPORT_PRIVATE wasm::WasmValue GetFieldValue(uint32_t field_index)
static DirectHandle< WasmStruct > AllocateDescriptorUninitialized(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_data, wasm::ModuleTypeIndex index, DirectHandle< Map > map)
static V8_EXPORT_PRIVATE DirectHandle< WasmSuspendingObject > New(Isolate *isolate, DirectHandle< JSReceiver > callable)
static void GetFunctionTableEntry(Isolate *isolate, DirectHandle< WasmTableObject > table, int entry_index, bool *is_valid, bool *is_null, MaybeDirectHandle< WasmTrustedInstanceData > *instance_data, int *function_index, MaybeDirectHandle< WasmJSFunction > *maybe_js_function)
static void UpdateDispatchTable(Isolate *isolate, DirectHandle< WasmTableObject > table, int entry_index, const wasm::WasmFunction *func, DirectHandle< WasmTrustedInstanceData > target_instance)
static V8_EXPORT_PRIVATE DirectHandle< WasmTableObject > New(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_data, wasm::ValueType type, wasm::CanonicalValueType canonical_type, uint32_t initial, bool has_maximum, uint64_t maximum, DirectHandle< Object > initial_value, wasm::AddressType address_type, DirectHandle< WasmDispatchTable > *out_dispatch_table=nullptr)
static void SetFunctionTableEntry(Isolate *isolate, DirectHandle< WasmTableObject > table, int entry_index, DirectHandle< Object > entry)
wasm::CanonicalValueType canonical_type(const wasm::WasmModule *module)
static V8_EXPORT_PRIVATE DirectHandle< Object > Get(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t index)
static MaybeDirectHandle< Object > JSToWasmElement(Isolate *isolate, DirectHandle< WasmTableObject > table, DirectHandle< Object > entry, const char **error_message)
static V8_EXPORT_PRIVATE void Fill(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t start, DirectHandle< Object > entry, uint32_t count)
static V8_EXPORT_PRIVATE void Set(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t index, DirectHandle< Object > entry)
static V8_EXPORT_PRIVATE int Grow(Isolate *isolate, DirectHandle< WasmTableObject > table, uint32_t count, DirectHandle< Object > init_value)
static V8_EXPORT_PRIVATE void SetFunctionTablePlaceholder(Isolate *isolate, DirectHandle< WasmTableObject > table, int entry_index, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, int func_index)
bool MatchesSignature(wasm::CanonicalTypeIndex expected_index)
static DirectHandle< WasmTagObject > New(Isolate *isolate, const wasm::FunctionSig *sig, wasm::CanonicalTypeIndex type_index, DirectHandle< HeapObject > tag, DirectHandle< WasmTrustedInstanceData > instance)
static DirectHandle< WasmTrustedInstanceData > New(Isolate *, DirectHandle< WasmModuleObject >, bool shared)
wasm::NativeModule * native_module() const
static DirectHandle< WasmFuncRef > GetOrCreateFuncRef(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, int function_index)
std::pair< Tagged< FixedArray >, uint32_t > GetGlobalBufferAndIndex(const wasm::WasmGlobal &)
void SetRawMemory(int memory_index, uint8_t *mem_start, size_t mem_size)
Tagged< WasmModuleObject > module_object() const
static constexpr std::array< const char *, 6 > kProtectedFieldNames
bool try_get_func_ref(int index, Tagged< WasmFuncRef > *result)
static constexpr std::array< uint16_t, 6 > kProtectedFieldOffsets
void InitDataSegmentArrays(const wasm::NativeModule *)
uint8_t * GetGlobalStorage(const wasm::WasmGlobal &)
static std::optional< MessageTemplate > InitTableEntries(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, DirectHandle< WasmTrustedInstanceData > shared_trusted_instance_data, uint32_t table_index, uint32_t segment_index, uint32_t dst, uint32_t src, uint32_t count) V8_WARN_UNUSED_RESULT
static constexpr std::array< const char *, kTaggedFieldsCount > kTaggedFieldNames
static constexpr std::array< uint16_t, kTaggedFieldsCount > kTaggedFieldOffsets
wasm::WasmValue GetGlobalValue(Isolate *, const wasm::WasmGlobal &)
const wasm::WasmModule * module() const
static bool CopyTableEntries(Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, uint32_t table_dst_index, uint32_t table_src_index, uint32_t dst, uint32_t src, uint32_t count) V8_WARN_UNUSED_RESULT
WasmCodePointer GetCallTarget(uint32_t func_index)
static V8_WARN_UNUSED_RESULT DirectHandle< WeakArrayList > Append(Isolate *isolate, DirectHandle< WeakArrayList > array, MaybeObjectDirectHandle value, AllocationType allocation=AllocationType::kYoung)
constexpr HeapType::Representation heap_representation() const
static constexpr CanonicalValueType Ref(CanonicalTypeIndex index, bool shared, RefTypeKind kind)
constexpr HeapType::Representation heap_representation_non_shared() const
constexpr CanonicalTypeIndex ref_index() const
constexpr bool IsFunctionType() const
static DirectHandle< Code > CompileJSToWasmWrapper(Isolate *isolate, const CanonicalSig *sig, CanonicalTypeIndex sig_index)
std::atomic< uint32_t > * tiering_budget_array() const
const WasmModule * module() const
base::Vector< const uint8_t > wire_bytes() const
WasmCodePointer GetCodePointerHandle(int index) const
V8_EXPORT_PRIVATE bool IsCanonicalSubtype(CanonicalTypeIndex sub_index, CanonicalTypeIndex super_index)
V8_EXPORT_PRIVATE const CanonicalStructType * LookupStruct(CanonicalTypeIndex index) const
V8_EXPORT_PRIVATE const CanonicalArrayType * LookupArray(CanonicalTypeIndex index) const
V8_EXPORT_PRIVATE const CanonicalSig * LookupFunctionSignature(CanonicalTypeIndex index) const
V8_EXPORT_PRIVATE void AddRecursiveGroup(WasmModule *module, uint32_t size)
static V8_EXPORT_PRIVATE void PrepareForCanonicalTypeId(Isolate *isolate, CanonicalTypeIndex id)
constexpr int value_kind_size() const
Definition value-type.h:485
constexpr ValueKind kind() const
Definition value-type.h:631
constexpr bool is_reference() const
Definition value-type.h:600
constexpr bool has_index() const
Definition value-type.h:367
constexpr bool is_object_reference() const
Definition value-type.h:601
constexpr bool use_wasm_null() const
Definition value-type.h:462
constexpr GenericKind generic_kind() const
Definition value-type.h:420
static uint32_t JSToWasmWrapperPackedArraySize(const FunctionSig *sig)
static uint32_t RefArgsCount(const FunctionSig *sig)
static uint32_t RefRetsCount(const FunctionSig *sig)
static size_t EstimateNativeModuleMetaDataSize(const WasmModule *)
static size_t EstimateNativeModuleCodeSize(const WasmModule *)
void UpdateEntrypoint(WasmCodePointer index, Address value, uint64_t signature_hash)
Address GetEntrypointWithoutSignatureCheck(WasmCodePointer index) const
WasmCodePointer AllocateAndInitializeEntry(Address entrypoint, uint64_t signature_hash)
base::Vector< uint8_t > instructions() const
base::Vector< const uint8_t > reloc_info() const
static void DecrementRefCount(base::Vector< WasmCode *const >)
TypeCanonicalizer * type_canonicalizer()
WasmCode * AddWrapper(const CacheKey &key, WasmCompilationResult result, WasmCode::Kind kind, uint64_t signature_hash)
static void ClearIndirectCallCacheEntry(Isolate *isolate, DirectHandle< WasmInstanceObject > instance, uint32_t table_index, uint32_t entry_index)
static void UpdateIndirectCallTable(Isolate *isolate, DirectHandle< WasmInstanceObject > instance, uint32_t table_index)
static void UpdateMemoryAddress(DirectHandle< WasmInstanceObject > instance)
int start
uint32_t count
Isolate * isolate
int32_t offset
TNode< Context > context
SharedFunctionInfoRef shared
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
ZoneVector< Entry > entries
static V ReadUnalignedValue(Address p)
Definition memory.h:28
constexpr bool IsInBounds(T index, T length, T max)
Definition bounds.h:49
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
wasm::WasmCompilationResult CompileWasmCapiCallWrapper(const wasm::CanonicalSig *sig)
V8_EXPORT_PRIVATE WasmCodePointerTable * GetProcessWideWasmCodePointerTable()
constexpr int kAnonymousFuncIndex
uint32_t max_mem32_pages()
static constexpr size_t kMaxCanonicalTypes
WasmImportWrapperCache * GetWasmImportWrapperCache()
uint32_t max_table_size()
MaybeDirectHandle< Object > JSToWasmObject(Isolate *isolate, DirectHandle< Object > value, CanonicalValueType expected, const char **error_message)
uint32_t max_mem64_pages()
std::optional< MessageTemplate > InitializeElementSegment(Zone *zone, Isolate *isolate, DirectHandle< WasmTrustedInstanceData > trusted_instance_data, DirectHandle< WasmTrustedInstanceData > shared_trusted_instance_data, uint32_t segment_index)
constexpr ImportCallKind kDefaultImportCallKind
TypeCanonicalizer * GetTypeCanonicalizer()
bool IsJSCompatibleSignature(const CanonicalSig *sig)
constexpr WasmCodePointer kInvalidWasmCodePointer
constexpr IndependentHeapType kWasmFuncRef
uint64_t max_mem32_bytes()
constexpr size_t kV8MaxWasmMemories
Definition wasm-limits.h:61
WasmEngine * GetWasmEngine()
constexpr size_t kWasmPageSize
V8_INLINE bool IsSubtypeOf(ValueType subtype, ValueType supertype, const WasmModule *sub_module, const WasmModule *super_module)
constexpr IndependentHeapType kWasmBottom
constexpr size_t kV8MaxWasmTableSize
Definition wasm-limits.h:58
uint64_t max_mem64_bytes()
DirectHandle< Object > WasmToJSObject(Isolate *isolate, DirectHandle< Object > value)
DirectHandle< Map > CreateStructMap(Isolate *isolate, wasm::CanonicalTypeIndex struct_index, DirectHandle< Map > opt_rtt_parent, DirectHandle< NativeContext > opt_native_context)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset kProtectedDispatchTable0Offset dispatch_table_for_imports
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
void EncodeI32ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint32_t value)
Tagged< T > MakeStrong(Tagged< T > value)
Definition tagged.h:903
void DecodeI32ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint32_t *value)
Tagged(T object) -> Tagged< T >
V8_INLINE IsolateForSandbox GetCurrentIsolateForSandbox()
Definition isolate.h:78
kWasmInternalFunctionIndirectPointerTag instance_data
V8_INLINE Isolate * GetIsolateFromWritableObject(Tagged< HeapObject > object)
DirectHandle< Map > CreateArrayMap(Isolate *isolate, wasm::CanonicalTypeIndex array_index, DirectHandle< Map > opt_rtt_parent)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
@ TERMINAL_FAST_ELEMENTS_KIND
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
double FastI2D(int x)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
kWasmInternalFunctionIndirectPointerTag kProtectedInstanceDataOffset sig
bool UseGenericWasmToJSWrapper(wasm::ImportCallKind kind, const wasm::CanonicalSig *sig, wasm::Suspend suspend)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset globals_start
const int kVariableSizeSentinel
Definition objects.h:84
v8::PageAllocator * GetArrayBufferPageAllocator()
Definition allocation.h:125
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset memory_objects
constexpr int kSystemPointerSize
Definition globals.h:410
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset tables
void DecodeI64ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint64_t *value)
V8_INLINE void * EmptyBackingStoreBuffer()
Definition sandbox.h:345
constexpr bool SmiValuesAre31Bits()
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset data_segment_starts
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset kTaggedGlobalsBufferOffset kTablesOffset kProtectedDispatchTable0Offset kProtectedDispatchTableForImportsOffset func_refs
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
Definition tagged.h:893
size_t AllocatePageSize()
Tagged< ClearedWeakValue > ClearedValue(PtrComprCageBase cage_base)
constexpr AdaptArguments kAdapt
Definition globals.h:2775
int FastD2I(double x)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset element_segments
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset kInstanceObjectOffset kMemoryObjectsOffset tagged_globals_buffer
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation nullptr
Definition flags.cc:1263
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates pages(requires --stress_compaction).") DEFINE_BOOL(cppheap_incremental_marking
return value
Definition map-inl.h:893
static bool IsMinusZero(double value)
constexpr int kMaxInt
Definition globals.h:374
DirectHandle< Map > CreateFuncRefMap(Isolate *isolate, wasm::CanonicalTypeIndex type, DirectHandle< Map > opt_rtt_parent)
kMemory0SizeOffset Address kNewAllocationLimitAddressOffset Address kOldAllocationLimitAddressOffset uint8_t kGlobalsStartOffset kJumpTableStartOffset std::atomic< uint32_t > kTieringBudgetArrayOffset kDataSegmentStartsOffset kElementSegmentsOffset instance_object
constexpr uint32_t kMaxUInt32
Definition globals.h:387
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
void EncodeI64ExceptionValue(DirectHandle< FixedArray > encoded_values, uint32_t *encoded_index, uint64_t value)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr ReleaseStoreTag kReleaseStore
Definition globals.h:2910
Definition c-api.cc:87
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
#define RELEASE_WRITE_INT32_FIELD(p, offset, value)
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK_GE(lhs, rhs)
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
WasmName GetNameOrNull(WireBytesRef ref) const
constexpr bool valid() const
Definition value-type.h:58
BoundsCheckStrategy bounds_checks
bool has_signature(ModuleTypeIndex index) const
bool has_array(ModuleTypeIndex index) const
std::vector< WasmFunction > functions
CanonicalTypeIndex canonical_sig_id(ModuleTypeIndex index) const
bool has_struct(ModuleTypeIndex index) const
const WasmTagSig * sig
#define TRACE_EVENT0(category_group, name)
#define V8_LIKELY(condition)
Definition v8config.h:661
std::unique_ptr< ValueMirror > value
#define FOREACH_WASMVALUE_CTYPES(V)
#define CASE_TYPE(valuetype, ctype)
#define TRACE_IFT(...)