54#if V8_ENABLE_DRUMBRAKE
58#if defined(V8_OS_WIN64)
62#define TRACE_HEAP(...) \
64 if (v8_flags.trace_wasm_native_heap) PrintF(__VA_ARGS__); \
71using trap_handler::ProtectedInstructionData;
78static_assert(
sizeof(WasmCode) <= 104);
100 merged_region = {
below->begin(),
below->size() + merged_region.size()};
105 regions_.insert(insert_pos, merged_region);
106 return merged_region;
127 return merged_region;
146 auto it =
regions_.lower_bound(region);
151 if (size > overlap.
size())
continue;
154 auto insert_pos =
regions_.erase(it);
155 if (size == old.size()) {
157 }
else if (ret.begin() == old.begin()) {
159 regions_.insert(insert_pos, {old.begin() +
size, old.size() - size});
160 }
else if (ret.end() == old.end()) {
162 regions_.insert(insert_pos, {old.begin(), old.size() - size});
166 regions_.insert(insert_pos, {old.begin(), ret.begin() - old.begin()});
167 regions_.insert(insert_pos, {ret.end(), old.end() - ret.end()});
203 size_t total_size = 0;
204 for (
auto& vec : vectors) total_size += vec.size();
206 std::unique_ptr<uint8_t[]>
result{
new uint8_t[total_size]};
207 uint8_t* ptr =
result.get();
208 for (
auto& vec : vectors) {
209 if (vec.empty())
continue;
210 memcpy(ptr, vec.begin(), vec.size());
226 RegisterHandlerData(
base, size, protected_instruction_data.size(),
227 protected_instruction_data.begin());
239 return isolate->IsLoggingCodeCreation();
250#if V8_ENABLE_DRUMBRAKE
251 case kInterpreterEntry:
252 return "interpreter entry";
262 module->lazily_generated_names.LookupFunctionName(wire_bytes, index());
264 std::string name_buffer;
266 name_buffer.resize(32);
268 SNPrintF(
base::VectorOf(&name_buffer.front(), name_buffer.size()),
269 "wasm-function[%d]", index()));
271 name_buffer.append(name.begin(), name.end());
277 int script_id)
const {
287 module->debug_symbols[WasmDebugSymbols::Type::SourceMap];
288 auto load_wasm_source_map = isolate->wasm_load_source_map_callback();
291 !symbol.external_url.is_empty() && load_wasm_source_map) {
294 std::string external_url_string(external_url.
data(), external_url.
size());
298 load_wasm_source_map(v8_isolate, external_url_string.c_str());
300 std::make_unique<WasmModuleSourceMap>(v8_isolate, source_map_str));
315 PROFILE(isolate, CodeCreateEvent(LogEventListener::CodeTag::kFunction,
this,
316 name, source_url, code_offset, script_id));
331 return std::binary_search(instructions.begin(), instructions.end(),
offset,
332 ProtectedInstructionDataCompare);
340 std::numeric_limits<uint16_t>::max());
341 static constexpr int kMaxSlotsPerParam = 4;
343 std::numeric_limits<uint16_t>::max());
357 !it.
done(); it.next()) {
361 Address target = it.rinfo()->wasm_call_address();
365 CHECK(code->contains(target));
369 Address target = it.rinfo()->wasm_stub_call_address();
373 CHECK(code->contains(target));
377 uint32_t sig_id = it.rinfo()->wasm_canonical_sig_id();
383 it.rinfo()->wasm_code_pointer_table_entry();
384 uint32_t function_index = function_index_map.at(call_target);
391 Address target = it.rinfo()->target_internal_reference();
401 FATAL(
"Unexpected mode: %d", mode);
409 bool function_index_matches =
411 v8_flags.print_wasm_code_function_index ==
static_cast<int>(
index()));
414 ? (
v8_flags.print_wasm_code || function_index_matches)
415 :
v8_flags.print_wasm_stub_code.value())) {
423 os <<
"--- WebAssembly code ---\n";
426 if (
auto* debug_side_table =
428 debug_side_table->Print(os);
431 os <<
"--- End code ---\n";
436 if (name) os <<
"name: " << name <<
"\n";
441 const char* compiler =
444 os <<
"compiler: " << compiler <<
"\n";
462#ifdef ENABLE_DISASSEMBLER
463 os <<
"Instructions (size = " << instruction_size <<
")\n";
471 os <<
"Exception Handler Table (size = " << table.NumberOfReturnEntries()
473 table.HandlerTableReturnPrint(os);
478 os <<
"Protected instructions:\n pc offset\n";
480 os << std::setw(10) << std::hex << data.instr_offset << std::setw(10)
487 os <<
"Source positions:\n pc offset position\n";
490 os << std::setw(10) << std::hex << it.code_offset() << std::dec
491 << std::setw(10) << it.source_position().ScriptOffset()
492 << (it.is_statement() ?
" statement" :
"") <<
"\n";
500 os <<
"Deopt exits (entries = " << data.entry_count
502 constexpr char pc_offset[] =
"pc-offset";
503 constexpr char source_offset[] =
" source-offset";
504 constexpr char translation_index[] =
" translation-index";
505 os <<
pc_offset << source_offset << translation_index <<
'\n';
506 uint32_t code_offset = data.deopt_exit_start_offset;
507 for (uint32_t
i = 0;
i < data.entry_count; ++
i) {
509 os << std::setw(
sizeof pc_offset - 1) << std::hex << code_offset
510 << std::dec << std::setw(
sizeof source_offset - 1)
526 !it.
done(); it.next()) {
527 it.rinfo()->Print(
nullptr, os);
531 os <<
"Instructions (size = " << instruction_size <<
", "
533 <<
static_cast<void*
>(
instructions().begin() + instruction_size) <<
")\n";
540 return "wasm function";
542 return "wasm-to-capi";
545#if V8_ENABLE_DRUMBRAKE
546 case WasmCode::kInterpreterEntry:
547 return "interpreter entry";
552 return "unknown kind";
570 std::vector<WasmCode*> dead_wrappers;
572 if (!code->DecRef())
continue;
577 dead_wrappers.push_back(code);
581 if (dead_code.empty() && dead_wrappers.empty())
return;
589 !iterator.
done() && iterator.code_offset() < code_offset;
590 iterator.Advance()) {
591 position = iterator.source_position();
601 int inlining_id)
const {
602 const size_t elem_size =
sizeof(int) +
sizeof(
bool) +
sizeof(
SourcePosition);
605 std::tuple<int, bool, SourcePosition>
result;
608 sizeof std::get<1>(
result));
609 std::memcpy(&std::get<2>(
result),
611 sizeof std::get<2>(
result));
654constexpr bool kNeedsToSplitRangeByReservations =
true;
656constexpr bool kNeedsToSplitRangeByReservations =
false;
661 const std::vector<VirtualMemory>& owned_code_space) {
662 if (!kNeedsToSplitRangeByReservations)
return {range};
665 size_t missing_begin = range.
begin();
666 size_t missing_end = range.end();
668 Address overlap_begin = std::max(missing_begin, vmem.address());
669 Address overlap_end = std::min(missing_end, vmem.end());
670 if (overlap_begin >= overlap_end)
continue;
671 split_ranges.
emplace_back(overlap_begin, overlap_end - overlap_begin);
674 if (missing_begin == overlap_begin) missing_begin = overlap_end;
675 if (missing_end == overlap_end) missing_end = overlap_begin;
676 if (missing_begin >= missing_end)
break;
678#ifdef ENABLE_SLOW_DCHECKS
680 size_t total_split_size = 0;
681 for (
auto split : split_ranges) total_split_size += split.size();
682 DCHECK_EQ(range.size(), total_split_size);
687int NumWasmFunctionsInFarJumpTable(uint32_t num_declared_functions) {
689 ?
static_cast<int>(num_declared_functions)
695size_t OverheadPerCodeSpace(uint32_t num_declared_functions) {
700#if defined(V8_OS_WIN64)
703 overhead += Heap::GetCodeRangeReservedAreaSize();
711 NumWasmFunctionsInFarJumpTable(num_declared_functions)));
720size_t ReservationSizeForWasmCode(
size_t needed_size,
721 int num_declared_functions,
722 size_t total_reserved_so_far) {
723 DCHECK_EQ(needed_size == 0, num_declared_functions == 0);
724 if (needed_size == 0)
return 0;
726 size_t overhead = OverheadPerCodeSpace(num_declared_functions);
739 size_t minimum_size = 2 * overhead;
740 size_t suggested_size = std::max(
742 total_reserved_so_far / 4);
744 const size_t max_code_space_size =
745 size_t{
v8_flags.wasm_max_code_space_size_mb} *
MB;
746 if (
V8_UNLIKELY(minimum_size > max_code_space_size)) {
748 <<
"required reservation minimum (" << minimum_size
749 <<
") is bigger than supported maximum ("
750 << max_code_space_size <<
")";
752 "Exceeding maximum wasm code space size",
753 oom_detail.PrintToArray().data());
758 return std::min(max_code_space_size, suggested_size);
762size_t ReservationSizeForWrappers(
size_t needed_size,
763 size_t total_reserved_so_far) {
768 size_t suggested_size = std::max(needed_size, total_reserved_so_far / 4);
770 const size_t max_code_space_size =
771 size_t{
v8_flags.wasm_max_code_space_size_mb} *
MB;
772 if (
V8_UNLIKELY(needed_size > max_code_space_size)) {
774 <<
"required reservation minimum (" << needed_size
775 <<
") is bigger than supported maximum ("
776 << max_code_space_size <<
")";
778 "Exceeding maximum wasm code space size",
779 oom_detail.PrintToArray().data());
784 return std::min(max_code_space_size, suggested_size);
789constexpr base::AddressRegion kUnrestrictedRegion{
796#if defined(V8_OS_WIN64)
802 if (WasmCodeManager::CanRegisterUnwindInfoForNonABICompliantCodeRange()) {
803 size_t size = Heap::GetCodeRangeReservedAreaSize();
809 reinterpret_cast<void*
>(region.begin()), region.size());
835 CHECK_EQ(kUnrestrictedRegion, region);
837 size_t total_reserved = 0;
839 size_t reserve_size =
841 ? ReservationSizeForWasmCode(
842 size, native_module->
module()->num_declared_functions,
844 : ReservationSizeForWrappers(size, total_reserved);
845 if (reserve_size < size) {
847 <<
"cannot reserve space for " << size
848 <<
"bytes of code (maximum reservation size is "
849 << reserve_size <<
")";
851 oom_detail.PrintToArray().data());
856 <<
"cannot allocate more code space (" << reserve_size
857 <<
" bytes, currently " << total_reserved <<
")";
859 oom_detail.PrintToArray().data());
889 if (commit_start < commit_end) {
897 v8_flags.wasm_max_committed_code_mb * MB);
902 TRACE_HEAP(
"Code alloc for %p: 0x%" PRIxPTR
",+%zu\n",
this,
903 code_space.
begin(), size);
904 return {
reinterpret_cast<uint8_t*
>(code_space.
begin()), code_space.
size()};
910 size_t code_size = 0;
912 code_size += code->instructions().size();
914 code->instructions().size()});
916 code->instructions().size());
925 for (
auto region : freed_regions.
regions()) {
928 std::max(
RoundUp(merged_region.begin(), commit_page_size),
929 RoundDown(region.begin(), commit_page_size));
931 std::min(
RoundDown(merged_region.end(), commit_page_size),
932 RoundUp(region.end(), commit_page_size));
933 if (discard_start >= discard_end)
continue;
934 regions_to_decommit.
Merge({discard_start, discard_end - discard_start});
938 for (
auto region : regions_to_decommit.
regions()) {
939 [[maybe_unused]]
size_t old_committed =
957 std::shared_ptr<const WasmModule> module,
958 std::shared_ptr<Counters> async_counters,
959 std::shared_ptr<NativeModule>* shared_this)
961 GetWasmEngine()->GetBarrierForBackgroundCompile()->TryLock()),
962 code_allocator_(async_counters),
964 compile_imports_(
std::move(compile_imports)),
968 fast_api_signatures_(
976 shared_this->reset(
this);
978 *shared_this, std::move(async_counters), detected_features);
981 if (
module_->num_declared_functions > 0) {
983 std::make_unique<WasmCode*[]>(
module_->num_declared_functions);
986 module_->num_declared_functions);
1001 auto initial_region = code_space.
region();
1003 const bool has_code_space = initial_region.size() > 0;
1005 if (has_code_space) {
1015 TRACE_EVENT1(
"v8.wasm",
"wasm.LogWasmCodes",
"functions",
1016 module_->num_declared_functions);
1019 DCHECK(IsString(url_obj) || IsUndefined(url_obj));
1020 std::unique_ptr<char[]> source_url =
1022 : std::unique_ptr<char[]>(
new char[1]{
'\0'});
1028 code->LogCode(isolate, source_url.get(), script->id());
1033 uint64_t signature_hash) {
1034 const size_t relocation_size = code->relocation_size();
1039 int source_pos_len = source_pos_table->length();
1045 reinterpret_cast<uint8_t*
>(code->body_start()),
1046 static_cast<size_t>(code->body_size()));
1052 const int base_offset = code->instruction_size();
1056 const int safepoint_table_offset =
1057 code->has_safepoint_table() ? base_offset + code->safepoint_table_offset()
1059 const int handler_table_offset = base_offset + code->handler_table_offset();
1060 const int constant_pool_offset = base_offset + code->constant_pool_offset();
1061 const int code_comments_offset = base_offset + code->code_comments_offset();
1070 dst_code_bytes.
size(),
1072 jit_allocation.
CopyCode(0, instructions.begin(), instructions.size());
1075 intptr_t delta =
reinterpret_cast<Address>(dst_code_bytes.
begin()) -
1076 code->instruction_start();
1080 auto jump_tables_ref =
1083 Address constant_pool_start = dst_code_addr + constant_pool_offset;
1086 reloc_info.
as_vector(), constant_pool_start,
1088 !it.
done(); it.next(), orig_it.
next()) {
1093 static_cast<uint32_t
>(Builtin::kFirstBytecodeHandler));
1096 it.rinfo()->set_wasm_stub_call_address(entry);
1098 uint32_t function_index =
1099 it.rinfo()->wasm_code_pointer_table_entry().value();
1101 it.rinfo()->set_wasm_code_pointer_table_entry(target,
1104 it.rinfo()->apply(delta);
1112 std::unique_ptr<WasmCode> new_code{
1119 safepoint_table_offset,
1120 handler_table_offset,
1121 constant_pool_offset,
1122 code_comments_offset,
1133 new_code->MaybePrint();
1134 new_code->Validate();
1141 uint32_t num_wasm_functions) {
1142 if (!num_wasm_functions)
return;
1155 Address compile_lazy_address =
1162 module_->num_imported_functions, compile_lazy_address);
1171 "Initialize WasmCodePointerTable");
1174 for (uint32_t
i = 0;
i < num_wasm_functions;
i++) {
1175 uint64_t signature_hash =
module_->signature_hash(
1176 type_canonicalizer,
module_->num_imported_functions +
i);
1181 signature_hash, write_scope);
1189 module_->num_imported_functions +
module_->num_declared_functions);
1205 uint64_t signature_hash =
1213 uint32_t tagged_parameter_slots,
1229 bool frame_has_feedback_slot =
false;
1234 index, desc,
stack_slots, ool_spill_count, tagged_parameter_slots,
1235 protected_instructions_data, source_position_table, inlining_positions,
1236 deopt_data,
kind, tier, for_debugging, frame_has_feedback_slot,
1237 code_space, jump_table_ref);
1252 uint32_t num_wasm_functions) {
1258 std::make_unique<WasmCodePointer[]>(num_wasm_functions);
1263 for (uint32_t
i = 0;
i < num_wasm_functions;
i++) {
1270 uint32_t tagged_parameter_slots,
1279 desc.buffer + desc.buffer_size - desc.reloc_size,
1280 static_cast<size_t>(desc.reloc_size)};
1286 const int safepoint_table_offset =
1287 desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset;
1288 const int handler_table_offset = desc.handler_table_offset;
1289 const int constant_pool_offset = desc.constant_pool_offset;
1290 const int code_comments_offset = desc.code_comments_offset;
1291 const int instr_size = desc.instr_size;
1298 jit_allocation.
CopyCode(0, desc.buffer, desc.instr_size);
1301 intptr_t delta = dst_code_bytes.
begin() - desc.buffer;
1307 Address constant_pool_start = code_start + constant_pool_offset;
1310 constant_pool_start, mode_mask);
1311 !it.
done(); it.next()) {
1314 uint32_t call_tag = it.rinfo()->wasm_call_tag();
1316 it.rinfo()->set_wasm_call_address(target);
1318 uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
1320 static_cast<uint32_t
>(Builtin::kFirstBytecodeHandler));
1323 it.rinfo()->set_wasm_stub_call_address(entry);
1325 uint32_t function_index =
1326 it.rinfo()->wasm_code_pointer_table_entry().value();
1328 it.rinfo()->set_wasm_code_pointer_table_entry(target,
1331 it.rinfo()->apply(delta);
1343 uint64_t signature_hash =
1346 std::unique_ptr<WasmCode> code{
new WasmCode{
this,
1351 tagged_parameter_slots,
1352 safepoint_table_offset,
1353 handler_table_offset,
1354 constant_pool_offset,
1355 code_comments_offset,
1357 protected_instructions_data,
1359 source_position_table,
1366 frame_has_feedback_slot}};
1376 "wasm.PublishCode");
1385 "wasm.PublishCode",
"number", unpublished_codes.
size());
1386 std::vector<WasmCode*> published_code;
1387 published_code.reserve(unpublished_codes.
size());
1390 for (
auto& unpublished_code : unpublished_codes) {
1392 unpublished_code.assumptions.get());
1393 if (code ==
nullptr) {
1398 published_code.push_back(code);
1400 return published_code;
1420#if V8_ENABLE_DRUMBRAKE
1421 case WasmCompilationResult::kInterpreterEntry:
1422 return WasmCode::Kind::kInterpreterEntry;
1436 if (assumptions !=
nullptr) {
1443 static_cast<int>(
module_->num_imported_functions));
1446 if (
V8_UNLIKELY(current.get(import_index) != status)) {
1461 if (code->index() <
static_cast<int>(
module_->num_imported_functions)) {
1465 code->RegisterTrapHandlerData();
1471 "Assume an order on execution tiers");
1481 "for_debugging is ordered");
1493 code->instruction_start(), code->signature_hash());
1498 code->DecRefOnLiveCode();
1528 prior_code->
tier() > new_code->
tier() &&
1540 DCHECK(!code->IsAnonymous());
1552 prior_code->DecRefOnLiveCode();
1558 code->instruction_start(), code->signature_hash());
1568 return {code_space, jump_tables};
1573 int ool_spills, uint32_t tagged_parameter_slots,
int safepoint_table_offset,
1574 int handler_table_offset,
int constant_pool_offset,
1575 int code_comments_offset,
int unpadded_binary_size,
1584 uint64_t signature_hash =
1587 return std::unique_ptr<WasmCode>{
new WasmCode{
this,
1592 tagged_parameter_slots,
1593 safepoint_table_offset,
1594 handler_table_offset,
1595 constant_pool_offset,
1596 code_comments_offset,
1597 unpadded_binary_size,
1598 protected_instructions_data,
1600 source_position_table,
1609std::pair<std::vector<WasmCode*>, std::vector<WellKnownImport>>
1617 std::vector<WellKnownImport> import_statuses(
module_->num_imported_functions);
1618 for (uint32_t
i = 0;
i <
module_->num_imported_functions;
i++) {
1619 import_statuses[
i] =
module_->type_feedback.well_known_imports.get(
i);
1621 return {std::vector<WasmCode*>{
start,
end}, std::move(import_statuses)};
1628 std::vector<WasmCode*> all_code(
owned_code_.size());
1630 [](
auto& entry) { return entry.second.get(); });
1654 std::unique_ptr<WasmModuleSourceMap> source_map) {
1665 kUnrestrictedRegion, type);
1697 ToAllocationType(type));
1700 std::unique_ptr<WasmCode> code{
1735 Address code_pointer_table_target,
1736 uint64_t signature_hash) {
1745 DCHECK_IMPLIES(code_space_data.jump_table, code_space_data.far_jump_table);
1746 if (!code_space_data.jump_table)
continue;
1747 WritableJumpTablePair writable_jump_tables =
1748 ThreadIsolation::LookupJumpTableAllocations(
1749 code_space_data.jump_table->instruction_start(),
1750 code_space_data.jump_table->instructions_size_,
1751 code_space_data.far_jump_table->instruction_start(),
1752 code_space_data.far_jump_table->instructions_size_);
1760 uint32_t slot_index,
Address target) {
1774 bool has_far_jump_slot =
1775 far_jump_table_offset <
1777 Address far_jump_table_start =
1780 has_far_jump_slot ? far_jump_table_start + far_jump_table_offset
1783 far_jump_table_slot, target);
1801 WasmCode* far_jump_table =
nullptr;
1802 const uint32_t num_wasm_functions =
module_->num_declared_functions;
1805 const bool needs_far_jump_table =
1807 const bool needs_jump_table = num_wasm_functions > 0 && needs_far_jump_table;
1809 if (needs_jump_table) {
1814 int jump_table_size =
1823 if (needs_far_jump_table) {
1824 int num_function_slots = NumWasmFunctionsInFarJumpTable(num_wasm_functions);
1826 int far_jump_table_size =
1850 if (is_first_code_space) {
1860 if (is_first_code_space) {
1864 if (jump_table && !is_first_code_space) {
1870 WritableJumpTablePair writable_jump_tables =
1871 ThreadIsolation::LookupJumpTableAllocations(
1878 for (uint32_t slot_index = 0; slot_index < num_wasm_functions;
1887 Address main_jump_table_target =
1891 slot_index, main_jump_table_target);
1900 explicit NativeModuleWireBytesStorage(
1905 return std::atomic_load(&wire_bytes_)
1907 .SubVector(ref.offset(), ref.end_offset());
1910 std::optional<ModuleWireBytes> GetModuleBytes() const final {
1911 return std::optional<ModuleWireBytes>(
1912 std::atomic_load(&wire_bytes_)->as_vector());
1921 auto shared_wire_bytes =
1922 std::make_shared<base::OwnedVector<const uint8_t>>(std::move(
wire_bytes));
1923 std::atomic_store(&
wire_bytes_, shared_wire_bytes);
1924 if (!shared_wire_bytes->empty()) {
1926 std::make_shared<NativeModuleWireBytesStorage>(
1927 std::move(shared_wire_bytes)));
1934 std::memory_order_relaxed);
1937 while (sample_in_micro_sec > max &&
1939 max, sample_in_micro_sec, std::memory_order_relaxed,
1940 std::memory_order_relaxed)) {
1953 [](
const std::unique_ptr<WasmCode>& a,
1954 const std::unique_ptr<WasmCode>& b) {
1955 return a->instruction_start() > b->instruction_start();
1962 insertion_hint->first > code->instruction_start());
1964 insertion_hint, code->instruction_start(), std::move(code));
1975 WasmCode* candidate = iter->second.get();
1977 if (!candidate->
contains(
pc))
return nullptr;
1985 auto jump_table_usable = [code_region](
const WasmCode* jump_table) {
1989 Address table_start = jump_table->instruction_start();
1990 Address table_end = table_start + jump_table->instructions().size();
1993 size_t max_distance = std::max(
1994 code_region.
end() > table_start ? code_region.
end() - table_start : 0,
1995 table_end > code_region.
begin() ? table_end - code_region.
begin() : 0);
2006 DCHECK_IMPLIES(code_space_data.jump_table, code_space_data.far_jump_table);
2007 if (!code_space_data.far_jump_table)
continue;
2011 (!jump_table_usable(code_space_data.far_jump_table) ||
2012 (code_space_data.jump_table &&
2013 !jump_table_usable(code_space_data.jump_table)))) {
2016 return {code_space_data.jump_table
2017 ? code_space_data.jump_table->instruction_start()
2019 code_space_data.far_jump_table->instruction_start()};
2025 uint32_t func_index,
const JumpTablesRef& jump_tables)
const {
2046 uint32_t slot_offset =
2047 static_cast<uint32_t
>(slot_address - code->instruction_start());
2051 code->instruction_start() +
2053 return module_->num_imported_functions + slot_idx;
2070 if (code_space_data.far_jump_table !=
nullptr &&
2071 code_space_data.far_jump_table->contains(target)) {
2072 uint32_t
offset =
static_cast<uint32_t
>(
2073 target - code_space_data.far_jump_table->instruction_start());
2090 static_cast<uint32_t
>(index) <
module_->num_imported_functions) {
2098 TRACE_HEAP(
"Deleting native module: %p\n",
this);
2123 : max_committed_code_space_(
v8_flags.wasm_max_committed_code_mb * MB),
2124 critical_committed_code_space_(max_committed_code_space_ / 2),
2125 next_code_space_hint_(reinterpret_cast<
Address>(
2130 v8_flags.wasm_max_code_space_size_mb);
2138#if defined(V8_OS_WIN64)
2140bool WasmCodeManager::CanRegisterUnwindInfoForNonABICompliantCodeRange() {
2156 <<
"trying to commit " << region.size()
2157 <<
", already committed " << old_value;
2159 "Exceeding maximum wasm committed code space",
2160 oom_detail.PrintToArray().data());
2164 old_value, old_value + region.size())) {
2169 TRACE_HEAP(
"Setting rwx permissions for 0x%" PRIxPTR
":0x%" PRIxPTR
"\n",
2170 region.begin(), region.end());
2172 reinterpret_cast<void*
>(region.begin()), region.size(),
2179 oom_detail.PrintToArray().data());
2188 [[maybe_unused]]
size_t old_committed =
2190 DCHECK_LE(region.size(), old_committed);
2191 TRACE_HEAP(
"Decommitting system pages 0x%" PRIxPTR
":0x%" PRIxPTR
"\n",
2192 region.begin(), region.end());
2194 reinterpret_cast<void*
>(region.begin()), region.size()))) {
2199 oom_detail.PrintToArray().data());
2207 region.begin(), std::make_pair(region.end(), native_module)));
2214 size =
RoundUp(size, allocate_page_size);
2221 VirtualMemory mem(page_allocator, size,
reinterpret_cast<void*
>(hint),
2229 std::memory_order_relaxed);
2246#if !defined(V8_OS_WIN) && !defined(V8_OS_IOS)
2248#if V8_HAS_PKU_JIT_WRITE_PROTECT
2252 CHECK(base::MemoryProtectionKey::SetPermissionsAndKey(
2254 RwxMemoryWriteScope::memory_protection_key()));
2290#if V8_TARGET_ARCH_X64
2291constexpr size_t kTurbofanFunctionOverhead = 24;
2292constexpr size_t kTurbofanCodeSizeMultiplier = 3;
2293constexpr size_t kLiftoffFunctionOverhead = 56;
2294constexpr size_t kLiftoffCodeSizeMultiplier = 4;
2295#elif V8_TARGET_ARCH_IA32
2296constexpr size_t kTurbofanFunctionOverhead = 20;
2297constexpr size_t kTurbofanCodeSizeMultiplier = 3;
2298constexpr size_t kLiftoffFunctionOverhead = 48;
2299constexpr size_t kLiftoffCodeSizeMultiplier = 3;
2300#elif V8_TARGET_ARCH_ARM
2301constexpr size_t kTurbofanFunctionOverhead = 44;
2302constexpr size_t kTurbofanCodeSizeMultiplier = 3;
2303constexpr size_t kLiftoffFunctionOverhead = 96;
2304constexpr size_t kLiftoffCodeSizeMultiplier = 5;
2305#elif V8_TARGET_ARCH_ARM64
2306constexpr size_t kTurbofanFunctionOverhead = 40;
2307constexpr size_t kTurbofanCodeSizeMultiplier = 3;
2308constexpr size_t kLiftoffFunctionOverhead = 68;
2309constexpr size_t kLiftoffCodeSizeMultiplier = 4;
2313constexpr size_t kTurbofanFunctionOverhead = 44;
2314constexpr size_t kTurbofanCodeSizeMultiplier = 4;
2315constexpr size_t kLiftoffFunctionOverhead = 96;
2316constexpr size_t kLiftoffCodeSizeMultiplier = 5;
2323 body_size * kLiftoffCodeSizeMultiplier;
2328 int code_section_length = 0;
2332 auto* first_fn = &
module->functions[module->num_imported_functions];
2333 auto* last_fn = &
module->functions.back();
2334 code_section_length =
2335 static_cast<int>(last_fn->code.end_offset() - first_fn->code.offset());
2338 code_section_length);
2343 int code_section_length) {
2346 if (num_functions == 0)
return 0;
2353 const size_t lazy_compile_table_size =
2356 const size_t overhead_per_function_turbofan =
2358 size_t size_of_turbofan = overhead_per_function_turbofan * num_functions +
2359 kTurbofanCodeSizeMultiplier * code_section_length;
2361 const size_t overhead_per_function_liftoff =
2365 size_t size_of_liftoff =
2366 v8_flags.liftoff ? overhead_per_function_liftoff * num_functions +
2367 kLiftoffCodeSizeMultiplier * code_section_length
2371 if (
v8_flags.wasm_lazy_compilation) size_of_liftoff /= 2;
2376 if (
v8_flags.liftoff &&
v8_flags.wasm_dynamic_tiering) size_of_turbofan /= 4;
2378 return lazy_compile_table_size + size_of_liftoff + size_of_turbofan;
2384 size_t wasm_module_estimate =
module->EstimateStoredSize();
2386 uint32_t num_wasm_functions =
module->num_declared_functions;
2389 size_t native_module_estimate =
2391 (
sizeof(
WasmCode*) * num_wasm_functions) +
2392 (
sizeof(
WasmCode) * num_wasm_functions);
2396 size_t far_jump_table_size =
2399 NumWasmFunctionsInFarJumpTable(num_wasm_functions)));
2401 return wasm_module_estimate + native_module_estimate + jump_table_size +
2402 far_jump_table_size;
2407#if V8_HAS_PKU_JIT_WRITE_PROTECT
2421#if V8_HAS_PKU_JIT_WRITE_PROTECT
2422 return RwxMemoryWriteScope::IsPKUWritable();
2431 size_t code_size_estimate, std::shared_ptr<const WasmModule> module) {
2432#if V8_ENABLE_DRUMBRAKE
2435 std::shared_ptr<NativeModule> ret;
2436 new NativeModule(enabled_features, detected_features, compile_imports,
2437 std::move(code_space), std::move(module),
2438 isolate->async_counters(), &ret);
2441 TRACE_HEAP(
"New NativeModule (wasm-jitless) %p\n", ret.get());
2450 auto [code_size, metadata_size] =
2452 isolate->counters()->wasm_flushed_liftoff_code_size_bytes()->AddSample(
2453 static_cast<int>(code_size));
2455 ->wasm_flushed_liftoff_metadata_size_bytes()
2456 ->AddSample(
static_cast<int>(metadata_size));
2476 code_size_estimate /= 2;
2479 size_t code_vmem_size = ReservationSizeForWasmCode(
2480 code_size_estimate, module->num_declared_functions, 0);
2481 DCHECK_EQ(code_vmem_size == 0, module->num_declared_functions == 0);
2485 if (
v8_flags.wasm_max_initial_code_space_reservation > 0) {
2486 size_t flag_max_bytes =
2487 static_cast<size_t>(
v8_flags.wasm_max_initial_code_space_reservation) *
2489 if (flag_max_bytes < code_vmem_size) code_vmem_size = flag_max_bytes;
2495 static constexpr int kAllocationRetries = 2;
2498 if (code_vmem_size != 0) {
2499 for (
int retries = 0;; ++retries) {
2502 if (retries == kAllocationRetries) {
2504 <<
"NewNativeModule cannot allocate code space of "
2505 << code_vmem_size <<
" bytes";
2507 oom_detail.PrintToArray().data());
2511 isolate->heap()->MemoryPressureNotification(
2514 code_space_region = code_space.
region();
2518 std::shared_ptr<NativeModule> ret;
2520 std::move(compile_imports),
2521 std::move(code_space), std::move(module),
2522 isolate->async_counters(), &ret);
2525 TRACE_HEAP(
"New NativeModule %p: Mem: 0x%" PRIxPTR
",+%zu\n", ret.get(),
2526 code_space_region.
begin(), code_space_region.
size());
2530 std::make_pair(code_space_region.
begin(),
2531 std::make_pair(code_space_region.
end(), ret.get())));
2537 int code_size_mb =
static_cast<int>(code_size /
MB);
2538#if V8_ENABLE_DRUMBRAKE
2541 if (
auto interpreter =
module_->interpreter_.lock()) {
2542 code_size_mb =
static_cast<int>(interpreter->TotalBytecodeSize() / MB);
2546 counters->wasm_module_code_size_mb()->AddSample(code_size_mb);
2547 int code_size_kb =
static_cast<int>(code_size /
KB);
2548 counters->wasm_module_code_size_kb()->AddSample(code_size_kb);
2550 Histogram* metadata_histogram = counters->wasm_module_metadata_size_kb();
2551 if (metadata_histogram->
Enabled()) {
2567 int metadata_size_kb =
2569 metadata_histogram->
AddSample(metadata_size_kb);
2578 int freed_percent =
static_cast<int>(100 * freed_size / generated_size);
2579 counters->wasm_module_freed_code_size_percent()->AddSample(freed_percent);
2586 return std::move(code[0]);
2592 "wasm.AddCompiledCode",
"num", results.
size());
2594 std::vector<UnpublishedWasmCode> generated_code;
2595 generated_code.reserve(results.
size());
2602 size_t max_code_batch_size =
v8_flags.wasm_max_code_space_size_mb * MB / 2;
2603 size_t total_code_space = 0;
2604 for (
auto&
result : results) {
2606 size_t new_code_space =
2608 if (total_code_space + new_code_space > max_code_batch_size) {
2612 if (split_point == 0) {
2616 if (
v8_flags.wasm_max_code_space_size_mb <
2619 <<
"--wasm-max-code-space-size="
2620 <<
v8_flags.wasm_max_code_space_size_mb.value();
2622 "A single code object needs more than "
2623 "half of the code space size",
2624 oom_detail.PrintToArray().data());
2629 "A single code object needs more than half of the code space "
2634 generated_code.insert(generated_code.end(),
2635 std::make_move_iterator(first_results.begin()),
2636 std::make_move_iterator(first_results.end()));
2640 results += split_point;
2641 total_code_space = 0;
2643 total_code_space += new_code_space;
2660 std::vector<size_t> sizes;
2661 for (
const auto&
result : results) {
2669 for (
auto&
result : results) {
2673 code_space += code_size;
2674 generated_code.emplace_back(
2678 result.protected_instructions_data.as_vector(),
2679 result.source_positions.as_vector(),
2680 result.inlining_positions.as_vector(),
2683 result.frame_has_feedback_slot, this_code_space, jump_tables),
2684 std::move(
result.assumptions));
2690 DCHECK_EQ(generated_code.capacity(), generated_code.size());
2692 return generated_code;
2706 !code->for_debugging()) {
2710 code->for_debugging()) {
2714 !code->is_liftoff()) {
2718 !code->is_turbofan()) {
2727 const uint32_t num_imports =
module_->num_imported_functions;
2729 size_t removed_codesize = 0;
2730 size_t removed_metadatasize = 0;
2735 if (code && ShouldRemoveCode(code, filter)) {
2736 removed_codesize += code->instructions_size();
2737 removed_metadatasize += code->EstimateCurrentMemoryConsumption();
2742 code->DecRefOnLiveCode();
2743 uint32_t func_index =
i + num_imports;
2757 return std::make_pair(removed_codesize, removed_metadatasize);
2763 size_t codesize_liftoff = 0;
2766 if (code && code->is_liftoff()) {
2767 codesize_liftoff += code->instructions_size();
2770 return codesize_liftoff;
2784 CHECK(code->is_dying());
2785 CHECK_EQ(code->ref_count_.load(std::memory_order_acquire), 0);
2824 std::shared_ptr<base::OwnedVector<const uint8_t>>
wire_bytes =
2827 result += wire_bytes_size;
2834 result +=
module_->num_declared_functions *
sizeof(uint32_t);
2839 if (external_storage >
sizeof(std::string)) {
2840 result += external_storage;
2854 for (
auto& [address, unique_code_ptr] :
owned_code_) {
2855 result += unique_code_ptr->EstimateCurrentMemoryConsumption();
2859 result += code->EstimateCurrentMemoryConsumption();
2873 if (
v8_flags.trace_wasm_offheap_memory) {
2874 PrintF(
"NativeModule wire bytes: %zu\n", wire_bytes_size);
2882 PrintF(
"Off-heap memory size of NativeModule: %zu\n",
2889 for (
auto& code_space : owned_code_space) {
2890 DCHECK(code_space.IsReserved());
2891 TRACE_HEAP(
"VMem Release: 0x%" PRIxPTR
":0x%" PRIxPTR
" (%zu)\n",
2892 code_space.address(), code_space.end(), code_space.size());
2894#if defined(V8_OS_WIN64)
2895 if (CanRegisterUnwindInfoForNonABICompliantCodeRange()) {
2897 reinterpret_cast<void*
>(code_space.address()));
2904 DCHECK(!code_space.IsReserved());
2908 [[maybe_unused]]
size_t old_committed =
2910 DCHECK_LE(committed_size, old_committed);
2920 Address region_start = iter->first;
2921 Address region_end = iter->second.first;
2925 return region_start <=
pc &&
pc < region_end ? candidate :
nullptr;
2930 if (candidate)
return candidate->
Lookup(
pc);
2941 return isolate->wasm_code_look_up_cache()->GetCacheEntry(
pc)->code;
2950 auto* entry = isolate->wasm_code_look_up_cache()->GetCacheEntry(
pc);
2961 auto expect_safepoint = [
code,
pc]() {
2962 const bool is_protected_instruction = code->IsProtectedInstruction(
2963 pc - WasmFrameConstants::kProtectedInstructionReturnAddressOffset);
2964 return !is_protected_instruction || code->for_debugging();
2966 if (!entry->safepoint_entry.is_initialized() && expect_safepoint()) {
2968 CHECK(entry->safepoint_entry.is_initialized());
2969 }
else if (expect_safepoint()) {
2972 DCHECK(!entry->safepoint_entry.is_initialized());
2974 return std::make_pair(code, entry->safepoint_entry);
2978 return isolate->wasm_code_look_up_cache()->Flush();
2986 : previous_scope_(current_code_refs_scope) {
2987 current_code_refs_scope =
this;
2991 DCHECK_EQ(
this, current_code_refs_scope);
3017 if (entry->pc.load(std::memory_order_acquire) ==
pc) {
3040 entry->pc.store(
pc, std::memory_order_release);
3042 entry->safepoint_entry.Reset();
virtual size_t AllocatePageSize()=0
virtual bool RecommitPages(void *address, size_t length, Permission permissions)
virtual bool DiscardSystemPages(void *address, size_t size)
base::AddressRegion GetOverlap(AddressRegion region) const
Vector< T > as_vector() const
V8_INLINE void AssertHeld() const
void emplace_back(Args &&... args)
Vector< T > SubVector(size_t from, size_t to) const
constexpr bool empty() const
constexpr size_t size() const
constexpr T * begin() const
constexpr T * data() const
static constexpr bool kAllBuiltinsAreIsolateIndependent
static V8_EXPORT_PRIVATE const int kEagerDeoptExitSize
static V8_EXPORT_PRIVATE int Decode(Isolate *isolate, std::ostream &os, uint8_t *begin, uint8_t *end, CodeReference code={}, Address current_pc=kNullAddress, size_t range_limit=0)
Address InstructionStartOf(Builtin builtin) const
static EmbeddedData FromBlob()
V8_EXPORT_PRIVATE void AddSample(int sample)
static NEVER_READ_ONLY_SPACE constexpr bool kOnHeapBodyIsContiguous
static V8_INLINE Isolate * Current()
static const int kApplyMask
uint32_t wasm_call_tag() const
static constexpr int ModeMask(Mode mode)
static constexpr bool IsWasmCall(Mode mode)
static constexpr bool IsWasmStubCall(Mode mode)
static constexpr bool IsWasmCodePointerTableEntry(Mode mode)
@ WASM_CODE_POINTER_TABLE_ENTRY
@ INTERNAL_REFERENCE_ENCODED
static V8_INLINE bool IsSupported()
SafepointEntry TryFindEntry(Address pc) const
static void RegisterJitAllocations(Address start, const std::vector< size_t > &sizes, JitAllocationType type)
static void UnregisterJitPage(Address address, size_t size)
static void RegisterJitPage(Address address, size_t size)
static WritableJitAllocation LookupJitAllocation(Address addr, size_t size, JitAllocationType type, bool enforce_write_api=false)
static V8_NODISCARD bool MakeExecutable(Address address, size_t size)
static void UnregisterWasmAllocation(Address addr, size_t size)
static WritableJitAllocation RegisterJitAllocation(Address addr, size_t size, JitAllocationType type, bool enforce_write_api=false)
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
const base::AddressRegion & region() const
V8_INLINE void CopyCode(size_t dst_offset, const uint8_t *src, size_t num_bytes)
V8_INLINE void ClearBytes(size_t offset, size_t len)
const std::vector< std::pair< uint32_t, WellKnownImport > > & import_statuses()
static constexpr int JumptableIndexForBuiltin(Builtin builtin)
static constexpr Builtin BuiltinForJumptableIndex(int index)
static constexpr int BuiltinCount()
static std::unique_ptr< CompilationState > New(const std::shared_ptr< NativeModule > &, std::shared_ptr< Counters >, WasmDetectedFeatures detected_features)
size_t EstimateCurrentMemoryConsumption() const
DebugSideTable * GetDebugSideTableIfExists(const WasmCode *) const
void RemoveDebugSideTables(base::Vector< WasmCode *const >)
base::AddressRegion Allocate(size_t size)
base::AddressRegion AllocateInRegion(size_t size, base::AddressRegion)
const auto & regions() const
std::set< base::AddressRegion, base::AddressRegion::StartAddressLess > regions_
base::AddressRegion Merge(base::AddressRegion)
static uint32_t FarJumpSlotOffsetToIndex(uint32_t offset)
static void PatchJumpTableSlot(WritableJumpTablePair &jump_table_pair, Address jump_table_slot, Address far_jump_table_slot, Address target)
static uint32_t FarJumpSlotIndexToOffset(uint32_t slot_index)
static constexpr uint32_t SizeForNumberOfLazyFunctions(uint32_t slot_count)
static constexpr uint32_t SizeForNumberOfSlots(uint32_t slot_count)
static uint32_t JumpSlotIndexToOffset(uint32_t slot_index)
static constexpr uint32_t SizeForNumberOfFarJumpSlots(int num_runtime_slots, int num_function_slots)
static void InitializeJumpsToLazyCompileTable(Address base, uint32_t num_slots, Address lazy_compile_table_start)
static void GenerateFarJumpTable(WritableJitAllocation &jit_allocation, Address base, Address *stub_targets, int num_runtime_slots, int num_function_slots)
static uint32_t LazyCompileSlotIndexToOffset(uint32_t slot_index)
static uint32_t SlotOffsetToIndex(uint32_t slot_offset)
static void GenerateLazyCompileTable(Address base, uint32_t num_slots, uint32_t num_imported_functions, Address wasm_compile_lazy_target)
void UpdateWellKnownImports(base::Vector< WellKnownImport > entries)
std::unique_ptr< NamesProvider > names_provider_
std::vector< CodeSpaceData > code_space_data_
Builtin GetBuiltinInJumptableSlot(Address target) const
WasmCode * main_jump_table_
NativeModule(const NativeModule &)=delete
void FreeCode(base::Vector< WasmCode *const >)
std::shared_ptr< base::OwnedVector< const uint8_t > > wire_bytes_
std::atomic< int64_t > sum_lazy_compilation_time_in_micro_sec_
Address GetNearCallTargetForFunction(uint32_t func_index, const JumpTablesRef &) const
static constexpr bool kNeedsFarJumpsBetweenCodeSpaces
std::shared_ptr< const WasmModule > module_
std::atomic< int > num_lazy_compilations_
size_t SumLiftoffCodeSizeForTesting() const
std::unique_ptr< WasmModuleSourceMap > source_map_
std::map< Address, std::unique_ptr< WasmCode > > owned_code_
std::vector< WasmCode * > SnapshotAllOwnedCode() const
void TransferNewOwnedCodeLocked() const
void PrintCurrentMemoryConsumptionEstimate() const
std::unique_ptr< CompilationState > compilation_state_
bool HasDebugInfo() const
WasmCode * GetCode(uint32_t index) const
std::unique_ptr< DebugInfo > debug_info_
uint32_t num_functions() const
base::RecursiveMutex allocation_mutex_
std::pair< std::vector< WasmCode * >, std::vector< WellKnownImport > > SnapshotCodeTable() const
const WasmModule * module() const
std::pair< base::Vector< uint8_t >, JumpTablesRef > AllocateForDeserializedCode(size_t total_code_size)
size_t EstimateCurrentMemoryConsumption() const
void PatchJumpTablesLocked(uint32_t slot_index, Address target, Address code_pointer_table_target, uint64_t signature_hash)
bool HasWireBytes() const
void FreeCodePointerTableHandles()
base::Vector< const uint8_t > wire_bytes() const
WasmCode * CreateEmptyJumpTableInRegionLocked(int jump_table_size, base::AddressRegion, JumpTableType type)
WasmCode * Lookup(Address) const
std::unique_ptr< WasmCodePointer[]> code_pointer_handles_
void LogWasmCodes(Isolate *, Tagged< Script >)
size_t code_pointer_handles_size_
std::atomic< size_t > liftoff_code_size_
const CompileTimeImports compile_imports_
WasmCode * PublishCodeLocked(std::unique_ptr< WasmCode >, AssumptionsJournal *)
void SetWireBytes(base::OwnedVector< const uint8_t > wire_bytes)
WasmCodePointer GetCodePointerHandle(int index) const
OperationsBarrier::Token engine_scope_
JumpTablesRef FindJumpTablesForRegionLocked(base::AddressRegion) const
bool HasCodeWithTier(uint32_t index, ExecutionTier tier) const
DebugInfo * GetDebugInfo()
bool HasCode(uint32_t index) const
bool should_update_code_table(WasmCode *new_code, WasmCode *prior_code) const
void InitializeJumpTableForLazyCompilation(uint32_t num_wasm_functions)
void ReinstallDebugCode(WasmCode *)
Address GetJumpTableEntryForBuiltin(Builtin builtin, const JumpTablesRef &) const
std::atomic< int64_t > max_lazy_compilation_time_in_micro_sec_
std::pair< size_t, size_t > RemoveCompiledCode(RemoveFilter filter)
std::unique_ptr< WasmCode > AddDeserializedCode(int index, base::Vector< uint8_t > instructions, int stack_slots, int ool_spills, uint32_t tagged_parameter_slots, int safepoint_table_offset, int handler_table_offset, int constant_pool_offset, int code_comments_offset, int unpadded_binary_size, base::Vector< const uint8_t > protected_instructions_data, base::Vector< const uint8_t > reloc_info, base::Vector< const uint8_t > source_position_table, base::Vector< const uint8_t > inlining_positions, base::Vector< const uint8_t > deopt_data, WasmCode::Kind kind, ExecutionTier tier)
WasmCode * PublishCode(UnpublishedWasmCode)
absl::flat_hash_map< WasmCodePointer, uint32_t > CallIndirectTargetMap
std::unique_ptr< WasmCode *[]> code_table_
void AddLazyCompilationTimeSample(int64_t sample)
std::unique_ptr< WasmCode > AddCodeWithCodeSpace(int index, const CodeDesc &desc, int stack_slots, int ool_spill_count, uint32_t tagged_parameter_slots, base::Vector< const uint8_t > protected_instructions_data, base::Vector< const uint8_t > source_position_table, base::Vector< const uint8_t > inlining_positions, base::Vector< const uint8_t > deopt_data, WasmCode::Kind kind, ExecutionTier tier, ForDebugging for_debugging, bool frame_has_feedback_slot, base::Vector< uint8_t > code_space, const JumpTablesRef &jump_tables_ref)
WasmCode * CreateEmptyJumpTableLocked(int jump_table_size, JumpTableType type)
void UseLazyStubLocked(uint32_t func_index)
std::unique_ptr< WasmCode > AddCode(int index, const CodeDesc &desc, int stack_slots, int ool_spill_count, uint32_t tagged_parameter_slots, base::Vector< const uint8_t > protected_instructions, base::Vector< const uint8_t > source_position_table, base::Vector< const uint8_t > inlining_positions, base::Vector< const uint8_t > deopt_data, WasmCode::Kind kind, ExecutionTier tier, ForDebugging for_debugging)
void UpdateCodeSize(size_t, ExecutionTier, ForDebugging)
V8_WARN_UNUSED_RESULT UnpublishedWasmCode AddCompiledCode(WasmCompilationResult &)
WasmCode * AddCodeForTesting(DirectHandle< Code > code, uint64_t signature_hash)
CallIndirectTargetMap CreateIndirectCallTargetToFunctionIndexMap() const
size_t GetNumberOfCodeSpacesForTesting() const
void InitializeCodePointerTableHandles(uint32_t num_wasm_functions)
uint32_t num_declared_functions() const
void SetDebugState(DebugState)
std::atomic< size_t > turbofan_code_size_
std::unique_ptr< std::atomic< uint32_t >[]> tiering_budgets_
uint32_t GetFunctionIndexFromJumpTableSlot(Address slot_address) const
WasmModuleSourceMap * GetWasmSourceMap() const
void PatchJumpTableLocked(WritableJumpTablePair &jump_table_pair, const CodeSpaceData &, uint32_t slot_index, Address target)
WasmCode * main_far_jump_table_
std::vector< std::unique_ptr< WasmCode > > new_owned_code_
void SampleCodeSize(Counters *) const
NamesProvider * GetNamesProvider()
void AddCodeSpaceLocked(base::AddressRegion)
WasmCodeAllocator code_allocator_
void SetWasmSourceMap(std::unique_ptr< WasmModuleSourceMap > source_map)
uint32_t num_imported_functions() const
WasmCode * lazy_compile_table_
base::Vector< uint8_t > AllocateForCode(NativeModule *, size_t size)
DisjointAllocationPool free_code_space_
base::Vector< uint8_t > AllocateForCodeInRegion(NativeModule *, size_t size, base::AddressRegion)
std::atomic< size_t > generated_code_size_
std::atomic< size_t > committed_code_space_
std::vector< VirtualMemory > owned_code_space_
size_t freed_code_size() const
size_t GetNumCodeSpaces() const
std::atomic< size_t > freed_code_size_
size_t committed_code_space() const
WasmCodeAllocator(std::shared_ptr< Counters > async_counters)
void Init(VirtualMemory code_space)
std::shared_ptr< Counters > async_counters_
size_t generated_code_size() const
DisjointAllocationPool freed_code_space_
base::Vector< uint8_t > AllocateForWrapper(size_t size)
void FreeCode(base::Vector< WasmCode *const >)
void InitializeCodeRange(NativeModule *native_module, base::AddressRegion region)
CacheEntry * GetCacheEntry(Address pc)
CacheEntry cache_[kWasmCodeLookupCacheSize]
static const int kWasmCodeLookupCacheSize
std::atomic< Address > next_code_space_hint_
static size_t EstimateLiftoffCodeSize(int body_size)
const size_t max_committed_code_space_
void Decommit(base::AddressRegion)
static bool MemoryProtectionKeyWritable()
void FreeNativeModule(base::Vector< VirtualMemory > owned_code, size_t committed_size)
std::map< Address, std::pair< Address, NativeModule * > > lookup_map_
static size_t EstimateNativeModuleMetaDataSize(const WasmModule *)
static bool HasMemoryProtectionKeySupport()
base::Mutex native_modules_mutex_
std::atomic< size_t > critical_committed_code_space_
std::pair< WasmCode *, SafepointEntry > LookupCodeAndSafepoint(Isolate *isolate, Address pc)
static size_t EstimateNativeModuleCodeSize(const WasmModule *)
NativeModule * LookupNativeModule(Address pc) const
V8_WARN_UNUSED_RESULT VirtualMemory TryAllocate(size_t size)
std::atomic< size_t > total_committed_code_space_
WasmCode * LookupCode(Isolate *isolate, Address pc) const
std::shared_ptr< NativeModule > NewNativeModule(Isolate *isolate, WasmEnabledFeatures enabled_features, WasmDetectedFeatures detected_features, CompileTimeImports compile_imports, size_t code_size_estimate, std::shared_ptr< const WasmModule > module)
static bool MemoryProtectionKeysEnabled()
void FlushCodeLookupCache(Isolate *isolate)
void AssignRange(base::AddressRegion, NativeModule *)
void Commit(base::AddressRegion)
void FreeEntry(WasmCodePointer index)
void SetEntrypointWithWriteScope(WasmCodePointer index, Address value, uint64_t signature_hash, WriteScope &write_scope)
WasmCodePointer AllocateUninitializedEntry()
void SetEntrypointAndSignature(WasmCodePointer index, Address value, uint64_t signature_hash)
static void AddRef(WasmCode *)
std::vector< WasmCode * > code_ptrs_
WasmCodeRefScope *const previous_scope_
const int unpadded_binary_size_
Address handler_table() const
std::tuple< int, bool, SourcePosition > GetInliningPosition(int inlining_id) const
Address instruction_start() const
void Print(const char *name=nullptr) const
ExecutionTier tier() const
bool IsProtectedInstruction(Address pc)
const int instructions_size_
std::string DebugName() const
base::Vector< uint8_t > instructions() const
const int inlining_positions_size_
bool has_trap_handler_index() const
const int constant_pool_offset_
V8_NOINLINE void DecRefOnPotentiallyDeadCode()
int handler_table_size() const
base::Vector< const trap_handler::ProtectedInstructionData > protected_instructions() const
const int reloc_info_size_
base::Vector< const uint8_t > source_positions() const
void set_trap_handler_index(int value)
int trap_handler_index() const
void Disassemble(const char *name, std::ostream &os, Address current_pc=kNullAddress) const
void RegisterTrapHandlerData()
base::Vector< const uint8_t > deopt_data() const
const int handler_table_offset_
NativeModule * native_module() const
base::Vector< const uint8_t > reloc_info() const
bool contains(Address pc) const
void LogCode(Isolate *isolate, const char *source_url, int script_id) const
const int deopt_data_size_
const int protected_instructions_size_
SourcePosition GetSourcePositionBefore(int code_offset)
base::Vector< const uint8_t > inlining_positions() const
const int safepoint_table_offset_
std::unique_ptr< const uint8_t[]> ConcatenateBytes(std::initializer_list< base::Vector< const uint8_t > >)
static void DecrementRefCount(base::Vector< WasmCode *const >)
const int code_comments_offset_
WasmCode(const WasmCode &)=delete
NativeModule *const native_module_
ForDebugging for_debugging() const
static bool ShouldBeLogged(Isolate *isolate)
size_t EstimateCurrentMemoryConsumption() const
int GetSourceOffsetBefore(int code_offset)
const int source_positions_size_
Address code_comments() const
int code_comments_size() const
Address constant_pool() const
WasmDeoptEntry GetDeoptEntry(uint32_t deopt_index) const
const WasmDeoptData & GetDeoptData() const
void PrintCurrentMemoryConsumptionEstimate() const
void AddPotentiallyDeadCode(WasmCode *)
void FreeDeadCode(const DeadCodeMap &, std::vector< WasmCode * > &)
void FreeNativeModule(NativeModule *)
std::unordered_map< NativeModule *, std::vector< WasmCode * > > DeadCodeMap
std::pair< size_t, size_t > FlushLiftoffCode()
WasmCode * Lookup(Address pc) const
#define PROFILE(the_isolate, Call)
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
ZoneVector< RpoNumber > & result
ZoneVector< Entry > entries
#define LOG_CODE_EVENT(isolate, Call)
base::SmallVector< int32_t, 1 > stack_slots
const std::shared_ptr< Counters > async_counters_
const base::Vector< const uint8_t > wire_bytes_
const WasmEnabledFeatures enabled_features_
constexpr bool IsPowerOfTwo(T value)
AddressRegion AddressRegionOf(T *ptr, size_t size)
FormattedString() -> FormattedString<>
constexpr Vector< T > VectorOf(T *start, size_t size)
OwnedVector< T > OwnedCopyOf(const T *data, size_t size)
void ReleaseHandlerData(int index)
V8_EXPORT_PRIVATE WasmCodePointerTable * GetProcessWideWasmCodePointerTable()
constexpr int kAnonymousFuncIndex
WasmImportWrapperCache * GetWasmImportWrapperCache()
WasmCodeManager * GetWasmCodeManager()
int JumpTableOffset(const WasmModule *module, int func_index)
TypeCanonicalizer * GetTypeCanonicalizer()
constexpr WasmCodePointer kInvalidWasmCodePointer
const char * GetWasmCodeKindAsString(WasmCode::Kind kind)
size_t ContentSize(const std::vector< T > &vector)
void DumpProfileToFile(const WasmModule *module, base::Vector< const uint8_t > wire_bytes, std::atomic< uint32_t > *tiering_budget_array)
WasmEngine * GetWasmEngine()
int declared_function_index(const WasmModule *module, int func_index)
WasmCode::Kind GetCodeKind(const WasmCompilationResult &result)
constexpr size_t kV8MaxWasmFunctionParams
void RegisterNonABICompliantCodeRange(void *start, size_t size_in_bytes)
bool CanRegisterUnwindInfoForNonABICompliantCodeRange()
void UnregisterNonABICompliantCodeRange(void *start)
bool SetPermissions(v8::PageAllocator *page_allocator, void *address, size_t size, PageAllocator::Permission access)
v8::PageAllocator * GetPlatformPageAllocator()
void PrintF(const char *format,...)
void FlushInstructionCache(void *start, size_t size)
constexpr intptr_t kCodeAlignment
constexpr uint32_t kDefaultMaxWasmCodeSpaceSizeMb
void * GetRandomMmapAddr()
constexpr int kSystemPointerSize
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
V8_EXPORT_PRIVATE FlagValues v8_flags
static constexpr Address kNullAddress
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
uint32_t ComputeAddressHash(Address address)
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
constexpr T RoundDown(T x, intptr_t m)
constexpr bool IsAligned(T value, U alignment)
#define UPDATE_WHEN_CLASS_CHANGES(classname, size)
WasmName GetNameOrNull(WireBytesRef ref) const
WasmCode * far_jump_table
Address far_jump_table_start
static constexpr AssumptionsJournal * kNoAssumptions
std::unique_ptr< AssumptionsJournal > assumptions
std::unique_ptr< WasmCode > code
BytecodeOffset bytecode_offset
uint32_t num_declared_functions
std::vector< WasmFunction > functions
uint32_t num_imported_functions
#define TRACE_EVENT0(category_group, name)
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT1(category_group, name, arg1_name, arg1_val)
#define V8_UNLIKELY(condition)
WasmCodeManager code_manager
const wasm::WasmModule * module_