v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
setup-heap-internal.cc
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6#include "src/api/api.h"
12#include "src/heap/factory.h"
13#include "src/heap/heap-inl.h"
14#include "src/heap/new-spaces.h"
27#include "src/objects/foreign.h"
37#include "src/objects/map.h"
42#include "src/objects/promise.h"
44#include "src/objects/script.h"
46#include "src/objects/smi.h"
48#include "src/objects/string.h"
55#include "src/regexp/regexp.h"
56#include "src/roots/roots.h"
58
59#if V8_ENABLE_WEBASSEMBLY
61#endif // V8_ENABLE_WEBASSEMBLY
62
63namespace v8 {
64namespace internal {
65
66namespace {
67
68DirectHandle<SharedFunctionInfo> CreateSharedFunctionInfo(
69 Isolate* isolate, Builtin builtin, int len,
71 DirectHandle<SharedFunctionInfo> shared =
72 isolate->factory()->NewSharedFunctionInfoForBuiltin(
73 isolate->factory()->empty_string(), builtin, len, kAdapt, kind);
74 return shared;
75}
76
77#ifdef DEBUG
78bool IsMutableMap(InstanceType instance_type, ElementsKind elements_kind) {
79 bool is_maybe_read_only_js_object =
81 bool is_js_object = InstanceTypeChecker::IsJSObject(instance_type);
82 bool is_always_shared_space_js_object =
83 InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(instance_type);
84 bool is_wasm_object = false;
85#if V8_ENABLE_WEBASSEMBLY
86 is_wasm_object =
87 instance_type == WASM_STRUCT_TYPE || instance_type == WASM_ARRAY_TYPE;
88#endif // V8_ENABLE_WEBASSEMBLY
89 DCHECK_IMPLIES(is_js_object &&
91 IsDictionaryElementsKind(elements_kind) ||
92 IsTerminalElementsKind(elements_kind) ||
93 is_maybe_read_only_js_object ||
94 (is_always_shared_space_js_object &&
95 elements_kind == SHARED_ARRAY_ELEMENTS));
96 // JSObjects have maps with a mutable prototype_validity_cell, so they cannot
97 // go in RO_SPACE. Maps for managed Wasm objects have mutable subtype lists.
98 return (is_js_object && !is_maybe_read_only_js_object &&
99 !is_always_shared_space_js_object) ||
100 is_wasm_object;
101}
102#endif
103
104struct ConstantStringInit {
105 base::Vector<const char> contents;
107};
108
109constexpr std::initializer_list<ConstantStringInit>
110 kImportantConstantStringTable{
111#define CONSTANT_STRING_ELEMENT(_, name, contents) \
112 {{contents, arraysize(contents) - 1}, RootIndex::k##name},
114 CONSTANT_STRING_ELEMENT, /* not used */)
116 CONSTANT_STRING_ELEMENT, /* not used */)
117#undef CONSTANT_STRING_ELEMENT
118 };
119
120constexpr std::initializer_list<ConstantStringInit>
121 kNotImportantConstantStringTable{
122#define CONSTANT_STRING_ELEMENT(_, name, contents) \
123 {{contents, arraysize(contents) - 1}, RootIndex::k##name},
125 CONSTANT_STRING_ELEMENT, /* not used */)
126#undef CONSTANT_STRING_ELEMENT
127 };
128
129struct StringTypeInit {
131 int size;
132 RootIndex index;
133};
134
135constexpr std::initializer_list<StringTypeInit> kStringTypeTable{
136#define STRING_TYPE_ELEMENT(type, size, name, CamelName) \
137 {type, size, RootIndex::k##CamelName##Map},
139#undef STRING_TYPE_ELEMENT
140};
141
142struct StructInit {
143 InstanceType type;
144 int size;
145 RootIndex index;
146};
147
148constexpr bool is_important_struct(InstanceType type) {
149 return type == ENUM_CACHE_TYPE || type == CALL_SITE_INFO_TYPE;
150}
151
152template <typename StructType>
153constexpr int StructSize() {
154 if constexpr (std::is_base_of_v<StructLayout, StructType>) {
155 return sizeof(StructType);
156 } else {
157 return StructType::kSize;
158 }
159}
160
161using AllocationSiteWithoutWeakNext = AllocationSite;
162constexpr std::initializer_list<StructInit> kStructTable{
163#define STRUCT_TABLE_ELEMENT(TYPE, Name, name) \
164 {TYPE, StructSize<Name>(), RootIndex::k##Name##Map},
166#undef STRUCT_TABLE_ELEMENT
167#define ALLOCATION_SITE_ELEMENT(_, TYPE, Name, Size, name) \
168 {TYPE, sizeof(Name##Size), RootIndex::k##Name##Size##Map},
170#undef ALLOCATION_SITE_ELEMENT
171#define DATA_HANDLER_ELEMENT(_, TYPE, Name, Size, name) \
172 {TYPE, Name::SizeFor(Size), RootIndex::k##Name##Size##Map},
174#undef DATA_HANDLER_ELEMENT
175};
176
177} // namespace
178
180 auto heap = isolate->heap();
181 if (!isolate->read_only_heap()->roots_init_complete()) {
182 if (!heap->CreateReadOnlyHeapObjects()) return false;
183 isolate->VerifyStaticRoots();
184 isolate->read_only_heap()->OnCreateRootsComplete(isolate);
185 }
186 // We prefer to fit all of read-only space in one page.
187 CHECK_EQ(heap->read_only_space()->pages().size(), 1);
188 auto ro_size = heap->read_only_space()->Size();
189 DCHECK_EQ(heap->old_space()->Size(), 0);
190 DCHECK_IMPLIES(heap->new_space(), heap->new_space()->Size() == 0);
191 auto res = heap->CreateMutableHeapObjects();
192 DCHECK_EQ(heap->read_only_space()->Size(), ro_size);
193 USE(ro_size);
194 return res;
195}
196
197bool Heap::CreateReadOnlyHeapObjects() {
198 // Create initial maps and important objects.
199 if (!CreateEarlyReadOnlyMapsAndObjects()) return false;
200 if (!CreateImportantReadOnlyObjects()) return false;
201
202#if V8_STATIC_ROOTS_BOOL
203 // The read only heap is sorted such that often used objects are allocated
204 // early for their compressed address to fit into 12bit arm immediates.
205 ReadOnlySpace* ro_space = isolate()->heap()->read_only_space();
207 USE(ro_space);
208#endif
209
210 if (!CreateLateReadOnlyNonJSReceiverMaps()) return false;
211 if (!CreateReadOnlyObjects()) return false;
212
213 // Order is important. JSReceiver maps must come after all non-JSReceiver maps
214 // in RO space with a sufficiently large gap in address. Currently there are
215 // no JSReceiver instances in RO space.
216 //
217 // See InstanceTypeChecker::kNonJsReceiverMapLimit.
218 if (!CreateLateReadOnlyJSReceiverMaps()) return false;
219
221
222#ifdef DEBUG
223 ReadOnlyRoots roots(isolate());
226 DCHECK(roots.is_initialized(pos));
227 }
228 roots.VerifyTypes();
229#endif
230 return true;
231}
232
233bool Heap::CreateMutableHeapObjects() {
234 ReadOnlyRoots roots(this);
235
236 // Ensure that all young generation pages are iterable. It must be after heap
237 // setup, so that the maps have been created.
238 if (new_space()) new_space()->MakeIterable();
239
241
242 // Create initial objects
245 CHECK_EQ(0u, gc_count_);
246
247 set_native_contexts_list(roots.undefined_value());
248 set_allocation_sites_list(roots.undefined_value());
249 set_dirty_js_finalization_registries_list(roots.undefined_value());
250 set_dirty_js_finalization_registries_list_tail(roots.undefined_value());
251
252 return true;
253}
254
255// Allocates contextless map in read-only or map (old) space.
256AllocationResult Heap::AllocateMap(AllocationType allocation_type,
257 InstanceType instance_type,
258 int instance_size,
259 ElementsKind elements_kind,
260 int inobject_properties) {
261 static_assert(LAST_JS_OBJECT_TYPE == LAST_TYPE);
263 DCHECK_EQ(allocation_type, IsMutableMap(instance_type, elements_kind)
266 AllocationResult allocation = AllocateRaw(Map::kSize, allocation_type);
267 if (!allocation.To(&result)) return allocation;
268
269 ReadOnlyRoots roots(this);
270 result->set_map_after_allocation(isolate(), roots.meta_map(),
272 Tagged<Map> map = isolate()->factory()->InitializeMap(
273 Cast<Map>(result), instance_type, instance_size, elements_kind,
274 inobject_properties, roots);
275
277}
278
279namespace {
280void InitializePartialMap(Isolate* isolate, Tagged<Map> map,
281 Tagged<Map> meta_map, InstanceType instance_type,
282 int instance_size) {
283 map->set_map_after_allocation(isolate, meta_map, SKIP_WRITE_BARRIER);
284 map->set_instance_type(instance_type);
285 map->set_instance_size(instance_size);
286 map->set_visitor_id(Map::GetVisitorId(map));
287 map->set_inobject_properties_start_or_constructor_function_index(0);
288 DCHECK(!IsJSObjectMap(map));
289 map->set_prototype_validity_cell(Map::kPrototypeChainValidSmi, kRelaxedStore);
290 map->SetInObjectUnusedPropertyFields(0);
291 map->set_bit_field(0);
292 map->set_bit_field2(0);
293 int bit_field3 =
294 Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
295 Map::Bits3::OwnsDescriptorsBit::encode(true) |
296 Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking);
297 map->set_bit_field3(bit_field3);
298 DCHECK(!map->is_in_retained_map_list());
299 map->clear_padding();
300 map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
301}
302} // namespace
303
304AllocationResult Heap::AllocatePartialMap(InstanceType instance_type,
305 int instance_size) {
307 AllocationResult allocation =
309 if (!allocation.To(&result)) return allocation;
310 // Cast<Map> cannot be used due to uninitialized map field.
312 InitializePartialMap(isolate(), map,
313 UncheckedCast<Map>(isolate()->root(RootIndex::kMetaMap)),
314 instance_type, instance_size);
316}
317
318void Heap::FinalizePartialMap(Tagged<Map> map) {
319 ReadOnlyRoots roots(this);
320 map->set_dependent_code(DependentCode::empty_dependent_code(roots));
321 map->set_raw_transitions(Smi::zero());
322 map->SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0,
324 map->init_prototype_and_constructor_or_back_pointer(roots);
325}
326
328 AllocationType allocation_type) {
329 DCHECK(map->instance_type() != MAP_TYPE);
330 int size = map->instance_size();
332 AllocationResult allocation = AllocateRaw(size, allocation_type);
333 if (!allocation.To(&result)) return allocation;
334 // New space objects are allocated white.
335 WriteBarrierMode write_barrier_mode =
336 allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER
338 result->set_map_after_allocation(isolate(), *map, write_barrier_mode);
340}
341
342bool Heap::CreateEarlyReadOnlyMapsAndObjects() {
343 // Setup maps and objects which are used often, or used in
344 // CreateImportantReadOnlyObjects.
345 ReadOnlyRoots roots(this);
346
347 // First create the following, in the following order:
348 // - Undefined value
349 // - Null value
350 // - Empty string
351 // - False value
352 // - True value
353 // - /String maps
354 // \...
355 // - Symbol map
356 // - Meta-map
357 // - Undefined map
358 // - Null map
359 // - Boolean map
360 //
361 // This is so that:
362 // 1. The falsy values are the first in the space, allowing ToBoolean false
363 // checks to be a single less-than.
364 // 2. The true value is immediately after the falsy values, so that we can
365 // use a single compare's condition flags to check both falsy and true.
366 // 3. The string maps are all together, and are the first maps, allowing
367 // them to be checked with a single less-than if we know we have a map.
368 // 4. The symbol map is with the string maps, for similarly fast Name
369 // checks.
370
372 {
373 // We're a bit loose with raw pointers here for readability -- this is all
374 // guaranteed to be safe anyway since the allocations can't cause a GC, so
375 // disable gcmole in this range.
376 DisableGCMole no_gc_mole;
377
378 // First, set up the roots to all point to the right offset in the
379 // allocation folded allocation.
380#define ALLOCATE_AND_SET_ROOT(Type, name, Size) \
381 { \
382 AllocationResult alloc = AllocateRaw(Size, AllocationType::kReadOnly); \
383 if (!alloc.To(&obj)) return false; \
384 } \
385 Tagged<Type> name = UncheckedCast<Type>(obj); \
386 set_##name(name)
387
388 ALLOCATE_AND_SET_ROOT(Undefined, undefined_value, sizeof(Undefined));
389 ALLOCATE_AND_SET_ROOT(Null, null_value, sizeof(Null));
392 ALLOCATE_AND_SET_ROOT(False, false_value, sizeof(False));
393 ALLOCATE_AND_SET_ROOT(True, true_value, sizeof(True));
394
395 for (const StringTypeInit& entry : kStringTypeTable) {
396 {
397 AllocationResult alloc =
399 if (!alloc.To(&obj)) return false;
400 }
402 roots_table()[entry.index] = map.ptr();
403 }
404 ALLOCATE_AND_SET_ROOT(Map, symbol_map, Map::kSize);
405
406 ALLOCATE_AND_SET_ROOT(Map, meta_map, Map::kSize);
407 // Keep HeapNumber and Oddball maps together for cheap NumberOrOddball
408 // checks.
409 ALLOCATE_AND_SET_ROOT(Map, undefined_map, Map::kSize);
410 ALLOCATE_AND_SET_ROOT(Map, null_map, Map::kSize);
411 // Keep HeapNumber and Boolean maps together for cheap NumberOrBoolean
412 // checks.
413 ALLOCATE_AND_SET_ROOT(Map, boolean_map, Map::kSize);
414 // Keep HeapNumber and BigInt maps together for cheaper numerics checks.
415 ALLOCATE_AND_SET_ROOT(Map, heap_number_map, Map::kSize);
416 ALLOCATE_AND_SET_ROOT(Map, bigint_map, Map::kSize);
417
418#undef ALLOCATE_AND_SET_ROOT
419
420 // Then, initialise the initial maps.
421 InitializePartialMap(isolate(), meta_map, meta_map, MAP_TYPE, Map::kSize);
422 InitializePartialMap(isolate(), undefined_map, meta_map, ODDBALL_TYPE,
423 sizeof(Undefined));
424 InitializePartialMap(isolate(), null_map, meta_map, ODDBALL_TYPE,
425 sizeof(Null));
426 InitializePartialMap(isolate(), boolean_map, meta_map, ODDBALL_TYPE,
427 sizeof(Boolean));
428 boolean_map->SetConstructorFunctionIndex(Context::BOOLEAN_FUNCTION_INDEX);
429 InitializePartialMap(isolate(), heap_number_map, meta_map, HEAP_NUMBER_TYPE,
430 sizeof(HeapNumber));
431 heap_number_map->SetConstructorFunctionIndex(
432 Context::NUMBER_FUNCTION_INDEX);
433 InitializePartialMap(isolate(), bigint_map, meta_map, BIGINT_TYPE,
435
436 for (const StringTypeInit& entry : kStringTypeTable) {
437 Tagged<Map> map = UncheckedCast<Map>(roots.object_at(entry.index));
438 InitializePartialMap(isolate(), map, meta_map, entry.type, entry.size);
439 map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
440 if (StringShape(entry.type).IsCons()) map->mark_unstable();
441 }
442 InitializePartialMap(isolate(), symbol_map, meta_map, SYMBOL_TYPE,
443 sizeof(Symbol));
444 symbol_map->SetConstructorFunctionIndex(Context::SYMBOL_FUNCTION_INDEX);
445
446 // Finally, initialise the non-map objects using those maps.
447 undefined_value->set_map_after_allocation(isolate(), undefined_map,
449 undefined_value->set_kind(Oddball::kUndefined);
450
451 null_value->set_map_after_allocation(isolate(), null_map,
453 null_value->set_kind(Oddball::kNull);
454
455 true_value->set_map_after_allocation(isolate(), boolean_map,
457 true_value->set_kind(Oddball::kTrue);
458
459 false_value->set_map_after_allocation(isolate(), boolean_map,
461 false_value->set_kind(Oddball::kFalse);
462
463 // The empty string is initialised with an empty hash despite being
464 // internalized -- this will be calculated once the hashseed is available.
465 // TODO(leszeks): Unify this initialisation with normal string
466 // initialisation.
467 empty_string->set_map_after_allocation(
468 isolate(), roots.unchecked_internalized_one_byte_string_map(),
470 empty_string->clear_padding_destructively(0);
471 empty_string->set_length(0);
472 empty_string->set_raw_hash_field(String::kEmptyHashField);
473 }
474
475 // Now that the initial objects are allocated, we can start allocating other
476 // objects where the order matters less.
477
478#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
479 { \
480 Tagged<Map> map; \
481 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
482 set_##field_name##_map(map); \
483 }
484
485 { // Partial map allocation
486 ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
487 ALLOCATE_PARTIAL_MAP(TRUSTED_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
488 trusted_fixed_array);
489 ALLOCATE_PARTIAL_MAP(PROTECTED_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
490 protected_fixed_array);
491 ALLOCATE_PARTIAL_MAP(WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
492 weak_fixed_array);
493 ALLOCATE_PARTIAL_MAP(TRUSTED_WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
494 trusted_weak_fixed_array);
495 ALLOCATE_PARTIAL_MAP(PROTECTED_WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
496 protected_weak_fixed_array);
497 ALLOCATE_PARTIAL_MAP(WEAK_ARRAY_LIST_TYPE, kVariableSizeSentinel,
498 weak_array_list);
500 fixed_cow_array)
501 DCHECK_NE(roots.fixed_array_map(), roots.fixed_cow_array_map());
502
503 ALLOCATE_PARTIAL_MAP(DESCRIPTOR_ARRAY_TYPE, kVariableSizeSentinel,
504 descriptor_array)
505
506 ALLOCATE_PARTIAL_MAP(HOLE_TYPE, Hole::kSize, hole);
507
508 // Some struct maps which we need for later dependencies
509 for (const StructInit& entry : kStructTable) {
510 if (!is_important_struct(entry.type)) continue;
512 if (!AllocatePartialMap(entry.type, entry.size).To(&map)) return false;
513 roots_table()[entry.index] = map.ptr();
514 }
515 }
516#undef ALLOCATE_PARTIAL_MAP
517
518 {
519 AllocationResult alloc =
521 if (!alloc.To(&obj)) return false;
522 obj->set_map_after_allocation(isolate(), roots.fixed_array_map(),
524 Cast<FixedArray>(obj)->set_length(0);
525 }
526 set_empty_fixed_array(Cast<FixedArray>(obj));
527
528 {
529 AllocationResult alloc =
531 if (!alloc.To(&obj)) return false;
532 obj->set_map_after_allocation(isolate(), roots.weak_fixed_array_map(),
534 Cast<WeakFixedArray>(obj)->set_length(0);
535 }
536 set_empty_weak_fixed_array(Cast<WeakFixedArray>(obj));
537
538 {
541 if (!allocation.To(&obj)) return false;
542 obj->set_map_after_allocation(isolate(), roots.weak_array_list_map(),
544 Cast<WeakArrayList>(obj)->set_capacity(0);
545 Cast<WeakArrayList>(obj)->set_length(0);
546 }
547 set_empty_weak_array_list(Cast<WeakArrayList>(obj));
548
549 DCHECK(!HeapLayout::InYoungGeneration(roots.undefined_value()));
550 {
551 AllocationResult allocation =
553 if (!allocation.To(&obj)) return false;
554 }
555 set_the_hole_value(Cast<Hole>(obj));
556
557 // Set preliminary exception sentinel value before actually initializing it.
558 set_exception(Cast<Hole>(obj));
559
560 // Allocate the empty enum cache.
561 {
562 AllocationResult allocation =
564 if (!allocation.To(&obj)) return false;
565 }
566 set_empty_enum_cache(Cast<EnumCache>(obj));
567 Cast<EnumCache>(obj)->set_keys(roots.empty_fixed_array());
568 Cast<EnumCache>(obj)->set_indices(roots.empty_fixed_array());
569
570 // Allocate the empty descriptor array.
571 {
572 int size = DescriptorArray::SizeFor(0);
573 if (!AllocateRaw(size, AllocationType::kReadOnly).To(&obj)) return false;
574 obj->set_map_after_allocation(isolate(), roots.descriptor_array_map(),
577 array->Initialize(roots.empty_enum_cache(), roots.undefined_value(), 0, 0,
579 }
580 set_empty_descriptor_array(Cast<DescriptorArray>(obj));
581
582 // Fix the instance_descriptors for the existing maps.
583 FinalizePartialMap(roots.meta_map());
584 FinalizePartialMap(roots.fixed_array_map());
585 FinalizePartialMap(roots.trusted_fixed_array_map());
586 FinalizePartialMap(roots.protected_fixed_array_map());
587 FinalizePartialMap(roots.weak_fixed_array_map());
588 FinalizePartialMap(roots.weak_array_list_map());
589 FinalizePartialMap(roots.trusted_weak_fixed_array_map());
590 FinalizePartialMap(roots.protected_weak_fixed_array_map());
591 FinalizePartialMap(roots.fixed_cow_array_map());
592 FinalizePartialMap(roots.descriptor_array_map());
593 FinalizePartialMap(roots.undefined_map());
594 roots.undefined_map()->set_is_undetectable(true);
595 FinalizePartialMap(roots.null_map());
596 roots.null_map()->set_is_undetectable(true);
597 FinalizePartialMap(roots.boolean_map());
598 FinalizePartialMap(roots.heap_number_map());
599 FinalizePartialMap(roots.bigint_map());
600 FinalizePartialMap(roots.hole_map());
601 FinalizePartialMap(roots.symbol_map());
602 for (const StructInit& entry : kStructTable) {
603 if (!is_important_struct(entry.type)) continue;
604 FinalizePartialMap(Cast<Map>(roots.object_at(entry.index)));
605 }
606 for (const StringTypeInit& entry : kStringTypeTable) {
607 FinalizePartialMap(Cast<Map>(roots.object_at(entry.index)));
608 }
609
610#define ALLOCATE_MAP(instance_type, size, field_name) \
611 { \
612 Tagged<Map> map; \
613 if (!AllocateMap(AllocationType::kReadOnly, (instance_type), size) \
614 .To(&map)) { \
615 return false; \
616 } \
617 set_##field_name##_map(map); \
618 }
619
620#define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
621 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
622
623#define ALLOCATE_PRIMITIVE_MAP(instance_type, size, field_name, \
624 constructor_function_index) \
625 { \
626 ALLOCATE_MAP((instance_type), (size), field_name); \
627 roots.field_name##_map()->SetConstructorFunctionIndex( \
628 (constructor_function_index)); \
629 }
630
631 { // Map allocation
632 ALLOCATE_VARSIZE_MAP(SCOPE_INFO_TYPE, scope_info)
633 ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info)
634 ALLOCATE_VARSIZE_MAP(CLOSURE_FEEDBACK_CELL_ARRAY_TYPE,
635 closure_feedback_cell_array)
636 ALLOCATE_VARSIZE_MAP(FEEDBACK_VECTOR_TYPE, feedback_vector)
637
638 ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
639 ALLOCATE_MAP(TRUSTED_FOREIGN_TYPE, TrustedForeign::kSize, trusted_foreign)
640 ALLOCATE_MAP(MEGA_DOM_HANDLER_TYPE, MegaDomHandler::kSize, mega_dom_handler)
641
642 ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
643 roots.fixed_double_array_map()->set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
644 ALLOCATE_VARSIZE_MAP(FEEDBACK_METADATA_TYPE, feedback_metadata)
645 ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
646 ALLOCATE_VARSIZE_MAP(TRUSTED_BYTE_ARRAY_TYPE, trusted_byte_array)
647 ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array)
648 ALLOCATE_VARSIZE_MAP(FREE_SPACE_TYPE, free_space)
649 ALLOCATE_VARSIZE_MAP(PROPERTY_ARRAY_TYPE, property_array)
650 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_MAP_TYPE, small_ordered_hash_map)
651 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_SET_TYPE, small_ordered_hash_set)
652 ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_NAME_DICTIONARY_TYPE,
653 small_ordered_name_dictionary)
654
655 ALLOCATE_VARSIZE_MAP(INSTRUCTION_STREAM_TYPE, instruction_stream)
656
657 ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell);
658 {
659 // The invalid_prototype_validity_cell is needed for JSObject maps.
661 AllocationResult alloc =
663 if (!alloc.To(&obj)) return false;
664 obj->set_map_after_allocation(isolate(), roots.cell_map(),
666 Cast<Cell>(obj)->set_value(value);
667 set_invalid_prototype_validity_cell(Cast<Cell>(obj));
668 }
669
670 ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
671 ALLOCATE_MAP(FILLER_TYPE, kTaggedSize, one_pointer_filler)
672 ALLOCATE_MAP(FILLER_TYPE, 2 * kTaggedSize, two_pointer_filler)
673
674 // The "no closures" and "one closure" FeedbackCell maps need
675 // to be marked unstable because their objects can change maps.
676 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
677 no_closures_cell)
678 roots.no_closures_cell_map()->mark_unstable();
679 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
680 one_closure_cell)
681 roots.one_closure_cell_map()->mark_unstable();
682 ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
683 many_closures_cell)
684
685 ALLOCATE_VARSIZE_MAP(TRANSITION_ARRAY_TYPE, transition_array)
686
687 ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, hash_table)
688 ALLOCATE_VARSIZE_MAP(ORDERED_NAME_DICTIONARY_TYPE, ordered_name_dictionary)
689 ALLOCATE_VARSIZE_MAP(NAME_DICTIONARY_TYPE, name_dictionary)
690 ALLOCATE_VARSIZE_MAP(SWISS_NAME_DICTIONARY_TYPE, swiss_name_dictionary)
691 ALLOCATE_VARSIZE_MAP(GLOBAL_DICTIONARY_TYPE, global_dictionary)
692 ALLOCATE_VARSIZE_MAP(NUMBER_DICTIONARY_TYPE, number_dictionary)
693
694 ALLOCATE_VARSIZE_MAP(REGISTERED_SYMBOL_TABLE_TYPE, registered_symbol_table)
695
696 ALLOCATE_VARSIZE_MAP(ARRAY_LIST_TYPE, array_list)
697
698 ALLOCATE_MAP(ACCESSOR_INFO_TYPE, AccessorInfo::kSize, accessor_info)
699 ALLOCATE_MAP(INTERCEPTOR_INFO_TYPE, InterceptorInfo::kSize,
700 interceptor_info)
701
702 ALLOCATE_VARSIZE_MAP(PREPARSE_DATA_TYPE, preparse_data)
703 ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize,
704 shared_function_info)
705 ALLOCATE_MAP(CODE_TYPE, Code::kSize, code)
706
707 return true;
708 }
709}
710
711bool Heap::CreateLateReadOnlyNonJSReceiverMaps() {
712 ReadOnlyRoots roots(this);
713 {
714 // Setup the struct maps.
715 for (const StructInit& entry : kStructTable) {
716 if (is_important_struct(entry.type)) continue;
718 if (!AllocateMap(AllocationType::kReadOnly, entry.type, entry.size)
719 .To(&map))
720 return false;
721 roots_table()[entry.index] = map.ptr();
722 }
723
724#define TORQUE_ALLOCATE_MAP(NAME, Name, name) \
725 ALLOCATE_MAP(NAME, Name::SizeFor(), name)
726 TORQUE_DEFINED_FIXED_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_MAP);
727#undef TORQUE_ALLOCATE_MAP
728
729#define TORQUE_ALLOCATE_VARSIZE_MAP(NAME, Name, name) \
730 /* The DescriptorArray map is pre-allocated and initialized above. */ \
731 if (NAME != DESCRIPTOR_ARRAY_TYPE) { \
732 ALLOCATE_VARSIZE_MAP(NAME, name) \
733 }
734 TORQUE_DEFINED_VARSIZE_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_VARSIZE_MAP);
735#undef TORQUE_ALLOCATE_VARSIZE_MAP
736
737 ALLOCATE_VARSIZE_MAP(ORDERED_HASH_MAP_TYPE, ordered_hash_map)
738 ALLOCATE_VARSIZE_MAP(ORDERED_HASH_SET_TYPE, ordered_hash_set)
739
740 ALLOCATE_VARSIZE_MAP(SIMPLE_NUMBER_DICTIONARY_TYPE,
741 simple_number_dictionary)
742 ALLOCATE_VARSIZE_MAP(NAME_TO_INDEX_HASH_TABLE_TYPE,
743 name_to_index_hash_table)
744
745 ALLOCATE_VARSIZE_MAP(EMBEDDER_DATA_ARRAY_TYPE, embedder_data_array)
746 ALLOCATE_VARSIZE_MAP(EPHEMERON_HASH_TABLE_TYPE, ephemeron_hash_table)
747
748 ALLOCATE_VARSIZE_MAP(SCRIPT_CONTEXT_TABLE_TYPE, script_context_table)
749
750 ALLOCATE_VARSIZE_MAP(OBJECT_BOILERPLATE_DESCRIPTION_TYPE,
751 object_boilerplate_description)
752
753 ALLOCATE_VARSIZE_MAP(COVERAGE_INFO_TYPE, coverage_info);
754 ALLOCATE_VARSIZE_MAP(REG_EXP_MATCH_INFO_TYPE, regexp_match_info);
755
756 ALLOCATE_MAP(REG_EXP_DATA_TYPE, RegExpData::kSize, regexp_data);
757 ALLOCATE_MAP(ATOM_REG_EXP_DATA_TYPE, AtomRegExpData::kSize,
758 atom_regexp_data);
759 ALLOCATE_MAP(IR_REG_EXP_DATA_TYPE, IrRegExpData::kSize, ir_regexp_data);
760
761 ALLOCATE_MAP(SOURCE_TEXT_MODULE_TYPE, SourceTextModule::kSize,
762 source_text_module)
763 ALLOCATE_MAP(SYNTHETIC_MODULE_TYPE, SyntheticModule::kSize,
764 synthetic_module)
765
766 ALLOCATE_MAP(CONTEXT_SIDE_PROPERTY_CELL_TYPE,
767 ContextSidePropertyCell::kSize,
768 global_context_side_property_cell)
769
770 IF_WASM(ALLOCATE_MAP, WASM_IMPORT_DATA_TYPE, WasmImportData::kSize,
771 wasm_import_data)
772 IF_WASM(ALLOCATE_MAP, WASM_CAPI_FUNCTION_DATA_TYPE,
773 WasmCapiFunctionData::kSize, wasm_capi_function_data)
774 IF_WASM(ALLOCATE_MAP, WASM_EXPORTED_FUNCTION_DATA_TYPE,
775 WasmExportedFunctionData::kSize, wasm_exported_function_data)
776 IF_WASM(ALLOCATE_MAP, WASM_INTERNAL_FUNCTION_TYPE,
777 WasmInternalFunction::kSize, wasm_internal_function)
778 IF_WASM(ALLOCATE_MAP, WASM_FUNC_REF_TYPE, WasmFuncRef::kSize, wasm_func_ref)
779 IF_WASM(ALLOCATE_MAP, WASM_JS_FUNCTION_DATA_TYPE, WasmJSFunctionData::kSize,
780 wasm_js_function_data)
781 IF_WASM(ALLOCATE_MAP, WASM_RESUME_DATA_TYPE, WasmResumeData::kSize,
782 wasm_resume_data)
783 IF_WASM(ALLOCATE_MAP, WASM_SUSPENDER_OBJECT_TYPE,
784 WasmSuspenderObject::kSize, wasm_suspender_object)
785 IF_WASM(ALLOCATE_MAP, WASM_TYPE_INFO_TYPE, kVariableSizeSentinel,
786 wasm_type_info)
787 IF_WASM(ALLOCATE_MAP, WASM_NULL_TYPE, kVariableSizeSentinel, wasm_null);
788 IF_WASM(ALLOCATE_MAP, WASM_TRUSTED_INSTANCE_DATA_TYPE,
789 WasmTrustedInstanceData::kSize, wasm_trusted_instance_data);
790 IF_WASM(ALLOCATE_VARSIZE_MAP, WASM_DISPATCH_TABLE_TYPE,
791 wasm_dispatch_table);
792
793 ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
794 ALLOCATE_MAP(INTERPRETER_DATA_TYPE, InterpreterData::kSize,
795 interpreter_data)
796 ALLOCATE_MAP(SHARED_FUNCTION_INFO_WRAPPER_TYPE,
797 SharedFunctionInfoWrapper::kSize, shared_function_info_wrapper)
798
799 ALLOCATE_MAP(DICTIONARY_TEMPLATE_INFO_TYPE, DictionaryTemplateInfo::kSize,
800 dictionary_template_info)
801 }
802
803 return true;
804}
805
806bool Heap::CreateLateReadOnlyJSReceiverMaps() {
807#define ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(instance_type, size, \
808 field_name) \
809 { \
810 Tagged<Map> map; \
811 if (!AllocateMap(AllocationType::kReadOnly, (instance_type), size, \
812 DICTIONARY_ELEMENTS) \
813 .To(&map)) { \
814 return false; \
815 } \
816 AlwaysSharedSpaceJSObject::PrepareMapNoEnumerableProperties(map); \
817 set_##field_name##_map(map); \
818 }
819
820 HandleScope late_jsreceiver_maps_handle_scope(isolate());
821 Factory* factory = isolate()->factory();
822 ReadOnlyRoots roots(this);
823
824 {
825 // JSMessageObject and JSExternalObject types are wrappers around a set
826 // of primitive values and exist only for the purpose of passing the data
827 // across V8 Api. They are not supposed to be leaked to user JS code
828 // except from d8 tests and they are not proper JSReceivers.
829 ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kHeaderSize,
830 message_object)
831 roots.message_object_map()->SetEnumLength(0);
832 roots.message_object_map()->set_is_extensible(false);
833
834 ALLOCATE_MAP(JS_EXTERNAL_OBJECT_TYPE, JSExternalObject::kHeaderSize,
835 external)
836 roots.external_map()->SetEnumLength(0);
837 roots.external_map()->set_is_extensible(false);
838 }
839
840 // Shared space object maps are immutable and can be in RO space.
841 {
842 Tagged<Map> shared_array_map;
843 if (!AllocateMap(AllocationType::kReadOnly, JS_SHARED_ARRAY_TYPE,
846 .To(&shared_array_map)) {
847 return false;
848 }
850 shared_array_map);
853 Descriptor length_descriptor = Descriptor::DataField(
854 factory->length_string(), JSSharedArray::kLengthFieldIndex,
857 descriptors->Set(InternalIndex(0), &length_descriptor);
858 shared_array_map->InitializeDescriptors(isolate(), *descriptors);
859 set_js_shared_array_map(shared_array_map);
860 }
861
863 JS_ATOMICS_MUTEX_TYPE, JSAtomicsMutex::kHeaderSize, js_atomics_mutex)
864 ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(JS_ATOMICS_CONDITION_TYPE,
865 JSAtomicsCondition::kHeaderSize,
866 js_atomics_condition)
867
868#undef ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP
869#undef ALLOCATE_PRIMITIVE_MAP
870#undef ALLOCATE_VARSIZE_MAP
871#undef ALLOCATE_MAP
872
873 return true;
874}
875
876// For static roots we need the r/o space to have identical layout on all
877// compile targets. Varying objects are padded to their biggest size.
878void Heap::StaticRootsEnsureAllocatedSize(DirectHandle<HeapObject> obj,
879 int required) {
881 int obj_size = obj->Size();
882 if (required == obj_size) return;
883 CHECK_LT(obj_size, required);
884 int filler_size = required - obj_size;
885
886 Tagged<HeapObject> filler =
887 allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
890 CreateFillerObjectAt(filler.address(), filler_size,
892
893 CHECK_EQ(filler.address(), obj->address() + obj_size);
894 CHECK_EQ(filler.address() + filler->Size(), obj->address() + required);
895 }
896}
897
898bool Heap::CreateImportantReadOnlyObjects() {
899 // Allocate some objects early to get addresses to fit as arm64 immediates.
901 ReadOnlyRoots roots(isolate());
902 HandleScope initial_objects_handle_scope(isolate());
903
904 // Hash seed for strings
905
906 Factory* factory = isolate()->factory();
907 set_hash_seed(*factory->NewByteArray(kInt64Size, AllocationType::kReadOnly));
909
910 // Important strings and symbols
911 for (const ConstantStringInit& entry : kImportantConstantStringTable) {
912 if (entry.index == RootIndex::kempty_string) {
913 // Special case the empty string, since it's allocated and initialised in
914 // the initial section.
915 isolate()->string_table()->InsertEmptyStringForBootstrapping(isolate());
916 } else {
917 DirectHandle<String> str = factory->InternalizeString(entry.contents);
918 roots_table()[entry.index] = str->ptr();
919 }
920 }
921
922 {
923#define SYMBOL_INIT(_, name) \
924 { \
925 DirectHandle<Symbol> symbol( \
926 isolate()->factory()->NewPrivateSymbol(AllocationType::kReadOnly)); \
927 roots_table()[RootIndex::k##name] = symbol->ptr(); \
928 }
930 // SYMBOL_INIT used again later.
931 }
932
933 // Empty elements
935 empty_property_dictionary = NameDictionary::New(
937 DCHECK(!empty_property_dictionary->HasSufficientCapacityToAdd(1));
938
939 set_empty_property_dictionary(*empty_property_dictionary);
940
941 // Allocate the empty OrderedNameDictionary
942 DirectHandle<OrderedNameDictionary> empty_ordered_property_dictionary =
944 .ToHandleChecked();
945 set_empty_ordered_property_dictionary(*empty_ordered_property_dictionary);
946
947 {
949 .To(&obj)) {
950 return false;
951 }
952 obj->set_map_after_allocation(isolate(), roots.byte_array_map(),
954 Cast<ByteArray>(obj)->set_length(0);
955 set_empty_byte_array(Cast<ByteArray>(obj));
956 }
957
958 {
959 AllocationResult alloc =
962 if (!alloc.To(&obj)) return false;
963 obj->set_map_after_allocation(isolate(), roots.scope_info_map(),
965 int flags = ScopeInfo::IsEmptyBit::encode(true);
966 DCHECK_EQ(ScopeInfo::LanguageModeBit::decode(flags), LanguageMode::kSloppy);
967 DCHECK_EQ(ScopeInfo::ReceiverVariableBits::decode(flags),
969 DCHECK_EQ(ScopeInfo::FunctionVariableBits::decode(flags),
971 Cast<ScopeInfo>(obj)->set_flags(flags, kRelaxedStore);
972 Cast<ScopeInfo>(obj)->set_context_local_count(0);
973 Cast<ScopeInfo>(obj)->set_parameter_count(0);
974 Cast<ScopeInfo>(obj)->set_position_info_start(0);
975 Cast<ScopeInfo>(obj)->set_position_info_end(0);
976 }
977 set_empty_scope_info(Cast<ScopeInfo>(obj));
978
979 {
981 .To(&obj)) {
982 return false;
983 }
984 obj->set_map_after_allocation(isolate(), roots.property_array_map(),
986 Cast<PropertyArray>(obj)->initialize_length(0);
987 set_empty_property_array(Cast<PropertyArray>(obj));
988 }
989
990 // Heap Numbers
991 // The -0 value must be set before NewNumber works.
992 set_minus_zero_value(
994 DCHECK(std::signbit(Object::NumberValue(roots.minus_zero_value())));
995
996 set_nan_value(*factory->NewHeapNumber<AllocationType::kReadOnly>(
997 std::numeric_limits<double>::quiet_NaN()));
998 set_hole_nan_value(*factory->NewHeapNumberFromBits<AllocationType::kReadOnly>(
1000 set_infinity_value(
1002 set_minus_infinity_value(
1004 set_max_safe_integer(
1006 set_max_uint_32(
1008 set_smi_min_value(
1010 set_smi_max_value_plus_one(
1012
1013 return true;
1014}
1015
1016bool Heap::CreateReadOnlyObjects() {
1017 HandleScope initial_objects_handle_scope(isolate());
1018 Factory* factory = isolate()->factory();
1019 ReadOnlyRoots roots(this);
1021
1022 {
1023 AllocationResult alloc =
1025 if (!alloc.To(&obj)) return false;
1026 obj->set_map_after_allocation(isolate(), roots.array_list_map(),
1028 // Unchecked to skip failing checks since required roots are uninitialized.
1029 UncheckedCast<ArrayList>(obj)->set_capacity(0);
1030 UncheckedCast<ArrayList>(obj)->set_length(0);
1031 }
1032 set_empty_array_list(UncheckedCast<ArrayList>(obj));
1033
1034 {
1037 if (!alloc.To(&obj)) return false;
1038 obj->set_map_after_allocation(isolate(),
1039 roots.object_boilerplate_description_map(),
1041
1042 Cast<ObjectBoilerplateDescription>(obj)->set_capacity(0);
1043 Cast<ObjectBoilerplateDescription>(obj)->set_backing_store_size(0);
1044 Cast<ObjectBoilerplateDescription>(obj)->set_flags(0);
1045 }
1046 set_empty_object_boilerplate_description(
1048
1049 {
1050 // Empty array boilerplate description
1051 AllocationResult alloc =
1052 Allocate(roots_table().array_boilerplate_description_map(),
1054 if (!alloc.To(&obj)) return false;
1055
1056 Cast<ArrayBoilerplateDescription>(obj)->set_constant_elements(
1057 roots.empty_fixed_array());
1058 Cast<ArrayBoilerplateDescription>(obj)->set_elements_kind(
1060 }
1061 set_empty_array_boilerplate_description(
1063
1064 // Empty arrays.
1065 {
1068 .To(&obj)) {
1069 return false;
1070 }
1071 obj->set_map_after_allocation(
1072 isolate(), roots.closure_feedback_cell_array_map(), SKIP_WRITE_BARRIER);
1073 Cast<ClosureFeedbackCellArray>(obj)->set_length(0);
1074 set_empty_closure_feedback_cell_array(Cast<ClosureFeedbackCellArray>(obj));
1075 }
1076
1077 DCHECK(!HeapLayout::InYoungGeneration(roots.empty_fixed_array()));
1078
1079 // Allocate the empty SwissNameDictionary
1080 DirectHandle<SwissNameDictionary> empty_swiss_property_dictionary =
1082 set_empty_swiss_property_dictionary(*empty_swiss_property_dictionary);
1083 StaticRootsEnsureAllocatedSize(empty_swiss_property_dictionary,
1084 8 * kTaggedSize);
1085
1086 roots.bigint_map()->SetConstructorFunctionIndex(
1087 Context::BIGINT_FUNCTION_INDEX);
1088
1089 for (const ConstantStringInit& entry : kNotImportantConstantStringTable) {
1090 DirectHandle<String> str = factory->InternalizeString(entry.contents);
1091 roots_table()[entry.index] = str->ptr();
1092 }
1093
1094#define ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR(_, name, contents) \
1095 static_assert(arraysize(contents) - 1 == 1);
1098 /* not used */)
1099#undef ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR
1100
1101 // Allocate and initialize table for single character one byte strings.
1102 int table_size = String::kMaxOneByteCharCode + 1;
1103 set_single_character_string_table(
1104 *factory->NewFixedArray(table_size, AllocationType::kReadOnly));
1105 for (int i = 0; i < table_size; ++i) {
1109 single_character_string_table()->set(i, *str);
1110 }
1111
1112 // Finish initializing oddballs after creating the string table.
1113 Oddball::Initialize(isolate(), factory->undefined_value(), "undefined",
1114 factory->nan_value(), "undefined", Oddball::kUndefined);
1115
1116 // Initialize the null_value.
1117 Oddball::Initialize(isolate(), factory->null_value(), "null",
1118 direct_handle(Smi::zero(), isolate()), "object",
1120
1121 // Initialize the true_value.
1122 Oddball::Initialize(isolate(), factory->true_value(), "true",
1123 direct_handle(Smi::FromInt(1), isolate()), "boolean",
1125
1126 // Initialize the false_value.
1127 Oddball::Initialize(isolate(), factory->false_value(), "false",
1128 direct_handle(Smi::zero(), isolate()), "boolean",
1130
1131 // Initialize the_hole_value.
1132 Hole::Initialize(isolate(), factory->the_hole_value(),
1133 factory->hole_nan_value());
1134
1135 set_property_cell_hole_value(*factory->NewHole());
1136 set_hash_table_hole_value(*factory->NewHole());
1137 set_promise_hole_value(*factory->NewHole());
1138 set_uninitialized_value(*factory->NewHole());
1139 set_arguments_marker(*factory->NewHole());
1140 set_termination_exception(*factory->NewHole());
1141 set_exception(*factory->NewHole());
1142 set_optimized_out(*factory->NewHole());
1143 set_stale_register(*factory->NewHole());
1144
1145 // Initialize marker objects used during compilation.
1146 set_self_reference_marker(*factory->NewHole());
1147 set_basic_block_counters_marker(*factory->NewHole());
1148
1149 {
1150 HandleScope handle_scope(isolate());
1152#undef SYMBOL_INIT
1153 }
1154
1155 {
1156 HandleScope handle_scope(isolate());
1157#define PUBLIC_SYMBOL_INIT(_, name, description) \
1158 DirectHandle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
1159 DirectHandle<String> name##d = factory->InternalizeUtf8String(#description); \
1160 name->set_description(*name##d); \
1161 roots_table()[RootIndex::k##name] = name->ptr();
1162
1164
1165#define WELL_KNOWN_SYMBOL_INIT(_, name, description) \
1166 DirectHandle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
1167 DirectHandle<String> name##d = factory->InternalizeUtf8String(#description); \
1168 name->set_is_well_known_symbol(true); \
1169 name->set_description(*name##d); \
1170 roots_table()[RootIndex::k##name] = name->ptr();
1171
1173
1174 // Mark "Interesting Symbols" appropriately.
1175 to_string_tag_symbol->set_is_interesting_symbol(true);
1176 }
1177
1178 {
1179 // All Names that can cause protector invalidation have to be allocated
1180 // consecutively to allow for fast checks
1181
1182 // Allocate the symbols's internal strings first, so we don't get
1183 // interleaved string allocations for the symbols later.
1184#define ALLOCATE_SYMBOL_STRING(_, name, description) \
1185 Handle<String> name##symbol_string = \
1186 factory->InternalizeUtf8String(#description); \
1187 USE(name##symbol_string);
1188
1190 /* not used */)
1192 /* not used */)
1194 /* not used */)
1195#undef ALLOCATE_SYMBOL_STRING
1196
1197#define INTERNALIZED_STRING_INIT(_, name, description) \
1198 DirectHandle<String> name = factory->InternalizeUtf8String(description); \
1199 roots_table()[RootIndex::k##name] = name->ptr();
1200
1202 /* not used */)
1204 /* not used */)
1206 /* not used */)
1208 /* not used */)
1209
1210 // Mark "Interesting Symbols" appropriately.
1211 to_primitive_symbol->set_is_interesting_symbol(true);
1212
1213#ifdef DEBUG
1214 roots.VerifyNameForProtectors();
1215#endif
1217
1218#undef INTERNALIZED_STRING_INIT
1219#undef PUBLIC_SYMBOL_INIT
1220#undef WELL_KNOWN_SYMBOL_INIT
1221 }
1222
1223 DirectHandle<NumberDictionary> slow_element_dictionary =
1224 NumberDictionary::New(isolate(), 1, AllocationType::kReadOnly,
1226 DCHECK(!slow_element_dictionary->HasSufficientCapacityToAdd(1));
1227 set_empty_slow_element_dictionary(*slow_element_dictionary);
1228
1229 DirectHandle<RegisteredSymbolTable> empty_symbol_table =
1230 RegisteredSymbolTable::New(isolate(), 1, AllocationType::kReadOnly,
1232 DCHECK(!empty_symbol_table->HasSufficientCapacityToAdd(1));
1233 set_empty_symbol_table(*empty_symbol_table);
1234
1235 // Allocate the empty OrderedHashMap.
1236 DirectHandle<OrderedHashMap> empty_ordered_hash_map =
1238 .ToHandleChecked();
1239 set_empty_ordered_hash_map(*empty_ordered_hash_map);
1240
1241 // Allocate the empty OrderedHashSet.
1242 DirectHandle<OrderedHashSet> empty_ordered_hash_set =
1244 .ToHandleChecked();
1245 set_empty_ordered_hash_set(*empty_ordered_hash_set);
1246
1247 // Allocate the empty FeedbackMetadata.
1248 DirectHandle<FeedbackMetadata> empty_feedback_metadata =
1250 set_empty_feedback_metadata(*empty_feedback_metadata);
1251
1252 // Canonical scope arrays.
1253 DirectHandle<ScopeInfo> global_this_binding =
1255 set_global_this_binding_scope_info(*global_this_binding);
1256
1257 DirectHandle<ScopeInfo> empty_function =
1259 set_empty_function_scope_info(*empty_function);
1260
1261 DirectHandle<ScopeInfo> native_scope_info =
1263 set_native_scope_info(*native_scope_info);
1264
1265 DirectHandle<ScopeInfo> shadow_realm_scope_info =
1267 set_shadow_realm_scope_info(*shadow_realm_scope_info);
1268
1269 // Allocate FeedbackCell for builtins.
1270 DirectHandle<FeedbackCell> many_closures_cell =
1272 set_many_closures_cell(*many_closures_cell);
1273
1274 // Initialize the wasm null_value.
1275
1276#ifdef V8_ENABLE_WEBASSEMBLY
1277 // Allocate the wasm-null object. It is a regular V8 heap object contained in
1278 // a V8 page.
1279 // In static-roots builds, it is large enough so that its payload (other than
1280 // its map word) can be mprotected on OS page granularity. We adjust the
1281 // layout such that we have a filler object in the current OS page, and the
1282 // wasm-null map word at the end of the current OS page. The payload then is
1283 // contained on a separate OS page which can be protected.
1284 // In non-static-roots builds, it is a regular object of size {kTaggedSize}
1285 // and does not need padding.
1286
1287 constexpr size_t kLargestPossibleOSPageSize = 64 * KB;
1288 static_assert(kLargestPossibleOSPageSize >= kMinimumOSPageSize);
1289
1291 // Ensure all of the following lands on the same V8 page.
1292 constexpr int kOffsetAfterMapWord = HeapObject::kMapOffset + kTaggedSize;
1293 static_assert(kOffsetAfterMapWord % kObjectAlignment == 0);
1295 kLargestPossibleOSPageSize + WasmNull::kSize - kOffsetAfterMapWord);
1296 Address next_page = RoundUp(read_only_space_->top() + kOffsetAfterMapWord,
1297 kLargestPossibleOSPageSize);
1298
1299 // Add some filler to end up right before an OS page boundary.
1300 int filler_size = static_cast<int>(next_page - read_only_space_->top() -
1301 kOffsetAfterMapWord);
1302 // TODO(v8:7748) Depending on where we end up this might actually not hold,
1303 // in which case we would need to use a one or two-word filler.
1304 CHECK(filler_size > 2 * kTaggedSize);
1305 Tagged<HeapObject> filler =
1306 allocator()->AllocateRawWith<HeapAllocator::kRetryOrFail>(
1309 CreateFillerObjectAt(filler.address(), filler_size,
1311 set_wasm_null_padding(filler);
1312 CHECK_EQ(read_only_space_->top() + kOffsetAfterMapWord, next_page);
1313 } else {
1314 set_wasm_null_padding(roots.undefined_value());
1315 }
1316
1317 // Finally, allocate the wasm-null object.
1318 {
1319 Tagged<HeapObject> wasm_null_obj;
1321 .To(&wasm_null_obj));
1322 // No need to initialize the payload since it's either empty or unmapped.
1324 WasmNull::kSize == sizeof(Tagged_t));
1325 wasm_null_obj->set_map_after_allocation(isolate(), roots.wasm_null_map(),
1327 set_wasm_null(Cast<WasmNull>(wasm_null_obj));
1329 CHECK_EQ(read_only_space_->top() % kLargestPossibleOSPageSize, 0);
1330 }
1331 }
1332#endif
1333
1334 return true;
1335}
1336
1337void Heap::CreateMutableApiObjects() {
1338 HandleScope scope(isolate());
1339 set_message_listeners(*ArrayList::New(isolate(), 2, AllocationType::kOld));
1340}
1341
1342void Heap::CreateReadOnlyApiObjects() {
1343 HandleScope scope(isolate());
1344 auto info =
1345 isolate()->factory()->NewInterceptorInfo(AllocationType::kReadOnly);
1346 set_noop_interceptor_info(*info);
1347 // Make sure read only heap layout does not depend on the size of
1348 // ExternalPointer fields.
1351}
1352
1353void Heap::CreateInitialMutableObjects() {
1354 HandleScope initial_objects_handle_scope(isolate());
1355 Factory* factory = isolate()->factory();
1356 ReadOnlyRoots roots(this);
1357
1358 // There's no "current microtask" in the beginning.
1359 set_current_microtask(roots.undefined_value());
1360
1361 set_weak_refs_keep_during_job(roots.undefined_value());
1362
1363 set_public_symbol_table(roots.empty_symbol_table());
1364 set_api_symbol_table(roots.empty_symbol_table());
1365 set_api_private_symbol_table(roots.empty_symbol_table());
1366
1367 set_number_string_cache(*factory->NewFixedArray(
1369
1370 // Unchecked to skip failing checks since required roots are uninitialized.
1371 set_basic_block_profiling_data(roots.unchecked_empty_array_list());
1372
1373 // Allocate regexp caches.
1374 set_string_split_cache(*factory->NewFixedArray(
1376 set_regexp_multiple_cache(*factory->NewFixedArray(
1378 set_regexp_match_global_atom_cache(*factory->NewFixedArray(
1380
1381 set_detached_contexts(roots.empty_weak_array_list());
1382
1383 set_feedback_vectors_for_profiling_tools(roots.undefined_value());
1384 set_functions_marked_for_manual_optimization(roots.undefined_value());
1385 set_shared_wasm_memories(roots.empty_weak_array_list());
1386 set_locals_block_list_cache(roots.undefined_value());
1387#ifdef V8_ENABLE_WEBASSEMBLY
1388 set_active_suspender(roots.undefined_value());
1389 set_js_to_wasm_wrappers(roots.empty_weak_fixed_array());
1390 set_wasm_canonical_rtts(roots.empty_weak_fixed_array());
1391#endif // V8_ENABLE_WEBASSEMBLY
1392
1393 set_script_list(roots.empty_weak_array_list());
1394
1395 set_materialized_objects(*factory->NewFixedArray(0, AllocationType::kOld));
1396
1397 // Handling of script id generation is in Heap::NextScriptId().
1398 set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
1399 set_last_debugging_id(Smi::FromInt(DebugInfo::kNoDebuggingId));
1400 set_last_stack_trace_id(Smi::zero());
1401 set_next_template_serial_number(
1403
1404 // Allocate the empty script.
1405 DirectHandle<Script> script = factory->NewScript(factory->empty_string());
1406 script->set_type(Script::Type::kNative);
1407 // This is used for exceptions thrown with no stack frames. Such exceptions
1408 // can be shared everywhere.
1409 script->set_origin_options(ScriptOriginOptions(true, false));
1410 set_empty_script(*script);
1411
1412 // Protectors
1413 set_array_buffer_detaching_protector(*factory->NewProtector());
1414 set_array_constructor_protector(*factory->NewProtector());
1415 set_array_iterator_protector(*factory->NewProtector());
1416 set_array_species_protector(*factory->NewProtector());
1417 set_is_concat_spreadable_protector(*factory->NewProtector());
1418 set_map_iterator_protector(*factory->NewProtector());
1419 set_no_elements_protector(*factory->NewProtector());
1420 set_mega_dom_protector(*factory->NewProtector());
1421 set_no_profiling_protector(*factory->NewProtector());
1422 set_no_undetectable_objects_protector(*factory->NewProtector());
1423 set_promise_hook_protector(*factory->NewProtector());
1424 set_promise_resolve_protector(*factory->NewProtector());
1425 set_promise_species_protector(*factory->NewProtector());
1426 set_promise_then_protector(*factory->NewProtector());
1427 set_regexp_species_protector(*factory->NewProtector());
1428 set_set_iterator_protector(*factory->NewProtector());
1429 set_string_iterator_protector(*factory->NewProtector());
1430 set_string_length_protector(*factory->NewProtector());
1431 set_string_wrapper_to_primitive_protector(*factory->NewProtector());
1432 set_number_string_not_regexp_like_protector(*factory->NewProtector());
1433 set_typed_array_length_protector(*factory->NewProtector());
1434 set_typed_array_species_protector(*factory->NewProtector());
1435
1436 set_serialized_objects(roots.empty_fixed_array());
1437 set_serialized_global_proxy_sizes(roots.empty_fixed_array());
1438
1439 // Evaluate the hash values which will then be cached in the strings.
1440 isolate()->factory()->zero_string()->EnsureHash();
1441 isolate()->factory()->one_string()->EnsureHash();
1442
1443 // Initialize builtins constants table.
1444 set_builtins_constants_table(roots.empty_fixed_array());
1445
1446 // Initialize descriptor cache.
1447 isolate_->descriptor_lookup_cache()->Clear();
1448
1449 // Initialize compilation cache.
1450 isolate_->compilation_cache()->Clear();
1451
1452 // Error.stack accessor callbacks and their SharedFunctionInfos:
1453 {
1454 DirectHandle<FunctionTemplateInfo> function_template;
1456 isolate_, Accessors::ErrorStackGetter, 0,
1459 isolate_, function_template);
1460 set_error_stack_getter_fun_template(*function_template);
1461
1463 isolate_, Accessors::ErrorStackSetter, 1,
1466 isolate_, function_template);
1467 set_error_stack_setter_fun_template(*function_template);
1468 }
1469
1470 // Create internal SharedFunctionInfos.
1471 // Async functions:
1472 {
1473 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1474 isolate(), Builtin::kAsyncFunctionAwaitRejectClosure, 1);
1475 set_async_function_await_reject_closure_shared_fun(*info);
1476
1477 info = CreateSharedFunctionInfo(
1478 isolate(), Builtin::kAsyncFunctionAwaitResolveClosure, 1);
1479 set_async_function_await_resolve_closure_shared_fun(*info);
1480 }
1481
1482 // Async generators:
1483 {
1484 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1485 isolate(), Builtin::kAsyncGeneratorAwaitResolveClosure, 1);
1486 set_async_generator_await_resolve_closure_shared_fun(*info);
1487
1488 info = CreateSharedFunctionInfo(
1489 isolate(), Builtin::kAsyncGeneratorAwaitRejectClosure, 1);
1490 set_async_generator_await_reject_closure_shared_fun(*info);
1491
1492 info = CreateSharedFunctionInfo(
1493 isolate(), Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure, 1);
1494 set_async_generator_yield_with_await_resolve_closure_shared_fun(*info);
1495
1496 info = CreateSharedFunctionInfo(
1497 isolate(), Builtin::kAsyncGeneratorReturnResolveClosure, 1);
1498 set_async_generator_return_resolve_closure_shared_fun(*info);
1499
1500 info = CreateSharedFunctionInfo(
1501 isolate(), Builtin::kAsyncGeneratorReturnClosedResolveClosure, 1);
1502 set_async_generator_return_closed_resolve_closure_shared_fun(*info);
1503
1504 info = CreateSharedFunctionInfo(
1505 isolate(), Builtin::kAsyncGeneratorReturnClosedRejectClosure, 1);
1506 set_async_generator_return_closed_reject_closure_shared_fun(*info);
1507 }
1508
1509 // AsyncIterator:
1510 {
1511 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1512 isolate_, Builtin::kAsyncIteratorValueUnwrap, 1);
1513 set_async_iterator_value_unwrap_shared_fun(*info);
1514
1515 info = CreateSharedFunctionInfo(
1516 isolate_, Builtin::kAsyncIteratorPrototypeAsyncDisposeResolveClosure,
1517 0);
1518 set_async_iterator_prototype_async_dispose_resolve_closure_shared_fun(
1519 *info);
1520 }
1521
1522 // AsyncFromSyncIterator:
1523 {
1524 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1525 isolate_, Builtin::kAsyncFromSyncIteratorCloseSyncAndRethrow, 1);
1526 set_async_from_sync_iterator_close_sync_and_rethrow_shared_fun(*info);
1527 }
1528
1529 // Promises:
1530 {
1531 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1532 isolate_, Builtin::kPromiseCapabilityDefaultResolve, 1,
1534 info->set_native(true);
1535 info->set_function_map_index(
1536 Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1537 set_promise_capability_default_resolve_shared_fun(*info);
1538
1539 info = CreateSharedFunctionInfo(isolate_,
1540 Builtin::kPromiseCapabilityDefaultReject, 1,
1542 info->set_native(true);
1543 info->set_function_map_index(
1544 Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1545 set_promise_capability_default_reject_shared_fun(*info);
1546
1547 info = CreateSharedFunctionInfo(
1548 isolate_, Builtin::kPromiseGetCapabilitiesExecutor, 2);
1549 set_promise_get_capabilities_executor_shared_fun(*info);
1550 }
1551
1552 // Promises / finally:
1553 {
1555 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThenFinally, 1);
1556 info->set_native(true);
1557 set_promise_then_finally_shared_fun(*info);
1558
1559 info =
1560 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseCatchFinally, 1);
1561 info->set_native(true);
1562 set_promise_catch_finally_shared_fun(*info);
1563
1564 info = CreateSharedFunctionInfo(isolate(),
1565 Builtin::kPromiseValueThunkFinally, 0);
1566 set_promise_value_thunk_finally_shared_fun(*info);
1567
1568 info =
1569 CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThrowerFinally, 0);
1570 set_promise_thrower_finally_shared_fun(*info);
1571 }
1572
1573 // Promise combinators:
1574 {
1575 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1576 isolate_, Builtin::kPromiseAllResolveElementClosure, 1);
1577 set_promise_all_resolve_element_closure_shared_fun(*info);
1578
1579 info = CreateSharedFunctionInfo(
1580 isolate_, Builtin::kPromiseAllSettledResolveElementClosure, 1);
1581 set_promise_all_settled_resolve_element_closure_shared_fun(*info);
1582
1583 info = CreateSharedFunctionInfo(
1584 isolate_, Builtin::kPromiseAllSettledRejectElementClosure, 1);
1585 set_promise_all_settled_reject_element_closure_shared_fun(*info);
1586
1587 info = CreateSharedFunctionInfo(
1588 isolate_, Builtin::kPromiseAnyRejectElementClosure, 1);
1589 set_promise_any_reject_element_closure_shared_fun(*info);
1590 }
1591
1592 // ProxyRevoke:
1593 {
1595 CreateSharedFunctionInfo(isolate_, Builtin::kProxyRevoke, 0);
1596 set_proxy_revoke_shared_fun(*info);
1597 }
1598
1599 // ShadowRealm:
1600 {
1601 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1602 isolate_, Builtin::kShadowRealmImportValueFulfilled, 1);
1603 set_shadow_realm_import_value_fulfilled_shared_fun(*info);
1604 }
1605
1606 // SourceTextModule:
1607 {
1608 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1609 isolate_, Builtin::kCallAsyncModuleFulfilled, 0);
1610 set_source_text_module_execute_async_module_fulfilled_sfi(*info);
1611
1612 info = CreateSharedFunctionInfo(isolate_, Builtin::kCallAsyncModuleRejected,
1613 0);
1614 set_source_text_module_execute_async_module_rejected_sfi(*info);
1615 }
1616
1617 // Array.fromAsync:
1618 {
1619 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1620 isolate_, Builtin::kArrayFromAsyncIterableOnFulfilled, 1);
1621 set_array_from_async_iterable_on_fulfilled_shared_fun(*info);
1622
1623 info = CreateSharedFunctionInfo(
1624 isolate_, Builtin::kArrayFromAsyncIterableOnRejected, 1);
1625 set_array_from_async_iterable_on_rejected_shared_fun(*info);
1626
1627 info = CreateSharedFunctionInfo(
1628 isolate_, Builtin::kArrayFromAsyncArrayLikeOnFulfilled, 1);
1629 set_array_from_async_array_like_on_fulfilled_shared_fun(*info);
1630
1631 info = CreateSharedFunctionInfo(
1632 isolate_, Builtin::kArrayFromAsyncArrayLikeOnRejected, 1);
1633 set_array_from_async_array_like_on_rejected_shared_fun(*info);
1634 }
1635
1636 // Atomics.Mutex
1637 {
1638 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1639 isolate_, Builtin::kAtomicsMutexAsyncUnlockResolveHandler, 1);
1640 set_atomics_mutex_async_unlock_resolve_handler_sfi(*info);
1641 info = CreateSharedFunctionInfo(
1642 isolate_, Builtin::kAtomicsMutexAsyncUnlockRejectHandler, 1);
1643 set_atomics_mutex_async_unlock_reject_handler_sfi(*info);
1644 }
1645
1646 // Atomics.Condition
1647 {
1648 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1649 isolate_, Builtin::kAtomicsConditionAcquireLock, 0);
1650 set_atomics_condition_acquire_lock_sfi(*info);
1651 }
1652
1653 // Async Disposable Stack
1654 {
1655 DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1656 isolate_, Builtin::kAsyncDisposableStackOnFulfilled, 0);
1657 set_async_disposable_stack_on_fulfilled_shared_fun(*info);
1658
1659 info = CreateSharedFunctionInfo(
1660 isolate_, Builtin::kAsyncDisposableStackOnRejected, 0);
1661 set_async_disposable_stack_on_rejected_shared_fun(*info);
1662
1663 info = CreateSharedFunctionInfo(isolate_,
1664 Builtin::kAsyncDisposeFromSyncDispose, 0);
1665 set_async_dispose_from_sync_dispose_shared_fun(*info);
1666 }
1667
1668 // Trusted roots:
1669 // TODO(saelo): these would ideally be read-only and shared, but we currently
1670 // don't have a trusted RO space.
1671 {
1672 set_empty_trusted_byte_array(*TrustedByteArray::New(isolate_, 0));
1673 set_empty_trusted_fixed_array(*TrustedFixedArray::New(isolate_, 0));
1674 set_empty_trusted_weak_fixed_array(
1676 set_empty_protected_fixed_array(*ProtectedFixedArray::New(isolate_, 0));
1677 set_empty_protected_weak_fixed_array(
1679 }
1680}
1681
1682void Heap::CreateInternalAccessorInfoObjects() {
1683 Isolate* isolate = this->isolate();
1684 HandleScope scope(isolate);
1685 DirectHandle<AccessorInfo> accessor_info;
1686
1687#define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName, ...) \
1688 accessor_info = Accessors::Make##AccessorName##Info(isolate); \
1689 roots_table()[RootIndex::k##AccessorName##Accessor] = accessor_info->ptr();
1691#undef INIT_ACCESSOR_INFO
1692
1693#define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, \
1694 SetterType) \
1695 Cast<AccessorInfo>( \
1696 Tagged<Object>(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1697 ->set_getter_side_effect_type(SideEffectType::GetterType); \
1698 Cast<AccessorInfo>( \
1699 Tagged<Object>(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1700 ->set_setter_side_effect_type(SideEffectType::SetterType);
1702#undef INIT_SIDE_EFFECT_FLAG
1703}
1704
1705} // namespace internal
1706} // namespace v8
#define ACCESSOR_INFO_LIST_GENERATOR(V, _)
Definition accessors.h:25
Isolate * isolate_
constexpr int kMinimumOSPageSize
Builtins::Kind kind
Definition builtins.cc:40
SourcePosition pos
static const int kNoScriptId
Definition v8-script.h:91
bool To(Tagged< T > *obj) const
static AllocationResult FromObject(Tagged< HeapObject > heap_object)
static void PrepareMapNoEnumerableProperties(Tagged< Map > map)
Definition js-struct.cc:34
static DirectHandle< FunctionTemplateInfo > CreateAccessorFunctionTemplateInfo(Isolate *isolate, FunctionCallback callback, int length, v8::SideEffectType side_effect_type)
static DirectHandle< ArrayList > New(IsolateT *isolate, int capacity, AllocationType allocation=AllocationType::kYoung)
Heap * heap() const
Definition base-space.h:27
static const int kNoDebuggingId
static V8_EXPORT_PRIVATE Tagged< DependentCode > empty_dependent_code(const ReadOnlyRoots &roots)
static constexpr RawGCStateType kInitialGCState
static constexpr int SizeFor(int number_of_all_descriptors)
static Descriptor DataField(Isolate *isolate, DirectHandle< Name > key, int field_index, PropertyAttributes attributes, Representation representation)
Definition property.cc:81
V8_INLINE Address address() const
Definition handles.h:695
Handle< HeapNumber > NewHeapNumberFromBits(uint64_t bits)
Handle< HeapNumber > NewHeapNumber(double value)
Handle< ByteArray > NewByteArray(int length, AllocationType allocation=AllocationType::kYoung)
Handle< DescriptorArray > NewDescriptorArray(int number_of_descriptors, int slack=0, AllocationType allocation=AllocationType::kYoung)
Handle< Script > NewScript(DirectHandle< UnionOf< String, Undefined > > source, ScriptEventType event_type=ScriptEventType::kCreate)
Handle< FixedArray > NewFixedArray(int length, AllocationType allocation=AllocationType::kYoung)
Handle< FeedbackMetadata > NewFeedbackMetadata(int slot_count, int create_closure_slot_count, AllocationType allocation=AllocationType::kOld)
DirectHandle< SwissNameDictionary > CreateCanonicalEmptySwissNameDictionary()
Definition factory.cc:586
DirectHandle< PropertyCell > NewProtector()
Definition factory.cc:2251
DirectHandle< Hole > NewHole()
Definition factory.cc:406
DirectHandle< FeedbackCell > NewManyClosuresCell(AllocationType allocation=AllocationType::kOld)
Definition factory.cc:2204
Handle< String > InternalizeString(base::Vector< const char > str, bool convert_encoding=false)
Definition factory.h:216
static const int kAlignedSize
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
Definition field-type.cc:22
static void SealAndPrepareForPromotionToReadOnly(Isolate *isolate, DirectHandle< FunctionTemplateInfo > info)
Definition templates.cc:127
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static constexpr int kMapOffset
void StaticRootsEnsureAllocatedSize(DirectHandle< HeapObject > obj, int required)
NewSpace * new_space() const
Definition heap.h:727
void set_native_contexts_list(Tagged< Object > object)
Definition heap.h:457
void set_allocation_sites_list(Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > object)
Definition heap.h:466
V8_WARN_UNUSED_RESULT AllocationResult AllocatePartialMap(InstanceType instance_type, int instance_size)
void InitializeHashSeed()
Definition heap.cc:5889
V8_EXPORT_PRIVATE void CreateFillerObjectAt(Address addr, int size, ClearFreedMemoryMode clear_memory_mode=ClearFreedMemoryMode::kDontClearFreedMemory)
Definition heap.cc:3202
static const int kInitialNumberStringCacheSize
Definition heap.h:1705
V8_INLINE RootsTable & roots_table()
Definition heap-inl.h:69
V8_INLINE Tagged< FixedArray > single_character_string_table()
Definition heap-inl.h:78
ReadOnlySpace * read_only_space_
Definition heap.h:2150
V8_WARN_UNUSED_RESULT AllocationResult AllocateMap(AllocationType allocation_type, InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND, int inobject_properties=0)
void set_dirty_js_finalization_registries_list_tail(Tagged< Object > object)
Definition heap.h:481
V8_WARN_UNUSED_RESULT AllocationResult Allocate(DirectHandle< Map > map, AllocationType allocation)
ReadOnlySpace * read_only_space() const
Definition heap.h:738
void set_dirty_js_finalization_registries_list(Tagged< Object > object)
Definition heap.h:475
unsigned int gc_count_
Definition heap.h:2210
void FinalizePartialMap(Tagged< Map > map)
V8_WARN_UNUSED_RESULT V8_INLINE AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
Definition heap-inl.h:194
static void Initialize(Isolate *isolate, DirectHandle< Hole > hole, DirectHandle< HeapNumber > numeric_value)
Definition hole-inl.h:31
static constexpr int kSize
Definition hole.h:33
static constexpr int kSize
static constexpr Tagged< Smi > kPrototypeChainValidSmi
Definition map.h:519
static const int kNoSlackTracking
Definition map.h:349
bool CanHaveFastTransitionableElementsKind() const
Definition map-inl.h:169
static constexpr int kPrototypeChainInvalid
Definition map.h:518
static V8_EXPORT_PRIVATE VisitorId GetVisitorId(Tagged< Map > map)
Definition map.cc:65
static V8_WARN_UNUSED_RESULT Handle< NameDictionary > New(IsolateT *isolate, int at_least_space_for, AllocationType allocation=AllocationType::kYoung, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY)
static constexpr int kEmptyHashField
Definition name.h:133
virtual void MakeIterable()=0
static double NumberValue(Tagged< Number > obj)
static constexpr uint8_t kNull
Definition oddball.h:56
static constexpr uint8_t kUndefined
Definition oddball.h:57
static constexpr uint8_t kFalse
Definition oddball.h:53
static void Initialize(Isolate *isolate, DirectHandle< Oddball > oddball, const char *to_string, DirectHandle< Number > to_number, const char *type_of, uint8_t kind)
Definition objects.cc:4274
static constexpr uint8_t kTrue
Definition oddball.h:54
static MaybeHandle< OrderedHashMap > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
static MaybeHandle< OrderedHashSet > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
static MaybeHandle< OrderedNameDictionary > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
static Handle< ProtectedFixedArray > New(IsolateT *isolate, int capacity)
static Handle< ProtectedWeakFixedArray > New(IsolateT *isolate, int capacity)
static V8_EXPORT_PRIVATE bool Contains(Address address)
V8_INLINE Tagged< Object > object_at(RootIndex root_index) const
Definition roots-inl.h:143
V8_INLINE void VerifyNameForProtectorsPages() const
Definition roots-inl.h:136
V8_INLINE bool is_initialized(RootIndex root_index) const
Definition roots-inl.h:163
void EnsureSpaceForAllocation(int size_in_bytes)
static constexpr int kRegExpResultsCacheSize
Definition regexp.h:240
static constexpr Representation Smi()
static constexpr RootIndex SingleCharacterStringIndex(int c)
Definition roots.h:629
static DirectHandle< ScopeInfo > CreateGlobalThisBinding(Isolate *isolate)
static DirectHandle< ScopeInfo > CreateForShadowRealmNativeContext(Isolate *isolate)
static constexpr int SizeFor(int length)
Definition scope-info.h:299
static V8_EXPORT_PRIVATE DirectHandle< ScopeInfo > CreateForEmptyFunction(Isolate *isolate)
static DirectHandle< ScopeInfo > CreateForNativeContext(Isolate *isolate)
static V8_INLINE constexpr int32_t SizeFor(int32_t length)
static bool SetupHeapInternal(Isolate *isolate)
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
V8_INLINE bool IsCons() const
Definition string-inl.h:170
static const int32_t kMaxOneByteCharCode
Definition string.h:500
static const int kUninitializedSerialNumber
Definition templates.h:41
static Handle< TrustedByteArray > New(IsolateT *isolate, int capacity, AllocationType allocation_type=AllocationType::kTrusted)
static Handle< TrustedFixedArray > New(IsolateT *isolate, int capacity, AllocationType allocation=AllocationType::kTrusted)
static Handle< TrustedWeakFixedArray > New(IsolateT *isolate, int capacity)
static V8_INLINE constexpr Tagged_t CompressAny(Address tagged)
static constexpr int kSize
static constexpr int SizeForCapacity(int capacity)
#define V8_INFINITY
Definition globals.h:23
#define V8_STATIC_ROOTS_GENERATION_BOOL
Definition globals.h:135
Isolate * isolate
#define WELL_KNOWN_SYMBOL_LIST_GENERATOR(V, _)
#define INTERNALIZED_STRING_FOR_PROTECTOR_LIST_GENERATOR(V, _)
#define IMPORTANT_INTERNALIZED_STRING_LIST_GENERATOR(V, _)
#define NOT_IMPORTANT_INTERNALIZED_STRING_LIST_GENERATOR(V, _)
#define PUBLIC_SYMBOL_LIST_GENERATOR(V, _)
#define WELL_KNOWN_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
#define EXTRA_IMPORTANT_INTERNALIZED_STRING_LIST_GENERATOR(V, _)
#define SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
#define PUBLIC_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
#define SINGLE_CHARACTER_INTERNALIZED_STRING_LIST_GENERATOR(V_, _)
#define NOT_IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR(V, _)
#define IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR(V, _)
SharedFunctionInfoRef shared
std::map< const std::string, const std::string > map
ZoneVector< RpoNumber > & result
V8_INLINE constexpr bool IsMaybeReadOnlyJSObject(InstanceType instance_type)
constexpr int kTaggedSize
Definition globals.h:542
constexpr double kMaxSafeInteger
Definition globals.h:1985
constexpr int kInt64Size
Definition globals.h:402
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
constexpr intptr_t kObjectAlignment
Definition globals.h:930
@ USE_CUSTOM_MINIMUM_CAPACITY
Definition globals.h:1568
constexpr uint64_t kHoleNanInt64
Definition globals.h:1960
Address Tagged_t
Definition globals.h:547
@ TERMINAL_FAST_ELEMENTS_KIND
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
const int kVariableSizeSentinel
Definition objects.h:84
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
constexpr int kSystemPointerSize
Definition globals.h:410
static const int kInvalidEnumCacheSentinel
bool IsTerminalElementsKind(ElementsKind kind)
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Definition flags.cc:1366
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit bit_field3
Definition map-inl.h:137
bool IsDictionaryElementsKind(ElementsKind kind)
constexpr AdaptArguments kAdapt
Definition globals.h:2775
const int kSmiMinValue
constexpr uint32_t kMaxUInt32
Definition globals.h:387
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
#define STRUCT_LIST(V)
#define STRING_TYPE_LIST(V)
#define DATA_HANDLER_LIST(V, _)
#define ALLOCATION_SITE_LIST(V, _)
#define PUBLIC_SYMBOL_INIT(_, name, description)
#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name)
base::Vector< const char > contents
#define STRUCT_TABLE_ELEMENT(TYPE, Name, name)
#define ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(instance_type, size, field_name)
#define INTERNALIZED_STRING_INIT(_, name, description)
#define WELL_KNOWN_SYMBOL_INIT(_, name, description)
#define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, SetterType)
#define ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR(_, name, contents)
#define ALLOCATE_SYMBOL_STRING(_, name, description)
#define TORQUE_ALLOCATE_MAP(NAME, Name, name)
#define TORQUE_ALLOCATE_VARSIZE_MAP(NAME, Name, name)
#define ALLOCATE_MAP(instance_type, size, field_name)
#define ALLOCATION_SITE_ELEMENT(_, TYPE, Name, Size, name)
#define DATA_HANDLER_ELEMENT(_, TYPE, Name, Size, name)
#define STRING_TYPE_ELEMENT(type, size, name, CamelName)
#define ALLOCATE_AND_SET_ROOT(Type, name, Size)
#define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName,...)
#define CONSTANT_STRING_ELEMENT(_, name, contents)
#define SYMBOL_INIT(_, name)
#define ALLOCATE_VARSIZE_MAP(instance_type, field_name)
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LT(lhs, rhs)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define USE(...)
Definition macros.h:293
constexpr T RoundUp(T x, intptr_t m)
Definition macros.h:387
#define IF_WASM(V,...)
Definition macros.h:472
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001