v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
global-handles.cc
Go to the documentation of this file.
1// Copyright 2009 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <algorithm>
8#include <atomic>
9#include <cstddef>
10#include <cstdint>
11#include <map>
12
13#include "src/api/api-inl.h"
15#include "src/base/logging.h"
18#include "src/common/globals.h"
20#include "src/heap/base/stack.h"
22#include "src/heap/gc-tracer.h"
23#include "src/heap/heap-inl.h"
25#include "src/heap/local-heap.h"
26#include "src/init/v8.h"
29#include "src/objects/slots.h"
31#include "src/sandbox/isolate.h"
34#include "src/utils/utils.h"
35
36namespace v8 {
37namespace internal {
38
39namespace {
40
41constexpr size_t kBlockSize = 256;
42
43} // namespace
44
45// Various internal weakness types for Persistent and Global handles.
46enum class WeaknessType {
47 // Weakness with custom callback and an embedder-provided parameter.
49 // Weakness with custom callback and an embedder-provided parameter. In
50 // addition the first two embedder fields are passed along. Note that the
51 // internal fields must contain aligned non-V8 pointers. Getting pointers to
52 // V8 objects through this interface would be GC unsafe so in that case the
53 // embedder gets a null pointer instead.
55 // Weakness where the handle is automatically reset in the garbage collector
56 // when the object is no longer reachable.
58};
59
60template <class _NodeType>
61class GlobalHandles::NodeBlock final {
62 public:
64 using NodeType = _NodeType;
65
66 V8_INLINE static const NodeBlock* From(const NodeType* node);
67 V8_INLINE static NodeBlock* From(NodeType* node);
68
74
75 NodeBlock(const NodeBlock&) = delete;
76 NodeBlock& operator=(const NodeBlock&) = delete;
77
78 NodeType* at(size_t index) { return &nodes_[index]; }
79 const NodeType* at(size_t index) const { return &nodes_[index]; }
82
85
86 V8_INLINE void ListAdd(NodeBlock** top);
87 V8_INLINE void ListRemove(NodeBlock** top);
88
89 NodeBlock* next() const { return next_; }
90 NodeBlock* next_used() const { return next_used_; }
91
92 const void* begin_address() const { return nodes_; }
93 const void* end_address() const { return &nodes_[kBlockSize]; }
94
95 private:
96 NodeType nodes_[kBlockSize];
102 uint32_t used_nodes_ = 0;
103};
104
105template <class NodeType>
108 const NodeType* firstNode = node - node->index();
109 const BlockType* block = reinterpret_cast<const BlockType*>(firstNode);
110 DCHECK_EQ(node, block->at(node->index()));
111 return block;
112}
113
114template <class NodeType>
116 NodeType* node) {
117 NodeType* firstNode = node - node->index();
118 BlockType* block = reinterpret_cast<BlockType*>(firstNode);
119 DCHECK_EQ(node, block->at(node->index()));
120 return block;
121}
122
123template <class NodeType>
125 DCHECK_LT(used_nodes_, kBlockSize);
126 return used_nodes_++ == 0;
127}
128
129template <class NodeType>
131 BlockType* old_top = *top;
132 *top = this;
133 next_used_ = old_top;
134 prev_used_ = nullptr;
135 if (old_top != nullptr) {
136 old_top->prev_used_ = this;
137 }
138}
139
140template <class NodeType>
142 DCHECK_GT(used_nodes_, 0);
143 return --used_nodes_ == 0;
144}
145
146template <class NodeType>
148 if (next_used_ != nullptr) next_used_->prev_used_ = prev_used_;
149 if (prev_used_ != nullptr) prev_used_->next_used_ = next_used_;
150 if (this == *top) {
151 *top = next_used_;
152 }
153}
154
155template <class BlockType>
156class GlobalHandles::NodeIterator final {
157 public:
158 using NodeType = typename BlockType::NodeType;
159
160 // Iterator traits.
161 using iterator_category = std::forward_iterator_tag;
162 using difference_type = std::ptrdiff_t;
166
167 explicit NodeIterator(BlockType* block) V8_NOEXCEPT : block_(block) {}
168 NodeIterator(NodeIterator&& other) V8_NOEXCEPT : block_(other.block_),
169 index_(other.index_) {}
170
171 NodeIterator(const NodeIterator&) = delete;
173
174 bool operator==(const NodeIterator& other) const {
175 return block_ == other.block_;
176 }
177 bool operator!=(const NodeIterator& other) const {
178 return block_ != other.block_;
179 }
180
182 if (++index_ < kBlockSize) return *this;
183 index_ = 0;
184 block_ = block_->next_used();
185 return *this;
186 }
187
188 NodeType* operator*() { return block_->at(index_); }
189 NodeType* operator->() { return block_->at(index_); }
190
191 private:
192 BlockType* block_ = nullptr;
193 size_t index_ = 0;
194};
195
196template <class NodeType>
197class GlobalHandles::NodeSpace final {
198 public:
201
202 static NodeSpace* From(NodeType* node);
203 static void Release(NodeType* node);
204
205 explicit NodeSpace(GlobalHandles* global_handles) V8_NOEXCEPT
206 : global_handles_(global_handles) {}
208
209 V8_INLINE NodeType* Allocate();
210
212 iterator end() { return iterator(nullptr); }
213
214 size_t TotalSize() const { return blocks_ * sizeof(NodeType) * kBlockSize; }
215 size_t handles_count() const { return handles_count_; }
216
217 private:
219 V8_INLINE void Free(NodeType* node);
220
224 NodeType* first_free_ = nullptr;
225 size_t blocks_ = 0;
226 size_t handles_count_ = 0;
227};
228
229template <class NodeType>
231 auto* block = first_block_;
232 while (block != nullptr) {
233 auto* tmp = block->next();
234 delete block;
235 block = tmp;
236 }
237}
238
239template <class NodeType>
241 if (first_free_ == nullptr) {
242 first_block_ = new BlockType(global_handles_, this, first_block_);
243 blocks_++;
244 PutNodesOnFreeList(first_block_);
245 }
246 DCHECK_NOT_NULL(first_free_);
247 NodeType* node = first_free_;
248 first_free_ = first_free_->next_free();
249 BlockType* block = BlockType::From(node);
250 if (block->IncreaseUsage()) {
251 block->ListAdd(&first_used_block_);
252 }
253 global_handles_->isolate()->counters()->global_handles()->Increment();
254 handles_count_++;
255 node->CheckNodeIsFreeNode();
256 return node;
257}
258
259template <class NodeType>
261 for (int32_t i = kBlockSize - 1; i >= 0; --i) {
262 NodeType* node = block->at(i);
263 const uint8_t index = static_cast<uint8_t>(i);
264 DCHECK_EQ(i, index);
265 node->set_index(index);
266 node->Free(first_free_);
267 first_free_ = node;
268 }
269}
270
271template <class NodeType>
273 BlockType* block = BlockType::From(node);
274 block->space()->Free(node);
275}
276
277template <class NodeType>
279 CHECK(node->IsInUse());
280 node->Release(first_free_);
281 first_free_ = node;
282 BlockType* block = BlockType::From(node);
283 if (block->DecreaseUsage()) {
284 block->ListRemove(&first_used_block_);
285 }
286 global_handles_->isolate()->counters()->global_handles()->Decrement();
287 handles_count_--;
288}
289
290template <class Child>
291class NodeBase {
292 public:
293 static const Child* FromLocation(const Address* location) {
294 return reinterpret_cast<const Child*>(location);
295 }
296
297 static Child* FromLocation(Address* location) {
298 return reinterpret_cast<Child*>(location);
299 }
300
306
307#ifdef ENABLE_GLOBAL_HANDLE_ZAPPING
308 ~NodeBase() {
309 ClearFields();
310 data_.next_free = nullptr;
311 index_ = 0;
312 }
313#endif
314
315 void Free(Child* free_list) {
316 ClearFields();
317 AsChild()->MarkAsFree();
318 data_.next_free = free_list;
319 }
320
321 // Publishes all internal state to be consumed by other threads.
323 DCHECK(!AsChild()->IsInUse());
324 data_.parameter = nullptr;
325 AsChild()->MarkAsUsed();
326 reinterpret_cast<std::atomic<Address>*>(&object_)->store(
327 object.ptr(), std::memory_order_release);
328 DCHECK(AsChild()->IsInUse());
329 return handle();
330 }
331
332 void Release(Child* free_list) {
333 DCHECK(AsChild()->IsInUse());
334 Free(free_list);
335 DCHECK(!AsChild()->IsInUse());
336 }
337
341 Address raw_object() const { return object_; }
342
343 uint8_t index() const { return index_; }
344 void set_index(uint8_t value) { index_ = value; }
345
346 uint16_t wrapper_class_id() const { return class_id_; }
350
351 // Accessors for next free node in the free list.
352 Child* next_free() {
353 DCHECK(!AsChild()->IsInUse());
354 return data_.next_free;
355 }
356
358 DCHECK(AsChild()->IsInUse());
359 data_.parameter = parameter;
360 }
361 void* parameter() const {
362 DCHECK(AsChild()->IsInUse());
363 return data_.parameter;
364 }
365
371
372 protected:
373 Child* AsChild() { return reinterpret_cast<Child*>(this); }
374 const Child* AsChild() const { return reinterpret_cast<const Child*>(this); }
375
376 void ClearFields() {
377 // Zap the values for eager trapping.
380 AsChild()->ClearImplFields();
381 }
382
383 // Storage for object pointer.
384 //
385 // Placed first to avoid offset computation. The stored data is equivalent to
386 // an Object. It is stored as a plain Address for convenience (smallest number
387 // of casts), and because it is a private implementation detail: the public
388 // interface provides type safety.
390
391 // Class id set by the embedder.
392 uint16_t class_id_ = 0;
393
394 // Index in the containing handle block.
395 uint8_t index_ = 0;
396
397 uint8_t flags_ = 0;
398
399 // The meaning of this field depends on node state:
400 // - Node in free list: Stores next free node pointer.
401 // - Otherwise, specific to the node implementation.
402 union {
403 Child* next_free = nullptr;
406};
407
408namespace {
409
410void ExtractInternalFields(Tagged<JSObject> jsobject, void** embedder_fields,
411 int len) {
412 int field_count = jsobject->GetEmbedderFieldCount();
413 IsolateForSandbox isolate = GetIsolateForSandbox(jsobject);
414 for (int i = 0; i < len; ++i) {
415 if (field_count == i) break;
416 void* pointer;
417 if (EmbedderDataSlot(jsobject, i).ToAlignedPointer(isolate, &pointer)) {
418 embedder_fields[i] = pointer;
419 }
420 }
421}
422
423} // namespace
424
425class GlobalHandles::Node final : public NodeBase<GlobalHandles::Node> {
426 public:
427 // State transition diagram:
428 // FREE -> NORMAL <-> WEAK -> {NEAR_DEATH, FREE} -> FREE
429 enum State {
430 FREE = 0,
431 // Strong global handle.
433 // Flagged as weak and still considered as live.
435 // Temporary state used in GC to sanity check that handles are reset in
436 // their first pass callback.
438 };
439
441 static_assert(static_cast<int>(NodeState::kMask) ==
443 static_assert(WEAK == Internals::kNodeStateIsWeakValue);
444 set_in_young_list(false);
445 }
446
447 Node(const Node&) = delete;
448 Node& operator=(const Node&) = delete;
449
450 const char* label() const {
451 return state() == NORMAL ? reinterpret_cast<char*>(data_.parameter)
452 : nullptr;
453 }
454
455 // State and flag accessors.
456
457 State state() const { return NodeState::decode(flags_); }
458 void set_state(State state) { flags_ = NodeState::update(flags_, state); }
459
462
469
470 bool IsWeak() const { return state() == WEAK; }
471
472 bool IsInUse() const { return state() != FREE; }
473
474 bool IsPhantomResetHandle() const {
476 }
477
479 return state() == NORMAL || state() == WEAK;
480 }
481
482 bool IsStrongRetainer() const { return state() == NORMAL; }
483
484 bool IsWeakRetainer() const { return state() == WEAK; }
485
486 bool has_callback() const { return weak_callback_ != nullptr; }
487
488 // Accessors for next free node in the free list.
490 DCHECK_EQ(FREE, state());
491 return data_.next_free;
492 }
493
512
513 void MakeWeak(Address** location_addr) {
514 DCHECK(IsInUse());
518 set_parameter(location_addr);
519 weak_callback_ = nullptr;
520 }
521
523 DCHECK(IsInUse());
524 void* p = parameter();
526 set_parameter(nullptr);
527 return p;
528 }
529
530 void AnnotateStrongRetainer(const char* label) {
532 data_.parameter = const_cast<char*>(label);
533 }
534
536 std::vector<std::pair<Node*, PendingPhantomCallback>>*
537 pending_phantom_callbacks) {
541
542 void* embedder_fields[v8::kEmbedderFieldsInWeakCallback] = {nullptr,
543 nullptr};
545 IsJSObject(object())) {
546 ExtractInternalFields(Cast<JSObject>(object()), embedder_fields,
548 }
549
550 // Zap with something dangerous.
551 location().store(Tagged<Object>(0xCA11));
552
553 pending_phantom_callbacks->push_back(std::make_pair(
554 this,
555 PendingPhantomCallback(weak_callback_, parameter(), embedder_fields)));
556 DCHECK(IsInUse());
558 }
559
563 Address** handle = reinterpret_cast<Address**>(parameter());
564 *handle = nullptr;
566 }
567
570
572 return NodeBlock<Node>::From(this)->global_handles();
573 }
574
575 private:
576 // Fields that are not used for managing node memory.
577 void ClearImplFields() { weak_callback_ = nullptr; }
578
580 DCHECK_EQ(nullptr, weak_callback_);
581 DCHECK(!IsInUse());
582 }
583
584 // This stores three flags (independent, partially_dependent and
585 // in_young_list) and a State.
587 // Tracks whether the node is contained in the set of young nodes. This bit
588 // persists across allocating and freeing a node as it's only cleaned up
589 // when young nodes are proccessed.
592
593 // Handle specific callback - might be a weak reference in disguise.
595
596 friend class NodeBase<Node>;
597};
598
599size_t GlobalHandles::TotalSize() const { return regular_nodes_->TotalSize(); }
600
602 return regular_nodes_->handles_count() * sizeof(Node);
603}
604
606 return regular_nodes_->handles_count();
607}
608
610 : isolate_(isolate),
611 regular_nodes_(std::make_unique<NodeSpace<GlobalHandles::Node>>(this)) {}
612
614
615namespace {
616
617template <typename NodeType>
618bool NeedsTrackingInYoungNodes(Tagged<Object> value, NodeType* node) {
619 return HeapLayout::InYoungGeneration(value) && !node->is_in_young_list();
620}
621
622} // namespace
623
625 GlobalHandles::Node* node = regular_nodes_->Allocate();
626 if (NeedsTrackingInYoungNodes(value, node)) {
627 young_nodes_.push_back(node);
628 node->set_in_young_list(true);
629 }
630 return node->Publish(value);
631}
632
636
638 DCHECK_NOT_NULL(location);
639 GlobalHandles* global_handles =
640 Node::FromLocation(location)->global_handles();
641#ifdef VERIFY_HEAP
642 if (v8_flags.verify_heap) {
643 Object::ObjectVerify(Tagged<Object>(*location), global_handles->isolate());
644 }
645#endif // VERIFY_HEAP
646 return global_handles->Create(*location);
647}
648
649// static
651 DCHECK_NOT_NULL(*from);
652 DCHECK_NOT_NULL(*to);
653 DCHECK_EQ(*from, *to);
654 Node* node = Node::FromLocation(*from);
655 if (node->IsWeak() && node->IsPhantomResetHandle()) {
656 node->set_parameter(to);
657 }
658 // Strong handles do not require fixups.
659}
660
662 if (location != nullptr) {
664 }
665}
666
668
669void GlobalHandles::MakeWeak(Address* location, void* parameter,
670 GenericCallback phantom_callback,
672 Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
673}
674
675void GlobalHandles::MakeWeak(Address** location_addr) {
676 Node::FromLocation(*location_addr)->MakeWeak(location_addr);
677}
678
680 return Node::FromLocation(location)->ClearWeakness();
681}
682
684 const char* label) {
685 Node::FromLocation(location)->AnnotateStrongRetainer(label);
686}
687
689 return Node::FromLocation(location)->IsWeak();
690}
691
693 Node* node, WeakSlotCallbackWithHeap should_reset_handle) {
694 DCHECK(node->IsWeakRetainer());
695
696 if (!should_reset_handle(isolate()->heap(), node->location())) return false;
697
698 switch (node->weakness_type()) {
700 node->ResetPhantomHandle();
701 break;
703 [[fallthrough]];
705 node->CollectPhantomCallbackData(&pending_phantom_callbacks_);
706 break;
707 }
708 return true;
709}
710
713 WeakSlotCallbackWithHeap should_reset_handle) {
714 for (Node* node : *regular_nodes_) {
715 if (node->IsWeakRetainer()) ResetWeakNodeIfDead(node, should_reset_handle);
716 }
717}
718
720 for (Node* node : young_nodes_) {
721 if (node->IsStrongRetainer()) {
722 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
723 node->location());
724 }
725 }
726}
727
729 RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
730 for (Node* node : young_nodes_) {
731 DCHECK(node->is_in_young_list());
732
733 if (node->IsWeakRetainer() &&
734 !ResetWeakNodeIfDead(node, should_reset_handle)) {
735 // Node is weak and alive, so it should be passed onto the visitor if
736 // present.
737 if (v) {
738 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
739 node->location());
740 }
741 }
742 }
743}
744
746 DCHECK(!AllowJavascriptExecution::IsAllowed(isolate()));
747 DCHECK(AllowGarbageCollection::IsAllowed());
748
749 if (second_pass_callbacks_.empty()) return;
750
751 // The callbacks may allocate, which in turn may lead to another GC run. If we
752 // are already processing the callbacks, we do not want to start over from
753 // within the inner GC. Newly added callbacks will always be run by the
754 // outermost GC run only.
755 GCCallbacksScope scope(isolate()->heap());
756 if (scope.CheckReenter()) {
757 TRACE_EVENT0("v8", "V8.GCPhantomHandleProcessingCallback");
760 GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
761 {
763 GCTracer::Scope::HEAP_EXTERNAL_SECOND_PASS_CALLBACKS);
764 while (!second_pass_callbacks_.empty()) {
765 auto callback = second_pass_callbacks_.back();
766 second_pass_callbacks_.pop_back();
768 }
769 }
772 GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
773 }
774}
775
776namespace {
777
778template <typename T>
779void UpdateListOfYoungNodesImpl(Isolate* isolate, std::vector<T*>* node_list) {
780 size_t last = 0;
781 for (T* node : *node_list) {
782 DCHECK(node->is_in_young_list());
783 if (node->IsInUse() && node->state() != T::NEAR_DEATH) {
784 if (HeapLayout::InYoungGeneration(node->object())) {
785 (*node_list)[last++] = node;
786 isolate->heap()->IncrementNodesCopiedInNewSpace();
787 } else {
788 node->set_in_young_list(false);
789 isolate->heap()->IncrementNodesPromoted();
790 }
791 } else {
792 node->set_in_young_list(false);
793 isolate->heap()->IncrementNodesDiedInNewSpace(1);
794 }
795 }
796 DCHECK_LE(last, node_list->size());
797 node_list->resize(last);
798 node_list->shrink_to_fit();
799}
800
801template <typename T>
802void ClearListOfYoungNodesImpl(Isolate* isolate, std::vector<T*>* node_list) {
803 for (T* node : *node_list) {
804 DCHECK(node->is_in_young_list());
805 node->set_in_young_list(false);
806 DCHECK_IMPLIES(node->IsInUse() && node->state() != T::NEAR_DEATH,
807 !HeapLayout::InYoungGeneration(node->object()));
808 }
809 isolate->heap()->IncrementNodesDiedInNewSpace(
810 static_cast<int>(node_list->size()));
811 node_list->clear();
812 node_list->shrink_to_fit();
813}
814
815} // namespace
816
818 UpdateListOfYoungNodesImpl(isolate_, &young_nodes_);
819}
820
822 ClearListOfYoungNodesImpl(isolate_, &young_nodes_);
823}
824
827 if (pending_phantom_callbacks_.empty()) return 0;
828
829 TRACE_GC(isolate()->heap()->tracer(),
830 GCTracer::Scope::HEAP_EXTERNAL_WEAK_GLOBAL_HANDLES);
831
832 size_t freed_nodes = 0;
833 std::vector<std::pair<Node*, PendingPhantomCallback>>
834 pending_phantom_callbacks;
835 pending_phantom_callbacks.swap(pending_phantom_callbacks_);
836 // The initial pass callbacks must simply clear the nodes.
837 for (auto& pair : pending_phantom_callbacks) {
838 Node* node = pair.first;
839 DCHECK_EQ(Node::NEAR_DEATH, node->state());
840 pair.second.Invoke(isolate(), PendingPhantomCallback::kFirstPass);
841
842 // Transition to second pass. It is required that the first pass callback
843 // resets the handle using |v8::PersistentBase::Reset|. Also see comments
844 // on |v8::WeakCallbackInfo|.
845 CHECK_WITH_MSG(Node::FREE == node->state(),
846 "Handle not reset in first callback. See comments on "
847 "|v8::WeakCallbackInfo|.");
848
849 if (pair.second.callback()) second_pass_callbacks_.push_back(pair.second);
850 freed_nodes++;
851 }
852 last_gc_custom_callbacks_ = freed_nodes;
853 return 0;
854}
855
857 InvocationType type) {
858 Data::Callback* callback_addr = nullptr;
859 if (type == kFirstPass) {
860 callback_addr = &callback_;
861 }
862 Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
863 embedder_fields_, callback_addr);
865 callback_ = nullptr;
866 callback(data);
867}
868
870 v8::GCCallbackFlags gc_callback_flags) {
871 // Process weak global handle callbacks. This must be done after the
872 // GC is completely done, because the callbacks may invoke arbitrary
873 // API functions.
874 DCHECK_EQ(Heap::NOT_IN_GC, isolate_->heap()->gc_state());
875
876 if (second_pass_callbacks_.empty()) return;
877
878 const bool synchronous_second_pass =
879 isolate_->MemorySaverModeEnabled() || v8_flags.predictable ||
881 (gc_callback_flags &
884 if (synchronous_second_pass) {
886 return;
887 }
888
892 ->GetForegroundTaskRunner(reinterpret_cast<v8::Isolate*>(isolate()))
896 DisallowJavascriptExecution no_js(isolate());
898 }));
899 }
900}
901
903 for (Node* node : *regular_nodes_) {
904 if (node->IsStrongRetainer()) {
905 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
906 node->location());
907 }
908 }
909}
910
912 for (Node* node : *regular_nodes_) {
913 if (node->IsWeak()) {
914 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
915 node->location());
916 }
917 }
918}
919
922 for (Node* node : *regular_nodes_) {
923 if (node->IsWeakOrStrongRetainer()) {
924 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
925 node->location());
926 }
927 }
928}
929
932 for (Node* node : young_nodes_) {
933 if (node->IsWeakOrStrongRetainer()) {
934 v->VisitRootPointer(Root::kGlobalHandles, node->label(),
935 node->location());
936 }
937 }
938}
939
943 Address* value = node->handle().location();
944 visitor->VisitPersistentHandle(
945 reinterpret_cast<v8::Persistent<v8::Value>*>(&value),
946 node->wrapper_class_id());
947}
948
951 for (Node* node : *regular_nodes_) {
952 if (node->IsWeakOrStrongRetainer()) {
953 ApplyPersistentHandleVisitor(visitor, node);
954 }
955 }
956}
957
959 stats->global_handle_count = 0;
960 stats->weak_global_handle_count = 0;
961 stats->pending_global_handle_count = 0;
962 stats->near_death_global_handle_count = 0;
963 stats->free_global_handle_count = 0;
964 for (Node* node : *regular_nodes_) {
965 stats->global_handle_count += 1;
966 if (node->state() == Node::WEAK) {
967 stats->weak_global_handle_count += 1;
968 } else if (node->state() == Node::NEAR_DEATH) {
969 stats->near_death_global_handle_count += 1;
970 } else if (node->state() == Node::FREE) {
971 stats->free_global_handle_count += 1;
972 }
973 }
974}
975
976#ifdef DEBUG
977
978void GlobalHandles::PrintStats() {
979 int total = 0;
980 int weak = 0;
981 int near_death = 0;
982 int destroyed = 0;
983
984 for (Node* node : *regular_nodes_) {
985 total++;
986 if (node->state() == Node::WEAK) weak++;
987 if (node->state() == Node::NEAR_DEATH) near_death++;
988 if (node->state() == Node::FREE) destroyed++;
989 }
990
991 PrintF("Global Handle Statistics:\n");
992 PrintF(" allocated memory = %zuB\n", total * sizeof(Node));
993 PrintF(" # weak = %d\n", weak);
994 PrintF(" # near_death = %d\n", near_death);
995 PrintF(" # free = %d\n", destroyed);
996 PrintF(" # total = %d\n", total);
997}
998
999void GlobalHandles::Print() {
1000 PrintF("Global handles:\n");
1001 for (Node* node : *regular_nodes_) {
1002 PrintF(" handle %p to %p%s\n", node->location().ToVoidPtr(),
1003 reinterpret_cast<void*>(node->object().ptr()),
1004 node->IsWeak() ? " (weak)" : "");
1005 }
1006}
1007
1008#endif
1009
1011 for (Address* block : blocks_) delete[] block;
1012}
1013
1015 int limit = size_;
1016 for (Address* block : blocks_) {
1017 DCHECK_GT(limit, 0);
1018 visitor->VisitRootPointers(
1019 Root::kEternalHandles, nullptr, FullObjectSlot(block),
1020 FullObjectSlot(block + std::min({limit, kSize})));
1021 limit -= kSize;
1022 }
1023}
1024
1026 for (int index : young_node_indices_) {
1027 visitor->VisitRootPointer(Root::kEternalHandles, nullptr,
1028 FullObjectSlot(GetLocation(index)));
1029 }
1030}
1031
1033 size_t last = 0;
1034 for (int index : young_node_indices_) {
1035 if (HeapLayout::InYoungGeneration(Tagged<Object>(*GetLocation(index)))) {
1036 young_node_indices_[last++] = index;
1037 }
1038 }
1039 DCHECK_LE(last, young_node_indices_.size());
1040 young_node_indices_.resize(last);
1041}
1042
1044 int* index) {
1045 DCHECK_EQ(kInvalidIndex, *index);
1046 if (object == Tagged<Object>()) return;
1047 Tagged<Object> the_hole = ReadOnlyRoots(isolate).the_hole_value();
1048 DCHECK_NE(the_hole, object);
1049 int block = size_ >> kShift;
1050 int offset = size_ & kMask;
1051 // Need to resize.
1052 if (offset == 0) {
1053 Address* next_block = new Address[kSize];
1054 MemsetPointer(FullObjectSlot(next_block), the_hole, kSize);
1055 blocks_.push_back(next_block);
1056 }
1057 DCHECK_EQ(the_hole.ptr(), blocks_[block][offset]);
1058 blocks_[block][offset] = object.ptr();
1059 if (HeapLayout::InYoungGeneration(object)) {
1060 young_node_indices_.push_back(size_);
1061 }
1062 *index = size_++;
1063}
1064
1065} // namespace internal
1066} // namespace v8
Isolate * isolate_
ThreadLocalTop * top
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
static const uint16_t kPersistentHandleNoClassId
virtual void VisitPersistentHandle(Persistent< Value > *value, uint16_t class_id)
std::shared_ptr< v8::TaskRunner > GetForegroundTaskRunner(Isolate *isolate)
void PostTask(std::unique_ptr< Task > task, const SourceLocation &location=SourceLocation::Current())
Definition v8-platform.h:82
void(*)(const WeakCallbackInfo< T > &data) Callback
static constexpr T decode(U value)
Definition bit-field.h:66
static V8_NODISCARD constexpr U update(U previous, T value)
Definition bit-field.h:61
static constexpr U kMask
Definition bit-field.h:41
V8_EXPORT_PRIVATE void Create(Isolate *isolate, Tagged< Object > object, int *index)
void IterateAllRoots(RootVisitor *visitor)
void IterateYoungRoots(RootVisitor *visitor)
void store(Tagged< Object > value) const
Definition slots-inl.h:54
GlobalHandles::NodeSpace< NodeType > *const space_
V8_INLINE void ListAdd(NodeBlock **top)
V8_INLINE void ListRemove(NodeBlock **top)
NodeBlock(GlobalHandles *global_handles, GlobalHandles::NodeSpace< NodeType > *space, NodeBlock *next) V8_NOEXCEPT
NodeBlock & operator=(const NodeBlock &)=delete
const NodeType * at(size_t index) const
static V8_INLINE const NodeBlock * From(const NodeType *node)
GlobalHandles::NodeSpace< NodeType > * space() const
NodeBlock(const NodeBlock &)=delete
NodeIterator & operator=(const NodeIterator &)=delete
NodeIterator(BlockType *block) V8_NOEXCEPT
NodeIterator(const NodeIterator &)=delete
std::forward_iterator_tag iterator_category
bool operator==(const NodeIterator &other) const
NodeIterator(NodeIterator &&other) V8_NOEXCEPT
typename BlockType::NodeType NodeType
bool operator!=(const NodeIterator &other) const
V8_INLINE void Free(NodeType *node)
static void Release(NodeType *node)
NodeSpace(GlobalHandles *global_handles) V8_NOEXCEPT
static NodeSpace * From(NodeType *node)
Node & operator=(const Node &)=delete
void MakeWeak(Address **location_addr)
void MakeWeak(void *parameter, WeakCallbackInfo< void >::Callback phantom_callback, v8::WeakCallbackType type)
void AnnotateStrongRetainer(const char *label)
void set_weakness_type(WeaknessType weakness_type)
WeakCallbackInfo< void >::Callback weak_callback_
void CollectPhantomCallbackData(std::vector< std::pair< Node *, PendingPhantomCallback > > *pending_phantom_callbacks)
void * embedder_fields_[v8::kEmbedderFieldsInWeakCallback]
void Invoke(Isolate *isolate, InvocationType type)
void IterateAllYoungRoots(RootVisitor *v)
void RecordStats(HeapStats *stats)
static void * ClearWeakness(Address *location)
static void Destroy(Address *location)
static void MakeWeak(Address *location, void *parameter, WeakCallbackInfo< void >::Callback weak_callback, v8::WeakCallbackType type)
static void AnnotateStrongRetainer(Address *location, const char *label)
std::unique_ptr< NodeSpace< Node > > regular_nodes_
IndirectHandle< Object > Create(Tagged< Object > value)
std::vector< std::pair< Node *, PendingPhantomCallback > > pending_phantom_callbacks_
void PostGarbageCollectionProcessing(v8::GCCallbackFlags gc_callback_flags)
void IterateYoungStrongAndDependentRoots(RootVisitor *v)
void IterateAllRoots(RootVisitor *v)
std::vector< PendingPhantomCallback > second_pass_callbacks_
static bool IsWeak(Address *location)
static void MoveGlobal(Address **from, Address **to)
static IndirectHandle< Object > CopyGlobal(Address *location)
void ApplyPersistentHandleVisitor(v8::PersistentHandleVisitor *visitor, Node *node)
bool ResetWeakNodeIfDead(Node *node, WeakSlotCallbackWithHeap should_reset_node)
void IterateStrongRoots(RootVisitor *v)
void ProcessWeakYoungObjects(RootVisitor *v, WeakSlotCallbackWithHeap should_reset_handle)
std::vector< Node * > young_nodes_
void IterateWeakRoots(RootVisitor *v)
GlobalHandles(const GlobalHandles &)=delete
void IterateWeakRootsForPhantomHandles(WeakSlotCallbackWithHeap should_reset_handle)
void IterateAllRootsForTesting(v8::PersistentHandleVisitor *v)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
bool IsTearingDown() const
Definition heap.h:525
HeapState gc_state() const
Definition heap.h:521
GCTracer * tracer()
Definition heap.h:800
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
Definition heap.cc:2581
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
Definition heap.cc:2594
static const int kNodeStateMask
static const int kNodeStateIsWeakValue
static const int kNodeFlagsOffset
static const int kNodeClassIdOffset
bool MemorySaverModeEnabled()
Definition isolate.h:2118
Address raw_object() const
IndirectHandle< Object > Publish(Tagged< Object > object)
uint16_t wrapper_class_id() const
void set_parameter(void *parameter)
void Free(Child *free_list)
Tagged< Object > object() const
static const Child * FromLocation(const Address *location)
void CheckNodeIsFreeNode() const
FullObjectSlot location()
IndirectHandle< Object > handle()
union v8::internal::NodeBase::@76 data_
bool has_wrapper_class_id() const
static Child * FromLocation(Address *location)
void set_index(uint8_t value)
void Release(Child *free_list)
const Child * AsChild() const
virtual void VisitRootPointers(Root root, const char *description, FullObjectSlot start, FullObjectSlot end)=0
virtual void VisitRootPointer(Root root, const char *description, FullObjectSlot p)
Definition visitors.h:75
V8_INLINE constexpr StorageType ptr() const
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
Definition v8.cc:282
const int size_
Definition assembler.cc:132
Label label
#define TRACE_GC(tracer, scope_id)
Definition gc-tracer.h:35
int32_t offset
TNode< Object > callback
Node * node
RpoNumber block
Register tmp
STL namespace.
void PrintF(const char *format,...)
Definition utils.cc:39
Tagged(T object) -> Tagged< T >
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
V8_INLINE IsolateForSandbox GetIsolateForSandbox(Tagged< HeapObject >)
Definition isolate.h:75
Handle< T > IndirectHandle
Definition globals.h:1086
constexpr uint32_t kGlobalHandleZapValue
Definition globals.h:1007
V8_EXPORT_PRIVATE FlagValues v8_flags
bool(*)(Heap *heap, FullObjectSlot pointer) WeakSlotCallbackWithHeap
Definition globals.h:1278
return value
Definition map-inl.h:893
std::unique_ptr< CancelableTask > MakeCancelableTask(Isolate *isolate, std::function< void()> func)
Definition task-utils.cc:43
static constexpr Address kNullAddress
Definition v8-internal.h:53
v8::WeakCallbackInfo< void >::Callback GenericCallback
void MemsetPointer(FullObjectSlot start, Tagged< Object > value, size_t counter)
Definition slots-inl.h:507
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr int kEmbedderFieldsInWeakCallback
GCCallbackFlags
@ kGCCallbackFlagForced
@ kNoGCCallbackFlags
@ kGCCallbackFlagSynchronousPhantomCallbackProcessing
@ kGCCallbackFlagCollectAllAvailableGarbage
@ kGCTypeProcessWeakCallbacks
std::vector< std::vector< ValueType > > blocks_
#define V8_NOEXCEPT
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define CHECK_WITH_MSG(condition, message)
Definition logging.h:118
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_NE(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define DISABLE_CFI_PERF
Definition macros.h:197
#define TRACE_EVENT0(category_group, name)
#define V8_INLINE
Definition v8config.h:500