v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
isolate.h
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_EXECUTION_ISOLATE_H_
6#define V8_EXECUTION_ISOLATE_H_
7
8#include <atomic>
9#include <cstddef>
10#include <functional>
11#include <list>
12#include <memory>
13#include <optional>
14#include <queue>
15#include <unordered_map>
16#include <vector>
17
18#include "include/v8-context.h"
19#include "include/v8-internal.h"
20#include "include/v8-isolate.h"
21#include "include/v8-metrics.h"
22#include "include/v8-snapshot.h"
23#include "src/base/macros.h"
27#include "src/common/globals.h"
37#include "src/handles/handles.h"
39#include "src/heap/factory.h"
40#include "src/heap/heap.h"
43#include "src/objects/code.h"
47#include "src/objects/tagged.h"
48#include "src/runtime/runtime.h"
53
54#ifdef DEBUG
56#endif
57
58#if V8_ENABLE_WEBASSEMBLY
59#include "src/wasm/stacks.h"
60#endif
61
62#ifdef V8_INTL_SUPPORT
63#include "unicode/uversion.h" // Define U_ICU_NAMESPACE.
64namespace U_ICU_NAMESPACE {
65class UMemory;
66} // namespace U_ICU_NAMESPACE
67#endif // V8_INTL_SUPPORT
68
69#if USE_SIMULATOR
71namespace v8 {
72namespace internal {
73class SimulatorData;
74}
75} // namespace v8
76#endif
77
78namespace v8_inspector {
79class V8Inspector;
80} // namespace v8_inspector
81
82namespace v8 {
83
84class EmbedderState;
85
86namespace base {
87class RandomNumberGenerator;
88} // namespace base
89
90namespace bigint {
91class Processor;
92}
93
94namespace debug {
95class ConsoleDelegate;
96class AsyncEventDelegate;
97} // namespace debug
98
99namespace internal {
100
102 v8::Isolate* isolate, v8::Local<v8::Context> context,
104 v8::Local<v8::Value> compilation_result, WasmAsyncSuccess success);
105
106namespace heap {
107class HeapTester;
108} // namespace heap
109
110namespace maglev {
111class MaglevConcurrentDispatcher;
112} // namespace maglev
113
114class AddressToIndexHashMap;
115class AstStringConstants;
116class Bootstrapper;
117class BuiltinsConstantsTableBuilder;
118class CancelableTaskManager;
119class Logger;
120class CodeTracer;
121class CommonFrame;
122class CompilationCache;
123class CompilationStatistics;
124class Counters;
125class Debug;
126class Deoptimizer;
127class DescriptorLookupCache;
128class EmbeddedFileWriterInterface;
129class EternalHandles;
130class GlobalHandles;
131class GlobalSafepoint;
132class HandleScopeImplementer;
133class HeapObjectToIndexHashMap;
134class HeapProfiler;
135class InnerPointerToCodeCache;
136class LazyCompileDispatcher;
137class LocalIsolate;
138class V8FileLogger;
139class MaterializedObjectStore;
140class Microtask;
141class MicrotaskQueue;
142class OptimizingCompileDispatcher;
143class OptimizingCompileTaskExecutor;
144class PersistentHandles;
145class PersistentHandlesList;
146class ReadOnlyArtifacts;
147class RegExpStack;
148class RootVisitor;
149class SetupIsolateDelegate;
150class SharedStructTypeRegistry;
151class Simulator;
152class SnapshotData;
153class StackFrame;
154class StringForwardingTable;
155class StringTable;
156class StubCache;
157class ThreadManager;
158class ThreadState;
159class ThreadVisitor; // Defined in v8threads.h
160class TieringManager;
161class TracingCpuProfilerImpl;
162class UnicodeCache;
163struct ManagedPtrDestructor;
164
165template <StateTag Tag>
166class VMState;
167
168namespace baseline {
169class BaselineBatchCompiler;
170} // namespace baseline
171
172namespace interpreter {
173class Interpreter;
174} // namespace interpreter
175
176namespace compiler {
177class NodeObserver;
178class PerIsolateCompilerCache;
179namespace turboshaft {
180class WasmRevecVerifier;
181} // namespace turboshaft
182} // namespace compiler
183
184namespace win64_unwindinfo {
185class BuiltinUnwindInfo;
186} // namespace win64_unwindinfo
187
188namespace metrics {
189class Recorder;
190} // namespace metrics
191
192namespace wasm {
193
194#if V8_ENABLE_DRUMBRAKE
195class WasmExecutionTimer;
196#endif // V8_ENABLE_DRUMBRAKE
197class WasmCodeLookupCache;
198class WasmOrphanedGlobalHandle;
199}
200
201namespace detail {
202class WaiterQueueNode;
203} // namespace detail
204
205#define RETURN_FAILURE_IF_EXCEPTION(isolate) \
206 do { \
207 Isolate* __isolate__ = (isolate); \
208 if (__isolate__->has_exception()) { \
209 return ReadOnlyRoots(__isolate__).exception(); \
210 } \
211 } while (false)
212
213#define RETURN_FAILURE_IF_EXCEPTION_DETECTOR(isolate, detector) \
214 do { \
215 Isolate* __isolate__ = (isolate); \
216 if (__isolate__->has_exception()) { \
217 detector.AcceptSideEffects(); \
218 return ReadOnlyRoots(__isolate__).exception(); \
219 } \
220 } while (false)
221
222// Macros for MaybeHandle.
223
224#define RETURN_VALUE_IF_EXCEPTION(isolate, value) \
225 do { \
226 Isolate* __isolate__ = (isolate); \
227 if (__isolate__->has_exception()) { \
228 return value; \
229 } \
230 } while (false)
231
232#define RETURN_VALUE_IF_EXCEPTION_DETECTOR(isolate, detector, value) \
233 RETURN_VALUE_IF_EXCEPTION(isolate, (detector.AcceptSideEffects(), value))
234
235#define RETURN_EXCEPTION_IF_EXCEPTION(isolate) \
236 RETURN_VALUE_IF_EXCEPTION(isolate, kNullMaybeHandle)
237
238#define MAYBE_RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
239 do { \
240 if ((call).IsNothing()) { \
241 DCHECK((isolate)->has_exception()); \
242 return value; \
243 } \
244 } while (false)
245
264#define RETURN_RESULT_OR_FAILURE(isolate, call) \
265 do { \
266 DirectHandle<Object> __result__; \
267 Isolate* __isolate__ = (isolate); \
268 if (!(call).ToHandle(&__result__)) { \
269 DCHECK(__isolate__->has_exception()); \
270 return ReadOnlyRoots(__isolate__).exception(); \
271 } \
272 DCHECK(!__isolate__->has_exception()); \
273 return *__result__; \
274 } while (false)
275
276#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
277 do { \
278 if (!(call).ToHandle(&dst)) { \
279 DCHECK((isolate)->has_exception()); \
280 return value; \
281 } \
282 } while (false)
283
284#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
285 do { \
286 auto* __isolate__ = (isolate); \
287 ASSIGN_RETURN_ON_EXCEPTION_VALUE(__isolate__, dst, call, \
288 ReadOnlyRoots(__isolate__).exception()); \
289 } while (false)
290
291#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call) \
292 ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, kNullMaybeHandle)
293
294#define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call) \
295 do { \
296 auto* __isolate__ = (isolate); \
297 return __isolate__->Throw(*__isolate__->factory()->call); \
298 } while (false)
299
300#define THROW_NEW_ERROR_RETURN_VALUE(isolate, call, value) \
301 do { \
302 auto* __isolate__ = (isolate); \
303 __isolate__->Throw(*__isolate__->factory()->call); \
304 return value; \
305 } while (false)
306
307#define THROW_NEW_ERROR(isolate, call) \
308 THROW_NEW_ERROR_RETURN_VALUE(isolate, call, kNullMaybeHandle)
309
340#define RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
341 do { \
342 if ((call).is_null()) { \
343 DCHECK((isolate)->has_exception()); \
344 return value; \
345 } \
346 } while (false)
347
368#define RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
369 do { \
370 Isolate* __isolate__ = (isolate); \
371 RETURN_ON_EXCEPTION_VALUE(__isolate__, call, \
372 ReadOnlyRoots(__isolate__).exception()); \
373 } while (false);
374
395#define RETURN_ON_EXCEPTION(isolate, call) \
396 RETURN_ON_EXCEPTION_VALUE(isolate, call, kNullMaybeHandle)
397
398#define RETURN_FAILURE(isolate, should_throw, call) \
399 do { \
400 if ((should_throw) == kDontThrow) { \
401 return Just(false); \
402 } else { \
403 isolate->Throw(*isolate->factory()->call); \
404 return Nothing<bool>(); \
405 } \
406 } while (false)
407
408#define MAYBE_RETURN(call, value) \
409 do { \
410 if ((call).IsNothing()) return value; \
411 } while (false)
412
413#define MAYBE_RETURN_NULL(call) MAYBE_RETURN(call, kNullMaybeHandle)
414
415#define API_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
416 do { \
417 if (!(call).ToLocal(&dst)) { \
418 DCHECK((isolate)->has_exception()); \
419 return value; \
420 } \
421 } while (false)
422
423#define MAYBE_RETURN_ON_EXCEPTION_VALUE(isolate, call, value) \
424 do { \
425 if ((call).IsNothing()) { \
426 DCHECK((isolate)->has_exception()); \
427 return value; \
428 } \
429 } while (false)
430
431#define MAYBE_RETURN_FAILURE_ON_EXCEPTION(isolate, call) \
432 do { \
433 Isolate* __isolate__ = (isolate); \
434 if ((call).IsNothing()) { \
435 DCHECK((__isolate__)->has_exception()); \
436 return ReadOnlyRoots(__isolate__).exception(); \
437 } \
438 } while (false)
439
440#define MAYBE_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value) \
441 do { \
442 if (!(call).To(&dst)) { \
443 DCHECK((isolate)->has_exception()); \
444 return value; \
445 } \
446 } while (false)
447
448#define MAYBE_ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call) \
449 do { \
450 Isolate* __isolate__ = (isolate); \
451 if (!(call).To(&dst)) { \
452 DCHECK(__isolate__->has_exception()); \
453 return ReadOnlyRoots(__isolate__).exception(); \
454 } \
455 } while (false)
456
457#define FOR_WITH_HANDLE_SCOPE(isolate, loop_var_type, init, loop_var, \
458 limit_check, increment, body) \
459 do { \
460 loop_var_type init; \
461 loop_var_type for_with_handle_limit = loop_var; \
462 Isolate* for_with_handle_isolate = isolate; \
463 while (limit_check) { \
464 for_with_handle_limit += 1024; \
465 HandleScope loop_scope(for_with_handle_isolate); \
466 for (; limit_check && loop_var < for_with_handle_limit; increment) { \
467 body \
468 } \
469 } \
470 } while (false)
471
472#define WHILE_WITH_HANDLE_SCOPE(isolate, limit_check, body) \
473 do { \
474 Isolate* for_with_handle_isolate = isolate; \
475 while (limit_check) { \
476 HandleScope loop_scope(for_with_handle_isolate); \
477 for (int for_with_handle_it = 0; \
478 limit_check && for_with_handle_it < 1024; ++for_with_handle_it) { \
479 body \
480 } \
481 } \
482 } while (false)
483
484#define FIELD_ACCESSOR(type, name) \
485 inline void set_##name(type v) { name##_ = v; } \
486 inline type name() const { return name##_; }
487
488// Controls for manual embedded blob lifecycle management, used by tests and
489// mksnapshot.
492
493#ifdef DEBUG
494
495#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V) \
496 V(int, code_kind_statistics, kCodeKindCount)
497#else
498
499#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
500
501#endif
502
503#define ISOLATE_INIT_ARRAY_LIST(V) \
504 /* SerializerDeserializer state. */ \
505 V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
506 V(int, bad_char_shift_table, kUC16AlphabetSize) \
507 V(int, good_suffix_shift_table, (kBMMaxShift + 1)) \
508 V(int, suffix_table, (kBMMaxShift + 1)) \
509 ISOLATE_INIT_DEBUG_ARRAY_LIST(V)
510
511using DebugObjectCache = std::vector<Handle<HeapObject>>;
512
513#define ISOLATE_INIT_LIST(V) \
514 /* Assembler state. */ \
515 V(FatalErrorCallback, exception_behavior, nullptr) \
516 V(OOMErrorCallback, oom_behavior, nullptr) \
517 V(LogEventCallback, event_logger, nullptr) \
518 V(ModifyCodeGenerationFromStringsCallback2, modify_code_gen_callback, \
519 nullptr) \
520 V(AllowWasmCodeGenerationCallback, allow_wasm_code_gen_callback, nullptr) \
521 V(ExtensionCallback, wasm_module_callback, &NoExtension) \
522 V(ExtensionCallback, wasm_instance_callback, &NoExtension) \
523 V(SharedArrayBufferConstructorEnabledCallback, \
524 sharedarraybuffer_constructor_enabled_callback, nullptr) \
525 V(WasmStreamingCallback, wasm_streaming_callback, nullptr) \
526 V(WasmAsyncResolvePromiseCallback, wasm_async_resolve_promise_callback, \
527 DefaultWasmAsyncResolvePromiseCallback) \
528 V(WasmLoadSourceMapCallback, wasm_load_source_map_callback, nullptr) \
529 V(WasmImportedStringsEnabledCallback, \
530 wasm_imported_strings_enabled_callback, nullptr) \
531 V(JavaScriptCompileHintsMagicEnabledCallback, \
532 compile_hints_magic_enabled_callback, nullptr) \
533 V(WasmJSPIEnabledCallback, wasm_jspi_enabled_callback, nullptr) \
534 V(IsJSApiWrapperNativeErrorCallback, \
535 is_js_api_wrapper_native_error_callback, nullptr) \
536 /* State for Relocatable. */ \
537 V(Relocatable*, relocatable_top, nullptr) \
538 V(DebugObjectCache*, string_stream_debug_object_cache, nullptr) \
539 V(Tagged<Object>, string_stream_current_security_token, Tagged<Object>()) \
540 V(const intptr_t*, api_external_references, nullptr) \
541 V(AddressToIndexHashMap*, external_reference_map, nullptr) \
542 V(HeapObjectToIndexHashMap*, root_index_map, nullptr) \
543 V(MicrotaskQueue*, default_microtask_queue, nullptr) \
544 V(CodeTracer*, code_tracer, nullptr) \
545 V(PromiseRejectCallback, promise_reject_callback, nullptr) \
546 V(ExceptionPropagationCallback, exception_propagation_callback, nullptr) \
547 V(const v8::StartupData*, snapshot_blob, nullptr) \
548 V(int, code_and_metadata_size, 0) \
549 V(int, bytecode_and_metadata_size, 0) \
550 V(int, external_script_source_size, 0) \
551 /* Number of CPU profilers running on the isolate. */ \
552 V(size_t, num_cpu_profilers, 0) \
553 /* true if a trace is being formatted through Error.prepareStackTrace. */ \
554 V(bool, formatting_stack_trace, false) \
555 V(bool, disable_bytecode_flushing, false) \
556 V(int, last_console_context_id, 0) \
557 V(v8_inspector::V8Inspector*, inspector, nullptr) \
558 V(int, embedder_wrapper_type_index, -1) \
559 V(int, embedder_wrapper_object_index, -1) \
560 V(compiler::NodeObserver*, node_observer, nullptr) \
561 V(bool, javascript_execution_assert, true) \
562 V(bool, javascript_execution_throws, true) \
563 V(bool, javascript_execution_dump, true) \
564 V(uint32_t, javascript_execution_counter, 0) \
565 V(bool, deoptimization_assert, true) \
566 V(bool, compilation_assert, true) \
567 V(bool, no_exception_assert, true) \
568 V(uint32_t, wasm_switch_to_the_central_stack_counter, 0)
569
570#define THREAD_LOCAL_TOP_ACCESSOR(type, name) \
571 inline void set_##name(type v) { thread_local_top()->name##_ = v; } \
572 inline type name() const { return thread_local_top()->name##_; }
573
574#define THREAD_LOCAL_TOP_ADDRESS(type, name) \
575 inline type* name##_address() { return &thread_local_top()->name##_; }
576
577// Do not use this variable directly, use Isolate::Current() instead.
578// Defined outside of Isolate because Isolate uses V8_EXPORT_PRIVATE.
579__attribute__((tls_model(V8_TLS_MODEL))) extern thread_local Isolate*
580 g_current_isolate_ V8_CONSTINIT;
581
582// HiddenFactory exists so Isolate can privately inherit from it without making
583// Factory's members available to Isolate directly.
585
587 // These forward declarations are required to make the friend declarations in
588 // PerIsolateThreadData work on some older versions of gcc.
589 class ThreadDataTable;
590 class EntryStackItem;
591
592 public:
593 Isolate(const Isolate&) = delete;
594 Isolate& operator=(const Isolate&) = delete;
595
597 void* operator new(size_t) = delete;
598 void operator delete(void*) = delete;
599
600 // A thread has a PerIsolateThreadData instance for each isolate that it has
601 // entered. That instance is allocated when the isolate is initially entered
602 // and reused on subsequent entries.
604 public:
606 : isolate_(isolate),
607 thread_id_(thread_id),
608 stack_limit_(0),
609 thread_state_(nullptr)
610#if USE_SIMULATOR
611 ,
612 simulator_(nullptr)
613#endif
614 {
615 }
619 Isolate* isolate() const { return isolate_; }
620 ThreadId thread_id() const { return thread_id_; }
621
622 FIELD_ACCESSOR(uintptr_t, stack_limit)
623 FIELD_ACCESSOR(ThreadState*, thread_state)
624#if USE_SIMULATOR
625 FIELD_ACCESSOR(Simulator*, simulator)
626#endif
627
628 bool Matches(Isolate* isolate, ThreadId thread_id) const {
629 return isolate_ == isolate && thread_id_ == thread_id;
630 }
631
632 private:
635 uintptr_t stack_limit_;
637
638#if USE_SIMULATOR
639 Simulator* simulator_;
640#endif
641
642 friend class Isolate;
643 friend class ThreadDataTable;
644 friend class EntryStackItem;
645 };
646
647 // Used for walking the promise tree for catch prediction.
652
653 static void InitializeOncePerProcess();
654
655 // Creates Isolate object. Must be used instead of constructing Isolate with
656 // new operator.
657 static Isolate* New();
658 static Isolate* New(IsolateGroup* isolate_group);
659
660 // Deletes Isolate object. Must be used instead of delete operator.
661 // Destroys the non-default isolates.
662 // Sets default isolate into "has_been_disposed" state rather then destroying,
663 // for legacy API reasons.
664 static void Delete(Isolate* isolate);
665
666 void SetUpFromReadOnlyArtifacts(ReadOnlyArtifacts* artifacts);
667 void set_read_only_heap(ReadOnlyHeap* ro_heap) { read_only_heap_ = ro_heap; }
668
669 // Page allocator that must be used for allocating V8 heap pages.
670 v8::PageAllocator* page_allocator() const;
671
672 // Returns the PerIsolateThreadData for the current thread (or nullptr if one
673 // is not currently set).
674 V8_INLINE static PerIsolateThreadData* CurrentPerIsolateThreadData();
675
676 // Returns the isolate inside which the current thread is running or nullptr.
677 V8_TLS_DECLARE_GETTER(TryGetCurrent, Isolate*, g_current_isolate_)
678
679 // Returns the isolate inside which the current thread is running.
680 V8_INLINE static Isolate* Current();
681 static void SetCurrent(Isolate* isolate);
682
683 inline bool IsCurrent() const;
684
685 // Usually called by Init(), but can be called early e.g. to allow
686 // testing components that require logging but not the whole
687 // isolate.
688 //
689 // Safe to call more than once.
690 void InitializeLoggingAndCounters();
691 bool InitializeCounters(); // Returns false if already initialized.
692
693 bool InitWithoutSnapshot();
694 bool InitWithSnapshot(SnapshotData* startup_snapshot_data,
695 SnapshotData* read_only_snapshot_data,
696 SnapshotData* shared_heap_snapshot_data,
697 bool can_rehash);
698
699 // True if at least one thread Enter'ed this isolate.
700 bool IsInUse() { return entry_stack_ != nullptr; }
701
702 void ReleaseSharedPtrs();
703
704 void ClearSerializerData();
705
706 void UpdateLogObjectRelocation();
707
708 // Initializes the current thread to run this Isolate.
709 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
710 // at the same time, this should be prevented using external locking.
711 void Enter();
712
713 // Exits the current thread. The previously entered Isolate is restored
714 // for the thread.
715 // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
716 // at the same time, this should be prevented using external locking.
717 void Exit();
718
719 // Find the PerThread for this particular (isolate, thread) combination.
720 // If one does not yet exist, allocate a new one.
721 PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();
722
723 // Find the PerThread for this particular (isolate, thread) combination
724 // If one does not yet exist, return null.
725 PerIsolateThreadData* FindPerThreadDataForThisThread();
726
727 // Find the PerThread for given (isolate, thread) combination
728 // If one does not yet exist, return null.
729 PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);
730
731 // Discard the PerThread for this particular (isolate, thread) combination
732 // If one does not yet exist, no-op.
733 void DiscardPerThreadDataForThisThread();
734
735 // Mutex for serializing access to break control structures.
736 base::RecursiveMutex* break_access() { return &break_access_; }
737
738 // Shared mutex for allowing thread-safe concurrent reads of FeedbackVectors.
739 base::Mutex* feedback_vector_access() { return &feedback_vector_access_; }
740
741 // Shared mutex for allowing thread-safe concurrent reads of
742 // InternalizedStrings.
744 return &internalized_string_access_;
745 }
746
747 // Shared mutex for allowing thread-safe concurrent reads of TransitionArrays
748 // of kind kFullTransitionArray.
750 return &full_transition_array_access_;
751 }
752
753 // Shared mutex for allowing thread-safe concurrent reads of
754 // SharedFunctionInfos.
756 return &shared_function_info_access_;
757 }
758
759 // Protects (most) map update operations, see also MapUpdater.
760 base::Mutex* map_updater_access() { return &map_updater_access_; }
761
762 // Protects JSObject boilerplate migrations (i.e. calls to MigrateInstance on
763 // boilerplate objects; elements kind transitions are *not* protected).
764 // Note this lock interacts with `map_updater_access` as follows
765 //
766 // - boilerplate migrations may trigger map updates.
767 // - if so, `boilerplate_migration_access` is locked before
768 // `map_updater_access`.
769 // - backgrounds threads must use the same lock order to avoid deadlocks.
771 return &boilerplate_migration_access_;
772 }
773
775 ReadOnlyArtifacts* artifacts = isolate_group()->read_only_artifacts();
776 DCHECK_NOT_NULL(artifacts);
777 return artifacts;
778 }
779
780 // The isolate's string table.
782 return OwnsStringTables() ? string_table_.get()
783 : shared_space_isolate()->string_table_.get();
784 }
786 return OwnsStringTables()
787 ? string_forwarding_table_.get()
788 : shared_space_isolate()->string_forwarding_table_.get();
789 }
790
792 return is_shared_space_isolate()
793 ? shared_struct_type_registry_.get()
794 : shared_space_isolate()->shared_struct_type_registry_.get();
795 }
796
797 Address get_address_from_id(IsolateAddressId id);
798
799 // Access to top context (where the current function object was created).
800 Tagged<Context> context() const { return thread_local_top()->context_; }
801 inline void set_context(Tagged<Context> context);
802 Tagged<Context>* context_address() { return &thread_local_top()->context_; }
803
804 // The "topmost script-having execution context" from the Web IDL spec
805 // (i.e. the context of the topmost user JavaScript code, see
806 // https://html.spec.whatwg.org/multipage/webappapis.html#topmost-script-having-execution-context)
807 // if known or Context::kNoContext otherwise.
809 return thread_local_top()->topmost_script_having_context_;
810 }
811 inline void set_topmost_script_having_context(Tagged<Context> context);
812 inline void clear_topmost_script_having_context();
814 return &thread_local_top()->topmost_script_having_context_;
815 }
816
817 // Access to current thread id.
818 inline void set_thread_id(ThreadId id) {
819 thread_local_top()->thread_id_.store(id, std::memory_order_relaxed);
820 }
821 inline ThreadId thread_id() const {
822 return thread_local_top()->thread_id_.load(std::memory_order_relaxed);
823 }
824
825 void InstallConditionalFeatures(DirectHandle<NativeContext> context);
826
827#if V8_ENABLE_WEBASSEMBLY
828 void WasmInitJSPIFeature();
829#endif
830
831 bool IsSharedArrayBufferConstructorEnabled(
833
834 bool IsWasmStringRefEnabled(DirectHandle<NativeContext> context);
835 bool IsWasmImportedStringsEnabled(DirectHandle<NativeContext> context);
836 // Has the JSPI flag been requested?
837 // Used only during initialization of contexts.
838 bool IsWasmJSPIRequested(DirectHandle<NativeContext> context);
839 // Has JSPI been enabled successfully?
840 bool IsWasmJSPIEnabled(DirectHandle<NativeContext> context);
842
843 THREAD_LOCAL_TOP_ADDRESS(Tagged<Context>, pending_handler_context)
844 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_entrypoint)
845 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_constant_pool)
846 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp)
847 THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp)
848 THREAD_LOCAL_TOP_ADDRESS(uintptr_t, num_frames_above_pending_handler)
849
850 v8::TryCatch* try_catch_handler() {
851 return thread_local_top()->try_catch_handler_;
852 }
853
854 // Interface to exception.
856 inline Tagged<Object> exception();
857 inline void set_exception(Tagged<Object> exception_obj);
858 // Clear thrown exception from V8 and a possible TryCatch.
859 inline void clear_exception();
860
861 // Clear the exception only from V8, not from a possible external try-catch.
862 inline void clear_internal_exception();
863 inline bool has_exception();
864
866 inline void clear_pending_message();
867 inline Tagged<Object> pending_message();
868 inline bool has_pending_message();
869 inline void set_pending_message(Tagged<Object> message_obj);
870
871#ifdef DEBUG
872 inline Tagged<Object> VerifyBuiltinsResult(Tagged<Object> result);
873 inline ObjectPair VerifyBuiltinsResult(ObjectPair pair);
874#endif
875
877 kJavaScriptHandler,
878 kExternalTryCatch,
879 kNone
880 };
881
882 ExceptionHandlerType TopExceptionHandlerType(Tagged<Object> exception);
883
884 inline bool is_catchable_by_javascript(Tagged<Object> exception);
885 inline bool is_catchable_by_wasm(Tagged<Object> exception);
886 inline bool is_execution_terminating();
887
888 // JS execution stack (see frames.h).
889 static Address c_entry_fp(ThreadLocalTop* thread) {
890 return thread->c_entry_fp_;
891 }
892 static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
893 Address c_function() { return thread_local_top()->c_function_; }
894
895 inline Address* c_entry_fp_address() {
896 return &thread_local_top()->c_entry_fp_;
897 }
898 static uint32_t c_entry_fp_offset() {
899 return static_cast<uint32_t>(OFFSET_OF(Isolate, isolate_data_) +
900 OFFSET_OF(IsolateData, thread_local_top_) +
901 OFFSET_OF(ThreadLocalTop, c_entry_fp_) -
902 isolate_root_bias());
903 }
904 inline Address* handler_address() { return &thread_local_top()->handler_; }
905 inline Address* c_function_address() {
906 return &thread_local_top()->c_function_;
907 }
908
909#if defined(DEBUG) || defined(VERIFY_HEAP)
910 // Count the number of active deserializers, so that the heap verifier knows
911 // whether there is currently an active deserialization happening.
912 //
913 // This is needed as the verifier currently doesn't support verifying objects
914 // which are partially deserialized.
915 //
916 // TODO(leszeks): Make the verifier a bit more deserialization compatible.
917 void RegisterDeserializerStarted() { ++num_active_deserializers_; }
918 void RegisterDeserializerFinished() {
919 CHECK_GE(--num_active_deserializers_, 0);
920 }
921 bool has_active_deserializer() const {
922 return num_active_deserializers_.load(std::memory_order_acquire) > 0;
923 }
924#else
928#endif
929
930 // Bottom JS entry.
931 Address js_entry_sp() { return thread_local_top()->js_entry_sp_; }
932 inline Address* js_entry_sp_address() {
933 return &thread_local_top()->js_entry_sp_;
934 }
935
936 std::vector<MemoryRange>* GetCodePages() const;
937
938 void SetCodePages(std::vector<MemoryRange>* new_code_pages);
939
940 // Returns the global object of the current context. It could be
941 // a builtin object, or a JS global object.
942 inline Handle<JSGlobalObject> global_object();
943
944 // Returns the global proxy object of the current context.
945 inline Handle<JSGlobalProxy> global_proxy();
946
947 static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
948 void FreeThreadResources() { thread_local_top()->Free(); }
949
950 // Walks the call stack and promise tree and calls a callback on every
951 // function an exception is likely to hit. Used in catch prediction.
952 // Returns true if the exception is expected to be caught.
953 bool WalkCallStackAndPromiseTree(
954 MaybeDirectHandle<JSPromise> rejected_promise,
955 const std::function<void(PromiseHandler)>& callback);
956
958 public:
959 // Scope currently can only be used for regular exceptions,
960 // not termination exception.
961 inline explicit ExceptionScope(Isolate* isolate);
962 inline ~ExceptionScope();
963
964 private:
967 };
968
969 void SetCaptureStackTraceForUncaughtExceptions(
970 bool capture, int frame_limit, StackTrace::StackTraceOptions options);
971 bool get_capture_stack_trace_for_uncaught_exceptions() const;
972
973 void SetAbortOnUncaughtExceptionCallback(
975
976 enum PrintStackMode { kPrintStackConcise, kPrintStackVerbose };
977 void PrintCurrentStackTrace(std::ostream& out,
979 should_include_frame_callback = nullptr);
980 void PrintStack(StringStream* accumulator,
981 PrintStackMode mode = kPrintStackVerbose);
982 void PrintStack(FILE* out, PrintStackMode mode = kPrintStackVerbose);
983 DirectHandle<String> StackTraceString();
984 // Stores a stack trace in a stack-allocated temporary buffer which will
985 // end up in the minidump for debugging purposes.
986 V8_NOINLINE void PushStackTraceAndDie(
987 void* ptr1 = nullptr, void* ptr2 = nullptr, void* ptr3 = nullptr,
988 void* ptr4 = nullptr, void* ptr5 = nullptr, void* ptr6 = nullptr);
989 // Similar to the above but without collecting the stack trace.
990 V8_NOINLINE void PushParamsAndDie(void* ptr1 = nullptr, void* ptr2 = nullptr,
991 void* ptr3 = nullptr, void* ptr4 = nullptr,
992 void* ptr5 = nullptr, void* ptr6 = nullptr);
993 // Like PushStackTraceAndDie but uses DumpWithoutCrashing to continue
994 // execution.
995 V8_NOINLINE void PushStackTraceAndContinue(
996 void* ptr1 = nullptr, void* ptr2 = nullptr, void* ptr3 = nullptr,
997 void* ptr4 = nullptr, void* ptr5 = nullptr, void* ptr6 = nullptr);
998 // Like PushParamsAndDie but uses DumpWithoutCrashing to continue
999 // execution.
1000 V8_NOINLINE void PushParamsAndContinue(
1001 void* ptr1 = nullptr, void* ptr2 = nullptr, void* ptr3 = nullptr,
1002 void* ptr4 = nullptr, void* ptr5 = nullptr, void* ptr6 = nullptr);
1003 DirectHandle<StackTraceInfo> CaptureDetailedStackTrace(
1004 int limit, StackTrace::StackTraceOptions options);
1005 MaybeDirectHandle<JSObject> CaptureAndSetErrorStack(
1006 DirectHandle<JSObject> error_object, FrameSkipMode mode,
1007 Handle<Object> caller);
1008 Handle<StackTraceInfo> GetDetailedStackTrace(
1009 DirectHandle<JSReceiver> error_object);
1010 Handle<FixedArray> GetSimpleStackTrace(DirectHandle<JSReceiver> error_object);
1011 // Walks the JS stack to find the first frame with a script name or
1012 // source URL. The inspected frames are the same as for the detailed stack
1013 // trace.
1014 DirectHandle<String> CurrentScriptNameOrSourceURL();
1015 MaybeDirectHandle<Script> CurrentReferrerScript();
1016 bool GetStackTraceLimit(Isolate* isolate, int* result);
1017
1018 Address GetAbstractPC(int* line, int* column);
1019
1020 // Returns if the given context may access the given global object. If
1021 // the result is false, the exception is guaranteed to be
1022 // set.
1023 bool MayAccess(DirectHandle<NativeContext> accessing_context,
1025
1026 void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
1027 V8_WARN_UNUSED_RESULT MaybeDirectHandle<Object> ReportFailedAccessCheck(
1029
1030 // Exception throwing support. The caller should use the result of Throw() as
1031 // its return value. Returns the Exception sentinel.
1033 MessageLocation* location = nullptr);
1034 Tagged<Object> ThrowAt(DirectHandle<JSObject> exception,
1035 MessageLocation* location);
1036 Tagged<Object> ThrowIllegalOperation();
1037
1038 void FatalProcessOutOfHeapMemory(const char* location) {
1039 heap()->FatalProcessOutOfMemory(location);
1040 }
1041
1043 console_delegate_ = delegate;
1044 }
1045 debug::ConsoleDelegate* console_delegate() { return console_delegate_; }
1046
1048 async_event_delegate_ = delegate;
1049 PromiseHookStateUpdated();
1050 }
1051
1052 // Async function and promise instrumentation support.
1053 void OnAsyncFunctionSuspended(DirectHandle<JSPromise> promise,
1055 void OnPromiseThen(DirectHandle<JSPromise> promise);
1056 void OnPromiseBefore(DirectHandle<JSPromise> promise);
1057 void OnPromiseAfter(DirectHandle<JSPromise> promise);
1058 void OnStackTraceCaptured(DirectHandle<StackTraceInfo> stack_trace);
1059 void OnTerminationDuringRunMicrotasks();
1060
1061 // Re-throw an exception. This involves no error reporting since error
1062 // reporting was handled when the exception was thrown originally.
1063 // The first overload doesn't set the corresponding pending message, which
1064 // has to be set separately or be guaranteed to not have changed.
1065 Tagged<Object> ReThrow(Tagged<Object> exception);
1066 Tagged<Object> ReThrow(Tagged<Object> exception, Tagged<Object> message);
1067
1068 // Find the correct handler for the current exception. This also
1069 // clears and returns the current exception.
1070 Tagged<Object> UnwindAndFindHandler();
1071
1072 // Tries to predict whether an exception will be caught. Note that this can
1073 // only produce an estimate, because it is undecidable whether a finally
1074 // clause will consume or re-throw an exception.
1082 CatchType PredictExceptionCatcher();
1083
1084 void ReportPendingMessages(bool report = true);
1085
1086 // Attempts to compute the current source location, storing the
1087 // result in the target out parameter. The source location is attached to a
1088 // Message object as the location which should be shown to the user. It's
1089 // typically the top-most meaningful location on the stack.
1090 bool ComputeLocation(MessageLocation* target);
1091 bool ComputeLocationFromException(MessageLocation* target,
1092 DirectHandle<Object> exception);
1093 bool ComputeLocationFromSimpleStackTrace(MessageLocation* target,
1094 DirectHandle<Object> exception);
1095 bool ComputeLocationFromDetailedStackTrace(MessageLocation* target,
1096 DirectHandle<Object> exception);
1097
1098 Handle<JSMessageObject> CreateMessage(DirectHandle<Object> exception,
1099 MessageLocation* location);
1100 DirectHandle<JSMessageObject> CreateMessageOrAbort(
1101 DirectHandle<Object> exception, MessageLocation* location);
1102 // Similar to Isolate::CreateMessage but DOESN'T inspect the JS stack and
1103 // only looks at the "detailed stack trace" as the "simple stack trace" might
1104 // have already been stringified.
1105 Handle<JSMessageObject> CreateMessageFromException(
1106 DirectHandle<Object> exception);
1107
1108 // Out of resource exception helpers.
1109 Tagged<Object> StackOverflow();
1110 Tagged<Object> TerminateExecution();
1111 void CancelTerminateExecution();
1112
1113 void RequestInterrupt(InterruptCallback callback, void* data);
1114 void InvokeApiInterruptCallbacks();
1115
1116 void RequestInvalidateNoProfilingProtector();
1117
1118 // Administration
1119 void Iterate(RootVisitor* v);
1120 void Iterate(RootVisitor* v, ThreadLocalTop* t);
1121 char* Iterate(RootVisitor* v, char* t);
1122 void IterateThread(ThreadVisitor* v, char* t);
1123
1124 // Returns the current native context.
1126 inline Tagged<NativeContext> raw_native_context();
1127
1128 inline DirectHandle<NativeContext> GetIncumbentContext();
1129 DirectHandle<NativeContext> GetIncumbentContextSlow();
1130
1131 void RegisterTryCatchHandler(v8::TryCatch* that);
1132 void UnregisterTryCatchHandler(v8::TryCatch* that);
1133
1134 char* ArchiveThread(char* to);
1135 char* RestoreThread(char* from);
1136
1137 static const int kUC16AlphabetSize = 256; // See StringSearchBase.
1138 static const int kBMMaxShift = 250; // See StringSearchBase.
1139
1140 // Accessors.
1141#define GLOBAL_ACCESSOR(type, name, initialvalue) \
1142 inline type name() const { \
1143 DCHECK_EQ(OFFSET_OF(Isolate, name##_), name##_debug_offset_); \
1144 return name##_; \
1145 } \
1146 inline void set_##name(type value) { \
1147 DCHECK_EQ(OFFSET_OF(Isolate, name##_), name##_debug_offset_); \
1148 name##_ = value; \
1149 }
1151#undef GLOBAL_ACCESSOR
1152
1154 if (value) {
1155 CollectSourcePositionsForAllBytecodeArrays();
1156 }
1157 detailed_source_positions_for_profiling_ = value;
1158 }
1159
1161 return detailed_source_positions_for_profiling_;
1162 }
1163
1164#define GLOBAL_ARRAY_ACCESSOR(type, name, length) \
1165 inline type* name() { \
1166 DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_); \
1167 return &(name##_)[0]; \
1168 }
1170#undef GLOBAL_ARRAY_ACCESSOR
1171
1172#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
1173 inline Handle<UNPAREN(type)> name(); \
1174 inline bool is_##name(Tagged<UNPAREN(type)> value);
1176#undef NATIVE_CONTEXT_FIELD_ACCESSOR
1177
1178 Bootstrapper* bootstrapper() { return bootstrapper_; }
1179 // Use for updating counters on a foreground thread.
1180 Counters* counters() { return async_counters().get(); }
1181 // Use for updating counters on a background thread.
1182 const std::shared_ptr<Counters>& async_counters() {
1183 // Make sure InitializeCounters() has been called.
1185 return async_counters_;
1186 }
1187 const std::shared_ptr<metrics::Recorder>& metrics_recorder() {
1188 return metrics_recorder_;
1189 }
1190 TieringManager* tiering_manager() { return tiering_manager_; }
1191 CompilationCache* compilation_cache() { return compilation_cache_; }
1193 // Call InitializeLoggingAndCounters() if logging is needed before
1194 // the isolate is fully initialized.
1195 DCHECK_NOT_NULL(v8_file_logger_);
1196 return v8_file_logger_;
1197 }
1198 StackGuard* stack_guard() { return isolate_data()->stack_guard(); }
1199 Heap* heap() { return &heap_; }
1200 const Heap* heap() const { return &heap_; }
1201 ReadOnlyHeap* read_only_heap() const { return read_only_heap_; }
1202 static Isolate* FromHeap(const Heap* heap) {
1203 return reinterpret_cast<Isolate*>(reinterpret_cast<Address>(heap) -
1205 }
1206
1207 const IsolateData* isolate_data() const { return &isolate_data_; }
1208 IsolateData* isolate_data() { return &isolate_data_; }
1209
1210 // When pointer compression is on, this is the base address of the pointer
1211 // compression cage, and the kPtrComprCageBaseRegister is set to this
1212 // value. When pointer compression is off, this is always kNullAddress.
1213 Address cage_base() const {
1215 isolate_data()->cage_base() == kNullAddress);
1216 return isolate_data()->cage_base();
1217 }
1218
1219 // When pointer compression and external code space are on, this is the base
1220 // address of the cage where the code space is allocated. Otherwise, it
1221 // defaults to cage_base().
1222 Address code_cage_base() const {
1223#ifdef V8_EXTERNAL_CODE_SPACE
1224 return code_cage_base_;
1225#else
1226 return cage_base();
1227#endif // V8_EXTERNAL_CODE_SPACE
1228 }
1229
1230 IsolateGroup* isolate_group() const { return isolate_group_; }
1231
1232#ifdef V8_COMPRESS_POINTERS
1233 VirtualMemoryCage* GetPtrComprCage() const {
1234 return isolate_group()->GetPtrComprCage();
1235 }
1236 VirtualMemoryCage* GetPtrComprCodeCageForTesting();
1237#endif
1238
1239 // Generated code can embed this address to get access to the isolate-specific
1240 // data (for example, roots, external references, builtins, etc.).
1241 // The kRootRegister is set to this value.
1242 Address isolate_root() const { return isolate_data()->isolate_root(); }
1243 constexpr static size_t isolate_root_bias() {
1244 return OFFSET_OF(Isolate, isolate_data_) + IsolateData::kIsolateRootBias;
1245 }
1246 static Isolate* FromRootAddress(Address isolate_root) {
1247 return reinterpret_cast<Isolate*>(isolate_root - isolate_root_bias());
1248 }
1249
1250 RootsTable& roots_table() { return isolate_data()->roots(); }
1251 const RootsTable& roots_table() const { return isolate_data()->roots(); }
1252
1253 // A sub-region of the Isolate object that has "predictable" layout which
1254 // depends only on the pointer size and therefore it's guaranteed that there
1255 // will be no compatibility issues because of different compilers used for
1256 // snapshot generator and actual V8 code.
1257 // Thus, kRootRegister may be used to address any location that falls into
1258 // this region.
1259 // See IsolateData::AssertPredictableLayout() for details.
1261 return base::AddressRegion(reinterpret_cast<Address>(&isolate_data_),
1262 sizeof(IsolateData));
1263 }
1264
1266 return Tagged<Object>(roots_table()[index]);
1267 }
1268
1270 return Handle<Object>(&roots_table()[index]);
1271 }
1272
1274 DCHECK(isolate_data()->external_reference_table()->is_initialized());
1275 return isolate_data()->external_reference_table();
1276 }
1277
1279 // The table may only be partially initialized at this point.
1280 return isolate_data()->external_reference_table();
1281 }
1282
1283 Address* builtin_entry_table() { return isolate_data_.builtin_entry_table(); }
1284
1285#ifdef V8_ENABLE_LEAPTIERING
1287 builtin_dispatch_handle(JSBuiltinDispatchHandleRoot::Idx idx) {
1288#if V8_STATIC_DISPATCH_HANDLES_BOOL
1289 return JSDispatchTable::GetStaticHandleForReadOnlySegmentEntry(idx);
1290#else
1291 return isolate_data_.builtin_dispatch_table()[idx];
1292#endif
1293 }
1294 V8_INLINE JSDispatchHandle builtin_dispatch_handle(Builtin builtin) {
1295 return builtin_dispatch_handle(
1296 JSBuiltinDispatchHandleRoot::to_idx(builtin));
1297 }
1298
1299 JSDispatchTable::Space* GetJSDispatchTableSpaceFor(Address owning_slot) {
1300 DCHECK(!ReadOnlyHeap::Contains(owning_slot));
1301 return heap()->js_dispatch_table_space();
1302 }
1303
1304#endif
1305 V8_INLINE Address* builtin_table() { return isolate_data_.builtin_table(); }
1307 return isolate_data_.builtin_tier0_table();
1308 }
1309
1310 bool IsBuiltinTableHandleLocation(Address* handle_location);
1311
1312 StubCache* load_stub_cache() const { return load_stub_cache_; }
1313 StubCache* store_stub_cache() const { return store_stub_cache_; }
1314 StubCache* define_own_stub_cache() const { return define_own_stub_cache_; }
1316 Deoptimizer* result = current_deoptimizer_;
1318 current_deoptimizer_ = nullptr;
1319 return result;
1320 }
1322 DCHECK_NULL(current_deoptimizer_);
1323 DCHECK_NOT_NULL(deoptimizer);
1324 current_deoptimizer_ = deoptimizer;
1325 }
1326 bool deoptimizer_lazy_throw() const { return deoptimizer_lazy_throw_; }
1328 deoptimizer_lazy_throw_ = value;
1329 }
1330 void InitializeThreadLocal();
1332 return &isolate_data_.thread_local_top_;
1333 }
1335 return &isolate_data_.thread_local_top_;
1336 }
1337
1338 static constexpr uint32_t thread_in_wasm_flag_address_offset() {
1339 // For WebAssembly trap handlers there is a flag in thread-local storage
1340 // which indicates that the executing thread executes WebAssembly code. To
1341 // access this flag directly from generated code, we store a pointer to the
1342 // flag in ThreadLocalTop in thread_in_wasm_flag_address_. This function
1343 // here returns the offset of that member from {isolate_root()}.
1344 return static_cast<uint32_t>(
1345 OFFSET_OF(Isolate, isolate_data_) +
1346 OFFSET_OF(IsolateData, thread_local_top_) +
1348 isolate_root_bias());
1349 }
1350
1351 constexpr static uint32_t context_offset() {
1352 return static_cast<uint32_t>(
1353 OFFSET_OF(Isolate, isolate_data_) +
1354 OFFSET_OF(IsolateData, thread_local_top_) +
1356 isolate_root_bias());
1357 }
1358
1359 constexpr static uint32_t central_stack_sp_offset() {
1360 return static_cast<uintptr_t>(OFFSET_OF(Isolate, isolate_data_) +
1361 OFFSET_OF(IsolateData, thread_local_top_) +
1362 OFFSET_OF(ThreadLocalTop, central_stack_sp_) -
1363 isolate_root_bias());
1364 }
1365
1366 constexpr static uint32_t central_stack_limit_offset() {
1367 return static_cast<uintptr_t>(
1368 OFFSET_OF(Isolate, isolate_data_) +
1369 OFFSET_OF(IsolateData, thread_local_top_) +
1370 OFFSET_OF(ThreadLocalTop, central_stack_limit_) - isolate_root_bias());
1371 }
1372
1373 static uint32_t error_message_param_offset() {
1374 return static_cast<uint32_t>(OFFSET_OF(Isolate, isolate_data_) +
1375 OFFSET_OF(IsolateData, error_message_param_) -
1376 isolate_root_bias());
1377 }
1378
1379 uint8_t error_message_param() { return isolate_data_.error_message_param_; }
1380
1381 THREAD_LOCAL_TOP_ADDRESS(Address, thread_in_wasm_flag_address)
1382
1383 THREAD_LOCAL_TOP_ADDRESS(uint8_t, is_on_central_stack_flag)
1384
1385 MaterializedObjectStore* materialized_object_store() const {
1386 return materialized_object_store_;
1387 }
1388
1390 return descriptor_lookup_cache_;
1391 }
1392
1394 return &isolate_data_.handle_scope_data_;
1395 }
1396
1398 DCHECK(handle_scope_implementer_);
1399 return handle_scope_implementer_;
1400 }
1401
1402 UnicodeCache* unicode_cache() const { return unicode_cache_; }
1403
1405 return inner_pointer_to_code_cache_;
1406 }
1407
1408#if V8_ENABLE_WEBASSEMBLY
1409 wasm::WasmCodeLookupCache* wasm_code_look_up_cache() {
1410 return wasm_code_look_up_cache_;
1411 }
1412 wasm::WasmOrphanedGlobalHandle* NewWasmOrphanedGlobalHandle();
1413 wasm::StackPool& stack_pool() { return stack_pool_; }
1414#endif // V8_ENABLE_WEBASSEMBLY
1415
1416 GlobalHandles* global_handles() const { return global_handles_; }
1417
1419
1420 EternalHandles* eternal_handles() const { return eternal_handles_; }
1421
1422 ThreadManager* thread_manager() const { return thread_manager_; }
1423
1424 bigint::Processor* bigint_processor() { return bigint_processor_; }
1425
1426#ifndef V8_INTL_SUPPORT
1428 return &jsregexp_uncanonicalize_;
1429 }
1430
1432 return &jsregexp_canonrange_;
1433 }
1434
1437 return &regexp_macro_assembler_canonicalize_;
1438 }
1439#endif // !V8_INTL_SUPPORT
1440
1441 RuntimeState* runtime_state() { return &runtime_state_; }
1442
1443 Builtins* builtins() { return &builtins_; }
1444
1445 RegExpStack* regexp_stack() const { return regexp_stack_; }
1446
1447 // Either points to jsregexp_static_offsets_vector, or nullptr if the static
1448 // vector is in use.
1450 return isolate_data()->regexp_static_result_offsets_vector();
1451 }
1453 DCHECK_EQ(value == nullptr,
1454 regexp_static_result_offsets_vector() != nullptr);
1455 isolate_data()->set_regexp_static_result_offsets_vector(value);
1456 }
1458 return isolate_data()->regexp_static_result_offsets_vector_address();
1459 }
1460
1461 std::unordered_set<int32_t*>& active_dynamic_regexp_result_vectors() {
1462 return active_dynamic_regexp_result_vectors_;
1463 }
1464
1465 // This data structure is only used for an optimization in StringSplit.
1466 // TODO(jgruber): Consider removing it.
1467 std::vector<int>* regexp_indices() { return &regexp_indices_; }
1468
1470 return total_regexp_code_generated_;
1471 }
1472 void IncreaseTotalRegexpCodeGenerated(DirectHandle<HeapObject> code);
1473
1474 Debug* debug() const { return debug_; }
1475
1476 bool is_profiling() const {
1477 return isolate_data_.execution_mode_ &
1478 IsolateExecutionModeFlag::kIsProfiling;
1479 }
1480
1481 void SetIsProfiling(bool enabled) {
1482 if (enabled) {
1483 CollectSourcePositionsForAllBytecodeArrays();
1484 RequestInvalidateNoProfilingProtector();
1485 }
1486 isolate_data_.execution_mode_.set(IsolateExecutionModeFlag::kIsProfiling,
1487 enabled);
1488 UpdateLogObjectRelocation();
1489 }
1490
1491 // Perform side effect checks on function calls and API callbacks.
1492 // See Debug::StartSideEffectCheckMode().
1494 return isolate_data_.execution_mode_ &
1495 IsolateExecutionModeFlag::kCheckSideEffects;
1496 }
1497
1499 return should_check_side_effects() ? DebugInfo::kSideEffects
1500 : DebugInfo::kBreakpoints;
1501 }
1503 bool check_side_effects = debug_execution_mode == DebugInfo::kSideEffects;
1504 isolate_data_.execution_mode_.set(
1505 IsolateExecutionModeFlag::kCheckSideEffects, check_side_effects);
1506 }
1507
1508 Logger* logger() const { return logger_; }
1509
1510#ifdef DEBUG
1511 static size_t non_disposed_isolates() { return non_disposed_isolates_; }
1512
1513 // Turbofan's string builder optimization can introduce SlicedString that are
1514 // less than SlicedString::kMinLength characters. Their live range and scope
1515 // are pretty limited, but they can be visible to the GC, which shouldn't
1516 // treat them as invalid. When such short SlicedString are introduced,
1517 // Turbofan will set has_turbofan_string_builders_ to true, which
1518 // SlicedString::SlicedStringVerify will check when verifying SlicedString to
1519 // decide if a too-short SlicedString is an issue or not.
1520 // See the compiler's StringBuilderOptimizer class for more details.
1521 bool has_turbofan_string_builders() { return has_turbofan_string_builders_; }
1522 void set_has_turbofan_string_builders() {
1523 has_turbofan_string_builders_ = true;
1524 }
1525#endif
1526
1528 // Upcast to the privately inherited base-class using c-style casts to avoid
1529 // undefined behavior (as static_cast cannot cast across private bases).
1530 return (v8::internal::Factory*)this;
1531 }
1532
1533 static const int kJSRegexpStaticOffsetsVectorSize = 128;
1534
1535 THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
1536
1537 THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
1538 THREAD_LOCAL_TOP_ACCESSOR(EmbedderState*, current_embedder_state)
1539
1540 void SetData(uint32_t slot, void* data) {
1541 DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
1542 isolate_data_.embedder_data_[slot] = data;
1543 }
1544 void* GetData(uint32_t slot) const {
1545 DCHECK_LT(slot, Internals::kNumIsolateDataSlots);
1546 return isolate_data_.embedder_data_[slot];
1547 }
1548
1549 bool serializer_enabled() const { return serializer_enabled_; }
1550
1551 void enable_serializer() { serializer_enabled_ = true; }
1552
1553 bool snapshot_available() const {
1554 return snapshot_blob_ != nullptr && snapshot_blob_->raw_size != 0;
1555 }
1556
1557 bool IsDead() const { return has_fatal_error_; }
1558 void SignalFatalError() { has_fatal_error_ = true; }
1559
1560 bool use_optimizer();
1561
1562 bool initialized_from_snapshot() { return initialized_from_snapshot_; }
1563
1564 bool NeedsSourcePositions() const;
1565
1566 bool IsLoggingCodeCreation() const;
1567
1568 inline bool InFastCCall() const;
1569
1570 bool AllowsCodeCompaction() const;
1571
1572 bool NeedsDetailedOptimizedCodeLineInfo() const;
1573
1575 return code_coverage_mode() == debug::CoverageMode::kBestEffort;
1576 }
1577
1579 return code_coverage_mode() == debug::CoverageMode::kPreciseCount;
1580 }
1581
1583 return code_coverage_mode() == debug::CoverageMode::kPreciseBinary;
1584 }
1585
1587 return code_coverage_mode() == debug::CoverageMode::kBlockCount;
1588 }
1589
1591 return code_coverage_mode() == debug::CoverageMode::kBlockBinary;
1592 }
1593
1595 return is_block_count_code_coverage() || is_block_binary_code_coverage();
1596 }
1597
1599 return is_precise_binary_code_coverage() || is_block_binary_code_coverage();
1600 }
1601
1603 return is_precise_count_code_coverage() || is_block_count_code_coverage();
1604 }
1605
1606 // Collect feedback vectors with data for code coverage or type profile.
1607 // Reset the list, when both code coverage and type profile are not
1608 // needed anymore. This keeps many feedback vectors alive, but code
1609 // coverage or type profile are used for debugging only and increase in
1610 // memory usage is expected.
1611 void SetFeedbackVectorsForProfilingTools(Tagged<Object> value);
1612
1613 void MaybeInitializeVectorListFromHeap();
1614
1615 double time_millis_since_init() const {
1616 return heap_.MonotonicallyIncreasingTimeInMs() - time_millis_at_init_;
1617 }
1618
1619 DateCache* date_cache() const { return date_cache_; }
1620
1621 void set_date_cache(DateCache* date_cache);
1622
1623#ifdef V8_INTL_SUPPORT
1624
1625 const std::string& DefaultLocale();
1626
1627 void ResetDefaultLocale();
1628
1629 void set_default_locale(const std::string& locale) {
1630 DCHECK_EQ(default_locale_.length(), 0);
1631 default_locale_ = locale;
1632 }
1633
1634 enum class ICUObjectCacheType{
1635 kDefaultCollator, kDefaultNumberFormat, kDefaultSimpleDateFormat,
1636 kDefaultSimpleDateFormatForTime, kDefaultSimpleDateFormatForDate};
1637 static constexpr int kICUObjectCacheTypeCount = 5;
1638
1639 icu::UMemory* get_cached_icu_object(ICUObjectCacheType cache_type,
1640 DirectHandle<Object> locales);
1641 void set_icu_object_in_cache(ICUObjectCacheType cache_type,
1642 DirectHandle<Object> locales,
1643 std::shared_ptr<icu::UMemory> obj);
1644 void clear_cached_icu_object(ICUObjectCacheType cache_type);
1645 void clear_cached_icu_objects();
1646
1647#endif // V8_INTL_SUPPORT
1648
1649 enum class KnownPrototype { kNone, kObject, kArray, kString };
1650
1651 KnownPrototype IsArrayOrObjectOrStringPrototype(Tagged<JSObject> object);
1652
1653 // On intent to set an element in object, make sure that appropriate
1654 // notifications occur if the set is on the elements of the array or
1655 // object prototype. Also ensure that changes to prototype chain between
1656 // Array and Object fire notifications.
1657 void UpdateNoElementsProtectorOnSetElement(DirectHandle<JSObject> object);
1659 UpdateNoElementsProtectorOnSetElement(object);
1660 }
1661
1662 void UpdateProtectorsOnSetPrototype(DirectHandle<JSObject> object,
1663 DirectHandle<Object> new_prototype);
1664
1666 UpdateNoElementsProtectorOnSetElement(object);
1667 }
1668 void UpdateTypedArrayLengthLookupChainProtectorOnSetPrototype(
1669 DirectHandle<JSObject> object);
1670 void UpdateTypedArraySpeciesLookupChainProtectorOnSetPrototype(
1671 DirectHandle<JSObject> object);
1672 void UpdateNumberStringNotRegexpLikeProtectorOnSetPrototype(
1673 DirectHandle<JSObject> object);
1675 DirectHandle<JSObject> object) {
1676 UpdateNoElementsProtectorOnSetElement(object);
1677 }
1678 void UpdateStringWrapperToPrimitiveProtectorOnSetPrototype(
1679 DirectHandle<JSObject> object, DirectHandle<Object> new_prototype);
1680
1681 // Returns true if array is the initial array prototype of its own creation
1682 // context.
1683 inline bool IsInitialArrayPrototype(Tagged<JSArray> array);
1684
1685 std::unique_ptr<PersistentHandles> NewPersistentHandles();
1686
1688 return persistent_handles_list_.get();
1689 }
1690
1691#ifdef V8_ENABLE_SPARKPLUG
1692 baseline::BaselineBatchCompiler* baseline_batch_compiler() const {
1693 DCHECK_NOT_NULL(baseline_batch_compiler_);
1694 return baseline_batch_compiler_;
1695 }
1696#endif // V8_ENABLE_SPARKPLUG
1697
1698#ifdef V8_ENABLE_MAGLEV
1699 maglev::MaglevConcurrentDispatcher* maglev_concurrent_dispatcher() {
1700 DCHECK_NOT_NULL(maglev_concurrent_dispatcher_);
1701 return maglev_concurrent_dispatcher_;
1702 }
1703#endif // V8_ENABLE_MAGLEV
1704
1706 // Thread is only available with flag enabled.
1707 DCHECK(optimizing_compile_dispatcher_ == nullptr ||
1708 v8_flags.concurrent_recompilation);
1709 return optimizing_compile_dispatcher_ != nullptr;
1710 }
1711
1712 void IncreaseConcurrentOptimizationPriority(
1714
1716 DCHECK_NOT_NULL(optimizing_compile_dispatcher_);
1717 return optimizing_compile_dispatcher_;
1718 }
1719
1720 OptimizingCompileDispatcher* SetOptimizingCompileDispatcherForTesting(
1721 OptimizingCompileDispatcher* dispatcher);
1722
1723 // Flushes all pending concurrent optimization jobs from the optimizing
1724 // compile dispatcher's queue.
1725 void AbortConcurrentOptimization(BlockingBehavior blocking_behavior);
1726
1727 int id() const { return id_; }
1728
1730 return was_locker_ever_used_.load(std::memory_order_relaxed);
1731 }
1733 was_locker_ever_used_.store(true, std::memory_order_relaxed);
1734 }
1735
1736 std::shared_ptr<CompilationStatistics> GetTurboStatistics();
1737#ifdef V8_ENABLE_MAGLEV
1738 std::shared_ptr<CompilationStatistics> GetMaglevStatistics();
1739#endif
1740 CodeTracer* GetCodeTracer();
1741
1742 void DumpAndResetStats();
1743 void DumpAndResetBuiltinsProfileData();
1744
1745 uint64_t* stress_deopt_count_address() { return &stress_deopt_count_; }
1746
1747 void set_force_slow_path(bool v) { force_slow_path_ = v; }
1748 bool force_slow_path() const { return force_slow_path_; }
1749 bool* force_slow_path_address() { return &force_slow_path_; }
1750
1751 bool jitless() const { return jitless_; }
1752
1753 void set_stack_size(size_t v) { stack_size_ = v; }
1754 size_t stack_size() { return stack_size_; }
1755
1756 base::RandomNumberGenerator* random_number_generator();
1757
1758 base::RandomNumberGenerator* fuzzer_rng();
1759
1760 // Generates a random number that is non-zero when masked
1761 // with the provided mask.
1762 int GenerateIdentityHash(uint32_t mask);
1763
1765 int id = next_optimization_id_.load();
1766 while (true) {
1767 int next_id = id + 1;
1768 if (!Smi::IsValid(next_id)) next_id = 0;
1769 if (next_optimization_id_.compare_exchange_strong(id, next_id)) {
1770 return id;
1771 }
1772 }
1773 }
1774
1775 // ES#sec-async-module-execution-fulfilled step 10
1776 //
1777 // According to the spec, modules that depend on async modules (i.e. modules
1778 // with top-level await) must be evaluated in order in which their
1779 // [[AsyncEvaluation]] flags were set to true. V8 tracks this global total
1780 // order with next_module_async_evaluation_ordinal_. Each module that sets its
1781 // [[AsyncEvaluation]] to true grabs the next ordinal.
1783 // For simplicity, V8 allows this ordinal to overflow. Overflow will result
1784 // in incorrect module loading behavior for module graphs with top-level
1785 // await.
1786 return next_module_async_evaluation_ordinal_++;
1787 }
1788
1789 void AddCallCompletedCallback(CallCompletedCallback callback);
1790 void RemoveCallCompletedCallback(CallCompletedCallback callback);
1792 if (!thread_local_top()->CallDepthIsZero()) return;
1793 FireCallCompletedCallbackInternal(microtask_queue);
1794 }
1795
1796 void AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
1797 void RemoveBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
1798 inline void FireBeforeCallEnteredCallback();
1799
1800 void SetPromiseRejectCallback(PromiseRejectCallback callback);
1801 void ReportPromiseReject(DirectHandle<JSPromise> promise,
1804
1805 void SetTerminationOnExternalTryCatch();
1806
1807 DirectHandle<Symbol> SymbolFor(RootIndex dictionary_index,
1808 Handle<String> name, bool private_symbol);
1809
1810 void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
1811 void CountUsage(v8::Isolate::UseCounterFeature feature);
1812 // Count multiple usages at once; cheaper than calling the {CountUsage}
1813 // separately for each feature.
1815
1816 static std::string GetTurboCfgFileName(Isolate* isolate);
1817
1818 int GetNextScriptId();
1819
1820 uint32_t next_unique_sfi_id() const {
1821 return next_unique_sfi_id_.load(std::memory_order_relaxed);
1822 }
1824 return next_unique_sfi_id_.fetch_add(1, std::memory_order_relaxed);
1825 }
1826
1827#ifdef V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
1828 void SetHasContextPromiseHooks(bool context_promise_hook) {
1829 promise_hook_flags_ = PromiseHookFields::HasContextPromiseHook::update(
1830 promise_hook_flags_, context_promise_hook);
1831 PromiseHookStateUpdated();
1832 }
1833#endif // V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
1834
1836 return PromiseHookFields::HasContextPromiseHook::decode(
1837 promise_hook_flags_);
1838 }
1839
1841 return reinterpret_cast<Address>(&promise_hook_flags_);
1842 }
1843
1845 return reinterpret_cast<Address>(&promise_hook_);
1846 }
1847
1849 return reinterpret_cast<Address>(&async_event_delegate_);
1850 }
1851
1853 return reinterpret_cast<Address>(&javascript_execution_assert_);
1854 }
1855
1857 javascript_execution_counter_++;
1858 }
1859
1861 return reinterpret_cast<Address>(&handle_scope_implementer_);
1862 }
1863
1864 void SetReleaseCppHeapCallback(v8::Isolate::ReleaseCppHeapCallback callback);
1865
1866 void RunReleaseCppHeapCallback(std::unique_ptr<v8::CppHeap> cpp_heap);
1867
1868 void SetPromiseHook(PromiseHook hook);
1869 void RunPromiseHook(PromiseHookType type, DirectHandle<JSPromise> promise,
1870 DirectHandle<Object> parent);
1871 void RunAllPromiseHooks(PromiseHookType type, DirectHandle<JSPromise> promise,
1872 DirectHandle<Object> parent);
1873 void UpdatePromiseHookProtector();
1874 void PromiseHookStateUpdated();
1875
1876 void AddDetachedContext(DirectHandle<Context> context);
1877 void CheckDetachedContextsAfterGC();
1878
1879 // Detach the environment from its outer global object.
1880 void DetachGlobal(DirectHandle<Context> env);
1881
1882 std::vector<Tagged<Object>>* startup_object_cache() {
1883 return &startup_object_cache_;
1884 }
1885
1886 // With a shared heap, this cache is shared among all isolates. Otherwise this
1887 // object cache is per-Isolate like the startup object cache. TODO(372493838):
1888 // This cache can only contain strings. Update name to reflect this.
1889 std::vector<Tagged<Object>>* shared_heap_object_cache() {
1890 if (OwnsStringTables()) {
1891 return &shared_heap_object_cache_;
1892 } else {
1893 return &shared_space_isolate()->shared_heap_object_cache_;
1894 }
1895 }
1896
1898 return builtins_constants_table_builder() != nullptr;
1899 }
1900
1902 return builtins_constants_table_builder_;
1903 }
1904
1905 // Hashes bits of the Isolate that are relevant for embedded builtins. In
1906 // particular, the embedded blob requires builtin InstructionStream object
1907 // layout and the builtins constants table to remain unchanged from
1908 // build-time.
1909 size_t HashIsolateForEmbeddedBlob();
1910
1911 static const uint8_t* CurrentEmbeddedBlobCode();
1912 static uint32_t CurrentEmbeddedBlobCodeSize();
1913 static const uint8_t* CurrentEmbeddedBlobData();
1914 static uint32_t CurrentEmbeddedBlobDataSize();
1915 static bool CurrentEmbeddedBlobIsBinaryEmbedded();
1916
1917 // These always return the same result as static methods above, but don't
1918 // access the global atomic variable (and thus *might be* slightly faster).
1919 const uint8_t* embedded_blob_code() const;
1920 uint32_t embedded_blob_code_size() const;
1921 const uint8_t* embedded_blob_data() const;
1922 uint32_t embedded_blob_data_size() const;
1923
1924 // Returns true if short builtin calls optimization is enabled for the
1925 // Isolate.
1927 return V8_SHORT_BUILTIN_CALLS_BOOL && is_short_builtin_calls_enabled_;
1928 }
1929
1930 // Returns a region from which it's possible to make pc-relative (short)
1931 // calls/jumps to embedded builtins or empty region if there's no embedded
1932 // blob or if pc-relative calls are not supported.
1933 static base::AddressRegion GetShortBuiltinsCallRegion();
1934
1936 array_buffer_allocator_ = allocator;
1937 }
1939 return array_buffer_allocator_;
1940 }
1941
1943 std::shared_ptr<v8::ArrayBuffer::Allocator> allocator) {
1944 array_buffer_allocator_shared_ = std::move(allocator);
1945 }
1946 std::shared_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_shared()
1947 const {
1948 return array_buffer_allocator_shared_;
1949 }
1950
1952 if (array_buffer_max_size_ == 0) {
1953 array_buffer_max_size_ = array_buffer_allocator_->MaxAllocationSize();
1954 }
1955 return &array_buffer_max_size_;
1956 }
1957
1958 FutexWaitListNode* futex_wait_list_node() { return &futex_wait_list_node_; }
1959
1961 return cancelable_task_manager_;
1962 }
1963
1965 return ast_string_constants_;
1966 }
1967
1968 interpreter::Interpreter* interpreter() const { return interpreter_; }
1969
1971 return compiler_cache_;
1972 }
1974 Zone* zone) {
1975 compiler_cache_ = cache;
1976 compiler_zone_ = zone;
1977 }
1978
1980
1982 return lazy_compile_dispatcher_.get();
1983 }
1984
1985 bool IsInCreationContext(Tagged<JSObject> object, uint32_t index);
1986
1987 void ClearKeptObjects();
1988
1989 void SetHostImportModuleDynamicallyCallback(
1991 void SetHostImportModuleWithPhaseDynamicallyCallback(
1993 MaybeDirectHandle<JSPromise> RunHostImportModuleDynamicallyCallback(
1994 MaybeDirectHandle<Script> maybe_referrer, Handle<Object> specifier,
1995 ModuleImportPhase phase,
1996 MaybeDirectHandle<Object> maybe_import_options_argument);
1997
1998 void SetHostInitializeImportMetaObjectCallback(
2000 MaybeHandle<JSObject> RunHostInitializeImportMetaObjectCallback(
2002
2003 void SetHostCreateShadowRealmContextCallback(
2005 MaybeDirectHandle<NativeContext> RunHostCreateShadowRealmContextCallback();
2006
2007 bool IsJSApiWrapperNativeError(DirectHandle<JSReceiver> obj);
2008
2010 embedded_file_writer_ = writer;
2011 }
2012
2013 int LookupOrAddExternallyCompiledFilename(const char* filename);
2014 const char* GetExternallyCompiledFilename(int index) const;
2015 int GetExternallyCompiledFilenameCount() const;
2016 // PrepareBuiltinSourcePositionMap is necessary in order to preserve the
2017 // builtin source positions before the corresponding code objects are
2018 // replaced with trampolines. Those source positions are used to
2019 // annotate the builtin blob with debugging information.
2020 void PrepareBuiltinSourcePositionMap();
2021
2022#if defined(V8_OS_WIN64)
2023 void SetBuiltinUnwindData(
2024 Builtin builtin,
2025 const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info);
2026#endif // V8_OS_WIN64
2027
2028 void SetPrepareStackTraceCallback(PrepareStackTraceCallback callback);
2029 MaybeDirectHandle<Object> RunPrepareStackTraceCallback(
2031 DirectHandle<JSArray> sites);
2032 bool HasPrepareStackTraceCallback() const;
2033
2034 void SetAddCrashKeyCallback(AddCrashKeyCallback callback);
2035 void AddCrashKey(CrashKeyId id, const std::string& value) {
2036 if (add_crash_key_callback_) {
2037 add_crash_key_callback_(id, value);
2038 }
2039 }
2040
2041#if defined(V8_ENABLE_ETW_STACK_WALKING)
2042 // Specifies the callback called when an ETW tracing session starts.
2043
2044 // Deprecated - to be deleted.
2045 void SetFilterETWSessionByURLCallback(FilterETWSessionByURLCallback callback);
2046
2047 void SetFilterETWSessionByURL2Callback(
2048 FilterETWSessionByURL2Callback callback);
2049 FilterETWSessionByURLResult RunFilterETWSessionByURLCallback(
2050 const std::string& payload);
2051
2052 bool IsETWTracingEnabled() const { return etw_tracing_enabled_; }
2053 void SetETWTracingEnabled(bool enabled) { etw_tracing_enabled_ = enabled; }
2054
2055 bool ETWIsInRundown() const { return etw_in_rundown_; }
2056 void SetETWIsInRundown(bool is_rundown) { etw_in_rundown_ = is_rundown; }
2057
2058 void set_etw_trace_interpreted_frames() {
2059 etw_trace_interpreted_frames_ = true;
2060 }
2061 bool interpreted_frames_native_stack() const {
2062 return v8_flags.interpreted_frames_native_stack ||
2063 etw_trace_interpreted_frames_;
2064 }
2065#else // V8_ENABLE_ETW_STACK_WALKING
2067 return v8_flags.interpreted_frames_native_stack;
2068 }
2069#endif // V8_ENABLE_ETW_STACK_WALKING
2070
2071 void SetIsLoading(bool is_loading);
2072
2074 code_coverage_mode_.store(coverage_mode, std::memory_order_relaxed);
2075 }
2077 return code_coverage_mode_.load(std::memory_order_relaxed);
2078 }
2079
2080 void SetPriority(v8::Isolate::Priority priority);
2081
2082 v8::Isolate::Priority priority() { return priority_; }
2084 return priority_ == v8::Isolate::Priority::kBestEffort;
2085 }
2086
2087 // When efficiency mode is enabled we can favor single core throughput without
2088 // latency requirements. Any decision based on this flag must be quickly
2089 // reversible as we have to expect to migrate out of efficiency mode on short
2090 // notice. E.g., it would not be advisable to generate worse code in
2091 // efficiency mode. The decision when to enable efficiency mode is steered by
2092 // the embedder. Currently the only signal (potentially) being considered is
2093 // if an isolate is in foreground or background mode.
2095 if (V8_UNLIKELY(v8_flags.efficiency_mode.value().has_value())) {
2096 return *v8_flags.efficiency_mode.value();
2097 }
2098 return priority_ != v8::Isolate::Priority::kUserBlocking;
2099 }
2100
2101 // This is a temporary api until we use it by default.
2103 return v8_flags.efficiency_mode_for_tiering_heuristics &&
2104 EfficiencyModeEnabled();
2105 }
2106
2107 // In battery saver mode we optimize to reduce total cpu cycles spent. Battery
2108 // saver mode is opt-in by the embedder. As with efficiency mode we must
2109 // expect that the mode is toggled off again and we should be able to ramp up
2110 // quickly after that.
2112 if (V8_UNLIKELY(v8_flags.battery_saver_mode.value().has_value())) {
2113 return *v8_flags.battery_saver_mode.value();
2114 }
2115 return V8_UNLIKELY(battery_saver_mode_enabled_);
2116 }
2117
2119 if (v8_flags.optimize_for_size) {
2120 return true;
2121 }
2122 if (V8_UNLIKELY(v8_flags.memory_saver_mode.value().has_value())) {
2123 return *v8_flags.memory_saver_mode.value();
2124 }
2125 return V8_UNLIKELY(memory_saver_mode_enabled_);
2126 }
2127
2128 PRINTF_FORMAT(2, 3) void PrintWithTimestamp(const char* format, ...);
2129
2130 void set_allow_atomics_wait(bool set) { allow_atomics_wait_ = set; }
2131 bool allow_atomics_wait() { return allow_atomics_wait_; }
2132
2133 bool flush_denormals() const { return flush_denormals_; }
2134
2135 // Register a finalizer to be called at isolate teardown.
2136 void RegisterManagedPtrDestructor(ManagedPtrDestructor* finalizer);
2137
2138 // Removes a previously-registered shared object finalizer.
2139 void UnregisterManagedPtrDestructor(ManagedPtrDestructor* finalizer);
2140
2141 size_t elements_deletion_counter() { return elements_deletion_counter_; }
2143 elements_deletion_counter_ = value;
2144 }
2145
2146#if V8_ENABLE_WEBASSEMBLY
2147 void AddSharedWasmMemory(DirectHandle<WasmMemoryObject> memory_object);
2148#endif // V8_ENABLE_WEBASSEMBLY
2149
2151 return thread_local_top()->top_backup_incumbent_scope_;
2152 }
2154 const v8::Context::BackupIncumbentScope* top_backup_incumbent_scope) {
2155 thread_local_top()->top_backup_incumbent_scope_ =
2156 top_backup_incumbent_scope;
2157 }
2158
2159 void SetIdle(bool is_idle);
2160
2161 // Changing various modes can cause differences in generated bytecode which
2162 // interferes with lazy source positions, so this should be called immediately
2163 // before such a mode change to ensure that this cannot happen.
2164 void CollectSourcePositionsForAllBytecodeArrays();
2165
2166 void AddCodeMemoryChunk(MutablePageMetadata* chunk);
2167 void RemoveCodeMemoryChunk(MutablePageMetadata* chunk);
2168 void AddCodeRange(Address begin, size_t length_in_bytes);
2169
2170 bool RequiresCodeRange() const;
2171
2172 static Address load_from_stack_count_address(const char* function_name);
2173 static Address store_to_stack_count_address(const char* function_name);
2174
2175 v8::metrics::Recorder::ContextId GetOrRegisterRecorderContextId(
2177 MaybeLocal<v8::Context> GetContextFromRecorderContextId(
2179
2180 void UpdateLongTaskStats();
2181 v8::metrics::LongTaskStats* GetCurrentLongTaskStats();
2182
2184 return main_thread_local_isolate_.get();
2185 }
2186
2187 Isolate* AsIsolate() { return this; }
2188 LocalIsolate* AsLocalIsolate() { return main_thread_local_isolate(); }
2190
2191 LocalHeap* main_thread_local_heap();
2192 LocalHeap* CurrentLocalHeap();
2193
2194#ifdef V8_COMPRESS_POINTERS
2195 ExternalPointerTable& external_pointer_table() {
2196 return isolate_data_.external_pointer_table_;
2197 }
2198
2199 const ExternalPointerTable& external_pointer_table() const {
2200 return isolate_data_.external_pointer_table_;
2201 }
2202
2203 Address external_pointer_table_address() {
2204 return reinterpret_cast<Address>(&isolate_data_.external_pointer_table_);
2205 }
2206
2207 ExternalPointerTable& shared_external_pointer_table() {
2208 return *isolate_data_.shared_external_pointer_table_;
2209 }
2210
2211 const ExternalPointerTable& shared_external_pointer_table() const {
2212 return *isolate_data_.shared_external_pointer_table_;
2213 }
2214
2215 ExternalPointerTable::Space* shared_external_pointer_space() {
2216 return shared_external_pointer_space_;
2217 }
2218
2219 Address shared_external_pointer_table_address_address() {
2220 return reinterpret_cast<Address>(
2221 &isolate_data_.shared_external_pointer_table_);
2222 }
2223
2224 CppHeapPointerTable& cpp_heap_pointer_table() {
2225 return isolate_data_.cpp_heap_pointer_table_;
2226 }
2227
2228 const CppHeapPointerTable& cpp_heap_pointer_table() const {
2229 return isolate_data_.cpp_heap_pointer_table_;
2230 }
2231
2232#endif // V8_COMPRESS_POINTERS
2233
2234#ifdef V8_ENABLE_SANDBOX
2235 TrustedPointerTable& trusted_pointer_table() {
2236 return isolate_data_.trusted_pointer_table_;
2237 }
2238
2239 const TrustedPointerTable& trusted_pointer_table() const {
2240 return isolate_data_.trusted_pointer_table_;
2241 }
2242
2243 Address trusted_pointer_table_base_address() const {
2244 return isolate_data_.trusted_pointer_table_.base_address();
2245 }
2246
2247 TrustedPointerTable& shared_trusted_pointer_table() {
2248 return *isolate_data_.shared_trusted_pointer_table_;
2249 }
2250
2251 const TrustedPointerTable& shared_trusted_pointer_table() const {
2252 return *isolate_data_.shared_trusted_pointer_table_;
2253 }
2254
2255 TrustedPointerTable::Space* shared_trusted_pointer_space() {
2256 return shared_trusted_pointer_space_;
2257 }
2258
2259 Address shared_trusted_pointer_table_base_address() {
2260 return reinterpret_cast<Address>(
2261 &isolate_data_.shared_trusted_pointer_table_);
2262 }
2263
2264 TrustedPointerPublishingScope* trusted_pointer_publishing_scope() const {
2265 return isolate_data_.trusted_pointer_publishing_scope_;
2266 }
2267 void set_trusted_pointer_publishing_scope(
2268 TrustedPointerPublishingScope* scope) {
2269 DCHECK_NE((trusted_pointer_publishing_scope() == nullptr),
2270 (scope == nullptr));
2271 isolate_data_.trusted_pointer_publishing_scope_ = scope;
2272 }
2273
2274 Address code_pointer_table_base_address() {
2275 return isolate_data_.code_pointer_table_base_address_;
2276 }
2277#endif // V8_ENABLE_SANDBOX
2278
2280 return reinterpret_cast<Address>(
2281 &isolate_data_.continuation_preserved_embedder_data_);
2282 }
2283
2290
2291 // Returns true when this isolate contains the shared spaces.
2292 bool is_shared_space_isolate() const { return is_shared_space_isolate_; }
2293
2294 // Returns the isolate that owns the shared spaces.
2296 DCHECK(has_shared_space());
2297 Isolate* isolate = shared_space_isolate_.value();
2298 DCHECK(has_shared_space());
2299 return isolate;
2300 }
2301
2302 // Returns true when this isolate supports allocation in shared spaces.
2303 bool has_shared_space() const { return shared_space_isolate_.value(); }
2304
2305 GlobalSafepoint* global_safepoint() const { return global_safepoint_.get(); }
2306
2307#if V8_ENABLE_DRUMBRAKE
2308 void initialize_wasm_execution_timer();
2309
2310 wasm::WasmExecutionTimer* wasm_execution_timer() const {
2311 return wasm_execution_timer_.get();
2312 }
2313#endif // V8_ENABLE_DRUMBRAKE
2314
2315 bool owns_shareable_data() { return owns_shareable_data_; }
2316
2317 bool log_object_relocation() const { return log_object_relocation_; }
2318
2319 // TODO(pthier): Unify with owns_shareable_data() once the flag
2320 // --shared-string-table is removed.
2321 bool OwnsStringTables() const {
2322 return !v8_flags.shared_string_table || is_shared_space_isolate();
2323 }
2324
2325#if USE_SIMULATOR
2326 SimulatorData* simulator_data() { return simulator_data_; }
2327#endif
2328
2329#ifdef V8_ENABLE_WEBASSEMBLY
2330 bool IsOnCentralStack();
2331 std::vector<std::unique_ptr<wasm::StackMemory>>& wasm_stacks() {
2332 return wasm_stacks_;
2333 }
2334
2335 // Updates the stack limit, parent pointer and central stack info.
2336 void SwitchStacks(wasm::StackMemory* from, wasm::StackMemory* to);
2337
2338 // Retires the stack owned by {continuation}, to be called when returning or
2339 // throwing from this continuation.
2340 // This updates the {StackMemory} state, removes it from the global
2341 // {wasm_stacks_} vector and nulls the EPT entry. This does not update the
2342 // {ActiveContinuation} root or the stack limit.
2343 void RetireWasmStack(wasm::StackMemory* stack);
2344#else
2345 bool IsOnCentralStack() { return true; }
2346#endif
2347
2348 // Access to the global "locals block list cache". Caches outer-stack
2349 // allocated variables per ScopeInfo for debug-evaluate.
2350 // We also store a strong reference to the outer ScopeInfo to keep all
2351 // blocklists along a scope chain alive.
2352 void LocalsBlockListCacheRehash();
2353 void LocalsBlockListCacheSet(DirectHandle<ScopeInfo> scope_info,
2354 DirectHandle<ScopeInfo> outer_scope_info,
2355 DirectHandle<StringSet> locals_blocklist);
2356 // Returns either `TheHole` or `StringSet`.
2357 Tagged<Object> LocalsBlockListCacheGet(DirectHandle<ScopeInfo> scope_info);
2358
2359 void VerifyStaticRoots();
2360
2362 public:
2364 : isolate_(isolate) {
2365 CHECK(!isolate_->enable_ro_allocation_for_snapshot_);
2366 isolate_->enable_ro_allocation_for_snapshot_ = true;
2367 }
2368
2370 CHECK(isolate_->enable_ro_allocation_for_snapshot_);
2371 isolate_->enable_ro_allocation_for_snapshot_ = false;
2372 }
2373
2374 private:
2376 };
2377
2379 return enable_ro_allocation_for_snapshot_;
2380 }
2381
2382 void set_battery_saver_mode_enabled(bool battery_saver_mode_enabled) {
2383 battery_saver_mode_enabled_ = battery_saver_mode_enabled;
2384 }
2385
2386 void set_memory_saver_mode_enabled(bool memory_saver_mode_enabled) {
2387 memory_saver_mode_enabled_ = memory_saver_mode_enabled;
2388 }
2389
2390 std::list<std::unique_ptr<detail::WaiterQueueNode>>&
2391 async_waiter_queue_nodes();
2392
2393 void ReportExceptionFunctionCallback(
2396 v8::ExceptionContext callback_kind);
2397 void ReportExceptionPropertyCallback(DirectHandle<JSReceiver> holder,
2398 DirectHandle<Name> name,
2399 v8::ExceptionContext callback_kind);
2400 void SetExceptionPropagationCallback(ExceptionPropagationCallback callback);
2401
2402#ifdef V8_ENABLE_WASM_SIMD256_REVEC
2403 void set_wasm_revec_verifier_for_test(
2405 wasm_revec_verifier_for_test_ = verifier;
2406 }
2407
2408 compiler::turboshaft::WasmRevecVerifier* wasm_revec_verifier_for_test()
2409 const {
2410 return wasm_revec_verifier_for_test_;
2411 }
2412#endif // V8_ENABLE_WASM_SIMD256_REVEC
2413
2414 bool IsFrozen() const { return is_frozen_; }
2415
2416 void Freeze(bool is_frozen) {
2417 is_frozen_ = is_frozen;
2418 if (v8_flags.memory_reducer_respects_frozen_state && IsFrozen()) {
2419 // We will either finalize an ongoing GC, or simply do a GC to reclaim
2420 // any unreachable memory.
2421 heap()->FinalizeIncrementalMarkingAtomicallyIfRunning(
2422 i::GarbageCollectionReason::kFrozen);
2423 heap()->EnsureSweepingCompleted(
2424 Heap::SweepingForcedFinalizationMode::kUnifiedHeap);
2425 }
2426 }
2427
2428 static void IterateRegistersAndStackOfSimulator(
2430
2431 std::shared_ptr<v8::TaskRunner> task_runner() const { return task_runner_; }
2432
2433 private:
2434 explicit Isolate(IsolateGroup* isolate_group);
2435 ~Isolate();
2436
2437 static Isolate* Allocate(IsolateGroup* isolate_group);
2438
2439 bool Init(SnapshotData* startup_snapshot_data,
2440 SnapshotData* read_only_snapshot_data,
2441 SnapshotData* shared_heap_snapshot_data, bool can_rehash);
2442
2443 void CheckIsolateLayout();
2444
2445 void InitializeCodeRanges();
2446 void AddCodeMemoryRange(MemoryRange range);
2447
2448 // See IsolateForSandbox.
2449 Isolate* ForSandbox() { return this; }
2450
2451 static void RemoveContextIdCallback(const v8::WeakCallbackInfo<void>& data);
2452
2453 void FireCallCompletedCallbackInternal(MicrotaskQueue* microtask_queue);
2454
2456 public:
2457 ThreadDataTable() = default;
2458
2459 PerIsolateThreadData* Lookup(ThreadId thread_id);
2460 void Insert(PerIsolateThreadData* data);
2461 void Remove(PerIsolateThreadData* data);
2462 void RemoveAllThreads();
2463
2464 private:
2465 struct Hasher {
2466 std::size_t operator()(const ThreadId& t) const {
2467 return std::hash<int>()(t.ToInteger());
2468 }
2469 };
2470
2471 std::unordered_map<ThreadId, PerIsolateThreadData*, Hasher> table_;
2472 };
2473
2474 // These items form a stack synchronously with threads Enter'ing and Exit'ing
2475 // the Isolate. The top of the stack points to a thread which is currently
2476 // running the Isolate. When the stack is empty, the Isolate is considered
2477 // not entered by any thread and can be Disposed.
2478 // If the same thread enters the Isolate more than once, the entry_count_
2479 // is incremented rather then a new item pushed to the stack.
2481 public:
2483 Isolate* previous_isolate, EntryStackItem* previous_item)
2484 : entry_count(1),
2485 previous_thread_data(previous_thread_data),
2486 previous_isolate(previous_isolate),
2487 previous_item(previous_item) {}
2490
2495 };
2496
2497 void Deinit();
2498
2499 static void SetIsolateThreadLocals(Isolate* isolate,
2500 PerIsolateThreadData* data);
2501
2503
2504 // Propagate exception message to the v8::TryCatch.
2505 // If there is no external try-catch or message was successfully propagated,
2506 // then return true.
2507 bool PropagateExceptionToExternalTryCatch(ExceptionHandlerType top_handler);
2508
2509 // Checks if the exception happened in any of the Api callback and call
2510 // the |exception_propagation_callback_|.
2511 void NotifyExceptionPropagationCallback();
2512
2514 return PromiseHookFields::HasIsolatePromiseHook::decode(
2515 promise_hook_flags_);
2516 }
2517
2519 return PromiseHookFields::HasAsyncEventDelegate::decode(
2520 promise_hook_flags_);
2521 }
2522
2523 void AddCrashKeysForIsolateAndHeapPointers();
2524
2525#if V8_ENABLE_WEBASSEMBLY
2526 bool IsOnCentralStack(Address addr);
2527#else
2528 bool IsOnCentralStack(Address addr) { return true; }
2529#endif
2530
2531 // This class contains a collection of data accessible from both C++ runtime
2532 // and compiled code (including assembly stubs, builtins, interpreter bytecode
2533 // handlers and optimized code).
2535
2536 // Set to true if this isolate is used as main isolate with a shared space.
2537 bool is_shared_space_isolate_{false};
2538
2541 ReadOnlyHeap* read_only_heap_ = nullptr;
2542
2543 // These are guaranteed empty when !OwnsStringTables().
2544 std::unique_ptr<StringTable> string_table_;
2545 std::unique_ptr<StringForwardingTable> string_forwarding_table_;
2546
2547 const int id_;
2548 std::atomic<EntryStackItem*> entry_stack_ = nullptr;
2549 int stack_trace_nesting_level_ = 0;
2550 std::atomic<bool> was_locker_ever_used_{false};
2551 StringStream* incomplete_message_ = nullptr;
2552 Address isolate_addresses_[kIsolateAddressCount + 1] = {};
2553 Bootstrapper* bootstrapper_ = nullptr;
2554 TieringManager* tiering_manager_ = nullptr;
2555 CompilationCache* compilation_cache_ = nullptr;
2556 std::shared_ptr<Counters> async_counters_;
2564 V8FileLogger* v8_file_logger_ = nullptr;
2565 StubCache* load_stub_cache_ = nullptr;
2566 StubCache* store_stub_cache_ = nullptr;
2567 StubCache* define_own_stub_cache_ = nullptr;
2568 Deoptimizer* current_deoptimizer_ = nullptr;
2569 bool deoptimizer_lazy_throw_ = false;
2570 MaterializedObjectStore* materialized_object_store_ = nullptr;
2571 bool capture_stack_trace_for_uncaught_exceptions_ = false;
2572 int stack_trace_for_uncaught_exceptions_frame_limit_ = 0;
2573 StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_ =
2574 StackTrace::kOverview;
2575 DescriptorLookupCache* descriptor_lookup_cache_ = nullptr;
2576 HandleScopeImplementer* handle_scope_implementer_ = nullptr;
2577 UnicodeCache* unicode_cache_ = nullptr;
2579 InnerPointerToCodeCache* inner_pointer_to_code_cache_ = nullptr;
2580 GlobalHandles* global_handles_ = nullptr;
2582 EternalHandles* eternal_handles_ = nullptr;
2583 ThreadManager* thread_manager_ = nullptr;
2584 bigint::Processor* bigint_processor_ = nullptr;
2587 SetupIsolateDelegate* setup_delegate_ = nullptr;
2588#if defined(DEBUG) || defined(VERIFY_HEAP)
2589 std::atomic<int> num_active_deserializers_;
2590#endif
2591#ifndef V8_INTL_SUPPORT
2596#endif // !V8_INTL_SUPPORT
2597 RegExpStack* regexp_stack_ = nullptr;
2598 std::vector<int> regexp_indices_;
2599 // Necessary in order to avoid memory leaks in the presence of
2600 // TerminateExecution exceptions.
2601 std::unordered_set<int32_t*> active_dynamic_regexp_result_vectors_;
2602 DateCache* date_cache_ = nullptr;
2603 base::RandomNumberGenerator* random_number_generator_ = nullptr;
2604 base::RandomNumberGenerator* fuzzer_rng_ = nullptr;
2605 v8::Isolate::ReleaseCppHeapCallback release_cpp_heap_callback_ = nullptr;
2606 PromiseHook promise_hook_ = nullptr;
2607 HostImportModuleDynamicallyCallback host_import_module_dynamically_callback_ =
2608 nullptr;
2610 host_import_module_with_phase_dynamically_callback_ = nullptr;
2611 std::atomic<debug::CoverageMode> code_coverage_mode_{
2612 debug::CoverageMode::kBestEffort};
2613
2614 std::atomic<bool> battery_saver_mode_enabled_ = false;
2615 std::atomic<bool> memory_saver_mode_enabled_ = false;
2616
2617 // Helper function for RunHostImportModuleDynamicallyCallback.
2618 // Unpacks import attributes, if present, from the second argument to dynamic
2619 // import() and returns them in a FixedArray, sorted by code point order of
2620 // the keys, in the form [key1, value1, key2, value2, ...]. Returns an empty
2621 // MaybeHandle if an error was thrown. In this case, the host callback should
2622 // not be called and instead the caller should use the exception to
2623 // reject the import() call's Promise.
2624 MaybeDirectHandle<FixedArray> GetImportAttributesFromArgument(
2625 MaybeDirectHandle<Object> maybe_import_options_argument);
2626
2628 host_initialize_import_meta_object_callback_ = nullptr;
2630 host_create_shadow_realm_context_callback_ = nullptr;
2631
2632#ifdef V8_INTL_SUPPORT
2633 std::string default_locale_;
2634
2635 // The cache stores the most recently accessed {locales,obj} pair for each
2636 // cache type.
2637 struct ICUObjectCacheEntry {
2638 std::string locales;
2639 std::shared_ptr<icu::UMemory> obj;
2640
2641 ICUObjectCacheEntry() = default;
2642 ICUObjectCacheEntry(std::string locales, std::shared_ptr<icu::UMemory> obj)
2643 : locales(locales), obj(std::move(obj)) {}
2644 };
2645
2646 ICUObjectCacheEntry icu_object_cache_[kICUObjectCacheTypeCount];
2647#endif // V8_INTL_SUPPORT
2648
2649 // Whether the isolate has been created for snapshotting.
2650 bool serializer_enabled_ = false;
2651
2652 // True if fatal error has been signaled for this isolate.
2653 bool has_fatal_error_ = false;
2654
2655 // True if this isolate was initialized from a snapshot.
2656 bool initialized_from_snapshot_ = false;
2657
2658 // True if short builtin calls optimization is enabled.
2659 bool is_short_builtin_calls_enabled_ = false;
2660
2661 // The isolate current's priority. This flag is used to prioritize
2662 // between memory usage and latency.
2663 std::atomic<v8::Isolate::Priority> priority_ =
2665
2666 // Indicates whether the isolate owns shareable data.
2667 // Only false for client isolates attached to a shared isolate.
2668 bool owns_shareable_data_ = true;
2669
2670 bool log_object_relocation_ = false;
2671
2672#ifdef V8_EXTERNAL_CODE_SPACE
2673 // Base address of the pointer compression cage containing external code
2674 // space, when external code space is enabled.
2675 Address code_cage_base_ = 0;
2676#endif
2677
2678 // Time stamp at initialization.
2679 double time_millis_at_init_ = 0;
2680
2681#ifdef DEBUG
2682 static std::atomic<size_t> non_disposed_isolates_;
2683
2684 JSObject::SpillInformation js_spill_information_;
2685
2686 std::atomic<bool> has_turbofan_string_builders_ = false;
2687#endif
2688
2689 Debug* debug_ = nullptr;
2690 Logger* logger_ = nullptr;
2691
2692 const AstStringConstants* ast_string_constants_ = nullptr;
2693
2694 interpreter::Interpreter* interpreter_ = nullptr;
2695
2696 compiler::PerIsolateCompilerCache* compiler_cache_ = nullptr;
2697 // The following zone is for compiler-related objects that should live
2698 // through all compilations (and thus all JSHeapBroker instances).
2699 Zone* compiler_zone_ = nullptr;
2700
2701 std::unique_ptr<LazyCompileDispatcher> lazy_compile_dispatcher_;
2702#ifdef V8_ENABLE_SPARKPLUG
2703 baseline::BaselineBatchCompiler* baseline_batch_compiler_ = nullptr;
2704#endif // V8_ENABLE_SPARKPLUG
2705#ifdef V8_ENABLE_MAGLEV
2706 maglev::MaglevConcurrentDispatcher* maglev_concurrent_dispatcher_ = nullptr;
2707#endif // V8_ENABLE_MAGLEV
2708
2709 using InterruptEntry = std::pair<InterruptCallback, void*>;
2710 std::queue<InterruptEntry> api_interrupts_queue_;
2711
2712#define GLOBAL_BACKING_STORE(type, name, initialvalue) type name##_;
2714#undef GLOBAL_BACKING_STORE
2715
2716#define GLOBAL_ARRAY_BACKING_STORE(type, name, length) type name##_[length];
2718#undef GLOBAL_ARRAY_BACKING_STORE
2719
2720#ifdef DEBUG
2721 // This class is huge and has a number of fields controlled by
2722 // preprocessor defines. Make sure the offsets of these fields agree
2723 // between compilation units.
2724#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
2725 static const intptr_t name##_debug_offset_;
2726 ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
2727 ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
2728#undef ISOLATE_FIELD_OFFSET
2729#endif
2730
2732 bool preprocessing_exception_ = false;
2733
2734 OptimizingCompileDispatcher* optimizing_compile_dispatcher_ = nullptr;
2735
2736 std::unique_ptr<PersistentHandlesList> persistent_handles_list_;
2737
2738 // Counts deopt points if deopt_every_n_times is enabled.
2739 uint64_t stress_deopt_count_ = 0;
2740
2741 bool force_slow_path_ = false;
2742
2743 // Certain objects may be allocated in RO space if suitable for the snapshot.
2744 bool enable_ro_allocation_for_snapshot_ = false;
2745
2746 bool initialized_ = false;
2747 bool jitless_ = false;
2748
2749 std::atomic<int> next_optimization_id_ = 0;
2750
2751 void InitializeNextUniqueSfiId(uint32_t id) {
2752 uint32_t expected = 0; // Called at most once per Isolate on startup.
2753 bool successfully_exchanged = next_unique_sfi_id_.compare_exchange_strong(
2754 expected, id, std::memory_order_relaxed, std::memory_order_relaxed);
2755 CHECK(successfully_exchanged);
2756 }
2757 std::atomic<uint32_t> next_unique_sfi_id_;
2758
2760
2761 // Vector of callbacks before a Call starts execution.
2762 std::vector<BeforeCallEnteredCallback> before_call_entered_callbacks_;
2763
2764 // Vector of callbacks when a Call completes.
2765 std::vector<CallCompletedCallback> call_completed_callbacks_;
2766
2767 v8::Isolate::UseCounterCallback use_counter_callback_ = nullptr;
2768
2769 std::shared_ptr<CompilationStatistics> turbo_statistics_;
2770#ifdef V8_ENABLE_MAGLEV
2771 std::shared_ptr<CompilationStatistics> maglev_statistics_;
2772#endif
2773 std::shared_ptr<metrics::Recorder> metrics_recorder_;
2774 uintptr_t last_recorder_context_id_ = 0;
2775 std::unordered_map<uintptr_t, v8::Global<v8::Context>>
2777
2778 size_t last_long_task_stats_counter_ = 0;
2780
2781 std::vector<Tagged<Object>> startup_object_cache_;
2782
2783 // When sharing data among Isolates (e.g. v8_flags.shared_string_table), only
2784 // the shared Isolate populates this and client Isolates reference that copy.
2785 //
2786 // Otherwise this is populated for all Isolates.
2787 std::vector<Tagged<Object>> shared_heap_object_cache_;
2788
2789 // Used during builtins compilation to build the builtins constants table,
2790 // which is stored on the root list prior to serialization.
2791 BuiltinsConstantsTableBuilder* builtins_constants_table_builder_ = nullptr;
2792
2793 void InitializeDefaultEmbeddedBlob();
2794 void CreateAndSetEmbeddedBlob();
2795 void InitializeIsShortBuiltinCallsEnabled();
2796 void MaybeRemapEmbeddedBuiltinsIntoCodeRange();
2797 void TearDownEmbeddedBlob();
2798 void SetEmbeddedBlob(const uint8_t* code, uint32_t code_size,
2799 const uint8_t* data, uint32_t data_size);
2800 void ClearEmbeddedBlob();
2801
2802 void InitializeBuiltinJSDispatchTable();
2803
2804 const uint8_t* embedded_blob_code_ = nullptr;
2805 uint32_t embedded_blob_code_size_ = 0;
2806 const uint8_t* embedded_blob_data_ = nullptr;
2807 uint32_t embedded_blob_data_size_ = 0;
2808
2809 v8::ArrayBuffer::Allocator* array_buffer_allocator_ = nullptr;
2810 std::shared_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_shared_;
2811 size_t array_buffer_max_size_ = 0;
2812
2813 std::shared_ptr<v8::TaskRunner> task_runner_;
2814
2816
2817 CancelableTaskManager* cancelable_task_manager_ = nullptr;
2818
2819 debug::ConsoleDelegate* console_delegate_ = nullptr;
2820
2821 debug::AsyncEventDelegate* async_event_delegate_ = nullptr;
2822 uint32_t promise_hook_flags_ = 0;
2823 uint32_t current_async_task_id_ = 0;
2824
2825 std::unique_ptr<LocalIsolate> main_thread_local_isolate_;
2826
2828 abort_on_uncaught_exception_callback_ = nullptr;
2829
2830 bool allow_atomics_wait_ = true;
2831 bool flush_denormals_ = false;
2832
2834 ManagedPtrDestructor* managed_ptr_destructors_head_ = nullptr;
2835
2836 size_t total_regexp_code_generated_ = 0;
2837
2838 size_t elements_deletion_counter_ = 0;
2839
2840 std::unique_ptr<TracingCpuProfilerImpl> tracing_cpu_profiler_;
2841
2842 EmbeddedFileWriterInterface* embedded_file_writer_ = nullptr;
2843
2844 PrepareStackTraceCallback prepare_stack_trace_callback_ = nullptr;
2845
2846#if defined(V8_ENABLE_ETW_STACK_WALKING)
2847 FilterETWSessionByURLCallback filter_etw_session_by_url_callback_ = nullptr;
2848 FilterETWSessionByURL2Callback filter_etw_session_by_url2_callback_ = nullptr;
2849 bool etw_tracing_enabled_;
2850 bool etw_trace_interpreted_frames_;
2851 bool etw_in_rundown_;
2852#endif // V8_ENABLE_ETW_STACK_WALKING
2853
2854 // TODO(kenton@cloudflare.com): This mutex can be removed if
2855 // thread_data_table_ is always accessed under the isolate lock. I do not
2856 // know if this is the case, so I'm preserving it for now.
2859
2860 // Stores the isolate containing the shared space.
2861 std::optional<Isolate*> shared_space_isolate_;
2862
2863 // Used to deduplicate registered SharedStructType shapes.
2864 //
2865 // This is guaranteed empty when !is_shared_space_isolate().
2866 std::unique_ptr<SharedStructTypeRegistry> shared_struct_type_registry_;
2867
2868#ifdef V8_COMPRESS_POINTERS
2869 // Stores the external pointer table space for the shared external pointer
2870 // table.
2871 ExternalPointerTable::Space* shared_external_pointer_space_ = nullptr;
2872#endif // V8_COMPRESS_POINTERS
2873
2874#ifdef V8_ENABLE_SANDBOX
2875 // Stores the trusted pointer table space for the shared trusted pointer
2876 // table.
2877 TrustedPointerTable::Space* shared_trusted_pointer_space_ = nullptr;
2878#endif // V8_ENABLE_SANDBOX
2879
2880 // List to manage the lifetime of the WaiterQueueNodes used to track async
2881 // waiters for JSSynchronizationPrimitives.
2882 std::list<std::unique_ptr<detail::WaiterQueueNode>> async_waiter_queue_nodes_;
2883
2884 // Used to track and safepoint all client isolates attached to this shared
2885 // isolate.
2886 std::unique_ptr<GlobalSafepoint> global_safepoint_;
2887 // Client isolates list managed by GlobalSafepoint.
2888 Isolate* global_safepoint_prev_client_isolate_ = nullptr;
2889 Isolate* global_safepoint_next_client_isolate_ = nullptr;
2890
2891 // A signal-safe vector of heap pages containing code. Used with the
2892 // v8::Unwinder API.
2893 std::atomic<std::vector<MemoryRange>*> code_pages_{nullptr};
2894 std::vector<MemoryRange> code_pages_buffer1_;
2895 std::vector<MemoryRange> code_pages_buffer2_;
2896 // The mutex only guards adding pages, the retrieval is signal safe.
2898
2899 // Stack size set with ResourceConstraints or Isolate::SetStackLimit, in
2900 // bytes. This is initialized with value of --stack-size.
2902#ifdef V8_ENABLE_WEBASSEMBLY
2903 wasm::WasmCodeLookupCache* wasm_code_look_up_cache_ = nullptr;
2904 std::vector<std::unique_ptr<wasm::StackMemory>> wasm_stacks_;
2905#if V8_ENABLE_DRUMBRAKE
2906 std::unique_ptr<wasm::WasmExecutionTimer> wasm_execution_timer_;
2907#endif // V8_ENABLE_DRUMBRAKE
2908 wasm::WasmOrphanedGlobalHandle* wasm_orphaned_handle_ = nullptr;
2909 wasm::StackPool stack_pool_;
2910#endif
2911
2912 // Enables the host application to provide a mechanism for recording a
2913 // predefined set of data as crash keys to be used in postmortem debugging
2914 // in case of a crash.
2915 AddCrashKeyCallback add_crash_key_callback_ = nullptr;
2916
2917#ifdef V8_ENABLE_WASM_SIMD256_REVEC
2918 compiler::turboshaft::WasmRevecVerifier* wasm_revec_verifier_for_test_ =
2919 nullptr;
2920#endif // V8_ENABLE_WASM_SIMD256_REVEC
2921
2922 // Delete new/delete operators to ensure that Isolate::New() and
2923 // Isolate::Delete() are used for Isolate creation and deletion.
2924 void* operator new(size_t, void* ptr) { return ptr; }
2925
2926#if USE_SIMULATOR
2927 SimulatorData* simulator_data_ = nullptr;
2928#endif
2929
2930#ifdef V8_ENABLE_CHECKS
2931 ThreadId current_thread_id_;
2932 int current_thread_counter_ = 0;
2933#endif
2934
2935 bool is_frozen_ = false;
2936
2937 friend class GlobalSafepoint;
2938 friend class heap::HeapTester;
2940 friend class IsolateForSandbox;
2941 friend class IsolateGroup;
2942 friend class TestSerializer;
2943 friend class SharedHeapNoClientsTest;
2944};
2945
2946// The current entered Isolate and its thread data. Do not access these
2947// directly! Use Isolate::CurrentPerIsolateThreadData instead.
2948//
2949// This is outside the Isolate class with extern storage because in clang-cl,
2950// thread_local is incompatible with dllexport linkage caused by
2951// V8_EXPORT_PRIVATE being applied to Isolate.
2952extern thread_local Isolate::PerIsolateThreadData*
2953 g_current_per_isolate_thread_data_ V8_CONSTINIT;
2954
2955#undef FIELD_ACCESSOR
2956#undef THREAD_LOCAL_TOP_ACCESSOR
2957#undef THREAD_LOCAL_TOP_ADDRESS
2958
2959// SaveContext scopes save the current context on the Isolate on creation, and
2960// restore it on destruction.
2972
2973// Like SaveContext, but also switches the Context to a new one in the
2974// constructor.
2976 public:
2977 SaveAndSwitchContext(Isolate* isolate, Tagged<Context> new_context);
2978};
2979
2980// A scope which sets the given isolate's context to null for its lifetime to
2981// ensure that code does not make assumptions on a context being available.
2983 public:
2984 explicit NullContextScope(Isolate* isolate)
2985 : SaveAndSwitchContext(isolate, Context()) {}
2986};
2987
2989#ifdef DEBUG
2990 public:
2991 explicit AssertNoContextChange(Isolate* isolate);
2993 CHECK_EQ(isolate_->context(), *context_);
2994 // The caller context is either cleared or not modified.
2995 if (!isolate_->topmost_script_having_context().is_null()) {
2996 CHECK_EQ(isolate_->topmost_script_having_context(),
2997 *topmost_script_having_context_);
2998 }
2999 }
3000
3001 private:
3004 Handle<Context> topmost_script_having_context_;
3005#else
3006 public:
3007 explicit AssertNoContextChange(Isolate* isolate) {}
3008#endif
3009};
3010
3012 public:
3013 explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
3014 Lock(isolate);
3015 }
3017
3018 static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
3019 static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
3020
3021 static bool TryLock(Isolate* isolate) {
3022 return isolate->break_access()->TryLock();
3023 }
3024
3025 private:
3027};
3028
3029// Support for checking for stack-overflows.
3031 public:
3032 explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) {}
3033
3034 // Use this to check for stack-overflows in C++ code.
3035 bool HasOverflowed() const {
3036 StackGuard* stack_guard = isolate_->stack_guard();
3037 return GetCurrentStackPosition() < stack_guard->real_climit();
3038 }
3039 static bool HasOverflowed(LocalIsolate* local_isolate);
3040
3041 // Use this to check for stack-overflow when entering runtime from JS code.
3042 bool JsHasOverflowed(uintptr_t gap = 0) const;
3043
3044 // Use this to check for stack-overflow when entering runtime from Wasm code.
3045 // If it is called from the central stack, while a switch was performed,
3046 // it checks logical stack limit of a secondary stack stored in the isolate,
3047 // instead checking actual one.
3048 bool WasmHasOverflowed(uintptr_t gap = 0) const;
3049
3050 // Use this to check for interrupt request in C++ code.
3052 StackGuard* stack_guard = isolate_->stack_guard();
3053 return GetCurrentStackPosition() < stack_guard->climit();
3054 }
3055
3056 // Precondition: InterruptRequested == true.
3057 // Returns true if any interrupt (overflow or termination) was handled, in
3058 // which case the caller must prevent further JS execution.
3060
3061 private:
3063};
3064
3065// This macro may be used in context that disallows JS execution.
3066// That is why it checks only for a stack overflow and termination.
3067#define STACK_CHECK(isolate, result_value) \
3068 do { \
3069 StackLimitCheck stack_check(isolate); \
3070 if (V8_UNLIKELY(stack_check.InterruptRequested()) && \
3071 V8_UNLIKELY(stack_check.HandleStackOverflowAndTerminationRequest())) { \
3072 return result_value; \
3073 } \
3074 } while (false)
3075
3077 public:
3079
3080 explicit StackTraceFailureMessage(Isolate* isolate, StackTraceMode mode,
3081 const Address* ptrs, size_t ptrs_count);
3082
3084 std::initializer_list<Address> ptrs)
3085 : StackTraceFailureMessage(isolate, mode, ptrs.begin(), ptrs.size()) {}
3086
3088 std::initializer_list<void*> ptrs)
3089 : StackTraceFailureMessage(isolate, mode,
3090 reinterpret_cast<const Address*>(ptrs.begin()),
3091 ptrs.size()) {}
3092
3093 V8_NOINLINE void Print() volatile;
3094
3095 static const uintptr_t kStartMarker = 0xdecade30;
3096 static const uintptr_t kMiddleMarker = 0xdecade33;
3097 static const uintptr_t kEndMarker = 0xdecade36;
3098 static const int kStacktraceBufferSize = 32 * KB;
3099
3102 Address ptrs_[64] = {};
3107};
3108
3109template <>
3111 public:
3114 DCHECK_NOT_NULL(isolate);
3115 DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
3116 }
3117
3120};
3121
3122// Set the current isolate for the thread *without* entering the isolate. Used
3123// e.g. by background GC threads to be able to access pointer tables.
3124// This subsumes a `PtrComprCageAccessScope` which is needed in the same
3125// contexts in order to be able to access on-heap objects.
3136
3137} // namespace internal
3138} // namespace v8
3139
3140#endif // V8_EXECUTION_ISOLATE_H_
Isolate * isolate_
RegisterAllocator * allocator_
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
Builtins::Kind kind
Definition builtins.cc:40
void(*)(std::unique_ptr< CppHeap >) ReleaseCppHeapCallback
bool(*)(Isolate *) AbortOnUncaughtExceptionCallback
Definition v8-isolate.h:756
void(*)(Isolate *isolate, UseCounterFeature feature) UseCounterCallback
Definition v8-isolate.h:669
AssertNoContextChange(Isolate *isolate)
Definition isolate.h:3007
static void Lock(Isolate *isolate)
Definition isolate.h:3018
static bool TryLock(Isolate *isolate)
Definition isolate.h:3021
ExecutionAccess(Isolate *isolate)
Definition isolate.h:3013
static void Unlock(Isolate *isolate)
Definition isolate.h:3019
PerIsolateThreadData * previous_thread_data
Definition isolate.h:2492
EntryStackItem & operator=(const EntryStackItem &)=delete
EntryStackItem(const EntryStackItem &)=delete
EntryStackItem(PerIsolateThreadData *previous_thread_data, Isolate *previous_isolate, EntryStackItem *previous_item)
Definition isolate.h:2482
PerIsolateThreadData(const PerIsolateThreadData &)=delete
PerIsolateThreadData & operator=(const PerIsolateThreadData &)=delete
PerIsolateThreadData(Isolate *isolate, ThreadId thread_id)
Definition isolate.h:605
bool Matches(Isolate *isolate, ThreadId thread_id) const
Definition isolate.h:628
std::unordered_map< ThreadId, PerIsolateThreadData *, Hasher > table_
Definition isolate.h:2471
Address continuation_preserved_embedder_data_address()
Definition isolate.h:2279
bool deoptimizer_lazy_throw() const
Definition isolate.h:1326
Isolate(const Isolate &)=delete
std::vector< CallCompletedCallback > call_completed_callbacks_
Definition isolate.h:2765
V8_INLINE Address * builtin_table()
Definition isolate.h:1305
GlobalHandles * global_handles() const
Definition isolate.h:1416
const AstStringConstants * ast_string_constants() const
Definition isolate.h:1964
std::vector< Tagged< Object > > shared_heap_object_cache_
Definition isolate.h:2787
DebugInfo::ExecutionMode debug_execution_mode() const
Definition isolate.h:1498
static constexpr uint32_t thread_in_wasm_flag_address_offset()
Definition isolate.h:1338
void set_stack_size(size_t v)
Definition isolate.h:1753
static constexpr uint32_t central_stack_sp_offset()
Definition isolate.h:1359
StubCache * load_stub_cache() const
Definition isolate.h:1312
std::shared_ptr< CompilationStatistics > turbo_statistics_
Definition isolate.h:2769
base::Mutex managed_ptr_destructors_mutex_
Definition isolate.h:2833
std::shared_ptr< v8::TaskRunner > task_runner_
Definition isolate.h:2813
Isolate * AsIsolate()
Definition isolate.h:2187
Tagged< Context > * context_address()
Definition isolate.h:802
ReadOnlyHeap * read_only_heap() const
Definition isolate.h:1201
std::queue< InterruptEntry > api_interrupts_queue_
Definition isolate.h:2710
bool HasIsolatePromiseHooks() const
Definition isolate.h:2513
bool MemorySaverModeEnabled()
Definition isolate.h:2118
const Heap * heap() const
Definition isolate.h:1200
void set_debug_execution_mode(DebugInfo::ExecutionMode debug_execution_mode)
Definition isolate.h:1502
ReadOnlyArtifacts * read_only_artifacts() const
Definition isolate.h:774
Deoptimizer * GetAndClearCurrentDeoptimizer()
Definition isolate.h:1315
bool serializer_enabled() const
Definition isolate.h:1549
bool is_binary_code_coverage() const
Definition isolate.h:1598
AccountingAllocator * allocator()
Definition isolate.h:1979
static constexpr size_t isolate_root_bias()
Definition isolate.h:1243
std::list< std::unique_ptr< detail::WaiterQueueNode > > async_waiter_queue_nodes_
Definition isolate.h:2882
static constexpr uint32_t context_offset()
Definition isolate.h:1351
debug::ConsoleDelegate * console_delegate()
Definition isolate.h:1045
Address address_of_regexp_static_result_offsets_vector() const
Definition isolate.h:1457
bool OwnsStringTables() const
Definition isolate.h:2321
std::atomic< uint32_t > next_unique_sfi_id_
Definition isolate.h:2757
bool IsCompileHintsMagicEnabled(Handle< NativeContext > context)
void InitializeNextUniqueSfiId(uint32_t id)
Definition isolate.h:2751
PRINTF_FORMAT(2, 3) void PrintWithTimestamp(const char *format
std::unique_ptr< LazyCompileDispatcher > lazy_compile_dispatcher_
Definition isolate.h:2701
bool is_shared_space_isolate() const
Definition isolate.h:2292
static Isolate * FromHeap(const Heap *heap)
Definition isolate.h:1202
static Address c_entry_fp(ThreadLocalTop *thread)
Definition isolate.h:889
std::unique_ptr< PersistentHandlesList > persistent_handles_list_
Definition isolate.h:2736
Address code_cage_base() const
Definition isolate.h:1222
Counters * counters()
Definition isolate.h:1180
CompilationCache * compilation_cache()
Definition isolate.h:1191
const std::shared_ptr< Counters > & async_counters()
Definition isolate.h:1182
bool is_best_effort_code_coverage() const
Definition isolate.h:1574
void UpdateNoElementsProtectorOnNormalizeElements(DirectHandle< JSObject > object)
Definition isolate.h:1674
DateCache * date_cache() const
Definition isolate.h:1619
void set_thread_id(ThreadId id)
Definition isolate.h:818
bool flush_denormals() const
Definition isolate.h:2133
Isolate * ForSandbox()
Definition isolate.h:2449
const IsolateData * isolate_data() const
Definition isolate.h:1207
EternalHandles * eternal_handles() const
Definition isolate.h:1420
const v8::Context::BackupIncumbentScope * top_backup_incumbent_scope() const
Definition isolate.h:2150
void set_current_deoptimizer(Deoptimizer *deoptimizer)
Definition isolate.h:1321
BuiltinsConstantsTableBuilder * builtins_constants_table_builder() const
Definition isolate.h:1901
RegExpStack * regexp_stack() const
Definition isolate.h:1445
void set_async_event_delegate(debug::AsyncEventDelegate *delegate)
Definition isolate.h:1047
void set_memory_saver_mode_enabled(bool memory_saver_mode_enabled)
Definition isolate.h:2386
std::shared_ptr< v8::ArrayBuffer::Allocator > array_buffer_allocator_shared_
Definition isolate.h:2810
size_t elements_deletion_counter()
Definition isolate.h:2141
Logger * logger() const
Definition isolate.h:1508
HandleScopeImplementer * handle_scope_implementer() const
Definition isolate.h:1397
bool was_locker_ever_used() const
Definition isolate.h:1729
base::RecursiveMutex * break_access()
Definition isolate.h:736
uint32_t next_unique_sfi_id() const
Definition isolate.h:1820
LocalIsolate * main_thread_local_isolate()
Definition isolate.h:2183
Bootstrapper * bootstrapper()
Definition isolate.h:1178
bool is_count_code_coverage() const
Definition isolate.h:1602
std::vector< Tagged< Object > > * startup_object_cache()
Definition isolate.h:1882
std::optional< Isolate * > shared_space_isolate_
Definition isolate.h:2861
StubCache * store_stub_cache() const
Definition isolate.h:1313
bool is_precise_binary_code_coverage() const
Definition isolate.h:1582
base::Mutex feedback_vector_access_
Definition isolate.h:2558
GlobalSafepoint * global_safepoint() const
Definition isolate.h:2305
Tagged< Context > context() const
Definition isolate.h:800
Isolate * GetMainThreadIsolateUnsafe()
Definition isolate.h:2189
bool force_slow_path() const
Definition isolate.h:1748
void set_compiler_utils(compiler::PerIsolateCompilerCache *cache, Zone *zone)
Definition isolate.h:1973
base::Mutex map_updater_access_
Definition isolate.h:2562
base::Mutex thread_data_table_mutex_
Definition isolate.h:2857
unibrow::Mapping< unibrow::CanonicalizationRange > jsregexp_canonrange_
Definition isolate.h:2593
OptimizingCompileDispatcher * optimizing_compile_dispatcher()
Definition isolate.h:1715
void set_deoptimizer_lazy_throw(bool value)
Definition isolate.h:1327
Address promise_hook_address()
Definition isolate.h:1844
Address promise_hook_flags_address()
Definition isolate.h:1840
bigint::Processor * bigint_processor()
Definition isolate.h:1424
base::Mutex * internalized_string_access()
Definition isolate.h:743
void RegisterDeserializerStarted()
Definition isolate.h:925
int32_t * regexp_static_result_offsets_vector() const
Definition isolate.h:1449
Address handle_scope_implementer_address()
Definition isolate.h:1860
void AddCrashKey(CrashKeyId id, const std::string &value)
Definition isolate.h:2035
std::unordered_set< int32_t * > active_dynamic_regexp_result_vectors_
Definition isolate.h:2601
std::vector< MemoryRange > code_pages_buffer1_
Definition isolate.h:2894
uint8_t error_message_param()
Definition isolate.h:1379
base::Mutex * feedback_vector_access()
Definition isolate.h:739
Address * handler_address()
Definition isolate.h:904
TracedHandles traced_handles_
Definition isolate.h:2581
bool IsFrozen() const
Definition isolate.h:2414
TieringManager * tiering_manager()
Definition isolate.h:1190
SharedStructTypeRegistry * shared_struct_type_registry() const
Definition isolate.h:791
size_t total_regexp_code_generated() const
Definition isolate.h:1469
TracedHandles * traced_handles()
Definition isolate.h:1418
void UpdateNoElementsProtectorOnSetPrototype(DirectHandle< JSObject > object)
Definition isolate.h:1665
std::pair< InterruptCallback, void * > InterruptEntry
Definition isolate.h:2709
base::Mutex internalized_string_access_
Definition isolate.h:2559
bool EfficiencyModeEnabledForTiering()
Definition isolate.h:2102
StubCache * define_own_stub_cache() const
Definition isolate.h:1314
Address * c_entry_fp_address()
Definition isolate.h:895
bool is_profiling() const
Definition isolate.h:1476
bool initialized_from_snapshot()
Definition isolate.h:1562
IsolateData isolate_data_
Definition isolate.h:2534
Address * js_entry_sp_address()
Definition isolate.h:932
static uint32_t error_message_param_offset()
Definition isolate.h:1373
std::unique_ptr< StringTable > string_table_
Definition isolate.h:2544
bool jitless() const
Definition isolate.h:1751
base::Mutex * full_transition_array_access()
Definition isolate.h:749
void RegisterDeserializerFinished()
Definition isolate.h:926
static int ArchiveSpacePerThread()
Definition isolate.h:947
std::unique_ptr< LocalIsolate > main_thread_local_isolate_
Definition isolate.h:2825
bool HasContextPromiseHooks() const
Definition isolate.h:1835
V8_INLINE HandleScopeData * handle_scope_data()
Definition isolate.h:1393
v8::ArrayBuffer::Allocator * array_buffer_allocator() const
Definition isolate.h:1938
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition isolate.h:1404
Address js_entry_sp()
Definition isolate.h:931
ThreadManager * thread_manager() const
Definition isolate.h:1422
Isolate & operator=(const Isolate &)=delete
void FreeThreadResources()
Definition isolate.h:948
ThreadId thread_id() const
Definition isolate.h:821
bool has_shared_space() const
Definition isolate.h:2303
void * GetData(uint32_t slot) const
Definition isolate.h:1544
base::Mutex boilerplate_migration_access_
Definition isolate.h:2563
bool * force_slow_path_address()
Definition isolate.h:1749
Address javascript_execution_assert_address()
Definition isolate.h:1852
const RootsTable & roots_table() const
Definition isolate.h:1251
std::unordered_map< uintptr_t, v8::Global< v8::Context > > recorder_context_id_map_
Definition isolate.h:2776
Tagged< Context > * topmost_script_having_context_address()
Definition isolate.h:813
void set_read_only_heap(ReadOnlyHeap *ro_heap)
Definition isolate.h:667
Address isolate_root() const
Definition isolate.h:1242
std::vector< int > regexp_indices_
Definition isolate.h:2598
ExternalReferenceTable * external_reference_table()
Definition isolate.h:1273
unsigned next_module_async_evaluation_ordinal_
Definition isolate.h:2759
PersistentHandlesList * persistent_handles_list() const
Definition isolate.h:1687
StringTable * string_table() const
Definition isolate.h:781
bool HasAsyncEventDelegate() const
Definition isolate.h:2518
unibrow::Mapping< unibrow::Ecma262UnCanonicalize > * jsregexp_uncanonicalize()
Definition isolate.h:1427
static uint32_t c_entry_fp_offset()
Definition isolate.h:898
FutexWaitListNode futex_wait_list_node_
Definition isolate.h:2815
std::unique_ptr< SharedStructTypeRegistry > shared_struct_type_registry_
Definition isolate.h:2866
bool detailed_source_positions_for_profiling() const
Definition isolate.h:1160
unibrow::Mapping< unibrow::Ecma262Canonicalize > * regexp_macro_assembler_canonicalize()
Definition isolate.h:1436
Address async_event_delegate_address()
Definition isolate.h:1848
void Freeze(bool is_frozen)
Definition isolate.h:2416
bool is_block_count_code_coverage() const
Definition isolate.h:1586
ThreadDataTable thread_data_table_
Definition isolate.h:2858
FutexWaitListNode * futex_wait_list_node()
Definition isolate.h:1958
static constexpr uint32_t central_stack_limit_offset()
Definition isolate.h:1366
std::unique_ptr< StringForwardingTable > string_forwarding_table_
Definition isolate.h:2545
void set_force_slow_path(bool v)
Definition isolate.h:1747
StackGuard * stack_guard()
Definition isolate.h:1198
unibrow::Mapping< unibrow::Ecma262UnCanonicalize > jsregexp_uncanonicalize_
Definition isolate.h:2592
IsolateData * isolate_data()
Definition isolate.h:1208
bool snapshot_available() const
Definition isolate.h:1553
base::Mutex shared_function_info_access_
Definition isolate.h:2561
base::Mutex * map_updater_access()
Definition isolate.h:760
void FireCallCompletedCallback(MicrotaskQueue *microtask_queue)
Definition isolate.h:1791
V8_INLINE Address * builtin_tier0_table()
Definition isolate.h:1306
void set_code_coverage_mode(debug::CoverageMode coverage_mode)
Definition isolate.h:2073
UnicodeCache * unicode_cache() const
Definition isolate.h:1402
bool EfficiencyModeEnabled()
Definition isolate.h:2094
IsolateGroup * isolate_group() const
Definition isolate.h:1230
ThreadLocalTop const * thread_local_top() const
Definition isolate.h:1334
IsolateGroup * isolate_group_
Definition isolate.h:2539
CancelableTaskManager * cancelable_task_manager()
Definition isolate.h:1960
std::shared_ptr< v8::TaskRunner > task_runner() const
Definition isolate.h:2431
void set_array_buffer_allocator_shared(std::shared_ptr< v8::ArrayBuffer::Allocator > allocator)
Definition isolate.h:1942
unibrow::Mapping< unibrow::Ecma262Canonicalize > regexp_macro_assembler_canonicalize_
Definition isolate.h:2595
bool interpreted_frames_native_stack() const
Definition isolate.h:2066
static Isolate * FromRootAddress(Address isolate_root)
Definition isolate.h:1246
void RegisterEmbeddedFileWriter(EmbeddedFileWriterInterface *writer)
Definition isolate.h:2009
LazyCompileDispatcher * lazy_compile_dispatcher() const
Definition isolate.h:1981
std::unique_ptr< TracingCpuProfilerImpl > tracing_cpu_profiler_
Definition isolate.h:2840
Builtins * builtins()
Definition isolate.h:1443
bool IsDead() const
Definition isolate.h:1557
V8FileLogger * v8_file_logger() const
Definition isolate.h:1192
DescriptorLookupCache * descriptor_lookup_cache() const
Definition isolate.h:1389
RuntimeState * runtime_state()
Definition isolate.h:1441
void set_battery_saver_mode_enabled(bool battery_saver_mode_enabled)
Definition isolate.h:2382
v8::internal::Factory * factory()
Definition isolate.h:1527
base::AddressRegion root_register_addressable_region() const
Definition isolate.h:1260
interpreter::Interpreter * interpreter() const
Definition isolate.h:1968
unibrow::Mapping< unibrow::CanonicalizationRange > * jsregexp_canonrange()
Definition isolate.h:1431
bool is_block_binary_code_coverage() const
Definition isolate.h:1590
void set_was_locker_ever_used()
Definition isolate.h:1732
void set_top_backup_incumbent_scope(const v8::Context::BackupIncumbentScope *top_backup_incumbent_scope)
Definition isolate.h:2153
void set_console_delegate(debug::ConsoleDelegate *delegate)
Definition isolate.h:1042
bool is_precise_count_code_coverage() const
Definition isolate.h:1578
ExternalReferenceTable * external_reference_table_unsafe()
Definition isolate.h:1278
Tagged< Context > topmost_script_having_context() const
Definition isolate.h:808
double time_millis_since_init() const
Definition isolate.h:1615
base::Mutex * shared_function_info_access()
Definition isolate.h:755
bool should_check_side_effects() const
Definition isolate.h:1493
bool detailed_source_positions_for_profiling_
Definition isolate.h:2731
static Address handler(ThreadLocalTop *thread)
Definition isolate.h:892
void SetDetailedSourcePositionsForProfiling(bool value)
Definition isolate.h:1153
bool has_active_deserializer() const
Definition isolate.h:927
std::vector< int > * regexp_indices()
Definition isolate.h:1467
std::unique_ptr< GlobalSafepoint > global_safepoint_
Definition isolate.h:2886
base::Mutex full_transition_array_access_
Definition isolate.h:2560
LocalIsolate * AsLocalIsolate()
Definition isolate.h:2188
bool is_short_builtin_calls_enabled() const
Definition isolate.h:1926
Debug * debug() const
Definition isolate.h:1474
void set_elements_deletion_counter(size_t value)
Definition isolate.h:2142
compiler::PerIsolateCompilerCache * compiler_cache() const
Definition isolate.h:1970
Address cage_base() const
Definition isolate.h:1213
Isolate * shared_space_isolate() const
Definition isolate.h:2295
void IncrementJavascriptExecutionCounter()
Definition isolate.h:1856
Address * builtin_entry_table()
Definition isolate.h:1283
std::vector< BeforeCallEnteredCallback > before_call_entered_callbacks_
Definition isolate.h:2762
bool concurrent_recompilation_enabled()
Definition isolate.h:1705
base::Mutex * boilerplate_migration_access()
Definition isolate.h:770
base::RecursiveMutex break_access_
Definition isolate.h:2557
bool is_block_code_coverage() const
Definition isolate.h:1594
bool BatterySaverModeEnabled()
Definition isolate.h:2111
Handle< Object > root_handle(RootIndex index)
Definition isolate.h:1269
void UpdateNoElementsProtectorOnSetLength(DirectHandle< JSObject > object)
Definition isolate.h:1658
std::vector< Tagged< Object > > startup_object_cache_
Definition isolate.h:2781
std::shared_ptr< Counters > async_counters_
Definition isolate.h:2556
bool enable_ro_allocation_for_snapshot() const
Definition isolate.h:2378
void FatalProcessOutOfHeapMemory(const char *location)
Definition isolate.h:1038
bool log_object_relocation() const
Definition isolate.h:2317
v8::metrics::LongTaskStats long_task_stats_
Definition isolate.h:2779
void SetIsProfiling(bool enabled)
Definition isolate.h:1481
const std::shared_ptr< metrics::Recorder > & metrics_recorder()
Definition isolate.h:1187
void set_regexp_static_result_offsets_vector(int32_t *value)
Definition isolate.h:1452
RootsTable & roots_table()
Definition isolate.h:1250
ThreadLocalTop * thread_local_top()
Definition isolate.h:1331
base::Mutex code_pages_mutex_
Definition isolate.h:2897
Address c_function()
Definition isolate.h:893
void set_array_buffer_allocator(v8::ArrayBuffer::Allocator *allocator)
Definition isolate.h:1935
bool IsGeneratingEmbeddedBuiltins() const
Definition isolate.h:1897
uint64_t * stress_deopt_count_address()
Definition isolate.h:1745
uint32_t GetAndIncNextUniqueSfiId()
Definition isolate.h:1823
v8::Isolate::Priority priority()
Definition isolate.h:2082
size_t * array_buffer_max_size_address()
Definition isolate.h:1951
std::shared_ptr< v8::ArrayBuffer::Allocator > array_buffer_allocator_shared() const
Definition isolate.h:1946
Tagged< Object > root(RootIndex index) const
Definition isolate.h:1265
std::shared_ptr< metrics::Recorder > metrics_recorder_
Definition isolate.h:2773
bool IsOnCentralStack(Address addr)
Definition isolate.h:2528
Address * c_function_address()
Definition isolate.h:905
std::vector< Tagged< Object > > * shared_heap_object_cache()
Definition isolate.h:1889
unsigned NextModuleAsyncEvaluationOrdinal()
Definition isolate.h:1782
StringForwardingTable * string_forwarding_table() const
Definition isolate.h:785
std::unordered_set< int32_t * > & active_dynamic_regexp_result_vectors()
Definition isolate.h:1461
std::vector< MemoryRange > code_pages_buffer2_
Definition isolate.h:2895
RuntimeState runtime_state_
Definition isolate.h:2585
debug::CoverageMode code_coverage_mode() const
Definition isolate.h:2076
MutexGuardIfOffThread(const MutexGuardIfOffThread &)=delete
MutexGuardIfOffThread(base::Mutex *mutex, Isolate *isolate)
Definition isolate.h:3112
MutexGuardIfOffThread & operator=(const MutexGuardIfOffThread &)=delete
NullContextScope(Isolate *isolate)
Definition isolate.h:2984
Handle< Context > context_
Definition isolate.h:2969
Handle< Context > topmost_script_having_context_
Definition isolate.h:2970
Isolate *const isolate_
Definition isolate.h:2968
V8_NO_UNIQUE_ADDRESS PtrComprCageAccessScope ptr_compr_cage_access_scope_
Definition isolate.h:3133
bool JsHasOverflowed(uintptr_t gap=0) const
Definition isolate.cc:7296
StackLimitCheck(Isolate *isolate)
Definition isolate.h:3032
V8_INLINE bool InterruptRequested()
Definition isolate.h:3051
V8_EXPORT_PRIVATE bool HandleStackOverflowAndTerminationRequest()
Definition isolate.cc:7283
bool WasmHasOverflowed(uintptr_t gap=0) const
Definition isolate.cc:7307
StackTraceFailureMessage(Isolate *isolate, StackTraceMode mode, const Address *ptrs, size_t ptrs_count)
Definition isolate.cc:739
V8_NOINLINE void Print() volatile
Definition isolate.cc:726
char js_stack_trace_[kStacktraceBufferSize]
Definition isolate.h:3105
StackTraceFailureMessage(Isolate *isolate, StackTraceMode mode, std::initializer_list< Address > ptrs)
Definition isolate.h:3083
static const uintptr_t kStartMarker
Definition isolate.h:3095
static const uintptr_t kEndMarker
Definition isolate.h:3097
StackTraceFailureMessage(Isolate *isolate, StackTraceMode mode, std::initializer_list< void * > ptrs)
Definition isolate.h:3087
static const uintptr_t kMiddleMarker
Definition isolate.h:3096
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define NATIVE_CONTEXT_FIELDS(V)
Definition contexts.h:46
TracedHandles & traced_handles_
Definition cpp-heap.cc:543
Handle< Context > context_
#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name)
#define GLOBAL_ARRAY_BACKING_STORE(type, name, length)
Definition isolate.h:2716
#define THREAD_LOCAL_TOP_ADDRESS(type, name)
Definition isolate.h:574
#define ISOLATE_INIT_ARRAY_LIST(V)
Definition isolate.h:503
#define GLOBAL_ARRAY_ACCESSOR(type, name, length)
Definition isolate.h:1164
#define GLOBAL_ACCESSOR(type, name, initialvalue)
Definition isolate.h:1141
#define THREAD_LOCAL_TOP_ACCESSOR(type, name)
Definition isolate.h:570
#define FIELD_ACCESSOR(type, name)
Definition isolate.h:484
#define ISOLATE_INIT_LIST(V)
Definition isolate.h:513
#define GLOBAL_BACKING_STORE(type, name, initialvalue)
Definition isolate.h:2712
MicrotaskQueue * microtask_queue
Definition execution.cc:77
#define V8_SHORT_BUILTIN_CALLS_BOOL
std::string filename
Isolate * isolate
TNode< Object > receiver
TNode< Object > callback
ZoneVector< RpoNumber > & result
uint32_t const mask
size_t priority
base::Mutex mutex
std::shared_ptr< metrics::Recorder > metrics_recorder_
const std::shared_ptr< Counters > async_counters_
uintptr_t Address
Definition memory.h:13
void FreeCurrentEmbeddedBlob()
Definition isolate.cc:286
thread_local Isolate::PerIsolateThreadData *g_current_per_isolate_thread_data_ V8_CONSTINIT
Definition isolate.cc:522
uintptr_t GetCurrentStackPosition()
Definition utils.cc:222
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation trace turbo cfg trace TurboFan s graph trimmer trace TurboFan s control equivalence trace TurboFan s register allocator trace stack load store counters for optimized code in run fuzzing &&concurrent_recompilation trace_turbo trace_turbo_scheduled trace_turbo_stack_accesses verify TurboFan machine graph of code stubs enable FixedArray bounds checks print TurboFan statistics of wasm compilations maximum cumulative size of bytecode considered for inlining scale factor of bytecode size used to calculate the inlining budget * KB
Definition flags.cc:1366
void DefaultWasmAsyncResolvePromiseCallback(v8::Isolate *isolate, v8::Local< v8::Context > context, v8::Local< v8::Promise::Resolver > resolver, v8::Local< v8::Value > result, WasmAsyncSuccess success)
Definition isolate.cc:7615
@ kIsolateAddressCount
Definition globals.h:2646
V8_EXPORT_PRIVATE FlagValues v8_flags
void DisableEmbeddedBlobRefcounting()
Definition isolate.cc:281
uint64_t ObjectPair
__attribute__((tls_model(V8_TLS_MODEL))) extern thread_local Isolate *g_current_isolate_ V8_CONSTINIT
std::vector< Handle< HeapObject > > DebugObjectCache
Definition isolate.h:511
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
void(*)(CrashKeyId id, const std::string &value) AddCrashKeyCallback
PromiseRejectEvent
Definition v8-promise.h:147
void(*)(ExceptionPropagationMessage message) ExceptionPropagationCallback
void(*)(PromiseRejectMessage message) PromiseRejectCallback
Definition v8-promise.h:170
void(*)(Isolate *) CallCompletedCallback
void(*)(Local< Object > target, AccessType type, Local< Value > data) FailedAccessCheckCallback
ExceptionContext
void(*)(Local< Context > context, Local< Module > module, Local< Object > meta) HostInitializeImportMetaObjectCallback
void(*)(Isolate *isolate, void *data) InterruptCallback
WasmAsyncSuccess
void(*)(PromiseHookType type, Local< Promise > promise, Local< Value > parent) PromiseHook
Definition v8-promise.h:143
bool(*)(Isolate *isolate, Local< String > script_name) PrintCurrentStackTraceFilterCallback
void(*)(Isolate *) BeforeCallEnteredCallback
ModuleImportPhase
StateTag
Definition v8-unwinder.h:36
PromiseHookType
Definition v8-promise.h:141
Definition c-api.cc:87
const uintptr_t stack_limit_
#define UNREACHABLE()
Definition logging.h:67
#define CHECK_GE(lhs, rhs)
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define OFFSET_OF(type, field)
Definition macros.h:57
Tagged< SharedFunctionInfo > function_info
Definition isolate.h:649
std::size_t operator()(const ThreadId &t) const
Definition isolate.h:2466
#define V8_TLS_MODEL
#define V8_TLS_DECLARE_GETTER(Name, Type, Member)
Heap * heap_
#define V8_NO_UNIQUE_ADDRESS
Definition v8config.h:722
#define V8_INLINE
Definition v8config.h:500
#define V8_WARN_UNUSED_RESULT
Definition v8config.h:671
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NOINLINE
Definition v8config.h:586
#define V8_NODISCARD
Definition v8config.h:693
std::unique_ptr< ValueMirror > value
Node * thread_in_wasm_flag_address_