v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
isolate.cc
Go to the documentation of this file.
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <stdlib.h>
8
9#include <atomic>
10#include <cinttypes>
11#include <cstdint>
12#include <fstream>
13#include <memory>
14#include <optional>
15#include <sstream>
16#include <string>
17#include <unordered_map>
18#include <utility>
19
20#include "include/v8-template.h"
22#include "src/api/api-inl.h"
24#include "src/ast/scopes.h"
25#include "src/base/fpu.h"
26#include "src/base/hashmap.h"
27#include "src/base/logging.h"
31#include "src/base/sys-info.h"
34#include "src/bigint/bigint.h"
42#include "src/common/globals.h"
47#include "src/date/date.h"
49#include "src/debug/debug.h"
65#include "src/flags/flags.h"
68#include "src/heap/heap-inl.h"
73#include "src/heap/safepoint.h"
74#include "src/ic/stub-cache.h"
77#include "src/init/v8.h"
83#include "src/logging/log.h"
84#include "src/logging/metrics.h"
106#include "src/objects/slots.h"
107#include "src/objects/smi.h"
110#include "src/objects/visitors.h"
115#include "src/roots/roots.h"
127
128#if defined(V8_USE_PERFETTO)
130#endif // defined(V8_USE_PERFETTO)
131
134#include "src/utils/ostreams.h"
135#include "src/utils/version.h"
137#include "src/zone/type-stats.h"
138#ifdef V8_INTL_SUPPORT
140#include "unicode/locid.h"
141#include "unicode/uobject.h"
142#endif // V8_INTL_SUPPORT
143
144#if V8_ENABLE_MAGLEV
146#endif // V8_ENABLE_MAGLEV
147
148#if V8_ENABLE_WEBASSEMBLY
152#include "src/wasm/stacks.h"
155#include "src/wasm/wasm-engine.h"
156#include "src/wasm/wasm-module.h"
158
159#if V8_ENABLE_DRUMBRAKE
161#endif // V8_ENABLE_DRUMBRAKE
162#endif // V8_ENABLE_WEBASSEMBLY
163
164#if defined(V8_ENABLE_ETW_STACK_WALKING)
166#endif // V8_ENABLE_ETW_STACK_WALKING
167
168#if defined(V8_OS_WIN64)
170#endif // V8_OS_WIN64
171
172#if USE_SIMULATOR
174#endif
175
176extern "C" const uint8_t v8_Default_embedded_blob_code_[];
178extern "C" const uint8_t v8_Default_embedded_blob_data_[];
180
181namespace v8 {
182namespace internal {
183
184#ifdef DEBUG
185#define TRACE_ISOLATE(tag) \
186 do { \
187 if (v8_flags.trace_isolates) { \
188 PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
189 id()); \
190 } \
191 } while (false)
192#else
193#define TRACE_ISOLATE(tag)
194#endif
195
196const uint8_t* DefaultEmbeddedBlobCode() {
198}
202const uint8_t* DefaultEmbeddedBlobData() {
204}
208
209namespace {
210// These variables provide access to the current embedded blob without requiring
211// an isolate instance. This is needed e.g. by
212// InstructionStream::InstructionStart, which may not have access to an isolate
213// but still needs to access the embedded blob. The variables are initialized by
214// each isolate in Init(). Writes and reads are relaxed since we can guarantee
215// that the current thread has initialized these variables before accessing
216// them. Different threads may race, but this is fine since they all attempt to
217// set the same values of the blob pointer and size.
218
219std::atomic<const uint8_t*> current_embedded_blob_code_(nullptr);
220std::atomic<uint32_t> current_embedded_blob_code_size_(0);
221std::atomic<const uint8_t*> current_embedded_blob_data_(nullptr);
222std::atomic<uint32_t> current_embedded_blob_data_size_(0);
223
224// The various workflows around embedded snapshots are fairly complex. We need
225// to support plain old snapshot builds, nosnap builds, and the requirements of
226// subtly different serialization tests. There's two related knobs to twiddle:
227//
228// - The default embedded blob may be overridden by setting the sticky embedded
229// blob. This is set automatically whenever we create a new embedded blob.
230//
231// - Lifecycle management can be either manual or set to refcounting.
232//
233// A few situations to demonstrate their use:
234//
235// - A plain old snapshot build neither overrides the default blob nor
236// refcounts.
237//
238// - mksnapshot sets the sticky blob and manually frees the embedded
239// blob once done.
240//
241// - Most serializer tests do the same.
242//
243// - Nosnapshot builds set the sticky blob and enable refcounting.
244
245// This mutex protects access to the following variables:
246// - sticky_embedded_blob_code_
247// - sticky_embedded_blob_code_size_
248// - sticky_embedded_blob_data_
249// - sticky_embedded_blob_data_size_
250// - enable_embedded_blob_refcounting_
251// - current_embedded_blob_refs_
252base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;
253
254const uint8_t* sticky_embedded_blob_code_ = nullptr;
255uint32_t sticky_embedded_blob_code_size_ = 0;
256const uint8_t* sticky_embedded_blob_data_ = nullptr;
257uint32_t sticky_embedded_blob_data_size_ = 0;
258
259bool enable_embedded_blob_refcounting_ = true;
260int current_embedded_blob_refs_ = 0;
261
262const uint8_t* StickyEmbeddedBlobCode() { return sticky_embedded_blob_code_; }
263uint32_t StickyEmbeddedBlobCodeSize() {
264 return sticky_embedded_blob_code_size_;
265}
266const uint8_t* StickyEmbeddedBlobData() { return sticky_embedded_blob_data_; }
267uint32_t StickyEmbeddedBlobDataSize() {
268 return sticky_embedded_blob_data_size_;
269}
270
271void SetStickyEmbeddedBlob(const uint8_t* code, uint32_t code_size,
272 const uint8_t* data, uint32_t data_size) {
273 sticky_embedded_blob_code_ = code;
274 sticky_embedded_blob_code_size_ = code_size;
275 sticky_embedded_blob_data_ = data;
276 sticky_embedded_blob_data_size_ = data_size;
277}
278
279} // namespace
280
282 base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
283 enable_embedded_blob_refcounting_ = false;
284}
285
287 CHECK(!enable_embedded_blob_refcounting_);
288 base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
289
290 if (StickyEmbeddedBlobCode() == nullptr) return;
291
292 CHECK_EQ(StickyEmbeddedBlobCode(), Isolate::CurrentEmbeddedBlobCode());
293 CHECK_EQ(StickyEmbeddedBlobData(), Isolate::CurrentEmbeddedBlobData());
294
296 const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobCode()),
298 const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlobData()),
300
301 current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
302 current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
303 current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
304 current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
305 sticky_embedded_blob_code_ = nullptr;
306 sticky_embedded_blob_code_size_ = 0;
307 sticky_embedded_blob_data_ = nullptr;
308 sticky_embedded_blob_data_size_ = 0;
309}
310
311// static
313 // In some situations, we must be able to rely on the embedded blob being
314 // immortal immovable. This is the case if the blob is binary-embedded.
315 // See blob lifecycle controls above for descriptions of when the current
316 // embedded blob may change (e.g. in tests or mksnapshot). If the blob is
317 // binary-embedded, it is immortal immovable.
318 const uint8_t* code =
319 current_embedded_blob_code_.load(std::memory_order_relaxed);
320 if (code == nullptr) return false;
321 return code == DefaultEmbeddedBlobCode();
322}
323
324void Isolate::SetEmbeddedBlob(const uint8_t* code, uint32_t code_size,
325 const uint8_t* data, uint32_t data_size) {
326 CHECK_NOT_NULL(code);
327 CHECK_NOT_NULL(data);
328
330 embedded_blob_code_size_ = code_size;
332 embedded_blob_data_size_ = data_size;
333 current_embedded_blob_code_.store(code, std::memory_order_relaxed);
334 current_embedded_blob_code_size_.store(code_size, std::memory_order_relaxed);
335 current_embedded_blob_data_.store(data, std::memory_order_relaxed);
336 current_embedded_blob_data_size_.store(data_size, std::memory_order_relaxed);
337
338#ifdef DEBUG
339 // Verify that the contents of the embedded blob are unchanged from
340 // serialization-time, just to ensure the compiler isn't messing with us.
342 if (d.EmbeddedBlobDataHash() != d.CreateEmbeddedBlobDataHash()) {
343 FATAL(
344 "Embedded blob data section checksum verification failed. This "
345 "indicates that the embedded blob has been modified since compilation "
346 "time.");
347 }
348 if (v8_flags.text_is_readable) {
349 if (d.EmbeddedBlobCodeHash() != d.CreateEmbeddedBlobCodeHash()) {
350 FATAL(
351 "Embedded blob code section checksum verification failed. This "
352 "indicates that the embedded blob has been modified since "
353 "compilation time. A common cause is a debugging breakpoint set "
354 "within builtin code.");
355 }
356 }
357#endif // DEBUG
358}
359
361 CHECK(enable_embedded_blob_refcounting_);
363 CHECK_EQ(embedded_blob_code_, StickyEmbeddedBlobCode());
365 CHECK_EQ(embedded_blob_data_, StickyEmbeddedBlobData());
366
367 embedded_blob_code_ = nullptr;
369 embedded_blob_data_ = nullptr;
371 current_embedded_blob_code_.store(nullptr, std::memory_order_relaxed);
372 current_embedded_blob_code_size_.store(0, std::memory_order_relaxed);
373 current_embedded_blob_data_.store(nullptr, std::memory_order_relaxed);
374 current_embedded_blob_data_size_.store(0, std::memory_order_relaxed);
375 sticky_embedded_blob_code_ = nullptr;
376 sticky_embedded_blob_code_size_ = 0;
377 sticky_embedded_blob_data_ = nullptr;
378 sticky_embedded_blob_data_size_ = 0;
379}
380
381const uint8_t* Isolate::embedded_blob_code() const {
382 return embedded_blob_code_;
383}
387const uint8_t* Isolate::embedded_blob_data() const {
388 return embedded_blob_data_;
389}
393
394// static
396 return current_embedded_blob_code_.load(std::memory_order_relaxed);
397}
398
399// static
401 return current_embedded_blob_code_size_.load(std::memory_order_relaxed);
402}
403
404// static
406 return current_embedded_blob_data_.load(std::memory_order_relaxed);
407}
408
409// static
411 return current_embedded_blob_data_size_.load(std::memory_order_relaxed);
412}
413
414// static
416 // Update calculations below if the assert fails.
417 static_assert(kMaxPCRelativeCodeRangeInMB <= 4096);
419 // Return empty region if pc-relative calls/jumps are not supported.
421 }
422 constexpr size_t max_size = std::numeric_limits<size_t>::max();
423 if (uint64_t{kMaxPCRelativeCodeRangeInMB} * MB > max_size) {
424 // The whole addressable space is reachable with pc-relative calls/jumps.
425 return base::AddressRegion(kNullAddress, max_size);
426 }
427 constexpr size_t radius = kMaxPCRelativeCodeRangeInMB * MB;
428
430 Address embedded_blob_code_start =
431 reinterpret_cast<Address>(CurrentEmbeddedBlobCode());
432 if (embedded_blob_code_start == kNullAddress) {
433 // Return empty region if there's no embedded blob.
435 }
436 Address embedded_blob_code_end =
437 embedded_blob_code_start + CurrentEmbeddedBlobCodeSize();
438 Address region_start =
439 (embedded_blob_code_end > radius) ? (embedded_blob_code_end - radius) : 0;
440 Address region_end = embedded_blob_code_start + radius;
441 if (region_end < embedded_blob_code_start) {
442 region_end = static_cast<Address>(-1);
443 }
444 return base::AddressRegion(region_start, region_end - region_start);
445}
446
450
452
453 static constexpr size_t kSeed = 0;
454 size_t hash = kSeed;
455
456 // Hash static entries of the roots table.
458#if V8_STATIC_ROOTS_BOOL
459 hash = base::hash_combine(hash,
460 static_cast<int>(RootIndex::kReadOnlyRootsCount));
462 for (auto ptr : StaticReadOnlyRootsPointerTable) {
463 hash = base::hash_combine(ptr, hash);
464 ++i;
465 }
466#endif // V8_STATIC_ROOTS_BOOL
467
468 // Hash data sections of builtin code objects.
469 for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
470 ++builtin) {
471 Tagged<Code> code = builtins()->code(builtin);
472
474 uint8_t* const code_ptr = reinterpret_cast<uint8_t*>(code.address());
475
476 // These static asserts ensure we don't miss relevant fields. We don't hash
477 // instruction_start, but other data fields must remain the same.
478 static_assert(Code::kEndOfStrongFieldsOffset ==
479 Code::kInstructionStartOffset);
480#ifndef V8_ENABLE_SANDBOX
481 static_assert(
482 Code::kInstructionStartOffsetEnd + 1 +
484 Code::kFlagsOffset);
485#endif
486 static_assert(Code::kFlagsOffsetEnd + 1 == Code::kInstructionSizeOffset);
487 static_assert(Code::kInstructionSizeOffsetEnd + 1 ==
488 Code::kMetadataSizeOffset);
489 static_assert(Code::kMetadataSizeOffsetEnd + 1 ==
490 Code::kInlinedBytecodeSizeOffset);
491 static_assert(Code::kInlinedBytecodeSizeOffsetEnd + 1 ==
492 Code::kOsrOffsetOffset);
493 static_assert(Code::kOsrOffsetOffsetEnd + 1 ==
494 Code::kHandlerTableOffsetOffset);
495 static_assert(Code::kHandlerTableOffsetOffsetEnd + 1 ==
496 Code::kUnwindingInfoOffsetOffset);
497 static_assert(Code::kUnwindingInfoOffsetOffsetEnd + 1 ==
498 Code::kConstantPoolOffsetOffset);
499 static_assert(Code::kConstantPoolOffsetOffsetEnd + 1 ==
500 Code::kCodeCommentsOffsetOffset);
501 static_assert(Code::kCodeCommentsOffsetOffsetEnd + 1 ==
502 Code::kBuiltinJumpTableInfoOffsetOffset);
503 static_assert(Code::kBuiltinJumpTableInfoOffsetOffsetEnd + 1 ==
504 Code::kParameterCountOffset);
505 static_assert(Code::kParameterCountOffsetEnd + 1 == Code::kBuiltinIdOffset);
506 static_assert(Code::kBuiltinIdOffsetEnd + 1 == Code::kUnalignedSize);
507 static constexpr int kStartOffset = Code::kFlagsOffset;
508
509 for (int j = kStartOffset; j < Code::kUnalignedSize; j++) {
510 hash = base::hash_combine(hash, size_t{code_ptr[j]});
511 }
512 }
513
514 // The builtins constants table is also tightly tied to embedded builtins.
515 hash = base::hash_combine(
516 hash, static_cast<size_t>(heap_.builtins_constants_table()->length()));
517
518 return hash;
519}
520
521thread_local Isolate::PerIsolateThreadData* g_current_per_isolate_thread_data_
522 V8_CONSTINIT = nullptr;
523thread_local Isolate* g_current_isolate_ V8_CONSTINIT = nullptr;
524
526
527// static
528void Isolate::SetCurrent(Isolate* isolate) { g_current_isolate_ = isolate; }
529
530namespace {
531// A global counter for all generated Isolates, might overflow.
532std::atomic<int> isolate_counter{0};
533} // namespace
534
535Isolate::PerIsolateThreadData*
538 PerIsolateThreadData* per_thread = nullptr;
539 {
541 per_thread = thread_data_table_.Lookup(thread_id);
542 if (per_thread == nullptr) {
543 if (v8_flags.adjust_os_scheduling_parameters) {
545 }
546 per_thread = new PerIsolateThreadData(this, thread_id);
547 thread_data_table_.Insert(per_thread);
548 }
550 }
551 return per_thread;
552}
553
556 if (thread_id.IsValid()) {
557 DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed),
558 thread_id);
561 if (per_thread) {
562 DCHECK(!per_thread->thread_state_);
563 thread_data_table_.Remove(per_thread);
564 }
565 }
566}
567
572
574 ThreadId thread_id) {
575 PerIsolateThreadData* per_thread = nullptr;
576 {
578 per_thread = thread_data_table_.Lookup(thread_id);
579 }
580 return per_thread;
581}
582
583void Isolate::InitializeOncePerProcess() { Heap::InitializeOncePerProcess(); }
584
588
589char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
590 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
591 Iterate(v, thread);
592 // Normally, ThreadLocalTop::topmost_script_having_context_ is visited weakly
593 // but in order to simplify handling of frozen threads we just clear it.
594 // Otherwise, we'd need to traverse the thread_storage again just to find this
595 // one field.
596 thread->topmost_script_having_context_ = Context();
597 return thread_storage + sizeof(ThreadLocalTop);
598}
599
601 ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
602 v->VisitThread(this, thread);
603}
604
606 // Visit the roots from the top for a given thread.
607 v->VisitRootPointer(Root::kStackRoots, nullptr,
608 FullObjectSlot(&thread->exception_));
609 v->VisitRootPointer(Root::kStackRoots, nullptr,
610 FullObjectSlot(&thread->pending_message_));
611 v->VisitRootPointer(Root::kStackRoots, nullptr,
612 FullObjectSlot(&thread->context_));
613
614 for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr;
615 block = block->next_) {
616 // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
618 Root::kStackRoots, nullptr,
619 FullObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
621 Root::kStackRoots, nullptr,
622 FullObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
623 }
624
626 Root::kStackRoots, nullptr,
628
629 // Iterate over pointers on native execution stack.
630#if V8_ENABLE_WEBASSEMBLY
631 wasm::WasmCodeRefScope wasm_code_ref_scope;
632
633 for (const std::unique_ptr<wasm::StackMemory>& stack : wasm_stacks_) {
634 if (stack->IsActive()) {
635 continue;
636 }
637 for (StackFrameIterator it(this, stack.get()); !it.done(); it.Advance()) {
638 it.frame()->Iterate(v);
639 }
640 }
641 StackFrameIterator it(this, thread, StackFrameIterator::FirstStackOnly{});
642#else
643 StackFrameIterator it(this, thread);
644#endif
645 for (; !it.done(); it.Advance()) {
646 it.frame()->Iterate(v);
647 }
648}
649
651 ThreadLocalTop* current_t = thread_local_top();
652 Iterate(v, current_t);
653}
654
658
664
670 StringStream accumulator(&allocator);
671 incomplete_message_ = &accumulator;
672 PrintStack(&accumulator);
673 DirectHandle<String> stack_trace = accumulator.ToString(this);
674 incomplete_message_ = nullptr;
676 return stack_trace;
677 } else if (stack_trace_nesting_level_ == 1) {
679 base::OS::PrintError(
680 "\n\nAttempt to print stack while printing stack (double fault)\n");
681 base::OS::PrintError(
682 "If you are lucky you may find a partial stack dump on stdout.\n\n");
684 return factory()->empty_string();
685 } else {
687 }
688}
689
690void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
691 void* ptr4, void* ptr5, void* ptr6) {
692 StackTraceFailureMessage message(this,
694 {ptr1, ptr2, ptr3, ptr4, ptr5, ptr6});
695 message.Print();
697}
698
699void Isolate::PushParamsAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4,
700 void* ptr5, void* ptr6) {
703 {ptr1, ptr2, ptr3, ptr4, ptr5, ptr6});
704 message.Print();
706}
707
708void Isolate::PushStackTraceAndContinue(void* ptr1, void* ptr2, void* ptr3,
709 void* ptr4, void* ptr5, void* ptr6) {
710 StackTraceFailureMessage message(this,
712 {ptr1, ptr2, ptr3, ptr4, ptr5, ptr6});
713 message.Print();
715}
716
717void Isolate::PushParamsAndContinue(void* ptr1, void* ptr2, void* ptr3,
718 void* ptr4, void* ptr5, void* ptr6) {
721 {ptr1, ptr2, ptr3, ptr4, ptr5, ptr6});
722 message.Print();
724}
725
727 // Print the details of this failure message object, including its own address
728 // to force stack allocation.
729 static_assert(arraysize(ptrs_) >= 6);
730 base::OS::PrintError(
731 "Stacktrace:\n ptr0=%p\n ptr1=%p\n ptr2=%p\n ptr3=%p\n "
732 "ptr4=%p\n ptr5=%p\n failure_message_object=%p\n%s",
733 reinterpret_cast<void*>(ptrs_[0]), reinterpret_cast<void*>(ptrs_[1]),
734 reinterpret_cast<void*>(ptrs_[2]), reinterpret_cast<void*>(ptrs_[3]),
735 reinterpret_cast<void*>(ptrs_[4]), reinterpret_cast<void*>(ptrs_[5]),
736 this, &js_stack_trace_[0]);
737}
738
741 const Address* ptrs, size_t ptrs_count)
742 : isolate_(isolate) {
743 size_t ptrs_size = std::min(arraysize(ptrs_), ptrs_count);
744 std::copy(ptrs, ptrs + ptrs_size, &ptrs_[0]);
745
746 if (mode == kIncludeStackTrace) {
747 // Write a stracktrace into the {js_stack_trace_} buffer.
748 const size_t buffer_length = arraysize(js_stack_trace_);
749 FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
752 // Keeping a reference to the last code objects to increase likelihood that
753 // they get included in the minidump.
754 const size_t code_objects_length = arraysize(code_objects_);
755 size_t i = 0;
757 for (; !it.done() && i < code_objects_length; it.Advance()) {
758 code_objects_[i++] = it.frame()->unchecked_code().ptr();
759 }
760 }
761}
762
764
765namespace {
766
767bool IsBuiltinFunction(Isolate* isolate, Tagged<HeapObject> object,
768 Builtin builtin) {
769 if (!IsJSFunction(object)) return false;
770 Tagged<JSFunction> const function = Cast<JSFunction>(object);
771 // Currently we have to use full pointer comparison here as builtin Code
772 // objects are still inside the sandbox while runtime-generated Code objects
773 // are in trusted space.
774 static_assert(!kAllCodeObjectsLiveInTrustedSpace);
775 return function->code(isolate).SafeEquals(isolate->builtins()->code(builtin));
776}
777
778// Check if the function is one of the known async function or
779// async generator fulfill handlers.
780bool IsBuiltinAsyncFulfillHandler(Isolate* isolate, Tagged<HeapObject> object) {
781 return IsBuiltinFunction(isolate, object,
782 Builtin::kAsyncFunctionAwaitResolveClosure) ||
783 IsBuiltinFunction(isolate, object,
784 Builtin::kAsyncGeneratorAwaitResolveClosure) ||
785 IsBuiltinFunction(
786 isolate, object,
787 Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure);
788}
789
790// Check if the function is one of the known async function or
791// async generator fulfill handlers.
792bool IsBuiltinAsyncRejectHandler(Isolate* isolate, Tagged<HeapObject> object) {
793 return IsBuiltinFunction(isolate, object,
794 Builtin::kAsyncFunctionAwaitRejectClosure) ||
795 IsBuiltinFunction(isolate, object,
796 Builtin::kAsyncGeneratorAwaitRejectClosure);
797}
798
799// Check if the function is one of the known builtin rejection handlers that
800// rethrows the exception instead of catching it.
801bool IsBuiltinForwardingRejectHandler(Isolate* isolate,
802 Tagged<HeapObject> object) {
803 return IsBuiltinFunction(isolate, object, Builtin::kPromiseCatchFinally) ||
804 IsBuiltinFunction(isolate, object,
805 Builtin::kAsyncFromSyncIteratorCloseSyncAndRethrow);
806}
807
808MaybeHandle<JSGeneratorObject> TryGetAsyncGenerator(
809 Isolate* isolate, DirectHandle<PromiseReaction> reaction) {
810 // Check if the {reaction} has one of the known async function or
811 // async generator continuations as its fulfill handler.
812 if (IsBuiltinAsyncFulfillHandler(isolate, reaction->fulfill_handler())) {
813 // Now peek into the handlers' AwaitContext to get to
814 // the JSGeneratorObject for the async function.
815 DirectHandle<Context> context(
816 Cast<JSFunction>(reaction->fulfill_handler())->context(), isolate);
817 Handle<JSGeneratorObject> generator_object(
818 Cast<JSGeneratorObject>(context->extension()), isolate);
819 return generator_object;
820 }
821 return MaybeHandle<JSGeneratorObject>();
822}
823
824#if V8_ENABLE_WEBASSEMBLY
825MaybeDirectHandle<WasmSuspenderObject> TryGetWasmSuspender(
826 Isolate* isolate, Tagged<HeapObject> handler) {
827 // Check if the {handler} is WasmResume.
828 if (IsBuiltinFunction(isolate, handler, Builtin::kWasmResume)) {
829 // Now peek into the handlers' AwaitContext to get to
830 // the JSGeneratorObject for the async function.
831 Tagged<SharedFunctionInfo> shared = Cast<JSFunction>(handler)->shared();
832 if (shared->HasWasmResumeData()) {
833 return direct_handle(shared->wasm_resume_data()->suspender(), isolate);
834 }
835 }
836 return MaybeDirectHandle<WasmSuspenderObject>();
837}
838#endif // V8_ENABLE_WEBASSEMBLY
839
840int GetGeneratorBytecodeOffset(
841 DirectHandle<JSGeneratorObject> generator_object) {
842 // The stored bytecode offset is relative to a different base than what
843 // is used in the source position table, hence the subtraction.
844 return Smi::ToInt(generator_object->input_or_debug_pos()) -
846}
847
848class CallSiteBuilder {
849 public:
850 CallSiteBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
851 Handle<Object> caller)
852 : isolate_(isolate),
853 mode_(mode),
854 limit_(limit),
855 caller_(caller),
856 skip_next_frame_(mode != SKIP_NONE) {
857 DCHECK_IMPLIES(mode_ == SKIP_UNTIL_SEEN, IsJSFunction(*caller_));
858 // Modern web applications are usually built with multiple layers of
859 // framework and library code, and stack depth tends to be more than
860 // a dozen frames, so we over-allocate a bit here to avoid growing
861 // the elements array in the common case.
862 elements_ = isolate->factory()->NewFixedArray(std::min(64, limit));
863 }
864
865 void SetPrevFrameAsConstructCall() {
866 if (skipped_prev_frame_) return;
867 DCHECK_GT(index_, 0);
869 Tagged<CallSiteInfo>::cast(elements_->get(index_ - 1));
870 info->set_flags(info->flags() | CallSiteInfo::kIsConstructor);
871 }
872
873 bool Visit(FrameSummary const& summary) {
874 if (Full()) return false;
875#if V8_ENABLE_WEBASSEMBLY
876#if V8_ENABLE_DRUMBRAKE
877 if (summary.IsWasmInterpreted()) {
878 AppendWasmInterpretedFrame(summary.AsWasmInterpreted());
879 return true;
880 // FrameSummary::IsWasm() should be renamed FrameSummary::IsWasmCompiled
881 // to be more precise, but we'll leave it as it is to try to reduce merge
882 // churn.
883 } else {
884#endif // V8_ENABLE_DRUMBRAKE
885 if (summary.IsWasm()) {
886 AppendWasmFrame(summary.AsWasm());
887 return true;
888 }
889#if V8_ENABLE_DRUMBRAKE
890 }
891#endif // V8_ENABLE_DRUMBRAKE
892 if (summary.IsWasmInlined()) {
893 AppendWasmInlinedFrame(summary.AsWasmInlined());
894 return true;
895 }
896 if (summary.IsBuiltin()) {
897 AppendBuiltinFrame(summary.AsBuiltin());
898 return true;
899 }
900#endif // V8_ENABLE_WEBASSEMBLY
901 AppendJavaScriptFrame(summary.AsJavaScript());
902 return true;
903 }
904
905 void AppendAsyncFrame(DirectHandle<JSGeneratorObject> generator_object) {
906 DirectHandle<JSFunction> function(generator_object->function(), isolate_);
907 if (!IsVisibleInStackTrace(function)) {
908 skipped_prev_frame_ = true;
909 return;
910 }
911 int flags = CallSiteInfo::kIsAsync;
912 if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
913
914 DirectHandle<JSAny> receiver(generator_object->receiver(), isolate_);
915 DirectHandle<BytecodeArray> code(
916 function->shared()->GetBytecodeArray(isolate_), isolate_);
917 int offset = GetGeneratorBytecodeOffset(generator_object);
918
919 DirectHandle<FixedArray> parameters =
920 isolate_->factory()->empty_fixed_array();
921 if (V8_UNLIKELY(v8_flags.detailed_error_stack_trace)) {
922 parameters = isolate_->factory()->CopyFixedArrayUpTo(
923 direct_handle(generator_object->parameters_and_registers(), isolate_),
924 function->shared()
925 ->internal_formal_parameter_count_without_receiver());
926 }
927
928 AppendFrame(receiver, function, code, offset, flags, parameters);
929 }
930
931 void AppendPromiseCombinatorFrame(DirectHandle<JSFunction> element_function,
932 DirectHandle<JSFunction> combinator) {
933 if (!IsVisibleInStackTrace(combinator)) {
934 skipped_prev_frame_ = true;
935 return;
936 }
937 int flags =
938 CallSiteInfo::kIsAsync | CallSiteInfo::kIsSourcePositionComputed;
939
940 DirectHandle<JSFunction> receiver(
941 combinator->native_context()->promise_function(), isolate_);
942 DirectHandle<Code> code(combinator->code(isolate_), isolate_);
943
944 // TODO(mmarchini) save Promises list from the Promise combinator
945 DirectHandle<FixedArray> parameters =
946 isolate_->factory()->empty_fixed_array();
947
948 // We store the offset of the promise into the element function's
949 // hash field for element callbacks.
950 int promise_index = Smi::ToInt(element_function->GetIdentityHash()) - 1;
951
952 AppendFrame(receiver, combinator, code, promise_index, flags, parameters);
953 }
954
955 void AppendJavaScriptFrame(
956 FrameSummary::JavaScriptFrameSummary const& summary) {
957 // Filter out internal frames that we do not want to show.
958 if (!IsVisibleInStackTrace(summary.function())) {
959 skipped_prev_frame_ = true;
960 return;
961 }
962
963 int flags = 0;
964 DirectHandle<JSFunction> function = summary.function();
965 if (IsStrictFrame(function)) flags |= CallSiteInfo::kIsStrict;
966 if (summary.is_constructor()) flags |= CallSiteInfo::kIsConstructor;
967
968 AppendFrame(Cast<UnionOf<JSAny, Hole>>(summary.receiver()), function,
969 summary.abstract_code(), summary.code_offset(), flags,
970 summary.parameters());
971 }
972
973#if V8_ENABLE_WEBASSEMBLY
974 void AppendWasmFrame(FrameSummary::WasmFrameSummary const& summary) {
975 if (summary.code()->kind() != wasm::WasmCode::kWasmFunction) return;
976 DirectHandle<WasmInstanceObject> instance = summary.wasm_instance();
977 int flags = CallSiteInfo::kIsWasm;
978 if (instance->module_object()->is_asm_js()) {
979 flags |= CallSiteInfo::kIsAsmJsWasm;
980 if (summary.at_to_number_conversion()) {
981 flags |= CallSiteInfo::kIsAsmJsAtNumberConversion;
982 }
983 }
984
985 DirectHandle<HeapObject> code = isolate_->factory()->undefined_value();
986 AppendFrame(instance,
987 direct_handle(Smi::FromInt(summary.function_index()), isolate_),
988 code, summary.code_offset(), flags,
989 isolate_->factory()->empty_fixed_array());
990 }
991
992#if V8_ENABLE_DRUMBRAKE
993 void AppendWasmInterpretedFrame(
994 FrameSummary::WasmInterpretedFrameSummary const& summary) {
995 Handle<WasmInstanceObject> instance = summary.wasm_instance();
996 int flags = CallSiteInfo::kIsWasm | CallSiteInfo::kIsWasmInterpretedFrame;
997 DCHECK(!instance->module_object()->is_asm_js());
998 // We don't have any code object in the interpreter, so we pass 'undefined'.
999 auto code = isolate_->factory()->undefined_value();
1000 AppendFrame(instance,
1001 handle(Smi::FromInt(summary.function_index()), isolate_), code,
1002 summary.byte_offset(), flags,
1003 isolate_->factory()->empty_fixed_array());
1004 }
1005#endif // V8_ENABLE_DRUMBRAKE
1006
1007 void AppendWasmInlinedFrame(
1008 FrameSummary::WasmInlinedFrameSummary const& summary) {
1009 DirectHandle<HeapObject> code = isolate_->factory()->undefined_value();
1010 int flags = CallSiteInfo::kIsWasm;
1011 AppendFrame(summary.wasm_instance(),
1012 direct_handle(Smi::FromInt(summary.function_index()), isolate_),
1013 code, summary.code_offset(), flags,
1014 isolate_->factory()->empty_fixed_array());
1015 }
1016
1017 void AppendBuiltinFrame(FrameSummary::BuiltinFrameSummary const& summary) {
1018 Builtin builtin = summary.builtin();
1019 DirectHandle<Code> code = isolate_->builtins()->code_handle(builtin);
1020 DirectHandle<Smi> function(Smi::FromInt(static_cast<int>(builtin)),
1021 isolate_);
1022 int flags = CallSiteInfo::kIsBuiltin;
1023 AppendFrame(Cast<UnionOf<JSAny, Hole>>(summary.receiver()), function, code,
1024 summary.code_offset(), flags,
1025 isolate_->factory()->empty_fixed_array());
1026 }
1027#endif // V8_ENABLE_WEBASSEMBLY
1028
1029 bool Full() { return index_ >= limit_; }
1030
1031 Handle<FixedArray> Build() {
1032 return FixedArray::RightTrimOrEmpty(isolate_, elements_, index_);
1033 }
1034
1035 private:
1036 // Poison stack frames below the first strict mode frame.
1037 // The stack trace API should not expose receivers and function
1038 // objects on frames deeper than the top-most one with a strict mode
1039 // function.
1040 bool IsStrictFrame(DirectHandle<JSFunction> function) {
1041 if (!encountered_strict_function_) {
1043 is_strict(function->shared()->language_mode());
1044 }
1046 }
1047
1048 // Determines whether the given stack frame should be displayed in a stack
1049 // trace.
1050 bool IsVisibleInStackTrace(DirectHandle<JSFunction> function) {
1051 return ShouldIncludeFrame(function) && IsNotHidden(function);
1052 }
1053
1054 // This mechanism excludes a number of uninteresting frames from the stack
1055 // trace. This can be be the first frame (which will be a builtin-exit frame
1056 // for the error constructor builtin) or every frame until encountering a
1057 // user-specified function.
1058 bool ShouldIncludeFrame(DirectHandle<JSFunction> function) {
1059 switch (mode_) {
1060 case SKIP_NONE:
1061 return true;
1062 case SKIP_FIRST:
1063 if (!skip_next_frame_) return true;
1064 skip_next_frame_ = false;
1065 return false;
1066 case SKIP_UNTIL_SEEN:
1067 if (skip_next_frame_ && (*function == *caller_)) {
1068 skip_next_frame_ = false;
1069 return false;
1070 }
1071 return !skip_next_frame_;
1072 }
1073 UNREACHABLE();
1074 }
1075
1076 bool IsNotHidden(DirectHandle<JSFunction> function) {
1077 // TODO(szuend): Remove this check once the flag is enabled
1078 // by default.
1079 if (!v8_flags.experimental_stack_trace_frames &&
1080 function->shared()->IsApiFunction()) {
1081 return false;
1082 }
1083 // Functions defined not in user scripts are not visible unless directly
1084 // exposed, in which case the native flag is set.
1085 // The --builtins-in-stack-traces command line flag allows including
1086 // internal call sites in the stack trace for debugging purposes.
1087 if (!v8_flags.builtins_in_stack_traces &&
1088 !function->shared()->IsUserJavaScript()) {
1089 return function->shared()->native() ||
1090 function->shared()->IsApiFunction();
1091 }
1092 return true;
1093 }
1094
1095 void AppendFrame(DirectHandle<UnionOf<JSAny, Hole>> receiver_or_instance,
1096 DirectHandle<UnionOf<Smi, JSFunction>> function,
1097 DirectHandle<HeapObject> code, int offset, int flags,
1098 DirectHandle<FixedArray> parameters) {
1099 if (IsTheHole(*receiver_or_instance, isolate_)) {
1100 // TODO(jgruber): Fix all cases in which frames give us a hole value
1101 // (e.g. the receiver in RegExp constructor frames).
1102 receiver_or_instance = isolate_->factory()->undefined_value();
1103 }
1104 auto info = isolate_->factory()->NewCallSiteInfo(
1105 Cast<JSAny>(receiver_or_instance), function, code, offset, flags,
1106 parameters);
1107 elements_ = FixedArray::SetAndGrow(isolate_, elements_, index_++, info);
1108 skipped_prev_frame_ = false;
1109 }
1110
1111 Isolate* isolate_;
1113 int index_ = 0;
1114 const int limit_;
1115 const Handle<Object> caller_;
1120};
1121
1122void CaptureAsyncStackTrace(Isolate* isolate, DirectHandle<JSPromise> promise,
1123 CallSiteBuilder* builder) {
1124 while (!builder->Full()) {
1125 // Check that the {promise} is not settled.
1126 if (promise->status() != Promise::kPending) return;
1127
1128 // Check that we have exactly one PromiseReaction on the {promise}.
1129 if (!IsPromiseReaction(promise->reactions())) return;
1130 DirectHandle<PromiseReaction> reaction(
1131 Cast<PromiseReaction>(promise->reactions()), isolate);
1132 if (!IsSmi(reaction->next())) return;
1133
1134 Handle<JSGeneratorObject> generator_object;
1135
1136 if (TryGetAsyncGenerator(isolate, reaction).ToHandle(&generator_object)) {
1137 CHECK(generator_object->is_suspended());
1138
1139 // Append async frame corresponding to the {generator_object}.
1140 builder->AppendAsyncFrame(generator_object);
1141
1142 // Try to continue from here.
1143 if (IsJSAsyncFunctionObject(*generator_object)) {
1144 auto async_function_object =
1145 Cast<JSAsyncFunctionObject>(generator_object);
1146 promise = direct_handle(async_function_object->promise(), isolate);
1147 } else {
1148 auto async_generator_object =
1149 Cast<JSAsyncGeneratorObject>(generator_object);
1150 if (IsUndefined(async_generator_object->queue(), isolate)) return;
1151 DirectHandle<AsyncGeneratorRequest> async_generator_request(
1152 Cast<AsyncGeneratorRequest>(async_generator_object->queue()),
1153 isolate);
1154 promise = direct_handle(
1155 Cast<JSPromise>(async_generator_request->promise()), isolate);
1156 }
1157 } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
1158 Builtin::kPromiseAllResolveElementClosure)) {
1159 DirectHandle<JSFunction> function(
1160 Cast<JSFunction>(reaction->fulfill_handler()), isolate);
1161 DirectHandle<Context> context(function->context(), isolate);
1162 DirectHandle<JSFunction> combinator(
1163 context->native_context()->promise_all(), isolate);
1164 builder->AppendPromiseCombinatorFrame(function, combinator);
1165
1166 if (IsNativeContext(*context)) {
1167 // NativeContext is used as a marker that the closure was already
1168 // called. We can't access the reject element context any more.
1169 return;
1170 }
1171
1172 // Now peek into the Promise.all() resolve element context to
1173 // find the promise capability that's being resolved when all
1174 // the concurrent promises resolve.
1175 int const index =
1177 DirectHandle<PromiseCapability> capability(
1178 Cast<PromiseCapability>(context->get(index)), isolate);
1179 if (!IsJSPromise(capability->promise())) return;
1180 promise = direct_handle(Cast<JSPromise>(capability->promise()), isolate);
1181 } else if (IsBuiltinFunction(
1182 isolate, reaction->fulfill_handler(),
1183 Builtin::kPromiseAllSettledResolveElementClosure)) {
1184 DirectHandle<JSFunction> function(
1185 Cast<JSFunction>(reaction->fulfill_handler()), isolate);
1186 DirectHandle<Context> context(function->context(), isolate);
1187 DirectHandle<JSFunction> combinator(
1188 context->native_context()->promise_all_settled(), isolate);
1189 builder->AppendPromiseCombinatorFrame(function, combinator);
1190
1191 if (IsNativeContext(*context)) {
1192 // NativeContext is used as a marker that the closure was already
1193 // called. We can't access the reject element context any more.
1194 return;
1195 }
1196
1197 // Now peek into the Promise.allSettled() resolve element context to
1198 // find the promise capability that's being resolved when all
1199 // the concurrent promises resolve.
1200 int const index =
1202 DirectHandle<PromiseCapability> capability(
1203 Cast<PromiseCapability>(context->get(index)), isolate);
1204 if (!IsJSPromise(capability->promise())) return;
1205 promise = direct_handle(Cast<JSPromise>(capability->promise()), isolate);
1206 } else if (IsBuiltinFunction(isolate, reaction->reject_handler(),
1207 Builtin::kPromiseAnyRejectElementClosure)) {
1208 DirectHandle<JSFunction> function(
1209 Cast<JSFunction>(reaction->reject_handler()), isolate);
1210 DirectHandle<Context> context(function->context(), isolate);
1211 DirectHandle<JSFunction> combinator(
1212 context->native_context()->promise_any(), isolate);
1213 builder->AppendPromiseCombinatorFrame(function, combinator);
1214
1215 if (IsNativeContext(*context)) {
1216 // NativeContext is used as a marker that the closure was already
1217 // called. We can't access the reject element context any more.
1218 return;
1219 }
1220
1221 // Now peek into the Promise.any() reject element context to
1222 // find the promise capability that's being resolved when any of
1223 // the concurrent promises resolve.
1225 DirectHandle<PromiseCapability> capability(
1226 Cast<PromiseCapability>(context->get(index)), isolate);
1227 if (!IsJSPromise(capability->promise())) return;
1228 promise = direct_handle(Cast<JSPromise>(capability->promise()), isolate);
1229 } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
1230 Builtin::kPromiseCapabilityDefaultResolve)) {
1231 DirectHandle<JSFunction> function(
1232 Cast<JSFunction>(reaction->fulfill_handler()), isolate);
1233 DirectHandle<Context> context(function->context(), isolate);
1234 promise = direct_handle(
1236 isolate);
1237 } else {
1238 // We have some generic promise chain here, so try to
1239 // continue with the chained promise on the reaction
1240 // (only works for native promise chains).
1241 Handle<HeapObject> promise_or_capability(
1242 reaction->promise_or_capability(), isolate);
1243 if (IsJSPromise(*promise_or_capability)) {
1244 promise = Cast<JSPromise>(promise_or_capability);
1245 } else if (IsPromiseCapability(*promise_or_capability)) {
1246 auto capability = Cast<PromiseCapability>(promise_or_capability);
1247 if (!IsJSPromise(capability->promise())) return;
1248 promise =
1249 direct_handle(Cast<JSPromise>(capability->promise()), isolate);
1250 } else {
1251 // Otherwise the {promise_or_capability} must be undefined here.
1252 CHECK(IsUndefined(*promise_or_capability, isolate));
1253 return;
1254 }
1255 }
1256 }
1257}
1258
1259MaybeDirectHandle<JSPromise> TryGetCurrentTaskPromise(Isolate* isolate) {
1260 Handle<Object> current_microtask = isolate->factory()->current_microtask();
1261 if (IsPromiseReactionJobTask(*current_microtask)) {
1262 auto promise_reaction_job_task =
1263 Cast<PromiseReactionJobTask>(current_microtask);
1264 // Check if the {reaction} has one of the known async function or
1265 // async generator continuations as its fulfill handler.
1266 if (IsBuiltinAsyncFulfillHandler(isolate,
1267 promise_reaction_job_task->handler()) ||
1268 IsBuiltinAsyncRejectHandler(isolate,
1269 promise_reaction_job_task->handler())) {
1270 // Now peek into the handlers' AwaitContext to get to
1271 // the JSGeneratorObject for the async function.
1272 DirectHandle<Context> context(
1273 Cast<JSFunction>(promise_reaction_job_task->handler())->context(),
1274 isolate);
1275 Handle<JSGeneratorObject> generator_object(
1276 Cast<JSGeneratorObject>(context->extension()), isolate);
1277 if (generator_object->is_executing()) {
1278 if (IsJSAsyncFunctionObject(*generator_object)) {
1279 auto async_function_object =
1280 Cast<JSAsyncFunctionObject>(generator_object);
1281 DirectHandle<JSPromise> promise(async_function_object->promise(),
1282 isolate);
1283 return promise;
1284 } else {
1285 auto async_generator_object =
1286 Cast<JSAsyncGeneratorObject>(generator_object);
1287 DirectHandle<Object> queue(async_generator_object->queue(), isolate);
1288 if (!IsUndefined(*queue, isolate)) {
1289 auto async_generator_request = Cast<AsyncGeneratorRequest>(queue);
1290 DirectHandle<JSPromise> promise(
1291 Cast<JSPromise>(async_generator_request->promise()), isolate);
1292 return promise;
1293 }
1294 }
1295 }
1296 } else {
1297#if V8_ENABLE_WEBASSEMBLY
1298 DirectHandle<WasmSuspenderObject> suspender;
1299 if (TryGetWasmSuspender(isolate, promise_reaction_job_task->handler())
1300 .ToHandle(&suspender)) {
1301 // The {promise_reaction_job_task} belongs to a suspended Wasm stack
1302 return direct_handle(suspender->promise(), isolate);
1303 }
1304#endif // V8_ENABLE_WEBASSEMBLY
1305
1306 // The {promise_reaction_job_task} doesn't belong to an await (or
1307 // yield inside an async generator) or a suspended Wasm stack,
1308 // but we might still be able to find an async frame if we follow
1309 // along the chain of promises on the {promise_reaction_job_task}.
1310 DirectHandle<HeapObject> promise_or_capability(
1311 promise_reaction_job_task->promise_or_capability(), isolate);
1312 if (IsJSPromise(*promise_or_capability)) {
1313 DirectHandle<JSPromise> promise =
1314 Cast<JSPromise>(promise_or_capability);
1315 return promise;
1316 }
1317 }
1318 }
1319 return MaybeDirectHandle<JSPromise>();
1320}
1321
1322void CaptureAsyncStackTrace(Isolate* isolate, CallSiteBuilder* builder) {
1323 DirectHandle<JSPromise> promise;
1324 if (TryGetCurrentTaskPromise(isolate).ToHandle(&promise)) {
1325 CaptureAsyncStackTrace(isolate, promise, builder);
1326 }
1327}
1328
1329template <typename Visitor>
1330void VisitStack(Isolate* isolate, Visitor* visitor,
1332 DisallowJavascriptExecution no_js(isolate);
1333 // Keep track if we visited a stack frame, but did not visit any summarized
1334 // frames. Either because the stack frame didn't create any summarized frames
1335 // or due to security origin.
1336 bool skipped_last_frame = true;
1337 for (StackFrameIterator it(isolate); !it.done(); it.Advance()) {
1338 StackFrame* frame = it.frame();
1339 switch (frame->type()) {
1340 case StackFrame::API_CALLBACK_EXIT:
1341 case StackFrame::BUILTIN_EXIT:
1342 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION:
1343 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
1344 case StackFrame::TURBOFAN_JS:
1345 case StackFrame::MAGLEV:
1346 case StackFrame::INTERPRETED:
1347 case StackFrame::BASELINE:
1348 case StackFrame::BUILTIN:
1349#if V8_ENABLE_WEBASSEMBLY
1350 case StackFrame::STUB:
1351 case StackFrame::WASM:
1352 case StackFrame::WASM_SEGMENT_START:
1353#if V8_ENABLE_DRUMBRAKE
1354 case StackFrame::WASM_INTERPRETER_ENTRY:
1355#endif // V8_ENABLE_DRUMBRAKE
1356#endif // V8_ENABLE_WEBASSEMBLY
1357 {
1358 // A standard frame may include many summarized frames (due to
1359 // inlining).
1360 FrameSummaries summaries = CommonFrame::cast(frame)->Summarize();
1361 if (summaries.top_frame_is_construct_call && !skipped_last_frame) {
1362 visitor->SetPrevFrameAsConstructCall();
1363 }
1364 skipped_last_frame = true;
1365 for (auto summary = summaries.frames.rbegin();
1366 summary != summaries.frames.rend(); ++summary) {
1367 // Skip frames from other origins when asked to do so.
1369 !summary->native_context()->HasSameSecurityTokenAs(
1370 isolate->context())) {
1371 continue;
1372 }
1373 if (!visitor->Visit(*summary)) return;
1374 skipped_last_frame = false;
1375 }
1376 break;
1377 }
1378
1379 default:
1380 break;
1381 }
1382 }
1383}
1384
1385Handle<FixedArray> CaptureSimpleStackTrace(Isolate* isolate, int limit,
1386 FrameSkipMode mode,
1387 Handle<Object> caller) {
1388 TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1389 "maxFrameCount", limit);
1390
1391#if V8_ENABLE_WEBASSEMBLY
1392 wasm::WasmCodeRefScope code_ref_scope;
1393#endif // V8_ENABLE_WEBASSEMBLY
1394
1395 CallSiteBuilder builder(isolate, mode, limit, caller);
1396 VisitStack(isolate, &builder);
1397
1398 // If --async-stack-traces are enabled and the "current microtask" is a
1399 // PromiseReactionJobTask, we try to enrich the stack trace with async
1400 // frames.
1401 if (v8_flags.async_stack_traces) {
1402 CaptureAsyncStackTrace(isolate, &builder);
1403 }
1404
1405 Handle<FixedArray> stack_trace = builder.Build();
1406 TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1407 "frameCount", stack_trace->length());
1408 return stack_trace;
1409}
1410
1411DirectHandle<StackTraceInfo> GetDetailedStackTraceFromCallSiteInfos(
1412 Isolate* isolate, DirectHandle<FixedArray> call_site_infos, int limit) {
1413 auto frames = isolate->factory()->NewFixedArray(
1414 std::min(limit, call_site_infos->length()));
1415 int index = 0;
1416 for (int i = 0; i < call_site_infos->length() && index < limit; ++i) {
1417 DirectHandle<CallSiteInfo> call_site_info(
1418 Cast<CallSiteInfo>(call_site_infos->get(i)), isolate);
1419 if (call_site_info->IsAsync()) {
1420 break;
1421 }
1422 DirectHandle<Script> script;
1423 if (!CallSiteInfo::GetScript(isolate, call_site_info).ToHandle(&script) ||
1424 !script->IsSubjectToDebugging()) {
1425 continue;
1426 }
1427 DirectHandle<StackFrameInfo> stack_frame_info =
1428 isolate->factory()->NewStackFrameInfo(
1429 script, CallSiteInfo::GetSourcePosition(call_site_info),
1430 CallSiteInfo::GetFunctionDebugName(call_site_info),
1431 IsConstructor(*call_site_info));
1432 frames->set(index++, *stack_frame_info);
1433 }
1434 frames = FixedArray::RightTrimOrEmpty(isolate, frames, index);
1435 return isolate->factory()->NewStackTraceInfo(frames);
1436}
1437
1438} // namespace
1439
1441 DirectHandle<JSObject> error_object, FrameSkipMode mode,
1442 Handle<Object> caller) {
1443 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
1444 Handle<UnionOf<Undefined, FixedArray>> call_site_infos_or_formatted_stack =
1445 factory()->undefined_value();
1446
1447 // Capture the "simple stack trace" for the error.stack property,
1448 // which can be disabled by setting Error.stackTraceLimit to a non
1449 // number value or simply deleting the property. If the inspector
1450 // is active, and requests more stack frames than the JavaScript
1451 // program itself, we collect up to the maximum.
1452 int stack_trace_limit = 0;
1453 if (GetStackTraceLimit(this, &stack_trace_limit)) {
1454 int limit = stack_trace_limit;
1458 // Collect up to the maximum of what the JavaScript program and
1459 // the inspector want. There's a special case here where the API
1460 // can ask the stack traces to also include cross-origin frames,
1461 // in which case we collect a separate trace below. Note that
1462 // the inspector doesn't use this option, so we could as well
1463 // just deprecate this in the future.
1466 }
1467 }
1468 call_site_infos_or_formatted_stack =
1469 CaptureSimpleStackTrace(this, limit, mode, caller);
1470 }
1471 DirectHandle<Object> error_stack = call_site_infos_or_formatted_stack;
1472
1473 // Next is the inspector part: Depending on whether we got a "simple
1474 // stack trace" above and whether that's usable (meaning the API
1475 // didn't request to include cross-origin frames), we remember the
1476 // cap for the stack trace (either a positive limit indicating that
1477 // the Error.stackTraceLimit value was below what was requested via
1478 // the API, or a negative limit to indicate the opposite), or we
1479 // collect a "detailed stack trace" eagerly and stash that away.
1481 DirectHandle<StackTraceInfo> stack_trace;
1482 if (IsUndefined(*call_site_infos_or_formatted_stack, this) ||
1485 stack_trace = CaptureDetailedStackTrace(
1488 } else {
1489 auto call_site_infos =
1490 Cast<FixedArray>(call_site_infos_or_formatted_stack);
1491 stack_trace = GetDetailedStackTraceFromCallSiteInfos(
1492 this, call_site_infos,
1494 if (stack_trace_limit < call_site_infos->length()) {
1495 call_site_infos_or_formatted_stack = FixedArray::RightTrimOrEmpty(
1496 this, call_site_infos, stack_trace_limit);
1497 }
1498 // Notify the debugger.
1499 OnStackTraceCaptured(stack_trace);
1500 }
1501 error_stack = factory()->NewErrorStackData(
1502 call_site_infos_or_formatted_stack, stack_trace);
1503 }
1504
1506 this,
1507 Object::SetProperty(this, error_object, factory()->error_stack_symbol(),
1508 error_stack, StoreOrigin::kMaybeKeyed,
1510 return error_object;
1511}
1512
1514 DirectHandle<JSReceiver> maybe_error_object) {
1516 ErrorUtils::GetErrorStackProperty(this, maybe_error_object);
1517 if (!IsErrorStackData(*lookup.error_stack)) return {};
1518 return handle(Cast<ErrorStackData>(lookup.error_stack)->stack_trace(), this);
1519}
1520
1522 DirectHandle<JSReceiver> maybe_error_object) {
1524 ErrorUtils::GetErrorStackProperty(this, maybe_error_object);
1525
1526 if (IsFixedArray(*lookup.error_stack)) {
1527 return Cast<FixedArray>(lookup.error_stack);
1528 }
1529 if (!IsErrorStackData(*lookup.error_stack)) {
1530 return factory()->empty_fixed_array();
1531 }
1532 auto error_stack_data = Cast<ErrorStackData>(lookup.error_stack);
1533 if (!error_stack_data->HasCallSiteInfos()) {
1534 return factory()->empty_fixed_array();
1535 }
1536 return handle(error_stack_data->call_site_infos(), this);
1537}
1538
1539Address Isolate::GetAbstractPC(int* line, int* column) {
1541
1542 if (it.done()) {
1543 *line = -1;
1544 *column = -1;
1545 return kNullAddress;
1546 }
1547 JavaScriptFrame* frame = it.frame();
1548 DCHECK(!frame->is_builtin());
1549
1550 DirectHandle<SharedFunctionInfo> shared(frame->function()->shared(), this);
1552 int position = frame->position();
1553
1554 Tagged<Object> maybe_script = frame->function()->shared()->script();
1555 if (IsScript(maybe_script)) {
1556 DirectHandle<Script> script(Cast<Script>(maybe_script), this);
1558 Script::GetPositionInfo(script, position, &info);
1559 *line = info.line + 1;
1560 *column = info.column + 1;
1561 } else {
1562 *line = position;
1563 *column = -1;
1564 }
1565
1566 if (frame->is_unoptimized()) {
1567 UnoptimizedJSFrame* iframe = static_cast<UnoptimizedJSFrame*>(frame);
1568 Address bytecode_start =
1569 iframe->GetBytecodeArray()->GetFirstBytecodeAddress();
1570 return bytecode_start + iframe->GetBytecodeOffset();
1571 }
1572
1573 return frame->pc();
1574}
1575
1576namespace {
1577
1578class StackFrameBuilder {
1579 public:
1580 StackFrameBuilder(Isolate* isolate, int limit)
1581 : isolate_(isolate),
1582 frames_(isolate_->factory()->empty_fixed_array()),
1583 index_(0),
1584 limit_(limit) {}
1585
1586 void SetPrevFrameAsConstructCall() {
1587 // Nothing to do.
1588 }
1589
1590 bool Visit(FrameSummary& summary) {
1591 // Check if we have enough capacity left.
1592 if (index_ >= limit_) return false;
1593 // Skip frames that aren't subject to debugging.
1594 if (!summary.is_subject_to_debugging()) return true;
1595 DirectHandle<StackFrameInfo> frame = summary.CreateStackFrameInfo();
1596 frames_ = FixedArray::SetAndGrow(isolate_, frames_, index_++, frame);
1597 return true;
1598 }
1599
1600 Handle<FixedArray> Build() {
1601 return FixedArray::RightTrimOrEmpty(isolate_, frames_, index_);
1602 }
1603
1604 private:
1605 Isolate* isolate_;
1607 int index_;
1608 int limit_;
1609};
1610
1611} // namespace
1612
1614 int limit, StackTrace::StackTraceOptions options) {
1615 TRACE_EVENT_BEGIN1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1616 "maxFrameCount", limit);
1617 StackFrameBuilder builder(this, limit);
1618 VisitStack(this, &builder, options);
1619 auto frames = builder.Build();
1620 TRACE_EVENT_END1(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__,
1621 "frameCount", frames->length());
1622 auto stack_trace = factory()->NewStackTraceInfo(frames);
1623 OnStackTraceCaptured(stack_trace);
1624 return stack_trace;
1625}
1626
1627namespace {
1628
1629class CurrentScriptNameStackVisitor {
1630 public:
1631 explicit CurrentScriptNameStackVisitor(Isolate* isolate)
1632 : isolate_(isolate) {}
1633
1634 void SetPrevFrameAsConstructCall() {
1635 // Nothing to do.
1636 }
1637
1638 bool Visit(FrameSummary& summary) {
1639 // Skip frames that aren't subject to debugging. Keep this in sync with
1640 // StackFrameBuilder::Visit so both visitors visit the same frames.
1641 if (!summary.is_subject_to_debugging()) return true;
1642
1643 // Frames that are subject to debugging always have a valid script object.
1644 auto script = Cast<Script>(summary.script());
1645 Handle<Object> name_or_url_obj(script->GetNameOrSourceURL(), isolate_);
1646 if (!IsString(*name_or_url_obj)) return true;
1647
1648 auto name_or_url = Cast<String>(name_or_url_obj);
1649 if (!name_or_url->length()) return true;
1650
1651 name_or_url_ = name_or_url;
1652 return false;
1653 }
1654
1655 DirectHandle<String> CurrentScriptNameOrSourceURL() const {
1656 return name_or_url_;
1657 }
1658
1659 private:
1660 Isolate* const isolate_;
1661 Handle<String> name_or_url_;
1662};
1663
1664class CurrentScriptStackVisitor {
1665 public:
1666 void SetPrevFrameAsConstructCall() {
1667 // Nothing to do.
1668 }
1669
1670 bool Visit(FrameSummary& summary) {
1671 // Skip frames that aren't subject to debugging. Keep this in sync with
1672 // StackFrameBuilder::Visit so both visitors visit the same frames.
1673 if (!summary.is_subject_to_debugging()) return true;
1674
1675 // Frames that are subject to debugging always have a valid script object.
1676 current_script_ = Cast<Script>(summary.script());
1677 return false;
1678 }
1679
1680 MaybeDirectHandle<Script> CurrentScript() const { return current_script_; }
1681
1682 private:
1683 MaybeHandle<Script> current_script_;
1684};
1685
1686} // namespace
1687
1689 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
1690 CurrentScriptNameStackVisitor visitor(this);
1691 VisitStack(this, &visitor);
1692 return visitor.CurrentScriptNameOrSourceURL();
1693}
1694
1696 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.stack_trace"), __func__);
1697 CurrentScriptStackVisitor visitor{};
1698 VisitStack(this, &visitor);
1700 if (!visitor.CurrentScript().ToHandle(&script)) {
1702 }
1703 return direct_handle(script->GetEvalOrigin(), this);
1704}
1705
1707 if (v8_flags.correctness_fuzzer_suppressions) return false;
1708 DirectHandle<JSObject> error = isolate->error_function();
1709
1710 DirectHandle<String> key = isolate->factory()->stackTraceLimit_string();
1711 DirectHandle<Object> stack_trace_limit =
1712 JSReceiver::GetDataProperty(isolate, error, key);
1713 if (!IsNumber(*stack_trace_limit)) return false;
1714
1715 // Ensure that limit is not negative.
1716 *result = std::max(
1717 FastD2IChecked(Object::NumberValue(Cast<Number>(*stack_trace_limit))), 0);
1718
1719 if (*result != v8_flags.stack_trace_limit) {
1720 isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
1721 }
1722
1723 return true;
1724}
1725
1727 if (stack_trace_nesting_level_ == 0) {
1731 StringStream accumulator(&allocator);
1732 incomplete_message_ = &accumulator;
1733 PrintStack(&accumulator, mode);
1734 accumulator.OutputToFile(out);
1736 accumulator.Log(this);
1737 incomplete_message_ = nullptr;
1739 } else if (stack_trace_nesting_level_ == 1) {
1741 base::OS::PrintError(
1742 "\n\nAttempt to print stack while printing stack (double fault)\n");
1743 base::OS::PrintError(
1744 "If you are lucky you may find a partial stack dump on stdout.\n\n");
1746 }
1747}
1748
1749static void PrintFrames(Isolate* isolate, StringStream* accumulator,
1750 StackFrame::PrintMode mode) {
1751 StackFrameIterator it(isolate);
1752 for (int i = 0; !it.done(); it.Advance()) {
1753 it.frame()->Print(accumulator, mode, i++);
1754 }
1755}
1756
1758 HandleScope scope(this);
1759 DCHECK(accumulator->IsMentionedObjectCacheClear(this));
1760
1761 // Avoid printing anything if there are no frames.
1762 if (c_entry_fp(thread_local_top()) == 0) return;
1763
1764 accumulator->Add(
1765 "\n==== JS stack trace =========================================\n\n");
1766 PrintFrames(this, accumulator, StackFrame::OVERVIEW);
1767 if (mode == kPrintStackVerbose) {
1768 accumulator->Add(
1769 "\n==== Details ================================================\n\n");
1770 PrintFrames(this, accumulator, StackFrame::DETAILS);
1771 accumulator->PrintMentionedObjectCache(this);
1772 }
1773 accumulator->Add("=====================\n\n");
1774}
1775
1780
1783 if (!thread_local_top()->failed_access_check_callback_) {
1784 THROW_NEW_ERROR(this, NewTypeError(MessageTemplate::kNoAccess));
1785 }
1786
1787 DCHECK(IsAccessCheckNeeded(*receiver));
1788 DCHECK(!context().is_null());
1789
1790 // Get the data object from access check info.
1791 HandleScope scope(this);
1793 {
1795 Tagged<AccessCheckInfo> access_check_info =
1797 if (access_check_info.is_null()) {
1798 no_gc.Release();
1799 THROW_NEW_ERROR(this, NewTypeError(MessageTemplate::kNoAccess));
1800 }
1801 data = direct_handle(access_check_info->data(), this);
1802 }
1803
1804 {
1805 // Leaving JavaScript.
1808 v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
1809 }
1810 RETURN_VALUE_IF_EXCEPTION(this, {});
1811 // Throw exception even the callback forgot to do so.
1812 THROW_NEW_ERROR(this, NewTypeError(MessageTemplate::kNoAccess));
1813}
1814
1817 DCHECK(IsJSGlobalProxy(*receiver) || IsAccessCheckNeeded(*receiver));
1818
1819 // Check for compatibility between the security tokens in the
1820 // current lexical context and the accessed object.
1821
1822 // During bootstrapping, callback functions are not enabled yet.
1823 if (bootstrapper()->IsActive()) return true;
1824 {
1826
1827 if (IsJSGlobalProxy(*receiver)) {
1828 std::optional<Tagged<Object>> receiver_context =
1829 Cast<JSGlobalProxy>(*receiver)->GetCreationContext();
1830 if (!receiver_context) return false;
1831
1832 if (*receiver_context == *accessing_context) return true;
1833
1834 if (Cast<Context>(*receiver_context)->security_token() ==
1835 accessing_context->security_token())
1836 return true;
1837 }
1838 }
1839
1840 HandleScope scope(this);
1843 {
1845 Tagged<AccessCheckInfo> access_check_info =
1847 if (access_check_info.is_null()) return false;
1848 Tagged<Object> fun_obj = access_check_info->callback();
1850 this, fun_obj);
1851 data = direct_handle(access_check_info->data(), this);
1852 }
1853
1854 {
1855 // Leaving JavaScript.
1857 return callback(v8::Utils::ToLocal(accessing_context),
1858 v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1859 }
1860}
1861
1863 // Whoever calls this method should not have overflown the stack limit by too
1864 // much. Otherwise we risk actually running out of stack space.
1865 // We allow for up to 8kB overflow, because we typically allow up to 4KB
1866 // overflow per frame in generated code, but might call through more smaller
1867 // frames until we reach this method.
1868 // If this DCHECK fails, one of the frames on the stack should be augmented by
1869 // an additional stack check.
1870#if defined(V8_USE_ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER)
1871 // Allow for a bit more overflow in sanitizer builds, because C++ frames take
1872 // significantly more space there.
1873 DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 64 * KB);
1874#elif (defined(V8_TARGET_ARCH_RISCV64) || defined(V8_TARGET_ARCH_RISCV32)) && \
1875 defined(USE_SIMULATOR)
1876 // Allow for more overflow on riscv simulator, because C++ frames take more
1877 // there.
1878 DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 12 * KB);
1879#elif defined(ENABLE_SLOW_DCHECKS) && V8_HAS_ATTRIBUTE_TRIVIAL_ABI
1880 // In this configuration, direct handles are not trivially copyable. This
1881 // prevents some C++ compiler optimizations and uses more stack space.
1882 DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 10 * KB);
1883#else
1884 DCHECK_GE(GetCurrentStackPosition(), stack_guard()->real_climit() - 8 * KB);
1885#endif
1886
1887 if (v8_flags.correctness_fuzzer_suppressions) {
1888 FATAL("Aborting on stack overflow");
1889 }
1890
1891#if USE_SIMULATOR
1892 // Adjust the stack limit back to the real limit in case it was temporarily
1893 // modified to reflect an overflow in the C stack (see
1894 // AdjustStackLimitForSimulator).
1895 stack_guard()->ResetStackLimitForSimulator();
1896#endif
1897 DisallowJavascriptExecution no_js(this);
1898 HandleScope scope(this);
1899
1900 DirectHandle<JSFunction> fun = range_error_function();
1902 MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
1903 DirectHandle<Object> options = factory()->undefined_value();
1904 DirectHandle<Object> no_caller;
1907 this, exception,
1908 ErrorUtils::Construct(this, fun, fun, msg, options, SKIP_NONE, no_caller,
1910 JSObject::AddProperty(this, exception, factory()->wasm_uncatchable_symbol(),
1911 factory()->true_value(), NONE);
1912
1913 Throw(*exception);
1914
1915#ifdef VERIFY_HEAP
1916 if (v8_flags.verify_heap && v8_flags.stress_compaction) {
1919 }
1920#endif // VERIFY_HEAP
1921
1922 return ReadOnlyRoots(heap()).exception();
1923}
1924
1926 MessageLocation* location) {
1927 DirectHandle<Name> key_start_pos = factory()->error_start_pos_symbol();
1928 Object::SetProperty(this, exception, key_start_pos,
1929 direct_handle(Smi::FromInt(location->start_pos()), this),
1932 .Check();
1933
1934 DirectHandle<Name> key_end_pos = factory()->error_end_pos_symbol();
1935 Object::SetProperty(this, exception, key_end_pos,
1936 direct_handle(Smi::FromInt(location->end_pos()), this),
1939 .Check();
1940
1941 DirectHandle<Name> key_script = factory()->error_script_symbol();
1942 Object::SetProperty(this, exception, key_script, location->script(),
1945 .Check();
1946
1947 return Throw(*exception, location);
1948}
1949
1951 return Throw(ReadOnlyRoots(this).termination_exception());
1952}
1953
1959
1961 ExecutionAccess access(this);
1963 stack_guard()->RequestApiInterrupt();
1964}
1965
1967 RCS_SCOPE(this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1968 // Note: callback below should be called outside of execution access lock.
1969 while (true) {
1970 InterruptEntry entry;
1971 {
1972 ExecutionAccess access(this);
1973 if (api_interrupts_queue_.empty()) return;
1974 entry = api_interrupts_queue_.front();
1976 }
1978 HandleScope handle_scope(this);
1979 entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1980 }
1981}
1982
1984 // This request might be triggered from arbitrary thread but protector
1985 // invalidation must happen on the main thread, so use Api interrupt
1986 // to achieve that.
1988 [](v8::Isolate* isolate, void*) {
1989 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
1990 if (Protectors::IsNoProfilingIntact(i_isolate)) {
1991 Protectors::InvalidateNoProfiling(i_isolate);
1992 }
1993 },
1994 nullptr);
1995}
1996
1997namespace {
1998
1999void ReportBootstrappingException(DirectHandle<Object> exception,
2000 MessageLocation* location) {
2001 base::OS::PrintError("Exception thrown during bootstrapping\n");
2002 if (location == nullptr || location->script().is_null()) return;
2003 // We are bootstrapping and caught an error where the location is set
2004 // and we have a script for the location.
2005 // In this case we could have an extension (or an internal error
2006 // somewhere) and we print out the line number at which the error occurred
2007 // to the console for easier debugging.
2008 int line_number =
2009 location->script()->GetLineNumber(location->start_pos()) + 1;
2010 if (IsString(*exception) && IsString(location->script()->name())) {
2011 base::OS::PrintError(
2012 "Extension or internal compilation error: %s in %s at line %d.\n",
2013 Cast<String>(*exception)->ToCString().get(),
2014 Cast<String>(location->script()->name())->ToCString().get(),
2015 line_number);
2016 } else if (IsString(location->script()->name())) {
2017 base::OS::PrintError(
2018 "Extension or internal compilation error in %s at line %d.\n",
2019 Cast<String>(location->script()->name())->ToCString().get(),
2020 line_number);
2021 } else if (IsString(*exception)) {
2022 base::OS::PrintError("Extension or internal compilation error: %s.\n",
2023 Cast<String>(*exception)->ToCString().get());
2024 } else {
2025 base::OS::PrintError("Extension or internal compilation error.\n");
2026 }
2027#ifdef OBJECT_PRINT
2028 // Since comments and empty lines have been stripped from the source of
2029 // builtins, print the actual source here so that line numbers match.
2030 if (IsString(location->script()->source())) {
2031 DirectHandle<String> src(Cast<String>(location->script()->source()),
2032 location->script()->GetIsolate());
2033 PrintF("Failing script:");
2034 int len = src->length();
2035 if (len == 0) {
2036 PrintF(" <not available>\n");
2037 } else {
2038 PrintF("\n");
2039 line_number = 1;
2040 PrintF("%5d: ", line_number);
2041 for (int i = 0; i < len; i++) {
2042 uint16_t character = src->Get(i);
2043 PrintF("%c", character);
2044 if (character == '\n' && i < len - 2) {
2045 PrintF("%5d: ", ++line_number);
2046 }
2047 }
2048 PrintF("\n");
2049 }
2050 }
2051#endif
2052}
2053
2054} // anonymous namespace
2055
2057 DirectHandle<Object> exception, MessageLocation* location) {
2058 DirectHandle<JSMessageObject> message_obj =
2059 CreateMessage(exception, location);
2060
2061 // If the abort-on-uncaught-exception flag is specified, and if the
2062 // embedder didn't specify a custom uncaught exception callback,
2063 // or if the custom callback determined that V8 should abort, then
2064 // abort.
2065 // Cache the flag on a static so that we can modify the value looked up below
2066 // in the presence of read-only flags.
2067 static bool abort_on_uncaught_exception =
2068 v8_flags.abort_on_uncaught_exception;
2069 if (abort_on_uncaught_exception) {
2070 CatchType prediction = PredictExceptionCatcher();
2071 if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
2074 reinterpret_cast<v8::Isolate*>(this)))) {
2075 // Prevent endless recursion.
2076 abort_on_uncaught_exception = false;
2077 // This flag is intended for use by JavaScript developers, so
2078 // print a user-friendly stack trace (not an internal one).
2079 PrintF(stderr, "%s\n\nFROM\n",
2080 MessageHandler::GetLocalizedMessage(this, message_obj).get());
2081 std::ostringstream stack_trace_stream;
2082 PrintCurrentStackTrace(stack_trace_stream);
2083 PrintF(stderr, "%s", stack_trace_stream.str().c_str());
2085 }
2086 }
2087
2088 return message_obj;
2089}
2090
2092 MessageLocation* location) {
2094 DCHECK_IMPLIES(IsHole(raw_exception),
2095 raw_exception == ReadOnlyRoots{this}.termination_exception());
2097#if V8_ENABLE_WEBASSEMBLY
2099#endif
2100
2101 HandleScope scope(this);
2102 DirectHandle<Object> exception(raw_exception, this);
2103
2104 if (v8_flags.print_all_exceptions) {
2105 PrintF("=========================================================\n");
2106 PrintF("Exception thrown:\n");
2107 if (location) {
2108 DirectHandle<Script> script = location->script();
2109 DirectHandle<Object> name(script->GetNameOrSourceURL(), this);
2110 PrintF("at ");
2111 if (IsString(*name) && Cast<String>(*name)->length() > 0) {
2112 Cast<String>(*name)->PrintOn(stdout);
2113 } else {
2114 PrintF("<anonymous>");
2115 }
2116// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
2117// initialize the line_ends array, so be careful when calling them.
2118#ifdef DEBUG
2119 if (AllowGarbageCollection::IsAllowed()) {
2120#else
2121 if ((false)) {
2122#endif
2123 Script::PositionInfo start_pos;
2124 Script::PositionInfo end_pos;
2125 Script::GetPositionInfo(script, location->start_pos(), &start_pos);
2126 Script::GetPositionInfo(script, location->end_pos(), &end_pos);
2127 PrintF(", %d:%d - %d:%d\n", start_pos.line + 1, start_pos.column + 1,
2128 end_pos.line + 1, end_pos.column + 1);
2129 } else {
2130 PrintF(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
2131 }
2132 }
2133 Print(*exception);
2134 PrintF("Stack Trace:\n");
2135 PrintStack(stdout);
2136 PrintF("=========================================================\n");
2137 }
2138
2139 // Determine whether a message needs to be created for the given exception
2140 // depending on the following criteria:
2141 // 1) External v8::TryCatch missing: Always create a message because any
2142 // JavaScript handler for a finally-block might re-throw to top-level.
2143 // 2) External v8::TryCatch exists: Only create a message if the handler
2144 // captures messages or is verbose (which reports despite the catch).
2145 // 3) ReThrow from v8::TryCatch: The message from a previous throw still
2146 // exists and we preserve it instead of creating a new message.
2147 bool requires_message = try_catch_handler() == nullptr ||
2150 bool rethrowing_message = thread_local_top()->rethrowing_message_;
2151
2153
2154 // Notify debugger of exception.
2155 if (is_catchable_by_javascript(*exception)) {
2156 DirectHandle<Object> message(pending_message(), this);
2157 std::optional<Tagged<Object>> maybe_exception = debug()->OnThrow(exception);
2158 if (maybe_exception.has_value()) {
2159 return *maybe_exception;
2160 }
2161 // Restore the message in case it was clobbered by debugger.
2162 set_pending_message(*message);
2163 }
2164
2165 // Generate the message if required.
2166 if (requires_message && !rethrowing_message) {
2167 MessageLocation computed_location;
2168 // If no location was specified we try to use a computed one instead.
2169 if (location == nullptr && ComputeLocation(&computed_location)) {
2170 location = &computed_location;
2171 }
2172 if (bootstrapper()->IsActive()) {
2173 // It's not safe to try to make message objects or collect stack traces
2174 // while the bootstrapper is active since the infrastructure may not have
2175 // been properly initialized.
2176 ReportBootstrappingException(exception, location);
2177 } else {
2178 DirectHandle<Object> message_obj =
2179 CreateMessageOrAbort(exception, location);
2180 set_pending_message(*message_obj);
2181 }
2182 }
2183
2184 // Set the exception being thrown.
2185 set_exception(*exception);
2187
2188 if (v8_flags.experimental_report_exceptions_from_callbacks &&
2189 exception_propagation_callback_ && !rethrowing_message &&
2191 // Don't preprocess exceptions that might happen inside
2192 // |exception_propagation_callback_|.
2196 }
2197 return ReadOnlyRoots(heap()).exception();
2198}
2199
2202
2203 // Set the exception being re-thrown.
2204 set_exception(exception);
2205 return ReadOnlyRoots(heap()).exception();
2206}
2207
2209 Tagged<Object> message) {
2212
2213 set_pending_message(message);
2214 return ReThrow(exception);
2215}
2216
2217namespace {
2218#if V8_ENABLE_WEBASSEMBLY
2219// This scope will set the thread-in-wasm flag after the execution of all
2220// destructors. The thread-in-wasm flag is only set when the scope gets enabled.
2221class SetThreadInWasmFlagScope {
2222 public:
2223 SetThreadInWasmFlagScope() { trap_handler::AssertThreadNotInWasm(); }
2224
2225 ~SetThreadInWasmFlagScope() {
2226 if (enabled_) trap_handler::SetThreadInWasm();
2227 }
2228
2229 void Enable() { enabled_ = true; }
2230
2231 private:
2232 bool enabled_ = false;
2233};
2234#endif // V8_ENABLE_WEBASSEMBLY
2235} // namespace
2236
2238 // TODO(v8:12676): Fix gcmole failures in this function.
2239 DisableGCMole no_gcmole;
2241
2242 // The topmost_script_having_context value becomes outdated after frames
2243 // unwinding.
2245
2246#if V8_ENABLE_WEBASSEMBLY
2247 // Create the {SetThreadInWasmFlagScope} first in this function so that its
2248 // destructor gets called after all the other destructors. It is important
2249 // that the destructor sets the thread-in-wasm flag after all other
2250 // destructors. The other destructors may cause exceptions, e.g. ASan on
2251 // Windows, which would invalidate the thread-in-wasm flag when the wasm trap
2252 // handler handles such non-wasm exceptions.
2253 SetThreadInWasmFlagScope set_thread_in_wasm_flag_scope;
2254#endif // V8_ENABLE_WEBASSEMBLY
2255 Tagged<Object> exception = this->exception();
2256
2257 auto FoundHandler = [&](StackFrameIterator& iter, Tagged<Context> context,
2258 Address instruction_start, intptr_t handler_offset,
2259 Address constant_pool_address, Address handler_sp,
2260 Address handler_fp, int num_frames_above_handler) {
2261 // Store information to be consumed by the CEntry.
2264 instruction_start + handler_offset;
2265 thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
2266 thread_local_top()->pending_handler_fp_ = handler_fp;
2267 thread_local_top()->pending_handler_sp_ = handler_sp;
2269 num_frames_above_handler;
2270
2271#if V8_ENABLE_WEBASSEMBLY
2272 // If the exception was caught in a different stack, process all
2273 // intermediate stack switches.
2274 // This cannot be done during unwinding because the stack switching state
2275 // must stay consistent with the thread local top (see crbug.com/406053619).
2277 if (active_stack != nullptr) {
2278 wasm::StackMemory* parent = nullptr;
2279 while (active_stack != iter.wasm_stack()) {
2280 parent = active_stack->jmpbuf()->parent;
2282 SwitchStacks(active_stack, parent);
2284 RetireWasmStack(active_stack);
2285 active_stack = parent;
2286 }
2287 if (parent) {
2288 // We switched at least once, update the active continuation.
2289 isolate_data_.set_active_stack(active_stack);
2290#if USE_SIMULATOR_BOOL && V8_TARGET_ARCH_ARM64
2291 Simulator::current(this)->SetStackLimit(
2292 reinterpret_cast<uintptr_t>(active_stack->jmpbuf()->stack_limit));
2293#endif
2294 }
2295 }
2296 // The unwinder is running on the central stack. If the target frame is in a
2297 // secondary stack, update the central stack flags and the stack limit.
2298 Address stack_address =
2299 handler_sp != kNullAddress ? handler_sp : handler_fp;
2300 if (!IsOnCentralStack(stack_address)) {
2302 iter.wasm_stack()->ShrinkTo(stack_address);
2303 uintptr_t limit =
2304 reinterpret_cast<uintptr_t>(iter.wasm_stack()->jslimit());
2308#if USE_SIMULATOR_BOOL && V8_TARGET_ARCH_ARM64
2309 Simulator::current(this)->SetStackLimit(limit);
2310#endif
2311 iter.wasm_stack()->clear_stack_switch_info();
2312 }
2313#endif
2314
2315 // Return and clear exception. The contract is that:
2316 // (1) the exception is stored in one place (no duplication), and
2317 // (2) within generated-code land, that one place is the return register.
2318 // If/when we unwind back into C++ (returning to the JSEntry stub,
2319 // or to Execution::CallWasm), the returned exception will be sent
2320 // back to isolate->set_exception(...).
2322 return exception;
2323 };
2324
2325 // Special handling of termination exceptions, uncatchable by JavaScript and
2326 // Wasm code, we unwind the handlers until the top ENTRY handler is found.
2327 bool catchable_by_js = is_catchable_by_javascript(exception);
2328 if (!catchable_by_js && !context().is_null()) {
2329 // Because the array join stack will not pop the elements when throwing the
2330 // uncatchable terminate exception, we need to clear the array join stack to
2331 // avoid leaving the stack in an invalid state.
2332 // See also CycleProtectedArrayJoin.
2333 raw_native_context()->set_array_join_stack(
2334 ReadOnlyRoots(this).undefined_value());
2335 }
2336
2337 // Compute handler and stack unwinding information by performing a full walk
2338 // over the stack and dispatching according to the frame type.
2339 int visited_frames = 0;
2340 for (StackFrameIterator iter(this, thread_local_top());;
2341 iter.Advance(), visited_frames++) {
2342#if V8_ENABLE_WEBASSEMBLY
2343 if (iter.frame()->type() == StackFrame::STACK_SWITCH) {
2344 if (catchable_by_js && iter.frame()->LookupCode()->builtin_id() !=
2345 Builtin::kJSToWasmStressSwitchStacksAsm) {
2346 Tagged<Code> code =
2347 builtins()->code(Builtin::kWasmReturnPromiseOnSuspendAsm);
2348 HandlerTable table(code);
2349 Address instruction_start =
2350 code->InstructionStart(this, iter.frame()->pc());
2351 int handler_offset = table.LookupReturn(0);
2352 return FoundHandler(iter, Context(), instruction_start, handler_offset,
2353 kNullAddress, iter.frame()->sp(),
2354 iter.frame()->fp(), visited_frames);
2355 } else {
2356 // Just walk across the stack switch here. We only process it once we
2357 // have reached the handler.
2358 continue;
2359 }
2360 }
2361#endif
2362 // Handler must exist.
2363 DCHECK(!iter.done());
2364
2365 StackFrame* frame = iter.frame();
2366
2367 // The debugger implements the "restart frame" feature by throwing a
2368 // terminate exception. Check and if we need to restart `frame`,
2369 // jump into the `RestartFrameTrampoline` builtin instead of
2370 // a catch handler.
2371 // Optimized frames take a detour via the deoptimizer before also jumping
2372 // to the `RestartFrameTrampoline` builtin.
2373 if (debug()->ShouldRestartFrame(frame->id())) {
2375 CHECK(!catchable_by_js);
2376 CHECK(frame->is_javascript());
2377
2378 if (frame->is_optimized_js()) {
2379 Tagged<Code> code = frame->LookupCode();
2380 // The debugger triggers lazy deopt for the "to-be-restarted" frame
2381 // immediately when the CDP event arrives while paused.
2382 CHECK(code->marked_for_deoptimization());
2384
2385 // Jump directly to the optimized frames return, to immediately fall
2386 // into the deoptimizer.
2387 const int offset =
2388 static_cast<int>(frame->pc() - code->instruction_start());
2389
2390 // Compute the stack pointer from the frame pointer. This ensures that
2391 // argument slots on the stack are dropped as returning would.
2392 // Note: Needed by the deoptimizer to rematerialize frames.
2393 Address return_sp = frame->fp() +
2395 code->stack_slots() * kSystemPointerSize;
2396 return FoundHandler(iter, Context(), code->instruction_start(), offset,
2397 code->constant_pool(), return_sp, frame->fp(),
2398 visited_frames);
2399 }
2400
2402 Tagged<Code> code = *BUILTIN_CODE(this, RestartFrameTrampoline);
2403 return FoundHandler(iter, Context(), code->instruction_start(), 0,
2404 code->constant_pool(), kNullAddress, frame->fp(),
2405 visited_frames);
2406 }
2407
2408 switch (frame->type()) {
2409 case StackFrame::ENTRY:
2410 case StackFrame::CONSTRUCT_ENTRY: {
2411 // For JSEntry frames we always have a handler.
2412 StackHandler* handler = frame->top_handler();
2413
2414 // Restore the next handler.
2415 thread_local_top()->handler_ = handler->next_address();
2416
2417 // Gather information from the handler.
2418 Tagged<Code> code = frame->LookupCode();
2419 HandlerTable table(code);
2420 return FoundHandler(iter, Context(),
2421 code->InstructionStart(this, frame->pc()),
2422 table.LookupReturn(0), code->constant_pool(),
2423 handler->address() + StackHandlerConstants::kSize,
2424 0, visited_frames);
2425 }
2426
2427#if V8_ENABLE_WEBASSEMBLY
2428 case StackFrame::C_WASM_ENTRY: {
2429#if V8_ENABLE_DRUMBRAKE
2430 if (v8_flags.wasm_jitless) {
2431 StackHandler* handler = frame->top_handler();
2432 thread_local_top()->handler_ = handler->next_address();
2433 Tagged<Code> code =
2434 frame->LookupCode(); // WasmInterpreterCWasmEntry.
2435
2436 HandlerTable table(code);
2437 Address instruction_start = code->InstructionStart(this, frame->pc());
2438 // Compute the stack pointer from the frame pointer. This ensures that
2439 // argument slots on the stack are dropped as returning would.
2440 Address return_sp = *reinterpret_cast<Address*>(
2441 frame->fp() + WasmInterpreterCWasmEntryConstants::kSPFPOffset);
2442 const int handler_offset = table.LookupReturn(0);
2445 }
2446 return FoundHandler(iter, Context(), instruction_start,
2447 handler_offset, code->constant_pool(), return_sp,
2448 frame->fp(), visited_frames);
2449 }
2450#endif // V8_ENABLE_DRUMBRAKE
2451
2452 StackHandler* handler = frame->top_handler();
2453 thread_local_top()->handler_ = handler->next_address();
2454 Tagged<Code> code = frame->LookupCode();
2455 HandlerTable table(code);
2456 Address instruction_start = code->instruction_start();
2457 int return_offset = static_cast<int>(frame->pc() - instruction_start);
2458 int handler_offset = table.LookupReturn(return_offset);
2459 DCHECK_NE(-1, handler_offset);
2460 // Compute the stack pointer from the frame pointer. This ensures that
2461 // argument slots on the stack are dropped as returning would.
2462 Address return_sp = frame->fp() +
2464 code->stack_slots() * kSystemPointerSize;
2465 return FoundHandler(iter, Context(), instruction_start, handler_offset,
2466 code->constant_pool(), return_sp, frame->fp(),
2467 visited_frames);
2468 }
2469
2470#if V8_ENABLE_DRUMBRAKE
2471 case StackFrame::WASM_INTERPRETER_ENTRY: {
2474 }
2475 } break;
2476#endif // V8_ENABLE_DRUMBRAKE
2477
2478 case StackFrame::WASM:
2479 case StackFrame::WASM_SEGMENT_START: {
2480 if (!is_catchable_by_wasm(exception)) break;
2481
2482 WasmFrame* wasm_frame = static_cast<WasmFrame*>(frame);
2483 wasm::WasmCode* wasm_code =
2484 wasm::GetWasmCodeManager()->LookupCode(this, frame->pc());
2485 int offset = wasm_frame->LookupExceptionHandlerInTable();
2486 if (offset < 0) break;
2487 // Compute the stack pointer from the frame pointer. This ensures that
2488 // argument slots on the stack are dropped as returning would.
2489 // The stack slot count needs to be adjusted for Liftoff frames. It has
2490 // two components: the fixed frame slots, and the maximum number of
2491 // registers pushed on top of the frame in out-of-line code. We know
2492 // that we are not currently in an OOL call, because OOL calls don't
2493 // have exception handlers. So we subtract the OOL spill count from the
2494 // total stack slot count to compute the actual frame size:
2495 int stack_slots = wasm_code->stack_slots() - wasm_code->ool_spills();
2496 Address return_sp = frame->fp() +
2499
2500#if V8_ENABLE_DRUMBRAKE
2501 // Transitioning from JS To Wasm.
2502 if (v8_flags.wasm_enable_exec_time_histograms &&
2503 v8_flags.slow_histograms && !v8_flags.wasm_jitless) {
2504 // Start measuring the time spent running Wasm for jitted Wasm.
2505 wasm_execution_timer()->Start();
2506 }
2507#endif // V8_ENABLE_DRUMBRAKE
2508
2509 // This is going to be handled by WebAssembly, so we need to set the TLS
2510 // flag. The {SetThreadInWasmFlagScope} will set the flag after all
2511 // destructors have been executed.
2512 set_thread_in_wasm_flag_scope.Enable();
2513 return FoundHandler(iter, Context(), wasm_code->instruction_start(),
2514 offset, wasm_code->constant_pool(), return_sp,
2515 frame->fp(), visited_frames);
2516 }
2517
2518 case StackFrame::WASM_LIFTOFF_SETUP: {
2519 // The WasmLiftoffFrameSetup builtin doesn't throw, and doesn't call
2520 // out to user code that could throw.
2521 UNREACHABLE();
2522 }
2523#endif // V8_ENABLE_WEBASSEMBLY
2524
2525 case StackFrame::MAGLEV:
2526 case StackFrame::TURBOFAN_JS: {
2527 // For optimized frames we perform a lookup in the handler table.
2528 if (!catchable_by_js) break;
2529 OptimizedJSFrame* opt_frame = static_cast<OptimizedJSFrame*>(frame);
2530 int offset = opt_frame->LookupExceptionHandlerInTable(nullptr, nullptr);
2531 if (offset < 0) break;
2532 // The code might be an optimized code or a turbofanned builtin.
2533 Tagged<Code> code = frame->LookupCode();
2534 // Compute the stack pointer from the frame pointer. This ensures
2535 // that argument slots on the stack are dropped as returning would.
2536 Address return_sp = frame->fp() +
2538 code->stack_slots() * kSystemPointerSize;
2539
2540 // TODO(bmeurer): Turbofanned BUILTIN frames appear as TURBOFAN_JS,
2541 // but do not have a code kind of TURBOFAN_JS.
2542 if (CodeKindCanDeoptimize(code->kind()) &&
2543 code->marked_for_deoptimization()) {
2544 // If the target code is lazy deoptimized, we jump to the original
2545 // return address, but we make a note that we are throwing, so
2546 // that the deoptimizer can do the right thing.
2547 offset = static_cast<int>(frame->pc() - code->instruction_start());
2549 }
2550
2551 return FoundHandler(
2552 iter, Context(), code->InstructionStart(this, frame->pc()), offset,
2553 code->constant_pool(), return_sp, frame->fp(), visited_frames);
2554 }
2555
2556 case StackFrame::STUB: {
2557 // Some stubs are able to handle exceptions.
2558 if (!catchable_by_js) break;
2559 StubFrame* stub_frame = static_cast<StubFrame*>(frame);
2560#if V8_ENABLE_WEBASSEMBLY
2561 DCHECK_NULL(wasm::GetWasmCodeManager()->LookupCode(this, frame->pc()));
2562#endif // V8_ENABLE_WEBASSEMBLY
2563
2564 // The code might be a dynamically generated stub or a turbofanned
2565 // embedded builtin.
2566 Tagged<Code> code = stub_frame->LookupCode();
2567 if (!code->is_turbofanned() || !code->has_handler_table()) {
2568 break;
2569 }
2570
2571 int offset = stub_frame->LookupExceptionHandlerInTable();
2572 if (offset < 0) break;
2573
2574 // Compute the stack pointer from the frame pointer. This ensures
2575 // that argument slots on the stack are dropped as returning would.
2576 Address return_sp = frame->fp() +
2578 code->stack_slots() * kSystemPointerSize;
2579
2580 return FoundHandler(
2581 iter, Context(), code->InstructionStart(this, frame->pc()), offset,
2582 code->constant_pool(), return_sp, frame->fp(), visited_frames);
2583 }
2584
2585 case StackFrame::INTERPRETED:
2586 case StackFrame::BASELINE: {
2587 // For interpreted frame we perform a range lookup in the handler table.
2588 if (!catchable_by_js) break;
2591 js_frame->GetBytecodeArray()->register_count());
2592 int context_reg = 0; // Will contain register index holding context.
2593 int offset =
2594 js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
2595 if (offset < 0) break;
2596 // Compute the stack pointer from the frame pointer. This ensures that
2597 // argument slots on the stack are dropped as returning would.
2598 // Note: This is only needed for interpreted frames that have been
2599 // materialized by the deoptimizer. If there is a handler frame
2600 // in between then {frame->sp()} would already be correct.
2601 Address return_sp = frame->fp() -
2603 register_slots * kSystemPointerSize;
2604
2605 // Patch the bytecode offset in the interpreted frame to reflect the
2606 // position of the exception handler. The special builtin below will
2607 // take care of continuing to dispatch at that position. Also restore
2608 // the correct context for the handler from the interpreter register.
2609 Tagged<Context> context =
2610 Cast<Context>(js_frame->ReadInterpreterRegister(context_reg));
2611 DCHECK(IsContext(context));
2612
2613 if (frame->is_baseline()) {
2614 BaselineFrame* sp_frame = BaselineFrame::cast(js_frame);
2615 Tagged<Code> code = sp_frame->LookupCode();
2616 intptr_t pc_offset = sp_frame->GetPCForBytecodeOffset(offset);
2617 // Patch the context register directly on the frame, so that we don't
2618 // need to have a context read + write in the baseline code.
2619 sp_frame->PatchContext(context);
2620 return FoundHandler(iter, Context(), code->instruction_start(),
2621 pc_offset, code->constant_pool(), return_sp,
2622 sp_frame->fp(), visited_frames);
2623 } else {
2625 static_cast<int>(offset));
2626
2627 Tagged<Code> code = *BUILTIN_CODE(this, InterpreterEnterAtBytecode);
2628 // We subtract a frame from visited_frames because otherwise the
2629 // shadow stack will drop the underlying interpreter entry trampoline
2630 // in which the handler runs.
2631 //
2632 // An interpreted frame cannot be the first frame we look at
2633 // because at a minimum, an exit frame into C++ has to separate
2634 // it and the context in which this C++ code runs.
2635 CHECK_GE(visited_frames, 1);
2636 return FoundHandler(iter, context, code->instruction_start(), 0,
2637 code->constant_pool(), return_sp, frame->fp(),
2638 visited_frames - 1);
2639 }
2640 }
2641
2642 case StackFrame::BUILTIN:
2643 // For builtin frames we are guaranteed not to find a handler.
2644 if (catchable_by_js) {
2645 CHECK_EQ(-1, BuiltinFrame::cast(frame)->LookupExceptionHandlerInTable(
2646 nullptr, nullptr));
2647 }
2648 break;
2649
2650 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
2651 // Builtin continuation frames with catch can handle exceptions.
2652 if (!catchable_by_js) break;
2655 js_frame->SetException(exception);
2656
2657 // Reconstruct the stack pointer from the frame pointer.
2658 Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
2659 Tagged<Code> code = js_frame->LookupCode();
2660 return FoundHandler(iter, Context(), code->instruction_start(), 0,
2661 code->constant_pool(), return_sp, frame->fp(),
2662 visited_frames);
2663 }
2664
2665 default:
2666 // All other types can not handle exception.
2667 break;
2668 }
2669
2670 if (frame->is_optimized_js()) {
2671 // Remove per-frame stored materialized objects.
2672 bool removed = materialized_object_store_->Remove(frame->fp());
2673 USE(removed);
2674 // If there were any materialized objects, the code should be
2675 // marked for deopt.
2676 DCHECK_IMPLIES(removed, frame->LookupCode()->marked_for_deoptimization());
2677 }
2678 }
2679
2680 UNREACHABLE();
2681} // namespace internal
2682
2683namespace {
2684
2685class StackFrameSummaryIterator {
2686 public:
2687 explicit StackFrameSummaryIterator(Isolate* isolate)
2688 : stack_iterator_(isolate), summaries_(), index_(0) {
2689 InitSummaries();
2690 }
2691 void Advance() {
2692 if (index_ == 0) {
2693 summaries_.frames.clear();
2694 stack_iterator_.Advance();
2695 InitSummaries();
2696 } else {
2697 index_--;
2698 }
2699 }
2700 bool done() const { return stack_iterator_.done(); }
2701 StackFrame* frame() const { return stack_iterator_.frame(); }
2702 bool has_frame_summary() const { return index_ < summaries_.size(); }
2703 const FrameSummary& frame_summary() const {
2704 DCHECK(has_frame_summary());
2705 return summaries_.frames[index_];
2706 }
2707 Isolate* isolate() const { return stack_iterator_.isolate(); }
2708
2709 private:
2710 void InitSummaries() {
2711 if (!done() && frame()->is_javascript()) {
2712 summaries_ = JavaScriptFrame::cast(frame())->Summarize();
2713 DCHECK_GT(summaries_.size(), 0);
2714 index_ = summaries_.size() - 1;
2715 }
2716 }
2717 StackFrameIterator stack_iterator_;
2718 FrameSummaries summaries_;
2719 int index_;
2720};
2721
2722HandlerTable::CatchPrediction CatchPredictionFor(Builtin builtin_id) {
2723 switch (builtin_id) {
2724#define CASE(Name) \
2725 case Builtin::k##Name: \
2726 return HandlerTable::PROMISE;
2728#undef CASE
2729 default:
2731 }
2732}
2733
2734HandlerTable::CatchPrediction PredictExceptionFromBytecode(
2735 Tagged<BytecodeArray> bytecode, int code_offset) {
2736 HandlerTable table(bytecode);
2737 int handler_index = table.LookupHandlerIndexForRange(code_offset);
2738 if (handler_index < 0) return HandlerTable::UNCAUGHT;
2739 return table.GetRangePrediction(handler_index);
2740}
2741
2742HandlerTable::CatchPrediction PredictException(const FrameSummary& summary,
2743 Isolate* isolate) {
2744 if (!summary.IsJavaScript()) {
2745 // This can happen when WASM is inlined by TurboFan. For now we ignore
2746 // frames that are not JavaScript.
2747 // TODO(https://crbug.com/349588762): We should also check Wasm code
2748 // for exception handling.
2750 }
2751 PtrComprCageBase cage_base(isolate);
2752 DirectHandle<AbstractCode> code = summary.AsJavaScript().abstract_code();
2753 if (code->kind(cage_base) == CodeKind::BUILTIN) {
2754 return CatchPredictionFor(code->GetCode()->builtin_id());
2755 }
2756
2757 // Must have been constructed from a bytecode array.
2758 CHECK_EQ(CodeKind::INTERPRETED_FUNCTION, code->kind(cage_base));
2759 return PredictExceptionFromBytecode(code->GetBytecodeArray(),
2760 summary.code_offset());
2761}
2762
2763HandlerTable::CatchPrediction PredictExceptionFromGenerator(
2764 DirectHandle<JSGeneratorObject> generator, Isolate* isolate) {
2765 return PredictExceptionFromBytecode(
2766 generator->function()->shared()->GetBytecodeArray(isolate),
2767 GetGeneratorBytecodeOffset(generator));
2768}
2769
2770Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
2771 switch (prediction) {
2773 return Isolate::NOT_CAUGHT;
2781 default:
2782 UNREACHABLE();
2783 }
2784}
2785
2786Isolate::CatchType PredictExceptionCatchAtFrame(
2787 const StackFrameSummaryIterator& iterator) {
2788 const StackFrame* frame = iterator.frame();
2789 switch (frame->type()) {
2790 case StackFrame::ENTRY:
2791 case StackFrame::CONSTRUCT_ENTRY: {
2792 Address external_handler =
2793 iterator.isolate()->thread_local_top()->try_catch_handler_address();
2794 Address entry_handler = frame->top_handler()->next_address();
2795 // The exception has been externally caught if and only if there is an
2796 // external handler which is on top of the top-most JS_ENTRY handler.
2797 if (external_handler != kNullAddress &&
2798 !iterator.isolate()->try_catch_handler()->IsVerbose()) {
2799 if (entry_handler == kNullAddress || entry_handler > external_handler) {
2801 }
2802 }
2803 } break;
2804
2805 // For JavaScript frames we perform a lookup in the handler table.
2806 case StackFrame::INTERPRETED:
2807 case StackFrame::BASELINE:
2808 case StackFrame::TURBOFAN_JS:
2809 case StackFrame::MAGLEV:
2810 case StackFrame::BUILTIN: {
2811 DCHECK(iterator.has_frame_summary());
2812 return ToCatchType(
2813 PredictException(iterator.frame_summary(), iterator.isolate()));
2814 }
2815
2816 case StackFrame::STUB: {
2817 Tagged<Code> code = *frame->LookupCode();
2818 if (code->kind() != CodeKind::BUILTIN || !code->has_handler_table() ||
2819 !code->is_turbofanned()) {
2820 break;
2821 }
2822
2823 return ToCatchType(CatchPredictionFor(code->builtin_id()));
2824 }
2825
2826 case StackFrame::JAVASCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
2827 Tagged<Code> code = *frame->LookupCode();
2828 return ToCatchType(CatchPredictionFor(code->builtin_id()));
2829 }
2830
2831 default:
2832 // All other types can not handle exception.
2833 break;
2834 }
2835 return Isolate::NOT_CAUGHT;
2836}
2837} // anonymous namespace
2838
2842 return CAUGHT_BY_EXTERNAL;
2843 }
2844
2845 // Search for an exception handler by performing a full walk over the stack.
2846 for (StackFrameSummaryIterator iter(this); !iter.done(); iter.Advance()) {
2847 Isolate::CatchType prediction = PredictExceptionCatchAtFrame(iter);
2848 if (prediction != NOT_CAUGHT) return prediction;
2849 }
2850
2851 // Handler not found.
2852 return NOT_CAUGHT;
2853}
2854
2856 if (v8_flags.stack_trace_on_illegal) PrintStack(stdout);
2857 return Throw(ReadOnlyRoots(heap()).illegal_access_string());
2858}
2859
2861 std::ostream& out,
2862 PrintCurrentStackTraceFilterCallback should_include_frame_callback) {
2863 DirectHandle<FixedArray> frames = CaptureSimpleStackTrace(
2864 this, FixedArray::kMaxLength, SKIP_NONE, factory()->undefined_value());
2865
2866 IncrementalStringBuilder builder(this);
2867 for (int i = 0; i < frames->length(); ++i) {
2868 DirectHandle<CallSiteInfo> frame(Cast<CallSiteInfo>(frames->get(i)), this);
2869
2870 if (should_include_frame_callback) {
2871 Tagged<Object> raw_script_name = frame->GetScriptNameOrSourceURL();
2872 v8::Local<v8::String> script_name_local;
2873 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(this);
2874
2875 if (IsString(raw_script_name)) {
2876 DirectHandle<String> script_name =
2877 Handle<String>(Cast<String>(raw_script_name), this);
2878 script_name_local = v8::Utils::ToLocal(script_name);
2879 } else {
2880 script_name_local = v8::String::Empty(v8_isolate);
2881 }
2882
2883 if (should_include_frame_callback(v8_isolate, script_name_local)) {
2884 SerializeCallSiteInfo(this, frame, &builder);
2885 } else {
2886 builder.AppendString("<redacted>");
2887 }
2888 } else {
2889 SerializeCallSiteInfo(this, frame, &builder);
2890 }
2891
2892 if (i != frames->length() - 1) builder.AppendCharacter('\n');
2893 }
2894
2895 DirectHandle<String> stack_trace = builder.Finish().ToHandleChecked();
2896 stack_trace->PrintOn(out);
2897}
2898
2901 if (it.done()) return false;
2902 // Compute the location from the function and the relocation info of the
2903 // baseline code. For optimized code this will use the deoptimization
2904 // information to get canonical location information.
2905#if V8_ENABLE_WEBASSEMBLY
2906 wasm::WasmCodeRefScope code_ref_scope;
2907#endif // V8_ENABLE_WEBASSEMBLY
2908 FrameSummary summary = it.GetTopValidFrame();
2910 Handle<Object> script = summary.script();
2911 if (!IsScript(*script) ||
2912 IsUndefined(Cast<Script>(*script)->source(), this)) {
2913 return false;
2914 }
2915
2916 if (summary.IsJavaScript()) {
2917 shared = handle(summary.AsJavaScript().function()->shared(), this);
2918 }
2919 if (summary.AreSourcePositionsAvailable()) {
2920 int pos = summary.SourcePosition();
2921 *target = MessageLocation(Cast<Script>(script), pos, pos + 1, shared);
2922 } else {
2923 *target =
2924 MessageLocation(Cast<Script>(script), shared, summary.code_offset());
2925 }
2926 return true;
2927}
2928
2930 DirectHandle<Object> exception) {
2931 if (!IsJSObject(*exception)) return false;
2932
2933 DirectHandle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
2935 this, Cast<JSObject>(exception), start_pos_symbol);
2936 if (!IsSmi(*start_pos)) return false;
2937 int start_pos_value = Cast<Smi>(*start_pos).value();
2938
2939 DirectHandle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
2941 this, Cast<JSObject>(exception), end_pos_symbol);
2942 if (!IsSmi(*end_pos)) return false;
2943 int end_pos_value = Cast<Smi>(*end_pos).value();
2944
2945 DirectHandle<Name> script_symbol = factory()->error_script_symbol();
2947 this, Cast<JSObject>(exception), script_symbol);
2948 if (!IsScript(*script)) return false;
2949
2950 Handle<Script> cast_script(Cast<Script>(*script), this);
2951 *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
2952 return true;
2953}
2954
2956 MessageLocation* target, DirectHandle<Object> exception) {
2957 if (!IsJSReceiver(*exception)) {
2958 return false;
2959 }
2960 DirectHandle<FixedArray> call_site_infos =
2962 for (int i = 0; i < call_site_infos->length(); ++i) {
2963 DirectHandle<CallSiteInfo> call_site_info(
2964 Cast<CallSiteInfo>(call_site_infos->get(i)), this);
2965 if (CallSiteInfo::ComputeLocation(call_site_info, target)) {
2966 return true;
2967 }
2968 }
2969 return false;
2970}
2971
2973 MessageLocation* target, DirectHandle<Object> exception) {
2974 if (!IsJSReceiver(*exception)) return false;
2975
2976 DirectHandle<StackTraceInfo> stack_trace =
2978 if (stack_trace.is_null() || stack_trace->length() == 0) {
2979 return false;
2980 }
2981
2982 DirectHandle<StackFrameInfo> info(stack_trace->get(0), this);
2983 const int pos = StackFrameInfo::GetSourcePosition(info);
2984 *target = MessageLocation(handle(info->script(), this), pos, pos + 1);
2985 return true;
2986}
2987
2989 MessageLocation* location) {
2990 DirectHandle<StackTraceInfo> stack_trace;
2992 if (IsJSObject(*exception)) {
2993 // First, check whether a stack trace is already present on this object.
2994 // It maybe an Error, or the embedder may have stored a stack trace using
2995 // Exception::CaptureStackTrace().
2996 // If the lookup fails, we fall through and capture the stack trace
2997 // at this throw site.
2998 stack_trace = GetDetailedStackTrace(Cast<JSObject>(exception));
2999 }
3000 if (stack_trace.is_null()) {
3001 // Not an error object, we capture stack and location at throw site.
3002 stack_trace = CaptureDetailedStackTrace(
3005 }
3006 }
3007 MessageLocation computed_location;
3008 if (location == nullptr &&
3009 (ComputeLocationFromException(&computed_location, exception) ||
3010 ComputeLocationFromSimpleStackTrace(&computed_location, exception) ||
3011 ComputeLocation(&computed_location))) {
3012 location = &computed_location;
3013 }
3014
3016 MessageTemplate::kUncaughtException,
3017 location, exception, stack_trace);
3018}
3019
3021 DirectHandle<Object> exception) {
3022 DirectHandle<StackTraceInfo> stack_trace;
3023 if (IsJSError(*exception)) {
3024 stack_trace = GetDetailedStackTrace(Cast<JSObject>(exception));
3025 }
3026
3027 MessageLocation* location = nullptr;
3028 MessageLocation computed_location;
3029 if (ComputeLocationFromException(&computed_location, exception) ||
3030 ComputeLocationFromDetailedStackTrace(&computed_location, exception)) {
3031 location = &computed_location;
3032 }
3033
3035 MessageTemplate::kPlaceholderOnly,
3036 location, exception, stack_trace);
3037}
3038
3040 Tagged<Object> exception) {
3041 DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
3042
3044 Address external_handler = thread_local_top()->try_catch_handler_address();
3045
3046 // A handler cannot be on top if it doesn't exist. For uncatchable exceptions,
3047 // the JavaScript handler cannot be on top.
3048 if (js_handler == kNullAddress || !is_catchable_by_javascript(exception)) {
3049 if (external_handler == kNullAddress) {
3051 }
3053 }
3054
3055 if (external_handler == kNullAddress) {
3057 }
3058
3059 // The exception has been externally caught if and only if there is an
3060 // external handler which is on top of the top-most JS_ENTRY handler.
3061 //
3062 // Note, that finally clauses would re-throw an exception unless it's aborted
3063 // by jumps in control flow (like return, break, etc.) and we'll have another
3064 // chance to set proper v8::TryCatch later.
3065 DCHECK_NE(kNullAddress, external_handler);
3066 DCHECK_NE(kNullAddress, js_handler);
3067 if (external_handler < js_handler) {
3069 }
3071}
3072
3073std::vector<MemoryRange>* Isolate::GetCodePages() const {
3074 return code_pages_.load(std::memory_order_acquire);
3075}
3076
3077void Isolate::SetCodePages(std::vector<MemoryRange>* new_code_pages) {
3078 code_pages_.store(new_code_pages, std::memory_order_release);
3079}
3080
3082 Tagged<Object> exception_obj = exception();
3083 ExceptionHandlerType top_handler = TopExceptionHandlerType(exception_obj);
3084
3085 // Try to propagate the exception to an external v8::TryCatch handler. If
3086 // propagation was unsuccessful, then we will get another chance at reporting
3087 // the pending message if the exception is re-thrown.
3088 bool has_been_propagated = PropagateExceptionToExternalTryCatch(top_handler);
3089 if (!has_been_propagated) return;
3090 if (!report) return;
3091
3092 DCHECK(AllowExceptions::IsAllowed(this));
3093
3094 // The embedder might run script in response to an exception.
3095 AllowJavascriptExecutionDebugOnly allow_script(this);
3096
3097 // Clear the pending message object early to avoid endless recursion.
3098 Tagged<Object> message_obj = pending_message();
3100
3101 // For uncatchable exceptions we do nothing. If needed, the exception and the
3102 // message have already been propagated to v8::TryCatch.
3103 if (!is_catchable_by_javascript(exception_obj)) return;
3104
3105 // Determine whether the message needs to be reported to all message handlers
3106 // depending on whether the topmost external v8::TryCatch is verbose. We know
3107 // there's no JavaScript handler on top; if there was, we would've returned
3108 // early.
3110
3111 bool should_report_exception;
3112 if (top_handler == ExceptionHandlerType::kExternalTryCatch) {
3113 should_report_exception = try_catch_handler()->is_verbose_;
3114 } else {
3115 should_report_exception = true;
3116 }
3117
3118 // Actually report the pending message to all message handlers.
3119 if (!IsTheHole(message_obj, this) && should_report_exception) {
3120 HandleScope scope(this);
3122 this);
3123 Handle<Script> script(message->script(), this);
3124 // Clear the exception and restore it afterwards, otherwise
3125 // CollectSourcePositions will abort.
3126 {
3127 ExceptionScope exception_scope(this);
3129 }
3130 int start_pos = message->GetStartPosition();
3131 int end_pos = message->GetEndPosition();
3132 MessageLocation location(script, start_pos, end_pos);
3133 MessageHandler::ReportMessage(this, &location, message);
3134 }
3135}
3136
3137namespace {
3138bool ReceiverIsForwardingHandler(Isolate* isolate,
3139 DirectHandle<JSReceiver> handler) {
3140 // Recurse to the forwarding Promise (e.g. return false) due to
3141 // - await reaction forwarding to the throwaway Promise, which has
3142 // a dependency edge to the outer Promise.
3143 // - PromiseIdResolveHandler forwarding to the output of .then
3144 // - Promise.all/Promise.race forwarding to a throwaway Promise, which
3145 // has a dependency edge to the generated outer Promise.
3146 // Otherwise, this is a real reject handler for the Promise.
3148 isolate->factory()->promise_forwarding_handler_symbol();
3149 DirectHandle<Object> forwarding_handler =
3150 JSReceiver::GetDataProperty(isolate, handler, key);
3151 return !IsUndefined(*forwarding_handler, isolate);
3152}
3153
3154bool WalkPromiseTreeInternal(
3155 Isolate* isolate, DirectHandle<JSPromise> promise,
3156 const std::function<void(Isolate::PromiseHandler)>& callback) {
3157 if (promise->status() != Promise::kPending) {
3158 // If a rejection reaches an exception that isn't pending, it will be
3159 // treated as caught.
3160 return true;
3161 }
3162
3163 bool any_caught = false;
3164 bool any_uncaught = false;
3165 DirectHandle<Object> current(promise->reactions(), isolate);
3166 while (!IsSmi(*current)) {
3167 auto reaction = Cast<PromiseReaction>(current);
3168 DirectHandle<HeapObject> promise_or_capability(
3169 reaction->promise_or_capability(), isolate);
3170 if (!IsUndefined(*promise_or_capability, isolate)) {
3171 if (!IsJSPromise(*promise_or_capability)) {
3172 promise_or_capability = direct_handle(
3173 Cast<PromiseCapability>(promise_or_capability)->promise(), isolate);
3174 }
3175 if (IsJSPromise(*promise_or_capability)) {
3176 DirectHandle<JSPromise> next_promise =
3177 Cast<JSPromise>(promise_or_capability);
3178 bool caught = false;
3179 DirectHandle<JSReceiver> reject_handler;
3180 if (!IsUndefined(reaction->reject_handler(), isolate)) {
3181 reject_handler = direct_handle(
3182 Cast<JSReceiver>(reaction->reject_handler()), isolate);
3183 if (!ReceiverIsForwardingHandler(isolate, reject_handler) &&
3184 !IsBuiltinForwardingRejectHandler(isolate, *reject_handler)) {
3185 caught = true;
3186 }
3187 }
3188 // Pass each handler to the callback
3189 DirectHandle<JSGeneratorObject> async_function;
3190 if (TryGetAsyncGenerator(isolate, reaction).ToHandle(&async_function)) {
3191 caught = caught ||
3192 PredictExceptionFromGenerator(async_function, isolate) ==
3194 // Look at the async function, not the individual handlers
3195 callback({async_function->function()->shared(), true});
3196 } else {
3197 // Not an async function, look at individual handlers
3198 if (!IsUndefined(reaction->fulfill_handler(), isolate)) {
3199 DirectHandle<JSReceiver> fulfill_handler(
3200 Cast<JSReceiver>(reaction->fulfill_handler()), isolate);
3201 if (!ReceiverIsForwardingHandler(isolate, fulfill_handler)) {
3202 if (IsBuiltinFunction(isolate, *fulfill_handler,
3203 Builtin::kPromiseThenFinally)) {
3204 // If this is the finally handler, get the wrapped callback
3205 // from the context to use instead
3206 DirectHandle<Context> context(
3207 Cast<JSFunction>(reaction->fulfill_handler())->context(),
3208 isolate);
3209 int const index =
3211 fulfill_handler = direct_handle(
3212 Cast<JSReceiver>(context->get(index)), isolate);
3213 }
3214 if (IsJSFunction(*fulfill_handler)) {
3215 callback({Cast<JSFunction>(fulfill_handler)->shared(), true});
3216 }
3217 }
3218 }
3219 if (caught) {
3220 // We've already checked that this isn't undefined or
3221 // a forwarding handler
3222 if (IsJSFunction(*reject_handler)) {
3223 callback({Cast<JSFunction>(reject_handler)->shared(), true});
3224 }
3225 }
3226 }
3227 caught =
3228 caught || WalkPromiseTreeInternal(isolate, next_promise, callback);
3229 any_caught = any_caught || caught;
3230 any_uncaught = any_uncaught || !caught;
3231 }
3232 } else {
3233#if V8_ENABLE_WEBASSEMBLY
3234 DirectHandle<WasmSuspenderObject> suspender;
3235 if (TryGetWasmSuspender(isolate, reaction->fulfill_handler())
3236 .ToHandle(&suspender)) {
3237 // If in the future we support Wasm exceptions or ignore listing in
3238 // Wasm, we will need to iterate through these frames. For now, we
3239 // only care about the resulting promise.
3240 DirectHandle<JSPromise> next_promise(suspender->promise(), isolate);
3241 bool caught = WalkPromiseTreeInternal(isolate, next_promise, callback);
3242 any_caught = any_caught || caught;
3243 any_uncaught = any_uncaught || !caught;
3244 }
3245#endif // V8_ENABLE_WEBASSEMBLY
3246 }
3247 current = direct_handle(reaction->next(), isolate);
3248 }
3249
3250 bool caught = any_caught && !any_uncaught;
3251
3252 if (!caught) {
3253 // If there is an outer promise, follow that to see if it is caught.
3254 DirectHandle<Symbol> key = isolate->factory()->promise_handled_by_symbol();
3255 DirectHandle<Object> outer_promise_obj =
3256 JSObject::GetDataProperty(isolate, promise, key);
3257 if (IsJSPromise(*outer_promise_obj)) {
3258 return WalkPromiseTreeInternal(
3259 isolate, Cast<JSPromise>(outer_promise_obj), callback);
3260 }
3261 }
3262 return caught;
3263}
3264
3265// Helper functions to scan for calls to .catch.
3267using interpreter::Bytecodes;
3268
3269enum PromiseMethod { kThen, kCatch, kFinally, kInvalid };
3270
3271// Requires the iterator to be on a GetNamedProperty instruction
3272PromiseMethod GetPromiseMethod(
3273 Isolate* isolate, const interpreter::BytecodeArrayIterator& iterator) {
3274 DirectHandle<Object> object = iterator.GetConstantForIndexOperand(1, isolate);
3275 if (!IsString(*object)) {
3276 return kInvalid;
3277 }
3278 auto str = Cast<String>(object);
3279 if (str->Equals(ReadOnlyRoots(isolate).then_string())) {
3280 return kThen;
3281 } else if (str->IsEqualTo(base::StaticCharVector("catch"))) {
3282 return kCatch;
3283 } else if (str->IsEqualTo(base::StaticCharVector("finally"))) {
3284 return kFinally;
3285 } else {
3286 return kInvalid;
3287 }
3288}
3289
3290bool TouchesRegister(const interpreter::BytecodeArrayIterator& iterator,
3291 int index) {
3292 Bytecode bytecode = iterator.current_bytecode();
3293 int num_operands = Bytecodes::NumberOfOperands(bytecode);
3294 const interpreter::OperandType* operand_types =
3295 Bytecodes::GetOperandTypes(bytecode);
3296
3297 for (int i = 0; i < num_operands; ++i) {
3298 if (Bytecodes::IsRegisterOperandType(operand_types[i])) {
3299 int base_index = iterator.GetRegisterOperand(i).index();
3300 int num_registers;
3301 if (Bytecodes::IsRegisterListOperandType(operand_types[i])) {
3302 num_registers = iterator.GetRegisterCountOperand(++i);
3303 } else {
3304 num_registers =
3305 Bytecodes::GetNumberOfRegistersRepresentedBy(operand_types[i]);
3306 }
3307
3308 if (base_index <= index && index < base_index + num_registers) {
3309 return true;
3310 }
3311 }
3312 }
3313
3314 if (Bytecodes::WritesImplicitRegister(bytecode)) {
3315 return iterator.GetStarTargetRegister().index() == index;
3316 }
3317
3318 return false;
3319}
3320
3321bool CallsCatchMethod(Isolate* isolate, Handle<BytecodeArray> bytecode_array,
3322 int offset) {
3323 interpreter::BytecodeArrayIterator iterator(bytecode_array, offset);
3324
3325 while (!iterator.done()) {
3326 // We should be on a call instruction of some kind. While we could check
3327 // this, it may be difficult to create an exhaustive list of instructions
3328 // that could call, such as property getters, but at a minimum this
3329 // instruction should write to the accumulator.
3330 if (!Bytecodes::WritesAccumulator(iterator.current_bytecode())) {
3331 return false;
3332 }
3333
3334 iterator.Advance();
3335 // While usually the next instruction is a Star, sometimes we store and
3336 // reload from context first.
3337 if (iterator.done()) {
3338 return false;
3339 }
3340 if (iterator.current_bytecode() == Bytecode::kStaCurrentContextSlot) {
3341 // Step over patterns like:
3342 // StaCurrentContextSlot [x]
3343 // LdaImmutableCurrentContextSlot [x]
3344 unsigned int slot = iterator.GetIndexOperand(0);
3345 iterator.Advance();
3346 if (!iterator.done() &&
3347 (iterator.current_bytecode() ==
3348 Bytecode::kLdaImmutableCurrentContextSlot ||
3349 iterator.current_bytecode() == Bytecode::kLdaCurrentContextSlot)) {
3350 if (iterator.GetIndexOperand(0) != slot) {
3351 return false;
3352 }
3353 iterator.Advance();
3354 }
3355 } else if (iterator.current_bytecode() == Bytecode::kStaContextSlot) {
3356 // Step over patterns like:
3357 // StaContextSlot r_x [y] [z]
3358 // LdaContextSlot r_x [y] [z]
3359 int context = iterator.GetRegisterOperand(0).index();
3360 unsigned int slot = iterator.GetIndexOperand(1);
3361 unsigned int depth = iterator.GetUnsignedImmediateOperand(2);
3362 iterator.Advance();
3363 if (!iterator.done() &&
3364 (iterator.current_bytecode() == Bytecode::kLdaImmutableContextSlot ||
3365 iterator.current_bytecode() == Bytecode::kLdaContextSlot)) {
3366 if (iterator.GetRegisterOperand(0).index() != context ||
3367 iterator.GetIndexOperand(1) != slot ||
3368 iterator.GetUnsignedImmediateOperand(2) != depth) {
3369 return false;
3370 }
3371 iterator.Advance();
3372 }
3373 } else if (iterator.current_bytecode() == Bytecode::kStaLookupSlot) {
3374 // Step over patterns like:
3375 // StaLookupSlot [x] [_]
3376 // LdaLookupSlot [x]
3377 unsigned int slot = iterator.GetIndexOperand(0);
3378 iterator.Advance();
3379 if (!iterator.done() &&
3380 (iterator.current_bytecode() == Bytecode::kLdaLookupSlot ||
3381 iterator.current_bytecode() ==
3382 Bytecode::kLdaLookupSlotInsideTypeof)) {
3383 if (iterator.GetIndexOperand(0) != slot) {
3384 return false;
3385 }
3386 iterator.Advance();
3387 }
3388 }
3389
3390 // Next instruction should be a Star (store accumulator to register)
3391 if (iterator.done() || !Bytecodes::IsAnyStar(iterator.current_bytecode())) {
3392 return false;
3393 }
3394 // The register it stores to will be assumed to be our promise
3395 int promise_register = iterator.GetStarTargetRegister().index();
3396
3397 // TODO(crbug/40283993): Should we loop over non-matching instructions here
3398 // to allow code like
3399 // `const promise = foo(); console.log(...); promise.catch(...);`?
3400
3401 iterator.Advance();
3402 // We should be on a GetNamedProperty instruction.
3403 if (iterator.done() ||
3404 iterator.current_bytecode() != Bytecode::kGetNamedProperty ||
3405 iterator.GetRegisterOperand(0).index() != promise_register) {
3406 return false;
3407 }
3408 PromiseMethod method = GetPromiseMethod(isolate, iterator);
3409 if (method == kInvalid) {
3410 return false;
3411 }
3412
3413 iterator.Advance();
3414 // Next instruction should be a Star (save immediate to register)
3415 if (iterator.done() || !Bytecodes::IsAnyStar(iterator.current_bytecode())) {
3416 return false;
3417 }
3418 // This register contains the method we will eventually invoke
3419 int method_register = iterator.GetStarTargetRegister().index();
3420 if (method_register == promise_register) {
3421 return false;
3422 }
3423
3424 // Now we step over multiple instructions creating the arguments for the
3425 // method.
3426 while (true) {
3427 iterator.Advance();
3428 if (iterator.done()) {
3429 return false;
3430 }
3431 Bytecode bytecode = iterator.current_bytecode();
3432 if (bytecode == Bytecode::kCallProperty1 ||
3433 bytecode == Bytecode::kCallProperty2) {
3434 // This is a call property call of the right size, but is it a call of
3435 // the method and on the promise?
3436 if (iterator.GetRegisterOperand(0).index() == method_register &&
3437 iterator.GetRegisterOperand(1).index() == promise_register) {
3438 // This is our method call, but does it catch?
3439 if (method == kCatch ||
3440 (method == kThen && bytecode == Bytecode::kCallProperty2)) {
3441 return true;
3442 }
3443 // Break out of the inner loop, continuing the outer loop. We
3444 // will use the same procedure to check for chained method calls.
3445 break;
3446 }
3447 }
3448
3449 // Check for some instructions that should make us give up scanning.
3450 if (Bytecodes::IsJump(bytecode) || Bytecodes::IsSwitch(bytecode) ||
3451 Bytecodes::Returns(bytecode) ||
3452 Bytecodes::UnconditionallyThrows(bytecode)) {
3453 // Stop scanning at control flow instructions that aren't calls
3454 return false;
3455 }
3456
3457 if (TouchesRegister(iterator, promise_register) ||
3458 TouchesRegister(iterator, method_register)) {
3459 // Stop scanning at instruction that unexpectedly interacts with one of
3460 // the registers we care about.
3461 return false;
3462 }
3463 }
3464 }
3465 return false;
3466}
3467
3468bool CallsCatchMethod(const StackFrameSummaryIterator& iterator) {
3469 if (!iterator.frame()->is_javascript()) {
3470 return false;
3471 }
3472 if (iterator.frame_summary().IsJavaScript()) {
3473 auto& js_summary = iterator.frame_summary().AsJavaScript();
3474 if (IsBytecodeArray(*js_summary.abstract_code())) {
3475 if (CallsCatchMethod(iterator.isolate(),
3476 Cast<BytecodeArray>(js_summary.abstract_code()),
3477 js_summary.code_offset())) {
3478 return true;
3479 }
3480 }
3481 }
3482 return false;
3483}
3484
3485} // namespace
3486
3488 MaybeDirectHandle<JSPromise> rejected_promise,
3489 const std::function<void(PromiseHandler)>& callback) {
3490 bool is_promise_rejection = false;
3491
3493 if (rejected_promise.ToHandle(&promise)) {
3494 is_promise_rejection = true;
3495 // If the promise has reactions, follow them and assume we are done. If
3496 // it has no reactions, assume promise is returned up the call stack and
3497 // trace accordingly. If the promise is not pending, it has no reactions
3498 // and is probably the result of a call to Promise.reject().
3499 if (promise->status() != Promise::kPending) {
3500 // Ignore this promise; set to null
3501 rejected_promise = MaybeDirectHandle<JSPromise>();
3502 } else if (IsSmi(promise->reactions())) {
3503 // Also check that there is no outer promise
3504 DirectHandle<Symbol> key = factory()->promise_handled_by_symbol();
3505 if (!IsJSPromise(*JSObject::GetDataProperty(this, promise, key))) {
3506 // Ignore this promise; set to null
3507 rejected_promise = MaybeDirectHandle<JSPromise>();
3508 }
3509 }
3510 }
3511
3512 if (!is_promise_rejection && TopExceptionHandlerType(Tagged<Object>()) ==
3514 return true; // caught by external
3515 }
3516
3517 // Search for an exception handler by performing a full walk over the stack.
3518 for (StackFrameSummaryIterator iter(this); !iter.done(); iter.Advance()) {
3519 Isolate::CatchType prediction = PredictExceptionCatchAtFrame(iter);
3520
3521 bool caught;
3522 if (rejected_promise.is_null()) {
3523 switch (prediction) {
3524 case NOT_CAUGHT:
3525 // Uncaught unless this is a promise rejection and the code will call
3526 // .catch()
3527 caught = is_promise_rejection && CallsCatchMethod(iter);
3528 break;
3530 // Uncaught unless this is a promise rejection and the code will call
3531 // .catch()
3532 caught = is_promise_rejection && CallsCatchMethod(iter);
3533 // Exceptions turn into promise rejections here
3534 is_promise_rejection = true;
3535 break;
3536 case CAUGHT_BY_PROMISE:
3537 // Exceptions turn into promise rejections here
3538 // TODO(leese): Perhaps we can handle the case where the reject method
3539 // is called in the promise constructor and it is still on the stack
3540 // by ignoring all try/catches on the stack until we get to the right
3541 // CAUGHT_BY_PROMISE?
3542 is_promise_rejection = true;
3543 caught = false;
3544 break;
3545 case CAUGHT_BY_EXTERNAL:
3546 caught = !is_promise_rejection;
3547 break;
3549 caught = true;
3550 // Unless this is a promise rejection and the function is not async...
3551 DCHECK(iter.has_frame_summary());
3552 const FrameSummary& summary = iter.frame_summary();
3553 if (is_promise_rejection && summary.IsJavaScript()) {
3554 // If the catch happens in an async function, assume it will
3555 // await this promise. Alternately, if the code will call .catch,
3556 // assume it is on this promise.
3557 caught = IsAsyncFunction(iter.frame_summary()
3558 .AsJavaScript()
3559 .function()
3560 ->shared()
3561 ->kind()) ||
3562 CallsCatchMethod(iter);
3563 }
3564 break;
3565 }
3566 } else {
3567 // The frame that calls the reject handler will not catch that promise
3568 // regardless of what else it does. We will trace where this rejection
3569 // goes according to its reaction callbacks, but we first need to handle
3570 // the topmost debuggable frame just to ensure there is a debuggable
3571 // frame and to permit ignore listing there.
3572 caught = false;
3573 }
3574
3575 if (iter.frame()->is_javascript()) {
3576 bool debuggable = false;
3577 DCHECK(iter.has_frame_summary());
3578 const FrameSummary& summary = iter.frame_summary();
3579 if (summary.IsJavaScript()) {
3580 const auto& info = summary.AsJavaScript().function()->shared();
3581 if (info->IsSubjectToDebugging()) {
3582 callback({*info, false});
3583 debuggable = true;
3584 }
3585 }
3586
3587 // Ignore the rest of the call stack if this is a rejection and the
3588 // promise has handlers; we will trace where the rejection goes instead
3589 // of where it came from.
3590 if (debuggable && !rejected_promise.is_null()) {
3591 break;
3592 }
3593 }
3594
3595 if (caught) {
3596 return true;
3597 }
3598 }
3599
3600 if (rejected_promise.is_null()) {
3601 // Now follow promises if this is a promise reaction job.
3602 rejected_promise = TryGetCurrentTaskPromise(this);
3603 }
3604
3605 if (rejected_promise.ToHandle(&promise)) {
3606 return WalkPromiseTreeInternal(this, promise, callback);
3607 }
3608 // Nothing caught.
3609 return false;
3610}
3611
3618
3622
3627
3630 direct_handle(context->global_object(), this);
3631 // If some fuzzer decided to make the global object non-extensible, then
3632 // we can't install any features (and would CHECK-fail if we tried).
3633 if (!global->map()->is_extensible()) return;
3634 DirectHandle<String> sab_name = factory()->SharedArrayBuffer_string();
3636 if (!JSObject::HasRealNamedProperty(this, global, sab_name)
3637 .FromMaybe(true)) {
3638 JSObject::AddProperty(this, global, factory()->SharedArrayBuffer_string(),
3639 shared_array_buffer_fun(), DONT_ENUM);
3640 }
3641 }
3642}
3643
3646 if (!v8_flags.enable_sharedarraybuffer_per_context) return true;
3647
3648 if (sharedarraybuffer_constructor_enabled_callback()) {
3649 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
3650 return sharedarraybuffer_constructor_enabled_callback()(api_context);
3651 }
3652 return false;
3653}
3654
3656#ifdef V8_ENABLE_WEBASSEMBLY
3657 // If Wasm imported strings are explicitly enabled via a callback, also enable
3658 // stringref.
3659 v8::WasmImportedStringsEnabledCallback callback_imported_strings =
3660 wasm_imported_strings_enabled_callback();
3661 if (callback_imported_strings) {
3662 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
3663 if (callback_imported_strings(api_context)) return true;
3664 }
3665 // Otherwise use the runtime flag.
3666 return v8_flags.experimental_wasm_stringref;
3667#else
3668 return false;
3669#endif
3670}
3671
3673#ifdef V8_ENABLE_WEBASSEMBLY
3674 if (v8_flags.wasm_jitless) return false;
3675
3676 v8::WasmJSPIEnabledCallback jspi_callback = wasm_jspi_enabled_callback();
3677 if (jspi_callback) {
3678 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
3679 if (jspi_callback(api_context)) return true;
3680 }
3681
3682 // Otherwise use the runtime flag.
3683 return v8_flags.experimental_wasm_jspi;
3684#else
3685 return false;
3686#endif
3687}
3688
3690#ifdef V8_ENABLE_WEBASSEMBLY
3691 return IsWasmJSPIRequested(context) &&
3692 context->is_wasm_jspi_installed() != Smi::zero();
3693#else
3694 return false;
3695#endif
3696}
3697
3700#ifdef V8_ENABLE_WEBASSEMBLY
3702 wasm_imported_strings_enabled_callback();
3703 if (callback) {
3704 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
3705 if (callback(api_context)) return true;
3706 }
3707 return v8_flags.experimental_wasm_imported_strings;
3708#else
3709 return false;
3710#endif
3711}
3712
3715
3716 // 1st candidate: most-recently-entered author function's context
3717 // if it's newer than the last Context::BackupIncumbentScope entry.
3718 //
3719 // NOTE: This code assumes that the stack grows downward.
3720 Address top_backup_incumbent =
3722 ? top_backup_incumbent_scope()->JSStackComparableAddressPrivate()
3723 : 0;
3724 if (!it.done() &&
3725 (!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
3726 Tagged<Context> context = Cast<Context>(it.frame()->context());
3727 // If the topmost_script_having_context is set then it must be correct.
3728 if (DEBUG_BOOL && !topmost_script_having_context().is_null()) {
3730 context->native_context());
3731 }
3732 return DirectHandle<NativeContext>(context->native_context(), this);
3733 }
3735
3736 // 2nd candidate: the last Context::Scope's incumbent context if any.
3738 v8::Local<v8::Context> incumbent_context =
3739 top_backup_incumbent_scope()->backup_incumbent_context_;
3740 return Utils::OpenDirectHandle(*incumbent_context);
3741 }
3742
3743 // Last candidate: the entered context or microtask context.
3744 // Given that there is no other author function is running, there must be
3745 // no cross-context function running, then the incumbent realm must match
3746 // the entry realm.
3747 v8::Local<v8::Context> entered_context =
3748 reinterpret_cast<v8::Isolate*>(this)->GetEnteredOrMicrotaskContext();
3749 return Utils::OpenDirectHandle(*entered_context);
3750}
3751
3752char* Isolate::ArchiveThread(char* to) {
3753 MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
3754 sizeof(ThreadLocalTop));
3755 return to + sizeof(ThreadLocalTop);
3756}
3757
3758char* Isolate::RestoreThread(char* from) {
3759 MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
3760 sizeof(ThreadLocalTop));
3761 DCHECK(context().is_null() || IsContext(context()));
3762 return from + sizeof(ThreadLocalTop);
3763}
3764
3769 ManagedPtrDestructor* n = nullptr;
3771 for (; l != nullptr; l = n) {
3772 l->external_memory_accounter_.Decrease(
3773 reinterpret_cast<v8::Isolate*>(this), l->estimated_size_);
3774 l->destructor_(l->shared_ptr_ptr_);
3775 n = l->next_;
3776 delete l;
3777 }
3778 }
3779}
3780
3782 FullObjectSlot location(handle_location);
3783 FullObjectSlot first_root(builtin_table());
3784 FullObjectSlot last_root(first_root + Builtins::kBuiltinCount);
3785 if (location >= last_root) return false;
3786 if (location < first_root) return false;
3787 return true;
3788}
3789
3800
3803 if (destructor->prev_) {
3804 destructor->prev_->next_ = destructor->next_;
3805 } else {
3808 }
3809 if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
3810 destructor->prev_ = nullptr;
3811 destructor->next_ = nullptr;
3812}
3813
3814// static
3816 ::heap::base::StackVisitor* visitor) {
3817 Isolate* isolate = Isolate::TryGetCurrent();
3818 if (!isolate) return;
3820}
3821
3822#if V8_ENABLE_WEBASSEMBLY
3824 auto stack = SimulatorStack::GetCentralStackView(this);
3825 Address stack_top = reinterpret_cast<Address>(stack.begin());
3826 Address stack_base = reinterpret_cast<Address>(stack.end());
3827 return stack_top < addr && addr <= stack_base;
3828}
3829
3831#if USE_SIMULATOR
3832 return IsOnCentralStack(Simulator::current(this)->get_sp());
3833#else
3835#endif
3836}
3837
3838void Isolate::AddSharedWasmMemory(
3839 DirectHandle<WasmMemoryObject> memory_object) {
3840 DirectHandle<WeakArrayList> shared_wasm_memories =
3841 factory()->shared_wasm_memories();
3842 shared_wasm_memories = WeakArrayList::Append(
3843 this, shared_wasm_memories, MaybeObjectDirectHandle::Weak(memory_object));
3844 heap()->set_shared_wasm_memories(*shared_wasm_memories);
3845}
3846
3847void Isolate::SwitchStacks(wasm::StackMemory* from, wasm::StackMemory* to) {
3848 // Synchronize the stack limit with the active continuation for
3849 // stack-switching. This can be done before or after changing the stack
3850 // pointer itself, as long as we update both before the next stack check.
3851 // {StackGuard::SetStackLimitForStackSwitching} doesn't update the value of
3852 // the jslimit if it contains a sentinel value, and it is also thread-safe. So
3853 // if an interrupt is requested before, during or after this call, it will be
3854 // preserved and handled at the next stack check.
3855
3857 if (v8_flags.trace_wasm_stack_switching) {
3858 if (to->jmpbuf()->state == wasm::JumpBuffer::Suspended) {
3859 PrintF("Switch from stack %d to %d (resume/start)\n", from->id(),
3860 to->id());
3861 } else if (to->jmpbuf()->state == wasm::JumpBuffer::Inactive) {
3862 PrintF("Switch from stack %d to %d (suspend/return)\n", from->id(),
3863 to->id());
3864 } else {
3865 UNREACHABLE();
3866 }
3867 }
3868 if (to->jmpbuf()->state == wasm::JumpBuffer::Suspended) {
3869 to->jmpbuf()->parent = from;
3870 } else {
3871 DCHECK_EQ(to->jmpbuf()->state, wasm::JumpBuffer::Inactive);
3872 // TODO(388533754): This check won't hold anymore with core stack-switching.
3873 // Instead, we will need to validate all the intermediate stacks and also
3874 // check that they don't hold central stack frames.
3875 DCHECK_EQ(from->jmpbuf()->parent, to);
3876 }
3877 uintptr_t limit = reinterpret_cast<uintptr_t>(to->jmpbuf()->stack_limit);
3879 // Update the central stack info.
3880 if (to->jmpbuf()->state == wasm::JumpBuffer::Inactive) {
3881 // When returning/suspending from a stack, the parent must be on
3882 // the central stack.
3883 // TODO(388533754): This assumption will not hold anymore with core
3884 // stack-switching, so we will need to revisit this.
3885 DCHECK(IsOnCentralStack(to->jmpbuf()->sp));
3887 thread_local_top()->central_stack_sp_ = to->jmpbuf()->sp;
3889 reinterpret_cast<Address>(to->jmpbuf()->stack_limit);
3890 } else {
3891 // A suspended stack cannot hold central stack frames.
3893 thread_local_top()->central_stack_sp_ = from->jmpbuf()->sp;
3895 reinterpret_cast<Address>(from->jmpbuf()->stack_limit);
3896 }
3897}
3898
3899void Isolate::RetireWasmStack(wasm::StackMemory* stack) {
3900 stack->jmpbuf()->state = wasm::JumpBuffer::Retired;
3901 size_t index = stack->index();
3902 // We can only return from a stack that was still in the global list.
3903 DCHECK_LT(index, wasm_stacks().size());
3904 std::unique_ptr<wasm::StackMemory> stack_ptr =
3905 std::move(wasm_stacks()[index]);
3906 DCHECK_EQ(stack_ptr.get(), stack);
3907 if (index != wasm_stacks().size() - 1) {
3908 wasm_stacks()[index] = std::move(wasm_stacks().back());
3909 wasm_stacks()[index]->set_index(index);
3910 }
3911 wasm_stacks().pop_back();
3912 for (size_t i = 0; i < wasm_stacks().size(); ++i) {
3913 SLOW_DCHECK(wasm_stacks()[i]->index() == i);
3914 }
3915 stack_pool().Add(std::move(stack_ptr));
3916}
3917
3918wasm::WasmOrphanedGlobalHandle* Isolate::NewWasmOrphanedGlobalHandle() {
3919 return wasm::WasmEngine::NewOrphanedGlobalHandle(&wasm_orphaned_handle_);
3920}
3921
3922#endif // V8_ENABLE_WEBASSEMBLY
3923
3925#if defined(USE_SIMULATOR)
3926 delete simulator_;
3927#endif
3928}
3929
3932 auto t = table_.find(thread_id);
3933 if (t == table_.end()) return nullptr;
3934 return t->second;
3935}
3936
3938 bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
3939 CHECK(inserted);
3940}
3941
3943 table_.erase(data->thread_id_);
3944 delete data;
3945}
3946
3948 for (auto& x : table_) {
3949 delete x.second;
3950 }
3951 table_.clear();
3952}
3953
3955 public:
3956 explicit TracingAccountingAllocator(Isolate* isolate) : isolate_(isolate) {}
3958
3959 protected:
3961 base::MutexGuard lock(&mutex_);
3962 UpdateMemoryTrafficAndReportMemoryUsage(segment->total_size());
3963 }
3964
3965 void TraceZoneCreationImpl(const Zone* zone) override {
3966 base::MutexGuard lock(&mutex_);
3967 active_zones_.insert(zone);
3968 nesting_depth_++;
3969 }
3970
3971 void TraceZoneDestructionImpl(const Zone* zone) override {
3972 base::MutexGuard lock(&mutex_);
3973#ifdef V8_ENABLE_PRECISE_ZONE_STATS
3974 if (v8_flags.trace_zone_type_stats) {
3975 type_stats_.MergeWith(zone->type_stats());
3976 }
3977#endif
3978 UpdateMemoryTrafficAndReportMemoryUsage(zone->segment_bytes_allocated());
3979 active_zones_.erase(zone);
3980 nesting_depth_--;
3981
3982#ifdef V8_ENABLE_PRECISE_ZONE_STATS
3983 if (v8_flags.trace_zone_type_stats && active_zones_.empty()) {
3984 type_stats_.Dump();
3985 }
3986#endif
3987 }
3988
3989 private:
3990 void UpdateMemoryTrafficAndReportMemoryUsage(size_t memory_traffic_delta) {
3991 if (!v8_flags.trace_zone_stats &&
3992 !(TracingFlags::zone_stats.load(std::memory_order_relaxed) &
3994 // Don't print anything if the zone tracing was enabled only because of
3995 // v8_flags.trace_zone_type_stats.
3996 return;
3997 }
3998
3999 memory_traffic_since_last_report_ += memory_traffic_delta;
4000 if (memory_traffic_since_last_report_ < v8_flags.zone_stats_tolerance)
4001 return;
4002 memory_traffic_since_last_report_ = 0;
4003
4004 Dump(buffer_, true);
4005
4006 {
4007 std::string trace_str = buffer_.str();
4008
4009 if (v8_flags.trace_zone_stats) {
4010 PrintF(
4011 "{"
4012 "\"type\": \"v8-zone-trace\", "
4013 "\"stats\": %s"
4014 "}\n",
4015 trace_str.c_str());
4016 }
4017 if (V8_UNLIKELY(
4018 TracingFlags::zone_stats.load(std::memory_order_relaxed) &
4021 "V8.Zone_Stats", TRACE_EVENT_SCOPE_THREAD, "stats",
4022 TRACE_STR_COPY(trace_str.c_str()));
4023 }
4024 }
4025
4026 // Clear the buffer.
4027 buffer_.str(std::string());
4028 }
4029
4030 void Dump(std::ostringstream& out, bool dump_details) {
4031 // Note: Neither isolate nor zones are locked, so be careful with accesses
4032 // as the allocator is potentially used on a concurrent thread.
4033 double time = isolate_->time_millis_since_init();
4034 out << "{" << "\"isolate\": \"" << reinterpret_cast<void*>(isolate_)
4035 << "\", " << "\"time\": " << time << ", ";
4036 size_t total_segment_bytes_allocated = 0;
4037 size_t total_zone_allocation_size = 0;
4038 size_t total_zone_freed_size = 0;
4039
4040 if (dump_details) {
4041 // Print detailed zone stats if memory usage changes direction.
4042 out << "\"zones\": [";
4043 bool first = true;
4044 for (const Zone* zone : active_zones_) {
4045 size_t zone_segment_bytes_allocated = zone->segment_bytes_allocated();
4046 size_t zone_allocation_size = zone->allocation_size_for_tracing();
4047 size_t freed_size = zone->freed_size_for_tracing();
4048 if (first) {
4049 first = false;
4050 } else {
4051 out << ", ";
4052 }
4053 out << "{" << "\"name\": \"" << zone->name() << "\", "
4054 << "\"allocated\": " << zone_segment_bytes_allocated << ", "
4055 << "\"used\": " << zone_allocation_size << ", "
4056 << "\"freed\": " << freed_size << "}";
4057 total_segment_bytes_allocated += zone_segment_bytes_allocated;
4058 total_zone_allocation_size += zone_allocation_size;
4059 total_zone_freed_size += freed_size;
4060 }
4061 out << "], ";
4062 } else {
4063 // Just calculate total allocated/used memory values.
4064 for (const Zone* zone : active_zones_) {
4065 total_segment_bytes_allocated += zone->segment_bytes_allocated();
4066 total_zone_allocation_size += zone->allocation_size_for_tracing();
4067 total_zone_freed_size += zone->freed_size_for_tracing();
4068 }
4069 }
4070 out << "\"allocated\": " << total_segment_bytes_allocated << ", "
4071 << "\"used\": " << total_zone_allocation_size << ", "
4072 << "\"freed\": " << total_zone_freed_size << "}";
4073 }
4074
4076 std::atomic<size_t> nesting_depth_{0};
4077
4079 std::unordered_set<const Zone*> active_zones_;
4080#ifdef V8_ENABLE_PRECISE_ZONE_STATS
4081 TypeStats type_stats_;
4082#endif
4083 std::ostringstream buffer_;
4084 // This value is increased on both allocations and deallocations.
4085 size_t memory_traffic_since_last_report_ = 0;
4086};
4087
4088#ifdef DEBUG
4089std::atomic<size_t> Isolate::non_disposed_isolates_;
4090#endif // DEBUG
4091
4092// static
4094
4095// static
4096Isolate* Isolate::New(IsolateGroup* group) { return Allocate(group); }
4097
4098// static
4100 // v8::V8::Initialize() must be called before creating any isolates.
4102 // Allocate Isolate itself on C++ heap, ensuring page alignment.
4103 void* isolate_ptr = base::AlignedAlloc(sizeof(Isolate), kMinimumOSPageSize);
4104 // IsolateAllocator manages the virtual memory resources for the Isolate.
4105 Isolate* isolate = new (isolate_ptr) Isolate(group);
4106
4107#ifdef DEBUG
4108 non_disposed_isolates_++;
4109#endif // DEBUG
4110
4111 return isolate;
4112}
4113
4114// static
4115void Isolate::Delete(Isolate* isolate) {
4116 DCHECK_NOT_NULL(isolate);
4117 // v8::V8::Dispose() must only be called after deleting all isolates.
4119 // Temporarily set this isolate as current so that various parts of
4120 // the isolate can access it in their destructors without having a
4121 // direct pointer. We don't use Enter/Exit here to avoid
4122 // initializing the thread data.
4123 PerIsolateThreadData* saved_data = isolate->CurrentPerIsolateThreadData();
4124 Isolate* saved_isolate = isolate->TryGetCurrent();
4125 SetIsolateThreadLocals(isolate, nullptr);
4126 isolate->set_thread_id(ThreadId::Current());
4127 isolate->heap()->SetStackStart();
4128
4129 isolate->Deinit();
4130
4131#ifdef DEBUG
4132 non_disposed_isolates_--;
4133#endif // DEBUG
4134
4135 IsolateGroup* group = isolate->isolate_group();
4136 isolate->~Isolate();
4137 // Only release the group once all other Isolate members have been destroyed.
4138 group->Release();
4139 // Free the isolate itself.
4140 base::AlignedFree(isolate);
4141
4142 // Restore the previous current isolate.
4143 SetIsolateThreadLocals(saved_isolate, saved_data);
4144}
4145
4155
4159
4163 id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
4165 traced_handles_(this),
4166 builtins_(this),
4167#if defined(DEBUG) || defined(VERIFY_HEAP)
4168 num_active_deserializers_(0),
4169#endif
4170 logger_(new Logger()),
4173 jitless_(v8_flags.jitless),
4176 SourceTextModule::kFirstAsyncEvaluationOrdinal),
4178#if defined(V8_ENABLE_ETW_STACK_WALKING)
4179 etw_tracing_enabled_(false),
4180 etw_trace_interpreted_frames_(v8_flags.interpreted_frames_native_stack),
4181 etw_in_rundown_(false),
4182#endif // V8_ENABLE_ETW_STACK_WALKING
4183 stack_size_(v8_flags.stack_size * KB) {
4184 TRACE_ISOLATE(constructor);
4186
4187 // ThreadManager is initialized early to support locking an isolate
4188 // before it is entered.
4189 thread_manager_ = new ThreadManager(this);
4190
4192
4193#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
4194 name##_ = (initial_value);
4196#undef ISOLATE_INIT_EXECUTE
4197
4198#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
4199 memset(name##_, 0, sizeof(type) * length);
4201#undef ISOLATE_INIT_ARRAY_EXECUTE
4202
4204 debug_ = new Debug(this);
4205
4207
4208#if V8_ENABLE_WEBASSEMBLY
4209 // If we are in production V8 and not in mksnapshot we have to pass the
4210 // landing pad builtin to the WebAssembly TrapHandler.
4211 // TODO(ahaas): Isolate creation is the earliest point in time when builtins
4212 // are available, so we cannot set the landing pad earlier at the moment.
4213 // However, if builtins ever get loaded during process initialization time,
4214 // then the initialization of the trap handler landing pad should also go
4215 // there.
4216 // TODO(ahaas): The code of the landing pad does not have to be a builtin,
4217 // we could also just move it to the trap handler, and implement it e.g. with
4218 // inline assembly. It's not clear if that's worth it.
4220 EmbeddedData embedded_data = EmbeddedData::FromBlob();
4221 Address landing_pad =
4222 embedded_data.InstructionStartOf(Builtin::kWasmTrapHandlerLandingPad);
4223 i::trap_handler::SetLandingPad(landing_pad);
4224 }
4225#endif // V8_ENABLE_WEBASSEMBLY
4226
4228}
4229
4231#ifdef V8_ENABLE_SANDBOX
4232 static_assert(static_cast<int>(OFFSET_OF(ExternalPointerTable, base_)) ==
4234 static_assert(static_cast<int>(OFFSET_OF(TrustedPointerTable, base_)) ==
4236 static_assert(static_cast<int>(sizeof(ExternalPointerTable)) ==
4238 static_assert(static_cast<int>(sizeof(TrustedPointerTable)) ==
4240#endif
4241
4242 static_assert(OFFSET_OF(Isolate, isolate_data_) == 0);
4243 static_assert(
4244 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)) ==
4246 static_assert(
4247 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.is_marking_flag_)) ==
4249 static_assert(static_cast<int>(
4252 static_assert(static_cast<int>(OFFSET_OF(
4255 static_assert(static_cast<int>(
4258 static_assert(static_cast<int>(
4261 static_assert(static_cast<int>(
4264 static_assert(static_cast<int>(
4267 static_assert(static_cast<int>(
4270 static_assert(
4271 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.cage_base_)) ==
4273 static_assert(static_cast<int>(OFFSET_OF(
4276 static_assert(
4277 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)) ==
4279
4280 static_assert(
4281 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.thread_local_top_)) ==
4283 static_assert(
4284 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.handle_scope_data_)) ==
4286 static_assert(
4287 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)) ==
4289#ifdef V8_COMPRESS_POINTERS
4290 static_assert(static_cast<int>(OFFSET_OF(
4291 Isolate, isolate_data_.external_pointer_table_)) ==
4292 Internals::kIsolateExternalPointerTableOffset);
4293
4294 static_assert(static_cast<int>(OFFSET_OF(
4295 Isolate, isolate_data_.shared_external_pointer_table_)) ==
4296 Internals::kIsolateSharedExternalPointerTableAddressOffset);
4297#endif
4298#ifdef V8_ENABLE_SANDBOX
4299 static_assert(
4300 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.trusted_cage_base_)) ==
4301 Internals::kIsolateTrustedCageBaseOffset);
4302
4303 static_assert(static_cast<int>(
4304 OFFSET_OF(Isolate, isolate_data_.trusted_pointer_table_)) ==
4305 Internals::kIsolateTrustedPointerTableOffset);
4306
4307 static_assert(static_cast<int>(OFFSET_OF(
4308 Isolate, isolate_data_.shared_trusted_pointer_table_)) ==
4309 Internals::kIsolateSharedTrustedPointerTableAddressOffset);
4310
4311 static_assert(static_cast<int>(OFFSET_OF(
4312 Isolate, isolate_data_.code_pointer_table_base_address_)) ==
4313 Internals::kIsolateCodePointerTableBaseAddressOffset);
4314#endif
4315 static_assert(static_cast<int>(OFFSET_OF(
4318 static_assert(
4319 static_cast<int>(OFFSET_OF(
4322
4323 static_assert(
4324 static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_table_)) ==
4326
4327 CHECK(IsAligned(reinterpret_cast<Address>(&isolate_data_),
4329
4330 static_assert(Internals::kStackGuardSize == sizeof(StackGuard));
4331 static_assert(Internals::kBuiltinTier0TableSize ==
4335
4336 // Ensure that certain hot IsolateData fields fall into the same CPU cache
4337 // line.
4338 constexpr size_t kCacheLineSize = 64;
4339 static_assert(OFFSET_OF(Isolate, isolate_data_) == 0);
4340
4341 // Fields written on every CEntry/CallApiCallback/CallApiGetter call.
4342 // See MacroAssembler::EnterExitFrame/LeaveExitFrame.
4343 constexpr size_t kCEntryFPCacheLine = RoundDown<kCacheLineSize>(
4344 OFFSET_OF(IsolateData, thread_local_top_.c_entry_fp_));
4345 static_assert(kCEntryFPCacheLine ==
4347 OFFSET_OF(IsolateData, thread_local_top_.c_function_)));
4348 static_assert(kCEntryFPCacheLine ==
4350 OFFSET_OF(IsolateData, thread_local_top_.context_)));
4351 static_assert(
4352 kCEntryFPCacheLine ==
4354 IsolateData, thread_local_top_.topmost_script_having_context_)));
4355 static_assert(kCEntryFPCacheLine ==
4357 OFFSET_OF(IsolateData, thread_local_top_.last_api_entry_)));
4358
4359 // Fields written on every MacroAssembler::CallCFunction call.
4360 static_assert(RoundDown<kCacheLineSize>(
4361 OFFSET_OF(IsolateData, fast_c_call_caller_fp_)) ==
4363 OFFSET_OF(IsolateData, fast_c_call_caller_pc_)));
4364
4365 // LinearAllocationArea objects must not cross cache line boundary.
4366 static_assert(
4367 RoundDown<kCacheLineSize>(OFFSET_OF(IsolateData, new_allocation_info_)) ==
4368 RoundDown<kCacheLineSize>(OFFSET_OF(IsolateData, new_allocation_info_) +
4369 sizeof(LinearAllocationArea) - 1));
4370 static_assert(
4371 RoundDown<kCacheLineSize>(OFFSET_OF(IsolateData, old_allocation_info_)) ==
4372 RoundDown<kCacheLineSize>(OFFSET_OF(IsolateData, old_allocation_info_) +
4373 sizeof(LinearAllocationArea) - 1));
4374}
4375
4377 delete external_reference_map_;
4378 external_reference_map_ = nullptr;
4379}
4380
4381// When profiling status changes, call this function to update the single bool
4382// cache.
4391
4393 TRACE_ISOLATE(deinit);
4394
4395#if defined(V8_USE_PERFETTO)
4397#endif // defined(V8_USE_PERFETTO)
4398
4399 // All client isolates should already be detached when the shared heap isolate
4400 // tears down.
4403 }
4404
4406 IgnoreLocalGCRequests ignore_gc_requests(heap());
4409 });
4410 }
4411
4412 // We start with the heap tear down so that releasing managed objects does
4413 // not cause a GC.
4415
4417 IgnoreLocalGCRequests ignore_gc_requests(heap());
4418
4419#if V8_ENABLE_WEBASSEMBLY && V8_ENABLE_DRUMBRAKE
4420 if (v8_flags.wasm_jitless) {
4422 } else if (v8_flags.wasm_enable_exec_time_histograms &&
4423 v8_flags.slow_histograms) {
4424 wasm_execution_timer_->Terminate();
4425 }
4426#endif // V8_ENABLE_WEBASSEMBLY && V8_ENABLE_DRUMBRAKE
4427
4428 tracing_cpu_profiler_.reset();
4429 if (v8_flags.stress_sampling_allocation_profiler > 0) {
4431 }
4432
4433 metrics_recorder_->NotifyIsolateDisposal();
4435
4437 if (v8_flags.harmony_struct) {
4439 } else {
4441 }
4442
4443 debug()->Unload();
4444
4445#if V8_ENABLE_WEBASSEMBLY
4447
4448 BackingStore::RemoveSharedWasmMemoryObjects(this);
4449#endif // V8_ENABLE_WEBASSEMBLY
4450
4453 }
4454
4455 if (v8_flags.print_deopt_stress) {
4456 PrintF(stdout, "=== Stress deopt counter: %" PRIu64 "\n",
4458 }
4459
4460 // We must stop the logger before we tear down other components.
4462 if (sampler && sampler->IsActive()) sampler->Stop();
4464
4466
4467 // Stop concurrent tasks before destroying resources since they might still
4468 // use those.
4470
4471 // Delete any remaining RegExpResultVector instances.
4472 for (int32_t* v : active_dynamic_regexp_result_vectors_) {
4473 delete[] v;
4474 }
4476
4477 // Cancel all compiler tasks.
4478#ifdef V8_ENABLE_SPARKPLUG
4479 delete baseline_batch_compiler_;
4480 baseline_batch_compiler_ = nullptr;
4481#endif // V8_ENABLE_SPARKPLUG
4482
4483#ifdef V8_ENABLE_MAGLEV
4484 delete maglev_concurrent_dispatcher_;
4485 maglev_concurrent_dispatcher_ = nullptr;
4486#endif // V8_ENABLE_MAGLEV
4487
4489 lazy_compile_dispatcher_->AbortAll();
4491 }
4492
4497 }
4498
4499 // At this point there are no more background threads left in this isolate.
4501
4502 // Tear down data that requires the shared heap before detaching.
4505
4506 // Detach from the shared heap isolate and then unlock the mutex.
4510 global_safepoint->RemoveClient(this);
4512 }
4513
4514 shared_space_isolate_.reset();
4515
4516 // Since there are no other threads left, we can lock this mutex without any
4517 // ceremony. This signals to the tear down code that we are in a safepoint.
4519
4521
4524
4525 if (tiering_manager_ != nullptr) {
4526 delete tiering_manager_;
4527 tiering_manager_ = nullptr;
4528 }
4529
4530#if USE_SIMULATOR
4531 delete simulator_data_;
4532 simulator_data_ = nullptr;
4533#endif
4534
4535 // After all concurrent tasks are stopped, we know for sure that stats aren't
4536 // updated anymore.
4538
4539 heap_.TearDown();
4541
4544
4546
4547 FILE* logfile = v8_file_logger_->TearDownAndGetLogFile();
4548 if (logfile != nullptr) base::Fclose(logfile);
4549
4550#if defined(V8_ENABLE_ETW_STACK_WALKING)
4551 if (v8_flags.enable_etw_stack_walking ||
4552 v8_flags.enable_etw_by_custom_filter_only) {
4554 }
4555#endif // defined(V8_ENABLE_ETW_STACK_WALKING)
4556
4557#if V8_ENABLE_WEBASSEMBLY
4559
4560 delete wasm_code_look_up_cache_;
4561 wasm_code_look_up_cache_ = nullptr;
4562#endif // V8_ENABLE_WEBASSEMBLY
4563
4565
4566 delete interpreter_;
4567 interpreter_ = nullptr;
4568
4569 delete ast_string_constants_;
4570 ast_string_constants_ = nullptr;
4571
4572 delete logger_;
4573 logger_ = nullptr;
4574
4575 delete root_index_map_;
4576 root_index_map_ = nullptr;
4577
4578 delete compiler_zone_;
4579 compiler_zone_ = nullptr;
4580 compiler_cache_ = nullptr;
4581
4582 SetCodePages(nullptr);
4583
4585
4586 if (OwnsStringTables()) {
4588 } else {
4591 }
4592
4593 if (!is_shared_space_isolate()) {
4595 }
4596
4597#ifdef V8_COMPRESS_POINTERS
4598 external_pointer_table().TearDownSpace(
4599 heap()->young_external_pointer_space());
4600 external_pointer_table().TearDownSpace(heap()->old_external_pointer_space());
4601 external_pointer_table().DetachSpaceFromReadOnlySegment(
4602 heap()->read_only_external_pointer_space());
4603 external_pointer_table().TearDownSpace(
4604 heap()->read_only_external_pointer_space());
4605 external_pointer_table().TearDown();
4606 if (owns_shareable_data()) {
4607 shared_external_pointer_table().TearDownSpace(
4608 shared_external_pointer_space());
4609 shared_external_pointer_table().TearDown();
4610 delete isolate_data_.shared_external_pointer_table_;
4611 isolate_data_.shared_external_pointer_table_ = nullptr;
4612 delete shared_external_pointer_space_;
4613 shared_external_pointer_space_ = nullptr;
4614 }
4615 cpp_heap_pointer_table().TearDownSpace(heap()->cpp_heap_pointer_space());
4616 cpp_heap_pointer_table().TearDown();
4617#endif // V8_COMPRESS_POINTERS
4618
4619#ifdef V8_ENABLE_SANDBOX
4620 trusted_pointer_table().TearDownSpace(heap()->trusted_pointer_space());
4621 trusted_pointer_table().TearDown();
4622 if (owns_shareable_data()) {
4623 shared_trusted_pointer_table().TearDownSpace(
4624 shared_trusted_pointer_space());
4625 shared_trusted_pointer_table().TearDown();
4626 delete isolate_data_.shared_trusted_pointer_table_;
4627 isolate_data_.shared_trusted_pointer_table_ = nullptr;
4628 delete shared_trusted_pointer_space_;
4629 shared_trusted_pointer_space_ = nullptr;
4630 }
4631
4632 IsolateGroup::current()->code_pointer_table()->TearDownSpace(
4633 heap()->code_pointer_space());
4634#endif // V8_ENABLE_SANDBOX
4635#ifdef V8_ENABLE_LEAPTIERING
4636 IsolateGroup::current()->js_dispatch_table()->TearDownSpace(
4637 heap()->js_dispatch_table_space());
4638#endif // V8_ENABLE_LEAPTIERING
4639
4640 {
4643 }
4644}
4645
4647 PerIsolateThreadData* data) {
4648 Isolate::SetCurrent(isolate);
4649 g_current_per_isolate_thread_data_ = data;
4650
4651#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
4652 V8HeapCompressionScheme::InitBase(isolate ? isolate->cage_base()
4653 : kNullAddress);
4654 IsolateGroup::set_current(isolate ? isolate->isolate_group() : nullptr);
4655#ifdef V8_EXTERNAL_CODE_SPACE
4656 ExternalCodeCompressionScheme::InitBase(isolate ? isolate->code_cage_base()
4657 : kNullAddress);
4658#endif
4659#ifdef V8_ENABLE_SANDBOX
4660 Sandbox::set_current(isolate ? isolate->isolate_group()->sandbox() : nullptr);
4661#endif
4662#endif // V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
4663
4664 if (isolate && isolate->main_thread_local_isolate()) {
4666 isolate->main_thread_local_heap()->marking_barrier());
4667 } else {
4669 }
4670}
4671
4673 TRACE_ISOLATE(destructor);
4675
4676 // The entry stack must be empty when we get here.
4677 DCHECK(entry_stack_ == nullptr ||
4678 entry_stack_.load()->previous_item == nullptr);
4679
4680 delete entry_stack_;
4681 entry_stack_ = nullptr;
4682
4683 delete date_cache_;
4684 date_cache_ = nullptr;
4685
4686 delete regexp_stack_;
4687 regexp_stack_ = nullptr;
4688
4690 descriptor_lookup_cache_ = nullptr;
4691
4692 delete load_stub_cache_;
4693 load_stub_cache_ = nullptr;
4694 delete store_stub_cache_;
4695 store_stub_cache_ = nullptr;
4697 define_own_stub_cache_ = nullptr;
4698
4701
4702 delete v8_file_logger_;
4703 v8_file_logger_ = nullptr;
4704
4706 handle_scope_implementer_ = nullptr;
4707
4708 delete code_tracer();
4709 set_code_tracer(nullptr);
4710
4711 delete compilation_cache_;
4712 compilation_cache_ = nullptr;
4713 delete bootstrapper_;
4714 bootstrapper_ = nullptr;
4715
4716 delete thread_manager_;
4717 thread_manager_ = nullptr;
4718
4720
4721 delete global_handles_;
4722 global_handles_ = nullptr;
4723 delete eternal_handles_;
4724 eternal_handles_ = nullptr;
4725
4726#if V8_ENABLE_WEBASSEMBLY
4728#endif
4729
4730 delete string_stream_debug_object_cache_;
4731 string_stream_debug_object_cache_ = nullptr;
4732
4734 random_number_generator_ = nullptr;
4735
4736 delete fuzzer_rng_;
4737 fuzzer_rng_ = nullptr;
4738
4739 delete debug_;
4740 debug_ = nullptr;
4741
4743 cancelable_task_manager_ = nullptr;
4744
4745 delete allocator_;
4746 allocator_ = nullptr;
4747
4748 // Assert that |default_microtask_queue_| is the last MicrotaskQueue instance.
4749 DCHECK_IMPLIES(default_microtask_queue_,
4750 default_microtask_queue_ == default_microtask_queue_->next());
4751 delete default_microtask_queue_;
4752 default_microtask_queue_ = nullptr;
4753
4754 // isolate_group_ released in caller, to ensure that all member destructors
4755 // run before potentially unmapping the isolate's VirtualMemoryArea.
4756}
4757
4760 // This method might be called on a thread that's not bound to any Isolate
4761 // and thus pointer compression schemes might have cage base value unset.
4762 // So, allow heap access here to let the checks work.
4763 i::PtrComprCageAccessScope ptr_compr_cage_access_scope(this);
4766}
4767
4769 DCHECK_IMPLIES(v8_flags.strict_termination_checks,
4771 if (try_catch_handler() == nullptr) return;
4773 try_catch_handler()->exception_ = reinterpret_cast<void*>(
4774 ReadOnlyRoots(heap()).termination_exception().ptr());
4775}
4776
4778 ExceptionHandlerType top_handler) {
4779 Tagged<Object> exception = this->exception();
4780
4781 if (top_handler == ExceptionHandlerType::kJavaScriptHandler) return false;
4782 if (top_handler == ExceptionHandlerType::kNone) return true;
4783
4785 if (!is_catchable_by_javascript(exception)) {
4787 } else {
4788 v8::TryCatch* handler = try_catch_handler();
4789 DCHECK(IsJSMessageObject(pending_message()) ||
4790 IsTheHole(pending_message(), this));
4791 handler->can_continue_ = true;
4792 handler->exception_ = reinterpret_cast<void*>(exception.ptr());
4793 // Propagate to the external try-catch only if we got an actual message.
4794 if (!has_pending_message()) return true;
4795 handler->message_obj_ = reinterpret_cast<void*>(pending_message().ptr());
4796 }
4797 return true;
4798}
4799
4800namespace {
4801
4802inline Tagged<FunctionTemplateInfo> GetTargetFunctionTemplateInfo(
4805 if (IsFunctionTemplateInfo(target)) {
4806 return Cast<FunctionTemplateInfo>(target);
4807 }
4808 CHECK(Is<JSFunction>(target));
4809 Tagged<SharedFunctionInfo> shared_info = Cast<JSFunction>(target)->shared();
4810 return shared_info->api_func_data();
4811}
4812
4813} // namespace
4814
4816 DCHECK_NOT_NULL(exception_propagation_callback_);
4817
4818 // Try to figure out whether the exception was thrown directly from an
4819 // Api callback and if it's the case then call the
4820 // |exception_propagation_callback_| with relevant data.
4821
4822 ExternalCallbackScope* ext_callback_scope = external_callback_scope();
4823 StackFrameIterator it(this);
4824
4825 if (it.done() && !ext_callback_scope) {
4826 // The exception was thrown directly by embedder code without crossing
4827 // "C++ -> JS" or "C++ -> Api callback" boundary.
4828 return;
4829 }
4830 if (it.done() ||
4831 (ext_callback_scope &&
4832 ext_callback_scope->JSStackComparableAddress() < it.frame()->fp())) {
4833 // There were no crossings of "C++ -> JS" boundary at all or they happened
4834 // earlier than the last crossing of the "C++ -> Api callback" boundary.
4835 // In this case all the data about Api callback is available in the
4836 // |ext_callback_scope| object.
4837 DCHECK_NOT_NULL(ext_callback_scope);
4838 v8::ExceptionContext kind = ext_callback_scope->exception_context();
4839 switch (kind) {
4842 DCHECK_NOT_NULL(ext_callback_scope->callback_info());
4843 auto callback_info =
4844 reinterpret_cast<const v8::FunctionCallbackInfo<v8::Value>*>(
4845 ext_callback_scope->callback_info());
4846
4848 Utils::OpenDirectHandle(*callback_info->This());
4849 DirectHandle<FunctionTemplateInfo> function_template_info(
4850 GetTargetFunctionTemplateInfo(*callback_info), this);
4851 ReportExceptionFunctionCallback(receiver, function_template_info, kind);
4852 return;
4853 }
4869 DCHECK_NOT_NULL(ext_callback_scope->callback_info());
4870 auto callback_info =
4871 reinterpret_cast<const v8::PropertyCallbackInfo<v8::Value>*>(
4872 ext_callback_scope->callback_info());
4873
4874 // Allow usages of v8::PropertyCallbackInfo<T>::Holder() for now.
4875 // TODO(https://crbug.com/333672197): remove.
4877
4878 DirectHandle<Object> holder =
4879 Utils::OpenDirectHandle(*callback_info->Holder());
4880 Handle<Object> maybe_name =
4882 DirectHandle<Name> name =
4883 IsSmi(*maybe_name)
4884 ? factory()->SizeToString(
4886 *callback_info))
4887 : Cast<Name>(maybe_name);
4888 DCHECK(IsJSReceiver(*holder));
4889
4890 // Allow usages of v8::PropertyCallbackInfo<T>::Holder() for now.
4891 // TODO(https://crbug.com/333672197): remove.
4893
4894 // Currently we call only ApiGetters from JS code.
4896 return;
4897 }
4898
4901 "ExternalCallbackScope should not use "
4902 "v8::ExceptionContext::kUnknown exception context");
4903 return;
4904 }
4905 UNREACHABLE();
4906 }
4907
4908 // There were no crossings of "C++ -> Api callback" boundary or they
4909 // happened before crossing the "C++ -> JS" boundary.
4910 // In this case all the data about Api callback is available in the
4911 // topmost "JS -> Api callback" frame (ApiCallbackExitFrame or
4912 // ApiAccessorExitFrame).
4913 DCHECK(!it.done());
4914 StackFrame::Type frame_type = it.frame()->type();
4915 switch (frame_type) {
4916 case StackFrame::API_CALLBACK_EXIT: {
4919 this);
4920 DirectHandle<FunctionTemplateInfo> function_template_info =
4921 frame->GetFunctionTemplateInfo();
4922
4923 v8::ExceptionContext callback_kind =
4926 ReportExceptionFunctionCallback(receiver, function_template_info,
4927 callback_kind);
4928 return;
4929 }
4930 case StackFrame::API_ACCESSOR_EXIT: {
4932
4933 DirectHandle<Object> holder(frame->holder(), this);
4934 DirectHandle<Name> name(frame->property_name(), this);
4935 DCHECK(IsJSReceiver(*holder));
4936
4937 // Currently we call only ApiGetters from JS code.
4940 return;
4941 }
4942 case StackFrame::TURBOFAN_JS:
4943 // This must be a fast Api call.
4944 CHECK(it.frame()->InFastCCall());
4945 // TODO(ishell): support fast Api calls.
4946 return;
4947 case StackFrame::EXIT:
4948 case StackFrame::BUILTIN_EXIT:
4949 // This is a regular runtime function or C++ builtin.
4950 return;
4951#if V8_ENABLE_WEBASSEMBLY
4952 case StackFrame::WASM:
4953 case StackFrame::WASM_SEGMENT_START:
4954 // No more info.
4955 return;
4956#endif // V8_ENABLE_WEBASSEMBLY
4957 default:
4958 // Other types are not expected, so just hard-crash.
4959 CHECK_NE(frame_type, frame_type);
4960 }
4961}
4962
4966 v8::ExceptionContext exception_context) {
4967 DCHECK(exception_context == v8::ExceptionContext::kConstructor ||
4968 exception_context == v8::ExceptionContext::kOperation);
4969 DCHECK_NOT_NULL(exception_propagation_callback_);
4970
4971 // Ignore exceptions that we can't extend.
4972 if (!IsJSReceiver(this->exception())) return;
4974
4975 DirectHandle<Object> maybe_message(pending_message(), this);
4976
4977 DirectHandle<String> property_name =
4978 IsUndefined(function->class_name(), this)
4979 ? factory()->empty_string()
4980 : Handle<String>(Cast<String>(function->class_name()), this);
4981 DirectHandle<String> interface_name =
4982 IsUndefined(function->interface_name(), this)
4983 ? factory()->empty_string()
4984 : Handle<String>(Cast<String>(function->interface_name()), this);
4985 if (exception_context != ExceptionContext::kConstructor) {
4986 exception_context =
4987 static_cast<ExceptionContext>(function->exception_context());
4988 }
4989
4990 {
4991 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(this);
4992 // Ignore any exceptions thrown inside the callback and rethrow the
4993 // original exception/message.
4994 TryCatch try_catch(v8_isolate);
4995
4996 exception_propagation_callback_(v8::ExceptionPropagationMessage(
4997 v8_isolate, v8::Utils::ToLocal(exception),
4998 v8::Utils::ToLocal(interface_name), v8::Utils::ToLocal(property_name),
4999 exception_context));
5000
5001 try_catch.Reset();
5002 }
5003 ReThrow(*exception, *maybe_message);
5004}
5005
5008 v8::ExceptionContext exception_context) {
5009 DCHECK_NOT_NULL(exception_propagation_callback_);
5010
5011 if (!IsJSReceiver(this->exception())) return;
5013
5014 DirectHandle<Object> maybe_message(pending_message(), this);
5015
5016 DirectHandle<String> property_name;
5017 std::ignore = Name::ToFunctionName(this, name).ToHandle(&property_name);
5018 DirectHandle<String> interface_name =
5019 JSReceiver::GetConstructorName(this, holder);
5020
5021 {
5022 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(this);
5023 // Ignore any exceptions thrown inside the callback and rethrow the
5024 // original exception/message.
5025 TryCatch try_catch(v8_isolate);
5026
5027 exception_propagation_callback_(v8::ExceptionPropagationMessage(
5028 v8_isolate, v8::Utils::ToLocal(exception),
5029 v8::Utils::ToLocal(interface_name), v8::Utils::ToLocal(property_name),
5030 exception_context));
5031
5032 try_catch.Reset();
5033 }
5034 ReThrow(*exception, *maybe_message);
5035}
5036
5039 exception_propagation_callback_ = callback;
5040}
5041
5043 if (async_counters_) return false;
5044 async_counters_ = std::make_shared<Counters>(this);
5045 return true;
5046}
5047
5049 if (v8_file_logger_ == nullptr) {
5050 v8_file_logger_ = new V8FileLogger(this);
5051 }
5053}
5054
5055namespace {
5056
5057void FinalizeBuiltinCodeObjects(Isolate* isolate) {
5058 DCHECK_NOT_NULL(isolate->embedded_blob_code());
5059 DCHECK_NE(0, isolate->embedded_blob_code_size());
5060 DCHECK_NOT_NULL(isolate->embedded_blob_data());
5061 DCHECK_NE(0, isolate->embedded_blob_data_size());
5062
5064 HandleScope scope(isolate);
5066 for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
5067 ++builtin) {
5068 DirectHandle<Code> old_code = isolate->builtins()->code_handle(builtin);
5069 // Note that `old_code.instruction_start` might point to `old_code`'s
5070 // InstructionStream which might be GCed once we replace the old code
5071 // with the new code.
5072 Address instruction_start = d.InstructionStartOf(builtin);
5073 DirectHandle<Code> new_code =
5074 isolate->factory()->NewCodeObjectForEmbeddedBuiltin(old_code,
5075 instruction_start);
5076
5077 // From this point onwards, the old builtin code object is unreachable and
5078 // will be collected by the next GC.
5079 isolate->builtins()->set_code(builtin, *new_code);
5080 }
5081}
5082
5083#ifdef DEBUG
5084bool IsolateIsCompatibleWithEmbeddedBlob(Isolate* isolate) {
5085 EmbeddedData d = EmbeddedData::FromBlob(isolate);
5086 return (d.IsolateHash() == isolate->HashIsolateForEmbeddedBlob());
5087}
5088#endif // DEBUG
5089
5090} // namespace
5091
5093 const uint8_t* code = DefaultEmbeddedBlobCode();
5094 uint32_t code_size = DefaultEmbeddedBlobCodeSize();
5095 const uint8_t* data = DefaultEmbeddedBlobData();
5096 uint32_t data_size = DefaultEmbeddedBlobDataSize();
5097
5098 if (StickyEmbeddedBlobCode() != nullptr) {
5099 base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
5100 // Check again now that we hold the lock.
5101 if (StickyEmbeddedBlobCode() != nullptr) {
5102 code = StickyEmbeddedBlobCode();
5103 code_size = StickyEmbeddedBlobCodeSize();
5104 data = StickyEmbeddedBlobData();
5105 data_size = StickyEmbeddedBlobDataSize();
5106 current_embedded_blob_refs_++;
5107 }
5108 }
5109
5110 if (code_size == 0) {
5111 CHECK_EQ(0, data_size);
5112 } else {
5113 SetEmbeddedBlob(code, code_size, data, data_size);
5114 }
5115}
5116
5118 base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
5119
5121
5122 // If a sticky blob has been set, we reuse it.
5123 if (StickyEmbeddedBlobCode() != nullptr) {
5124 CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
5125 CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
5126 CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
5127 CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
5128 } else {
5129 // Create and set a new embedded blob.
5130 uint8_t* code;
5131 uint32_t code_size;
5132 uint8_t* data;
5133 uint32_t data_size;
5135 this, &code, &code_size, &data, &data_size);
5136
5137 CHECK_EQ(0, current_embedded_blob_refs_);
5138 const uint8_t* const_code = const_cast<const uint8_t*>(code);
5139 const uint8_t* const_data = const_cast<const uint8_t*>(data);
5140 SetEmbeddedBlob(const_code, code_size, const_data, data_size);
5141 current_embedded_blob_refs_++;
5142
5143 SetStickyEmbeddedBlob(code, code_size, data, data_size);
5144 }
5145
5147 FinalizeBuiltinCodeObjects(this);
5148}
5149
5151 if (V8_SHORT_BUILTIN_CALLS_BOOL && v8_flags.short_builtin_calls) {
5152#if defined(V8_OS_ANDROID)
5153 // On Android, the check is not operative to detect memory, and re-embedded
5154 // builtins don't have a memory cost.
5156#else
5157 // Check if the system has more than 4GB of physical memory by comparing the
5158 // old space size with respective threshold value.
5161#endif // defined(V8_OS_ANDROID)
5162 // Additionally, enable if there is already a process-wide CodeRange that
5163 // has re-embedded builtins.
5165 CodeRange* code_range = isolate_group()->GetCodeRange();
5166 if (code_range && code_range->embedded_blob_code_copy() != nullptr) {
5168 }
5169 }
5171 // The short builtin calls could still be enabled if allocated code range
5172 // is close enough to embedded builtins so that the latter could be
5173 // reached using pc-relative (short) calls/jumps.
5176 }
5177 }
5178}
5179
5182 return;
5183 }
5186 // The embedded builtins are within the pc-relative reach from the code
5187 // range, so there's no need to remap embedded builtins.
5188 return;
5189 }
5190
5193
5195 embedded_blob_code_ = heap_.code_range_->RemapEmbeddedBuiltins(
5198 // The un-embedded code blob is already a part of the registered code range
5199 // so it's not necessary to register it again.
5200}
5201
5203 // Nothing to do in case the blob is embedded into the binary or unset.
5204 if (StickyEmbeddedBlobCode() == nullptr) return;
5205
5207 CHECK_EQ(embedded_blob_code(), StickyEmbeddedBlobCode());
5208 CHECK_EQ(embedded_blob_data(), StickyEmbeddedBlobData());
5209 }
5210 CHECK_EQ(CurrentEmbeddedBlobCode(), StickyEmbeddedBlobCode());
5211 CHECK_EQ(CurrentEmbeddedBlobData(), StickyEmbeddedBlobData());
5212
5213 base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
5214 current_embedded_blob_refs_--;
5215 if (current_embedded_blob_refs_ == 0 && enable_embedded_blob_refcounting_) {
5216 // We own the embedded blob and are the last holder. Free it.
5218 const_cast<uint8_t*>(CurrentEmbeddedBlobCode()),
5220 const_cast<uint8_t*>(CurrentEmbeddedBlobData()),
5223 }
5224}
5225
5227 return Init(nullptr, nullptr, nullptr, false);
5228}
5229
5230bool Isolate::InitWithSnapshot(SnapshotData* startup_snapshot_data,
5231 SnapshotData* read_only_snapshot_data,
5232 SnapshotData* shared_heap_snapshot_data,
5233 bool can_rehash) {
5234 DCHECK_NOT_NULL(startup_snapshot_data);
5235 DCHECK_NOT_NULL(read_only_snapshot_data);
5236 DCHECK_NOT_NULL(shared_heap_snapshot_data);
5237 return Init(startup_snapshot_data, read_only_snapshot_data,
5238 shared_heap_snapshot_data, can_rehash);
5239}
5240
5241namespace {
5242static std::string ToHexString(uintptr_t address) {
5243 std::stringstream stream_address;
5244 stream_address << "0x" << std::hex << address;
5245 return stream_address.str();
5246}
5247} // namespace
5248
5251
5252 const uintptr_t isolate_address = reinterpret_cast<uintptr_t>(this);
5254 ToHexString(isolate_address));
5255
5256 const uintptr_t ro_space_firstpage_address =
5259 ToHexString(ro_space_firstpage_address));
5260
5261 const uintptr_t old_space_firstpage_address =
5262 heap()->old_space()->FirstPageAddress();
5264 ToHexString(old_space_firstpage_address));
5265
5266 if (heap()->code_range_base()) {
5267 const uintptr_t code_range_base_address = heap()->code_range_base();
5269 ToHexString(code_range_base_address));
5270 }
5271
5272 if (heap()->code_space()->first_page()) {
5273 const uintptr_t code_space_firstpage_address =
5274 heap()->code_space()->FirstPageAddress();
5276 ToHexString(code_space_firstpage_address));
5277 }
5279 // TODO(cbruni): Implement strategy to infrequently collect this.
5280 const uint32_t v8_snapshot_checksum_calculated = 0;
5282 ToHexString(v8_snapshot_checksum_calculated));
5283 const uint32_t v8_snapshot_checksum_expected =
5286 ToHexString(v8_snapshot_checksum_expected));
5287}
5288
5291 MemoryRange embedded_range{
5292 reinterpret_cast<const void*>(embedded_blob_code()),
5294 code_pages_buffer1_.push_back(embedded_range);
5296}
5297
5298namespace {
5299
5300// This global counter contains number of stack loads/stores per optimized/wasm
5301// function.
5302using MapOfLoadsAndStoresPerFunction =
5303 std::map<std::string /* function_name */,
5304 std::pair<uint64_t /* loads */, uint64_t /* stores */>>;
5305MapOfLoadsAndStoresPerFunction* stack_access_count_map = nullptr;
5306
5307class BigIntPlatform : public bigint::Platform {
5308 public:
5309 explicit BigIntPlatform(Isolate* isolate) : isolate_(isolate) {}
5310 ~BigIntPlatform() override = default;
5311
5312 bool InterruptRequested() override {
5313 StackLimitCheck interrupt_check(isolate_);
5314 return (interrupt_check.InterruptRequested() &&
5315 isolate_->stack_guard()->HasTerminationRequest());
5316 }
5317
5318 private:
5319 Isolate* isolate_;
5320};
5321} // namespace
5322
5323#ifdef V8_COMPRESS_POINTERS
5324VirtualMemoryCage* Isolate::GetPtrComprCodeCageForTesting() {
5326 : isolate_group_->GetPtrComprCage();
5327}
5328#endif // V8_COMPRESS_POINTERS
5329
5331#if V8_STATIC_ROOTS_BOOL
5332#define STATIC_ROOTS_FAILED_MSG \
5333 "Read-only heap layout changed. Run `tools/dev/gen-static-roots.py` to " \
5334 "update static-roots.h."
5335 static_assert(static_cast<int>(RootIndex::kReadOnlyRootsCount) ==
5336 StaticReadOnlyRootsPointerTable.size(),
5338 auto& roots = roots_table();
5340 for (Tagged_t cmp_ptr : StaticReadOnlyRootsPointerTable) {
5341 Address the_root = roots[idx];
5342 Address ptr =
5344 CHECK_WITH_MSG(the_root == ptr, STATIC_ROOTS_FAILED_MSG);
5345 ++idx;
5346 }
5347
5349#define CHECK_NAME(_1, _2, CamelName) \
5350 CHECK_WITH_MSG(StaticReadOnlyRoot::k##CamelName == \
5351 V8HeapCompressionScheme::CompressObject(roots[idx]), \
5352 STATIC_ROOTS_FAILED_MSG); \
5353 ++idx;
5354 STRONG_READ_ONLY_ROOT_LIST(CHECK_NAME)
5355#undef CHECK_NAME
5356
5357 // Check if instance types to map range mappings are still valid.
5358 //
5359 // Is##type(map) may be computed by checking if the map pointer lies in a
5360 // statically known range of addresses, whereas Is##type(instance_type) is the
5361 // definitive source of truth. If they disagree it means that a particular
5362 // entry in InstanceTypeChecker::kUniqueMapRangeOfInstanceTypeRangeList is out
5363 // of date. This can also happen if an instance type is starting to be used by
5364 // more maps.
5365 //
5366 // If this check fails either re-arrange allocations in the read-only heap
5367 // such that the static map range is restored (consult static-roots.h for a
5368 // sorted list of addresses) or remove the offending entry from the list.
5369 for (idx = RootIndex::kFirstRoot; idx <= RootIndex::kLastRoot; ++idx) {
5370 Tagged<Object> obj = roots_table().slot(idx).load(this);
5371 if (obj.ptr() == kNullAddress || !IsMap(obj)) continue;
5372 Tagged<Map> map = Cast<Map>(obj);
5373
5374#define INSTANCE_TYPE_CHECKER_SINGLE(type, _) \
5375 CHECK_EQ(InstanceTypeChecker::Is##type(map), \
5376 InstanceTypeChecker::Is##type(map->instance_type()));
5377 INSTANCE_TYPE_CHECKERS_SINGLE(INSTANCE_TYPE_CHECKER_SINGLE)
5378#undef INSTANCE_TYPE_CHECKER_SINGLE
5379
5380#define INSTANCE_TYPE_CHECKER_RANGE(type, _1, _2) \
5381 CHECK_EQ(InstanceTypeChecker::Is##type(map), \
5382 InstanceTypeChecker::Is##type(map->instance_type()));
5383 INSTANCE_TYPE_CHECKERS_RANGE(INSTANCE_TYPE_CHECKER_RANGE)
5384#undef INSTANCE_TYPE_CHECKER_RANGE
5385
5386 // This limit is used in various places as a fast IsJSReceiver check.
5388 InstanceTypeChecker::IsPrimitiveHeapObject(map->instance_type()),
5391 CHECK_IMPLIES(InstanceTypeChecker::IsJSReceiver(map->instance_type()),
5395 read_only_heap()->read_only_space()->Size());
5396
5397 if (InstanceTypeChecker::IsString(map->instance_type())) {
5398 CHECK_EQ(InstanceTypeChecker::IsString(map),
5399 InstanceTypeChecker::IsString(map->instance_type()));
5401 InstanceTypeChecker::IsSeqString(map->instance_type()));
5403 InstanceTypeChecker::IsExternalString(map->instance_type()));
5404 CHECK_EQ(
5406 InstanceTypeChecker::IsUncachedExternalString(map->instance_type()));
5408 InstanceTypeChecker::IsInternalizedString(map->instance_type()));
5410 InstanceTypeChecker::IsConsString(map->instance_type()));
5412 InstanceTypeChecker::IsSlicedString(map->instance_type()));
5414 InstanceTypeChecker::IsThinString(map->instance_type()));
5416 InstanceTypeChecker::IsOneByteString(map->instance_type()));
5418 InstanceTypeChecker::IsTwoByteString(map->instance_type()));
5419 }
5420 }
5421
5422 // Sanity check the API
5423 CHECK_EQ(
5424 v8::internal::Internals::GetRoot(reinterpret_cast<v8::Isolate*>(this),
5425 static_cast<int>(RootIndex::kNullValue)),
5426 ReadOnlyRoots(this).null_value().ptr());
5427#undef STATIC_ROOTS_FAILED_MSG
5428#endif // V8_STATIC_ROOTS_BOOL
5429}
5430
5431bool Isolate::Init(SnapshotData* startup_snapshot_data,
5432 SnapshotData* read_only_snapshot_data,
5433 SnapshotData* shared_heap_snapshot_data, bool can_rehash) {
5434 TRACE_ISOLATE(init);
5435
5436#ifdef V8_COMPRESS_POINTERS_IN_SHARED_CAGE
5438#endif // V8_COMPRESS_POINTERS_IN_SHARED_CAGE
5439
5440 const bool create_heap_objects = (shared_heap_snapshot_data == nullptr);
5441 // We either have both or none.
5442 DCHECK_EQ(create_heap_objects, startup_snapshot_data == nullptr);
5443 DCHECK_EQ(create_heap_objects, read_only_snapshot_data == nullptr);
5444
5445 EnableRoAllocationForSnapshotScope enable_ro_allocation(this);
5446
5447 base::ElapsedTimer timer;
5448 if (create_heap_objects && v8_flags.profile_deserialization) timer.Start();
5449
5451
5453 reinterpret_cast<v8::Isolate*>(this));
5454
5455 isolate_group()->AddIsolate(this);
5456 Isolate* const use_shared_space_isolate =
5458
5460
5461 stress_deopt_count_ = v8_flags.deopt_every_n_times;
5462 force_slow_path_ = v8_flags.force_slow_path;
5463
5465
5466 has_fatal_error_ = false;
5467
5468 // The initialization process does not handle memory exhaustion.
5469 AlwaysAllocateScope always_allocate(heap());
5470
5471#define ASSIGN_ELEMENT(CamelName, hacker_name) \
5472 isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
5473 reinterpret_cast<Address>(hacker_name##_address());
5475#undef ASSIGN_ELEMENT
5476
5477 // We need to initialize code_pages_ before any on-heap code is allocated to
5478 // make sure we record all code allocations.
5480
5483 global_handles_ = new GlobalHandles(this);
5485 bootstrapper_ = new Bootstrapper(this);
5487 load_stub_cache_ = new StubCache(this);
5488 store_stub_cache_ = new StubCache(this);
5491 regexp_stack_ = new RegExpStack();
5492 isolate_data()->set_regexp_static_result_offsets_vector(
5493 jsregexp_static_offsets_vector());
5494 date_cache_ = new DateCache();
5496 bigint_processor_ = bigint::Processor::New(new BigIntPlatform(this));
5497
5499 global_safepoint_ = std::make_unique<GlobalSafepoint>(this);
5500 }
5501
5502 if (v8_flags.lazy_compile_dispatcher) {
5503 lazy_compile_dispatcher_ = std::make_unique<LazyCompileDispatcher>(
5504 this, V8::GetCurrentPlatform(), v8_flags.stack_size);
5505 }
5506#ifdef V8_ENABLE_SPARKPLUG
5507 baseline_batch_compiler_ = new baseline::BaselineBatchCompiler(this);
5508#endif // V8_ENABLE_SPARKPLUG
5509#ifdef V8_ENABLE_MAGLEV
5510 maglev_concurrent_dispatcher_ = new maglev::MaglevConcurrentDispatcher(this);
5511#endif // V8_ENABLE_MAGLEV
5512
5513#if USE_SIMULATOR
5514 simulator_data_ = new SimulatorData;
5515#endif
5516
5517 // Enable logging before setting up the heap
5518 v8_file_logger_->SetUp(this);
5519
5520 metrics_recorder_ = std::make_shared<metrics::Recorder>();
5521
5522 {
5523 // Ensure that the thread has a valid stack guard. The v8::Locker object
5524 // will ensure this too, but we don't have to use lockers if we are only
5525 // using one thread.
5526 ExecutionAccess lock(this);
5527 stack_guard()->InitThread(lock);
5528 }
5529
5530 // Create LocalIsolate/LocalHeap for the main thread and set state to Running.
5532
5533 {
5534 IgnoreLocalGCRequests ignore_gc_requests(heap());
5536 }
5537
5538 // Requires a LocalHeap to be set up to register a GC epilogue callback.
5540
5541#if V8_ENABLE_WEBASSEMBLY
5542 wasm_code_look_up_cache_ = new wasm::WasmCodeLookupCache;
5543#endif // V8_ENABLE_WEBASSEMBLY
5544
5545 // Lock clients_mutex_ in order to prevent shared GCs from other clients
5546 // during deserialization.
5547 std::optional<base::RecursiveMutexGuard> clients_guard;
5548
5549 if (use_shared_space_isolate && !is_shared_space_isolate()) {
5550 clients_guard.emplace(
5551 &use_shared_space_isolate->global_safepoint()->clients_mutex_);
5552 use_shared_space_isolate->global_safepoint()->AppendClient(this);
5553 }
5554
5555 shared_space_isolate_ = use_shared_space_isolate;
5556
5559
5560 if (use_shared_space_isolate && !is_shared_space_isolate() &&
5561 use_shared_space_isolate->heap()
5563 ->IsMajorMarking()) {
5564 heap_.SetIsMarkingFlag(true);
5565 }
5566
5567 // Set up the object heap.
5571 if (!create_heap_objects) {
5572 // Must be done before deserializing RO space, since RO space may contain
5573 // builtin Code objects which point into the (potentially remapped)
5574 // embedded blob.
5576 }
5577 {
5578 // Must be done before deserializing RO space since the deserialization
5579 // process refers to these data structures.
5581 isolate_group()->external_ref_table());
5582#ifdef V8_COMPRESS_POINTERS
5583 external_pointer_table().Initialize();
5584 external_pointer_table().InitializeSpace(
5585 heap()->read_only_external_pointer_space());
5586 external_pointer_table().AttachSpaceToReadOnlySegment(
5587 heap()->read_only_external_pointer_space());
5588 external_pointer_table().InitializeSpace(
5589 heap()->young_external_pointer_space());
5590 external_pointer_table().InitializeSpace(
5591 heap()->old_external_pointer_space());
5592 cpp_heap_pointer_table().Initialize();
5593 cpp_heap_pointer_table().InitializeSpace(heap()->cpp_heap_pointer_space());
5594#endif // V8_COMPRESS_POINTERS
5595
5596#ifdef V8_ENABLE_SANDBOX
5597 trusted_pointer_table().Initialize();
5598 trusted_pointer_table().InitializeSpace(heap()->trusted_pointer_space());
5599#endif // V8_ENABLE_SANDBOX
5600 }
5601 isolate_group()->SetupReadOnlyHeap(this, read_only_snapshot_data, can_rehash);
5604
5605 DCHECK_EQ(this, Isolate::Current());
5606 PerIsolateThreadData* const current_data = CurrentPerIsolateThreadData();
5607 DCHECK_EQ(current_data->isolate(), this);
5608 SetIsolateThreadLocals(this, current_data);
5609
5610 if (OwnsStringTables()) {
5611 string_table_ = std::make_unique<StringTable>(this);
5612 string_forwarding_table_ = std::make_unique<StringForwardingTable>(this);
5613 } else {
5614 // Only refer to shared string table after attaching to the shared isolate.
5619 }
5620
5621#ifdef V8_EXTERNAL_CODE_SPACE
5622 {
5623 VirtualMemoryCage* code_cage;
5624 if (heap_.code_range()) {
5625 code_cage = heap_.code_range();
5626 } else {
5627 CHECK(jitless_);
5628 // In jitless mode the code space pages will be allocated in the main
5629 // pointer compression cage.
5630 code_cage = isolate_group_->GetPtrComprCage();
5631 }
5632 code_cage_base_ = ExternalCodeCompressionScheme::PrepareCageBaseAddress(
5633 code_cage->base());
5635 // .. now that it's available, initialize the thread-local base.
5636 ExternalCodeCompressionScheme::InitBase(code_cage_base_);
5637 }
5638 CHECK_EQ(ExternalCodeCompressionScheme::base(), code_cage_base_);
5639
5640 // Ensure that ExternalCodeCompressionScheme is applicable to all objects
5641 // stored in the code cage.
5642 using ComprScheme = ExternalCodeCompressionScheme;
5643 Address base = code_cage->base() + kHeapObjectTag;
5644 Address last = base + code_cage->size() - kTaggedSize;
5645 Address upper_bound = base + kPtrComprCageReservationSize - kTaggedSize;
5646 PtrComprCageBase code_cage_base{code_cage_base_};
5647 CHECK_EQ(base, ComprScheme::DecompressTagged(
5648 code_cage_base, ComprScheme::CompressAny(base)));
5649 CHECK_EQ(last, ComprScheme::DecompressTagged(
5650 code_cage_base, ComprScheme::CompressAny(last)));
5651 CHECK_EQ(upper_bound,
5652 ComprScheme::DecompressTagged(
5653 code_cage_base, ComprScheme::CompressAny(upper_bound)));
5654 }
5655#endif // V8_EXTERNAL_CODE_SPACE
5656
5658
5659#ifdef V8_COMPRESS_POINTERS
5660 if (owns_shareable_data()) {
5661 isolate_data_.shared_external_pointer_table_ = new ExternalPointerTable();
5662 shared_external_pointer_space_ = new ExternalPointerTable::Space();
5663 shared_external_pointer_table().Initialize();
5664 shared_external_pointer_table().InitializeSpace(
5665 shared_external_pointer_space());
5666 } else {
5668 isolate_data_.shared_external_pointer_table_ =
5669 shared_space_isolate()->isolate_data_.shared_external_pointer_table_;
5670 shared_external_pointer_space_ =
5671 shared_space_isolate()->shared_external_pointer_space_;
5672 }
5673#endif // V8_COMPRESS_POINTERS
5674
5675#ifdef V8_ENABLE_SANDBOX
5676 IsolateGroup::current()->code_pointer_table()->InitializeSpace(
5677 heap()->code_pointer_space());
5678 if (owns_shareable_data()) {
5679 isolate_data_.shared_trusted_pointer_table_ = new TrustedPointerTable();
5680 shared_trusted_pointer_space_ = new TrustedPointerTable::Space();
5681 shared_trusted_pointer_table().Initialize();
5682 shared_trusted_pointer_table().InitializeSpace(
5683 shared_trusted_pointer_space());
5684 } else {
5686 isolate_data_.shared_trusted_pointer_table_ =
5687 shared_space_isolate()->isolate_data_.shared_trusted_pointer_table_;
5688 shared_trusted_pointer_space_ =
5689 shared_space_isolate()->shared_trusted_pointer_space_;
5690 }
5691
5692#endif // V8_ENABLE_SANDBOX
5693#ifdef V8_ENABLE_LEAPTIERING
5694 IsolateGroup::current()->js_dispatch_table()->InitializeSpace(
5695 heap()->js_dispatch_table_space());
5696#endif // V8_ENABLE_LEAPTIERING
5697
5698#if V8_ENABLE_WEBASSEMBLY
5700#endif // V8_ENABLE_WEBASSEMBLY
5701
5702#if defined(V8_ENABLE_ETW_STACK_WALKING)
5703 if (v8_flags.enable_etw_stack_walking ||
5704 v8_flags.enable_etw_by_custom_filter_only) {
5706 }
5707#endif // defined(V8_ENABLE_ETW_STACK_WALKING)
5708
5709 if (setup_delegate_ == nullptr) {
5711 }
5712
5713 if (!v8_flags.inline_new) heap_.DisableInlineAllocation();
5714
5715 if (!setup_delegate_->SetupHeap(this, create_heap_objects)) {
5716 V8::FatalProcessOutOfMemory(this, "heap object creation");
5717 }
5718
5719 if (create_heap_objects) {
5720 // Terminate the startup and shared heap object caches so we can iterate.
5721 startup_object_cache_.push_back(ReadOnlyRoots(this).undefined_value());
5722 shared_heap_object_cache_.push_back(ReadOnlyRoots(this).undefined_value());
5723 }
5724
5726
5727 // Profiler has to be created after ThreadLocal is initialized
5728 // because it makes use of interrupts.
5730
5731 bootstrapper_->Initialize(create_heap_objects);
5732
5733 if (create_heap_objects) {
5735
5736 if (v8_flags.concurrent_builtin_generation) {
5740 }
5741
5742 setup_delegate_->SetupBuiltins(this, true);
5743
5744 if (v8_flags.concurrent_builtin_generation) {
5747 }
5748
5752
5754 } else {
5755 setup_delegate_->SetupBuiltins(this, false);
5756 }
5757
5758 // Initialize custom memcopy and memmove functions (must happen after
5759 // embedded blob setup).
5761
5762 if ((v8_flags.trace_turbo || v8_flags.trace_turbo_graph ||
5763 v8_flags.turbo_profiling) &&
5764 !v8_flags.concurrent_turbo_tracing) {
5765 PrintF("Concurrent recompilation has been disabled for tracing.\n");
5769 }
5770
5771 // Initialize before deserialization since collections may occur,
5772 // clearing/updating ICs (and thus affecting tiering decisions).
5773 tiering_manager_ = new TieringManager(this);
5774
5775 if (!create_heap_objects) {
5776 // If we are deserializing, read the state into the now-empty heap.
5777 SharedHeapDeserializer shared_heap_deserializer(
5778 this, shared_heap_snapshot_data, can_rehash);
5779 shared_heap_deserializer.DeserializeIntoIsolate();
5780
5781 StartupDeserializer startup_deserializer(this, startup_snapshot_data,
5782 can_rehash);
5783 startup_deserializer.DeserializeIntoIsolate();
5784 }
5792
5793 delete setup_delegate_;
5794 setup_delegate_ = nullptr;
5795
5797
5798 // Extra steps in the logger after the heap has been set up.
5800
5801#ifdef DEBUG
5802 // Verify that the current heap state (usually deserialized from the snapshot)
5803 // is compatible with the embedded blob. If this DCHECK fails, we've likely
5804 // loaded a snapshot generated by a different V8 version or build-time
5805 // configuration.
5806 if (!IsolateIsCompatibleWithEmbeddedBlob(this)) {
5807 FATAL(
5808 "The Isolate is incompatible with the embedded blob. This is usually "
5809 "caused by incorrect usage of mksnapshot. When generating custom "
5810 "snapshots, embedders must ensure they pass the same flags as during "
5811 "the V8 build process (e.g.: --turbo-instruction-scheduling).");
5812 }
5813#endif // DEBUG
5814
5815 if (v8_flags.print_builtin_code) builtins()->PrintBuiltinCode();
5816 if (v8_flags.print_builtin_size) builtins()->PrintBuiltinSize();
5817
5818 // Finish initialization of ThreadLocal after deserialization is done.
5821
5822 // Quiet the heap NaN if needed on target platform.
5823 if (!create_heap_objects)
5824 Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
5825
5826 if (v8_flags.trace_turbo) {
5827 // Create an empty file.
5828 std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
5829 }
5830
5832 *factory()->undefined_value();
5833
5834 {
5835 HandleScope scope(this);
5837 }
5838
5839 initialized_from_snapshot_ = !create_heap_objects;
5840
5841 if (v8_flags.stress_sampling_allocation_profiler > 0) {
5842 uint64_t sample_interval = v8_flags.stress_sampling_allocation_profiler;
5843 int stack_depth = 128;
5844 v8::HeapProfiler::SamplingFlags sampling_flags =
5847 sample_interval, stack_depth, sampling_flags);
5848 }
5849
5850 if (create_heap_objects && v8_flags.profile_deserialization) {
5851 double ms = timer.Elapsed().InMillisecondsF();
5852 PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
5853 }
5854
5856 SLOW_DCHECK(SharedFunctionInfo::UniqueIdsAreUnique(this));
5857 }
5858
5859 if (v8_flags.harmony_struct) {
5860 // Initialize or get the struct type registry shared by all isolates.
5863 std::make_unique<SharedStructTypeRegistry>();
5864 } else {
5866 }
5867 }
5868
5869#ifdef V8_ENABLE_WEBASSEMBLY
5870#if V8_STATIC_ROOTS_BOOL
5871 // Protect the payload of wasm null.
5872 if (!page_allocator()->DecommitPages(
5873 reinterpret_cast<void*>(factory()->wasm_null()->payload()),
5875 V8::FatalProcessOutOfMemory(this, "decommitting WasmNull payload");
5876 }
5877#endif // V8_STATIC_ROOTS_BOOL
5878#endif // V8_ENABLE_WEBASSEMBLY
5879
5880 // Isolate initialization allocates long living objects that should be
5881 // pretenured to old space.
5882 DCHECK_IMPLIES(heap()->new_space(), heap()->new_space()->Size() == 0);
5883 DCHECK_IMPLIES(heap()->new_lo_space(), heap()->new_lo_space()->Size() == 0);
5884 DCHECK_EQ(heap()->gc_count(), 0);
5885
5886#if defined(V8_ENABLE_ETW_STACK_WALKING)
5887 if (v8_flags.enable_etw_stack_walking ||
5888 v8_flags.enable_etw_by_custom_filter_only) {
5890 }
5891#endif // defined(V8_ENABLE_ETW_STACK_WALKING)
5892
5893#if defined(V8_USE_PERFETTO)
5895#endif // defined(V8_USE_PERFETTO)
5896
5897 initialized_ = true;
5898
5899 return true;
5900}
5901
5903 Isolate* current_isolate = nullptr;
5905
5906#ifdef V8_ENABLE_CHECKS
5907 // No different thread must have entered the isolate. Allow re-entering.
5909 if (current_thread_id_.IsValid()) {
5910 CHECK_EQ(current_thread_id_, thread_id);
5911 } else {
5912 CHECK_EQ(0, current_thread_counter_);
5913 current_thread_id_ = thread_id;
5914 }
5915 current_thread_counter_++;
5916#endif
5917
5918 // Set the stack start for the main thread that enters the isolate.
5919 heap()->SetStackStart();
5920
5921 if (current_data != nullptr) {
5922 current_isolate = current_data->isolate_;
5923 DCHECK_NOT_NULL(current_isolate);
5924 if (current_isolate == this) {
5925 DCHECK(Current() == this);
5926 auto entry_stack = entry_stack_.load();
5927 DCHECK_NOT_NULL(entry_stack);
5928 DCHECK(entry_stack->previous_thread_data == nullptr ||
5929 entry_stack->previous_thread_data->thread_id() ==
5931 // Same thread re-enters the isolate, no need to re-init anything.
5932 entry_stack->entry_count++;
5933 return;
5934 }
5935 }
5936
5938 DCHECK_NOT_NULL(data);
5939 DCHECK(data->isolate_ == this);
5940
5941 EntryStackItem* item =
5942 new EntryStackItem(current_data, current_isolate, entry_stack_);
5943 entry_stack_ = item;
5944
5945 SetIsolateThreadLocals(this, data);
5946
5947 // In case it's the first time some thread enters the isolate.
5948 set_thread_id(data->thread_id());
5949}
5950
5952 auto current_entry_stack = entry_stack_.load();
5953 DCHECK_NOT_NULL(current_entry_stack);
5954 DCHECK(current_entry_stack->previous_thread_data == nullptr ||
5955 current_entry_stack->previous_thread_data->thread_id() ==
5957
5958#ifdef V8_ENABLE_CHECKS
5959 // The current thread must have entered the isolate.
5960 CHECK_EQ(current_thread_id_, ThreadId::Current());
5961 if (--current_thread_counter_ == 0) current_thread_id_ = ThreadId::Invalid();
5962#endif
5963
5964 if (--current_entry_stack->entry_count > 0) return;
5965
5968
5969 // Pop the stack.
5970 entry_stack_ = current_entry_stack->previous_item;
5971
5972 PerIsolateThreadData* previous_thread_data =
5973 current_entry_stack->previous_thread_data;
5974 Isolate* previous_isolate = current_entry_stack->previous_isolate;
5975
5976 delete current_entry_stack;
5977
5978 // Reinit the current thread for the isolate it was running before this one.
5979 SetIsolateThreadLocals(previous_isolate, previous_thread_data);
5980}
5981
5986
5987std::unique_ptr<PersistentHandles> Isolate::NewPersistentHandles() {
5988 return std::make_unique<PersistentHandles>(this);
5989}
5990
5992 if (v8_flags.trace_turbo_stack_accesses) {
5993 StdoutStream os;
5994 uint64_t total_loads = 0;
5995 uint64_t total_stores = 0;
5996 os << "=== Stack access counters === " << std::endl;
5997 if (!stack_access_count_map) {
5998 os << "No stack accesses in optimized/wasm functions found.";
5999 } else {
6000 DCHECK_NOT_NULL(stack_access_count_map);
6001 os << "Number of optimized/wasm stack-access functions: "
6002 << stack_access_count_map->size() << std::endl;
6003 for (auto it = stack_access_count_map->cbegin();
6004 it != stack_access_count_map->cend(); it++) {
6005 std::string function_name((*it).first);
6006 std::pair<uint64_t, uint64_t> per_func_count = (*it).second;
6007 os << "Name: " << function_name << ", Loads: " << per_func_count.first
6008 << ", Stores: " << per_func_count.second << std::endl;
6009 total_loads += per_func_count.first;
6010 total_stores += per_func_count.second;
6011 }
6012 os << "Total Loads: " << total_loads << ", Total Stores: " << total_stores
6013 << std::endl;
6014 stack_access_count_map = nullptr;
6015 }
6016 }
6017 if (turbo_statistics_ != nullptr) {
6018 DCHECK(v8_flags.turbo_stats || v8_flags.turbo_stats_nvp);
6019 StdoutStream os;
6020 if (v8_flags.turbo_stats) {
6021 AsPrintableStatistics ps = {"Turbofan", *turbo_statistics_, false};
6022 os << ps << std::endl;
6023 }
6024 if (v8_flags.turbo_stats_nvp) {
6025 AsPrintableStatistics ps = {"Turbofan", *turbo_statistics_, true};
6026 os << ps << std::endl;
6027 }
6028 turbo_statistics_.reset();
6029 }
6030
6031#ifdef V8_ENABLE_MAGLEV
6032 if (maglev_statistics_ != nullptr) {
6033 DCHECK(v8_flags.maglev_stats || v8_flags.maglev_stats_nvp);
6034 StdoutStream os;
6035 if (v8_flags.maglev_stats) {
6036 AsPrintableStatistics ps = {"Maglev", *maglev_statistics_, false};
6037 os << ps << std::endl;
6038 }
6039 if (v8_flags.maglev_stats_nvp) {
6040 AsPrintableStatistics ps = {"Maglev", *maglev_statistics_, true};
6041 os << ps << std::endl;
6042 }
6043 maglev_statistics_.reset();
6044 }
6045#endif // V8_ENABLE_MAGLEV
6046
6047#if V8_ENABLE_WEBASSEMBLY
6048 // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
6049 // just dump and reset the engines statistics together with the Isolate.
6050 if (v8_flags.turbo_stats_wasm) {
6052 }
6053#endif // V8_ENABLE_WEBASSEMBLY
6054#if V8_RUNTIME_CALL_STATS
6055 if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
6057 counters()->worker_thread_runtime_call_stats()->AddToMainTable(
6058 counters()->runtime_call_stats());
6059 counters()->runtime_call_stats()->Print();
6060 counters()->runtime_call_stats()->Reset();
6061 }
6062#endif // V8_RUNTIME_CALL_STATS
6063}
6064
6066 if (BasicBlockProfiler::Get()->HasData(this)) {
6067 if (v8_flags.turbo_profiling_output) {
6068 FILE* f = std::fopen(v8_flags.turbo_profiling_output, "w");
6069 if (f == nullptr) {
6070 FATAL("Unable to open file \"%s\" for writing.\n",
6071 v8_flags.turbo_profiling_output.value());
6072 }
6073 OFStream pgo_stream(f);
6074 BasicBlockProfiler::Get()->Log(this, pgo_stream);
6075 } else {
6076 StdoutStream out;
6077 BasicBlockProfiler::Get()->Print(this, out);
6078 }
6080 } else {
6081 // Only log builtins PGO data if v8 was built with
6082 // v8_enable_builtins_profiling=true
6083 CHECK_NULL(v8_flags.turbo_profiling_output);
6084 }
6085}
6086
6092
6095 DisallowGarbageCollection no_recursive_gc;
6097 }
6098#ifdef V8_ENABLE_MAGLEV
6099 if (maglev_concurrent_dispatcher()->is_enabled()) {
6100 DisallowGarbageCollection no_recursive_gc;
6101 maglev_concurrent_dispatcher()->Flush(behavior);
6102 }
6103#endif
6104}
6105
6106std::shared_ptr<CompilationStatistics> Isolate::GetTurboStatistics() {
6107 if (turbo_statistics_ == nullptr) {
6109 }
6110 return turbo_statistics_;
6111}
6112
6113#ifdef V8_ENABLE_MAGLEV
6114
6115std::shared_ptr<CompilationStatistics> Isolate::GetMaglevStatistics() {
6116 if (maglev_statistics_ == nullptr) {
6117 maglev_statistics_.reset(new CompilationStatistics());
6118 }
6119 return maglev_statistics_;
6120}
6121
6122#endif // V8_ENABLE_MAGLEV
6123
6125 if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
6126 return code_tracer();
6127}
6128
6130 // TODO(v8:7700): Update this predicate for a world with multiple tiers.
6131 return (v8_flags.turbofan || v8_flags.maglev) && !serializer_enabled_ &&
6133}
6134
6137 DCHECK(IsCode(*code, cage_base) || IsTrustedByteArray(*code, cage_base));
6139}
6140
6144
6147 v8_flags.log_function_events ||
6149}
6150
6152 return v8_flags.compact_code_space && logger()->allows_code_compaction();
6153}
6154
6156 return
6157 // Static conditions.
6158 v8_flags.trace_deopt || v8_flags.trace_turbo ||
6159 v8_flags.trace_turbo_graph || v8_flags.turbo_profiling ||
6160 v8_flags.print_maglev_code || v8_flags.perf_prof || v8_flags.log_maps ||
6161 v8_flags.log_ic || v8_flags.log_function_events ||
6162 v8_flags.heap_snapshot_on_oom ||
6163 // Dynamic conditions; changing any of these conditions triggers source
6164 // position collection for the entire heap
6165 // (CollectSourcePositionsForAllBytecodeArrays).
6167}
6168
6170 DCHECK(IsUndefined(value, this) || IsArrayList(value));
6171 heap()->set_feedback_vectors_for_profiling_tools(value);
6172}
6173
6175 if (!IsUndefined(heap()->feedback_vectors_for_profiling_tools(), this)) {
6176 // Already initialized, return early.
6177 DCHECK(IsArrayList(heap()->feedback_vectors_for_profiling_tools()));
6178 return;
6179 }
6180
6181 // Collect existing feedback vectors.
6183
6184 {
6185 HeapObjectIterator heap_iterator(heap());
6186 for (Tagged<HeapObject> current_obj = heap_iterator.Next();
6187 !current_obj.is_null(); current_obj = heap_iterator.Next()) {
6188 if (!IsFeedbackVector(current_obj)) continue;
6189
6190 Tagged<FeedbackVector> vector = Cast<FeedbackVector>(current_obj);
6191 Tagged<SharedFunctionInfo> shared = vector->shared_function_info();
6192
6193 // No need to preserve the feedback vector for non-user-visible functions.
6194 if (!shared->IsSubjectToDebugging()) continue;
6195
6196 vectors.emplace_back(vector, this);
6197 }
6198 }
6199
6200 // Add collected feedback vectors to the root list lest we lose them to GC.
6202 ArrayList::New(this, static_cast<int>(vectors.size()));
6203 for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
6205}
6206
6208 if (date_cache != date_cache_) {
6209 delete date_cache_;
6210 }
6212}
6213
6215 Tagged<JSObject> object) {
6216 Tagged<Map> metamap = object->map(this)->map(this);
6217 Tagged<NativeContext> native_context = metamap->native_context();
6218 if (native_context->initial_object_prototype() == object) {
6220 } else if (native_context->initial_array_prototype() == object) {
6222 } else if (native_context->initial_string_prototype() == object) {
6224 }
6225 return KnownPrototype::kNone;
6226}
6227
6230 Tagged<Map> metamap = object->map(this)->map(this);
6231 // Filter out native-context independent objects.
6232 if (metamap == ReadOnlyRoots(this).meta_map()) return false;
6233 Tagged<NativeContext> native_context = metamap->native_context();
6234 return native_context->get(index) == object;
6235}
6236
6238 DirectHandle<JSObject> object) {
6240 if (!object->map()->is_prototype_map()) return;
6241 if (!Protectors::IsNoElementsIntact(this)) return;
6243 if (obj_type == KnownPrototype::kNone) return;
6246 } else if (obj_type == KnownPrototype::kArray) {
6248 }
6249 Protectors::InvalidateNoElements(this);
6250}
6251
6260
6262 DirectHandle<JSObject> object) {
6263 if ((IsJSTypedArrayPrototype(*object) || IsJSTypedArray(*object)) &&
6264 Protectors::IsTypedArrayLengthLookupChainIntact(this)) {
6265 Protectors::InvalidateTypedArrayLengthLookupChain(this);
6266 }
6267}
6268
6270 DirectHandle<JSObject> object) {
6271 // Setting the __proto__ of TypedArray constructor could change TypedArray's
6272 // @@species. So we need to invalidate the @@species protector.
6273 if (IsTypedArrayConstructor(*object) &&
6274 Protectors::IsTypedArraySpeciesLookupChainIntact(this)) {
6275 Protectors::InvalidateTypedArraySpeciesLookupChain(this);
6276 }
6277}
6278
6280 DirectHandle<JSObject> object) {
6281 if (!Protectors::IsNumberStringNotRegexpLikeIntact(this)) {
6282 return;
6283 }
6284 // We need to protect the prototype chain of `Number.prototype` and
6285 // `String.prototype`.
6286 // Since `Object.prototype.__proto__` is not writable, we can assume it
6287 // doesn't occur here. We detect `Number.prototype` and `String.prototype` by
6288 // checking for a prototype that is a JSPrimitiveWrapper. This is a safe
6289 // approximation. Using JSPrimitiveWrapper as prototype should be
6290 // sufficiently rare.
6291 DCHECK(!IsJSObjectPrototype(*object));
6292 if (object->map()->is_prototype_map() && (IsJSPrimitiveWrapper(*object))) {
6293 Protectors::InvalidateNumberStringNotRegexpLike(this);
6294 }
6295}
6296
6298 DirectHandle<JSObject> object, DirectHandle<Object> new_prototype) {
6299 if (!Protectors::IsStringWrapperToPrimitiveIntact(this)) {
6300 return;
6301 }
6302
6303 // We can have a custom @@toPrimitive on a string wrapper also if we subclass
6304 // String and the subclass (or one of its subclasses) defines its own
6305 // @@toPrimitive. Thus we invalidate the protector whenever we detect
6306 // subclassing String - it should be reasonably rare.
6307 if (IsStringWrapper(*object) || IsStringWrapper(*new_prototype)) {
6308 Protectors::InvalidateStringWrapperToPrimitive(this);
6309 }
6310}
6311
6313 base::RandomNumberGenerator** rng, int seed) {
6314 if (*rng == nullptr) {
6315 if (seed != 0) {
6316 *rng = new base::RandomNumberGenerator(seed);
6317 } else {
6318 *rng = new base::RandomNumberGenerator();
6319 }
6320 }
6321 return *rng;
6322}
6323
6325 // TODO(bmeurer) Initialized lazily because it depends on flags; can
6326 // be fixed once the default isolate cleanup is done.
6328}
6329
6331 if (fuzzer_rng_ == nullptr) {
6332 int64_t seed = v8_flags.fuzzer_random_seed;
6333 if (seed == 0) {
6335 }
6336
6338 }
6339
6340 return fuzzer_rng_;
6341}
6342
6344 int hash;
6345 int attempts = 0;
6346 do {
6347 hash = random_number_generator()->NextInt() & mask;
6348 } while (hash == 0 && attempts++ < 30);
6349 return hash != 0 ? hash : 1;
6350}
6351
6352#ifdef DEBUG
6353#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
6354 const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
6355ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
6356ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
6357#undef ISOLATE_FIELD_OFFSET
6358#endif
6359
6361 Handle<String> name,
6362 bool private_symbol) {
6365 Cast<RegisteredSymbolTable>(root_handle(dictionary_index));
6366 InternalIndex entry = dictionary->FindEntry(this, key);
6367 DirectHandle<Symbol> symbol;
6368 if (entry.is_not_found()) {
6369 symbol =
6370 private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
6371 symbol->set_description(*key);
6372 dictionary = RegisteredSymbolTable::Add(this, dictionary, key, symbol);
6373
6374 switch (dictionary_index) {
6375 case RootIndex::kPublicSymbolTable:
6376 symbol->set_is_in_public_symbol_table(true);
6377 heap()->set_public_symbol_table(*dictionary);
6378 break;
6379 case RootIndex::kApiSymbolTable:
6380 heap()->set_api_symbol_table(*dictionary);
6381 break;
6382 case RootIndex::kApiPrivateSymbolTable:
6383 heap()->set_api_private_symbol_table(*dictionary);
6384 break;
6385 default:
6386 UNREACHABLE();
6387 }
6388 } else {
6389 symbol =
6390 DirectHandle<Symbol>(Cast<Symbol>(dictionary->ValueAt(entry)), this);
6391 }
6392 return symbol;
6393}
6394
6401
6409
6416
6423
6426 DCHECK(thread_local_top()->CallDepthIsZero());
6427
6428 bool perform_checkpoint =
6430 microtask_queue->microtasks_policy() == v8::MicrotasksPolicy::kAuto &&
6432
6433 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
6434 if (perform_checkpoint) microtask_queue->PerformCheckpoint(isolate);
6435
6436 if (call_completed_callbacks_.empty()) return;
6437 // Fire callbacks. Increase call depth to prevent recursive callbacks.
6439 std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
6440 for (auto& callback : callbacks) {
6441 callback(reinterpret_cast<v8::Isolate*>(this));
6442 }
6443}
6444
6445#ifdef V8_ENABLE_WEBASSEMBLY
6446void Isolate::WasmInitJSPIFeature() {
6447 if (v8_flags.wasm_jitless) return;
6448
6449 if (isolate_data_.active_stack() == nullptr) {
6451 stack->jmpbuf()->state = wasm::JumpBuffer::Active;
6452 this->wasm_stacks().emplace_back(stack);
6453 stack->set_index(0);
6454 if (v8_flags.trace_wasm_stack_switching) {
6455 PrintF("Set up native stack object (limit: %p, base: %p)\n",
6456 stack->jslimit(), reinterpret_cast<void*>(stack->base()));
6457 }
6458 HandleScope scope(this);
6460 }
6461}
6462#endif
6463
6465 if (Protectors::IsPromiseHookIntact(this)) {
6466 HandleScope scope(this);
6467 Protectors::InvalidatePromiseHook(this);
6468 }
6469}
6470
6482
6483namespace {
6484
6485MaybeDirectHandle<JSPromise> NewRejectedPromise(
6486 Isolate* isolate, v8::Local<v8::Context> api_context,
6487 DirectHandle<Object> exception) {
6489 API_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, resolver,
6490 v8::Promise::Resolver::New(api_context),
6492
6494 isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
6496
6497 v8::Local<v8::Promise> promise = resolver->GetPromise();
6498 return v8::Utils::OpenDirectHandle(*promise);
6499}
6500
6501} // namespace
6502
6504 MaybeDirectHandle<Script> maybe_referrer, Handle<Object> specifier,
6505 ModuleImportPhase phase,
6506 MaybeDirectHandle<Object> maybe_import_options_argument) {
6508 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(native_context());
6510 DirectHandle<Object> exception =
6511 factory()->NewError(error_function(), MessageTemplate::kUnsupported);
6512 return NewRejectedPromise(this, api_context, exception);
6513 }
6514
6515 DirectHandle<String> specifier_str;
6516 MaybeDirectHandle<String> maybe_specifier = Object::ToString(this, specifier);
6517 if (!maybe_specifier.ToHandle(&specifier_str)) {
6520 }
6523 return NewRejectedPromise(this, api_context, exception);
6524 }
6526
6527 v8::Local<v8::Promise> promise;
6528 DirectHandle<FixedArray> import_attributes_array;
6529 if (!GetImportAttributesFromArgument(maybe_import_options_argument)
6530 .ToHandle(&import_attributes_array)) {
6533 }
6536 return NewRejectedPromise(this, api_context, exception);
6537 }
6538 DirectHandle<FixedArray> host_defined_options;
6539 DirectHandle<Object> resource_name;
6540 if (maybe_referrer.is_null()) {
6541 host_defined_options = factory()->empty_fixed_array();
6542 resource_name = factory()->null_value();
6543 } else {
6544 DirectHandle<Script> referrer = maybe_referrer.ToHandleChecked();
6545 host_defined_options =
6546 direct_handle(referrer->host_defined_options(), this);
6547 resource_name = direct_handle(referrer->name(), this);
6548 }
6549
6550 switch (phase) {
6552 // TODO(42204365): Deprecate HostImportModuleDynamicallyCallback once
6553 // HostImportModuleWithPhaseDynamicallyCallback is stable.
6555 this, promise,
6557 api_context, v8::Utils::ToLocal(host_defined_options),
6558 v8::Utils::ToLocal(resource_name),
6559 v8::Utils::ToLocal(specifier_str),
6560 ToApiHandle<v8::FixedArray>(import_attributes_array)),
6562 break;
6564 CHECK(v8_flags.js_source_phase_imports);
6567 this, promise,
6569 api_context, v8::Utils::ToLocal(host_defined_options),
6570 v8::Utils::ToLocal(resource_name),
6571 v8::Utils::ToLocal(specifier_str), phase,
6572 ToApiHandle<v8::FixedArray>(import_attributes_array)),
6574 break;
6575 default:
6576 UNREACHABLE();
6577 }
6578
6579 return v8::Utils::OpenDirectHandle(*promise);
6580}
6581
6583 MaybeDirectHandle<Object> maybe_import_options_argument) {
6584 DirectHandle<FixedArray> import_attributes_array =
6585 factory()->empty_fixed_array();
6586 DirectHandle<Object> import_options_argument;
6587 if (!maybe_import_options_argument.ToHandle(&import_options_argument) ||
6588 IsUndefined(*import_options_argument)) {
6589 return import_attributes_array;
6590 }
6591
6592 // The parser shouldn't have allowed the second argument to import() if
6593 // the flag wasn't enabled.
6594 DCHECK(v8_flags.harmony_import_attributes);
6595
6596 if (!IsJSReceiver(*import_options_argument)) {
6597 this->Throw(
6598 *factory()->NewTypeError(MessageTemplate::kNonObjectImportArgument));
6600 }
6601
6602 DirectHandle<JSReceiver> import_options_argument_receiver =
6603 Cast<JSReceiver>(import_options_argument);
6604
6605 DirectHandle<Object> import_attributes_object;
6606
6607 if (v8_flags.harmony_import_attributes) {
6608 DirectHandle<Name> with_key = factory()->with_string();
6609 if (!JSReceiver::GetProperty(this, import_options_argument_receiver,
6610 with_key)
6611 .ToHandle(&import_attributes_object)) {
6612 // This can happen if the property has a getter function that throws
6613 // an error.
6615 }
6616 }
6617
6618 // If there is no 'with' option in the options bag, it's not an error. Just do
6619 // the import() as if no attributes were provided.
6620 if (IsUndefined(*import_attributes_object)) return import_attributes_array;
6621
6622 if (!IsJSReceiver(*import_attributes_object)) {
6623 this->Throw(
6624 *factory()->NewTypeError(MessageTemplate::kNonObjectAttributesOption));
6626 }
6627
6628 DirectHandle<JSReceiver> import_attributes_object_receiver =
6629 Cast<JSReceiver>(import_attributes_object);
6630
6631 DirectHandle<FixedArray> attribute_keys;
6632 if (!KeyAccumulator::GetKeys(this, import_attributes_object_receiver,
6635 .ToHandle(&attribute_keys)) {
6636 // This happens if the attributes object is a Proxy whose ownKeys() or
6637 // getOwnPropertyDescriptor() trap throws.
6639 }
6640
6641 bool has_non_string_attribute = false;
6642
6643 // The attributes will be passed to the host in the form: [key1,
6644 // value1, key2, value2, ...].
6645 constexpr size_t kAttributeEntrySizeForDynamicImport = 2;
6646 import_attributes_array = factory()->NewFixedArray(static_cast<int>(
6647 attribute_keys->length() * kAttributeEntrySizeForDynamicImport));
6648 for (int i = 0; i < attribute_keys->length(); i++) {
6649 DirectHandle<String> attribute_key(Cast<String>(attribute_keys->get(i)),
6650 this);
6651 DirectHandle<Object> attribute_value;
6652 if (!Object::GetPropertyOrElement(this, import_attributes_object_receiver,
6653 attribute_key)
6654 .ToHandle(&attribute_value)) {
6655 // This can happen if the property has a getter function that throws
6656 // an error.
6658 }
6659
6660 if (!IsString(*attribute_value)) {
6661 has_non_string_attribute = true;
6662 }
6663
6664 import_attributes_array->set((i * kAttributeEntrySizeForDynamicImport),
6665 *attribute_key);
6666 import_attributes_array->set((i * kAttributeEntrySizeForDynamicImport) + 1,
6667 *attribute_value);
6668 }
6669
6670 if (has_non_string_attribute) {
6671 this->Throw(*factory()->NewTypeError(
6672 MessageTemplate::kNonStringImportAttributeValue));
6674 }
6675
6676 return import_attributes_array;
6677}
6678
6680
6685
6690
6693 CHECK(IsTheHole(module->import_meta(kAcquireLoad), this));
6696 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(native_context());
6698 api_context, Utils::ToLocal(Cast<Module>(module)),
6699 v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(import_meta)));
6700 if (has_exception()) return {};
6701 }
6702 return import_meta;
6703}
6704
6709
6714
6718 DirectHandle<Object> exception =
6719 factory()->NewError(error_function(), MessageTemplate::kUnsupported);
6720 Throw(*exception);
6721 return kNullMaybeHandle;
6722 }
6723
6724 v8::Local<v8::Context> api_context = v8::Utils::ToLocal(native_context());
6725 v8::Local<v8::Context> shadow_realm_context;
6727 this, shadow_realm_context,
6730 DirectHandle<Context> shadow_realm_context_handle =
6731 v8::Utils::OpenDirectHandle(*shadow_realm_context);
6732 DCHECK(IsNativeContext(*shadow_realm_context_handle));
6733 shadow_realm_context_handle->set_scope_info(
6734 ReadOnlyRoots(this).shadow_realm_scope_info());
6735 return Cast<NativeContext>(shadow_realm_context_handle);
6736}
6737
6740 DirectHandle<JSArray> sites) {
6741 v8::Local<v8::Context> api_context = Utils::ToLocal(context);
6742
6745 this, stack,
6746 prepare_stack_trace_callback_(api_context, Utils::ToLocal(error),
6747 Utils::ToLocal(sites)),
6749 return Utils::OpenDirectHandle(*stack);
6750}
6751
6753 return is_js_api_wrapper_native_error_callback_ != nullptr &&
6754 is_js_api_wrapper_native_error_callback_(
6755 reinterpret_cast<v8::Isolate*>(this), Utils::ToLocal(obj));
6756}
6757
6765
6766const char* Isolate::GetExternallyCompiledFilename(int index) const {
6767 if (embedded_file_writer_ != nullptr) {
6769 }
6770 return "";
6771}
6772
6774 if (embedded_file_writer_ != nullptr) {
6776 }
6777 return 0;
6778}
6779
6786
6787#if defined(V8_OS_WIN64)
6788void Isolate::SetBuiltinUnwindData(
6789 Builtin builtin,
6790 const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info) {
6791 if (embedded_file_writer_ != nullptr) {
6792 embedded_file_writer_->SetBuiltinUnwindData(builtin, unwinding_info);
6793 }
6794}
6795#endif // V8_OS_WIN64
6796
6800
6804
6805#if defined(V8_ENABLE_ETW_STACK_WALKING)
6806void Isolate::SetFilterETWSessionByURLCallback(
6807 FilterETWSessionByURLCallback callback) {
6808 filter_etw_session_by_url_callback_ = callback;
6809}
6810void Isolate::SetFilterETWSessionByURL2Callback(
6811 FilterETWSessionByURL2Callback callback) {
6812 filter_etw_session_by_url2_callback_ = callback;
6813}
6814
6815FilterETWSessionByURLResult Isolate::RunFilterETWSessionByURLCallback(
6816 const std::string& etw_filter_payload) {
6817 if (context().is_null()) {
6818 // No context to retrieve the current URL.
6819 return {false, false};
6820 }
6821 v8::Local<v8::Context> context = Utils::ToLocal(native_context());
6822
6823 if (filter_etw_session_by_url2_callback_) {
6824 return filter_etw_session_by_url2_callback_(context, etw_filter_payload);
6825 } else if (filter_etw_session_by_url_callback_) {
6826 bool enable_etw_tracing =
6827 filter_etw_session_by_url_callback_(context, etw_filter_payload);
6828 return {enable_etw_tracing, v8_flags.interpreted_frames_native_stack};
6829 }
6830
6831 // If no callback is installed, by default enable etw tracing but disable
6832 // tracing of interpreter stack frames.
6833 return {true, v8_flags.interpreted_frames_native_stack};
6834}
6835
6836#endif // V8_ENABLE_ETW_STACK_WALKING
6837
6844
6849
6850void Isolate::RunReleaseCppHeapCallback(std::unique_ptr<v8::CppHeap> cpp_heap) {
6852 release_cpp_heap_callback_(std::move(cpp_heap));
6853 }
6854}
6855
6860
6863 DirectHandle<Object> parent) {
6864#ifdef V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
6865 if (HasContextPromiseHooks()) {
6866 native_context()->RunPromiseHook(type, promise, parent);
6867 }
6868#endif
6870 RunPromiseHook(type, promise, parent);
6871 }
6872}
6873
6876 DirectHandle<Object> parent) {
6877 if (!HasIsolatePromiseHooks()) return;
6878 DCHECK(promise_hook_ != nullptr);
6879 promise_hook_(type, v8::Utils::PromiseToLocal(promise),
6880 v8::Utils::ToLocal(parent));
6881}
6882
6884 DirectHandle<JSPromise> parent) {
6885 DCHECK(!promise->has_async_task_id());
6887 if (HasAsyncEventDelegate()) {
6891 promise->set_async_task_id(current_async_task_id_);
6893 promise->async_task_id(), false);
6894 }
6895}
6896
6898 if (!HasAsyncEventDelegate()) return;
6901 for (JavaScriptStackFrameIterator frame_it(this); !frame_it.done();
6902 frame_it.Advance()) {
6903 std::vector<Handle<SharedFunctionInfo>> infos;
6904 frame_it.frame()->GetFunctions(&infos);
6905 for (auto it = infos.rbegin(); it != infos.rend(); ++it) {
6907 if (info->HasBuiltinId()) {
6908 // We should not report PromiseThen and PromiseCatch which is called
6909 // indirectly, e.g. Promise.all calls Promise.then internally.
6910 switch (info->builtin_id()) {
6911 case Builtin::kPromisePrototypeCatch:
6912 action_type = Just(debug::kDebugPromiseCatch);
6913 continue;
6914 case Builtin::kPromisePrototypeFinally:
6915 action_type = Just(debug::kDebugPromiseFinally);
6916 continue;
6917 case Builtin::kPromisePrototypeThen:
6918 action_type = Just(debug::kDebugPromiseThen);
6919 continue;
6920 default:
6921 return;
6922 }
6923 }
6924 if (info->IsUserJavaScript() && action_type.IsJust()) {
6925 DCHECK(!promise->has_async_task_id());
6928 promise->set_async_task_id(current_async_task_id_);
6930 promise->async_task_id(),
6931 debug()->IsBlackboxed(info));
6932 }
6933 return;
6934 }
6935 }
6936}
6937
6940 factory()->undefined_value());
6941 if (HasAsyncEventDelegate()) {
6942 if (promise->has_async_task_id()) {
6944 debug::kDebugWillHandle, promise->async_task_id(), false);
6945 }
6946 }
6947}
6948
6951 factory()->undefined_value());
6952 if (HasAsyncEventDelegate()) {
6953 if (promise->has_async_task_id()) {
6955 debug::kDebugDidHandle, promise->async_task_id(), false);
6956 }
6957 }
6958}
6959
6966
6969 // This performs cleanup for when RunMicrotasks (in
6970 // builtins-microtask-queue-gen.cc) is aborted via a termination exception.
6971 // This has to be kept in sync with the code in said file. Currently this
6972 // includes:
6973 //
6974 // (1) Resetting the |current_microtask| slot on the Isolate to avoid leaking
6975 // memory (and also to keep |current_microtask| not being undefined as an
6976 // indicator that we're currently pumping the microtask queue).
6977 // (2) Empty the promise stack to avoid leaking memory.
6978 // (3) If the |current_microtask| is a promise reaction or resolve thenable
6979 // job task, then signal the async event delegate and debugger that the
6980 // microtask finished running.
6981 //
6982
6983 // Reset the |current_microtask| global slot.
6984 DirectHandle<Microtask> current_microtask(
6985 Cast<Microtask>(heap()->current_microtask()), this);
6986 heap()->set_current_microtask(ReadOnlyRoots(this).undefined_value());
6987
6988 if (IsPromiseReactionJobTask(*current_microtask)) {
6989 auto promise_reaction_job_task =
6990 Cast<PromiseReactionJobTask>(current_microtask);
6991 DirectHandle<HeapObject> promise_or_capability(
6992 promise_reaction_job_task->promise_or_capability(), this);
6993 if (IsPromiseCapability(*promise_or_capability)) {
6994 promise_or_capability = direct_handle(
6995 Cast<PromiseCapability>(promise_or_capability)->promise(), this);
6996 }
6997 if (IsJSPromise(*promise_or_capability)) {
6998 OnPromiseAfter(Cast<JSPromise>(promise_or_capability));
6999 }
7000 } else if (IsPromiseResolveThenableJobTask(*current_microtask)) {
7001 auto promise_resolve_thenable_job_task =
7002 Cast<PromiseResolveThenableJobTask>(current_microtask);
7003 DirectHandle<JSPromise> promise_to_resolve(
7004 promise_resolve_thenable_job_task->promise_to_resolve(), this);
7005 OnPromiseAfter(promise_to_resolve);
7006 }
7007
7009}
7010
7012 promise_reject_callback_ = callback;
7013}
7014
7017 v8::PromiseRejectEvent event) {
7018 if (promise_reject_callback_ == nullptr) return;
7019 promise_reject_callback_(v8::PromiseRejectMessage(
7020 v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value)));
7021}
7022
7027
7031
7034 // The counter callback
7035 // - may cause the embedder to call into V8, which is not generally possible
7036 // during GC.
7037 // - requires a current native context, which may not always exist.
7038 // TODO(jgruber): Consider either removing the native context requirement in
7039 // blink, or passing it to the callback explicitly.
7040 if (heap_.gc_state() == Heap::NOT_IN_GC && !context().is_null()) {
7041 DCHECK(IsContext(context()));
7042 DCHECK(IsNativeContext(context()->native_context()));
7044 HandleScope handle_scope(this);
7045 for (auto feature : features) {
7046 use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
7047 }
7048 }
7049 } else {
7051 }
7052}
7053
7055
7056// static
7058 if (const char* filename = v8_flags.trace_turbo_cfg_file) return filename;
7059 std::ostringstream os;
7060 os << "turbo-" << base::OS::GetCurrentProcessId() << "-";
7061 if (isolate != nullptr) {
7062 os << isolate->id();
7063 } else {
7064 os << "any";
7065 }
7066 os << ".cfg";
7067 return os.str();
7068}
7069
7070// Heap::detached_contexts tracks detached contexts as pairs
7071// (the context, number of GC since the context was detached).
7073 HandleScope scope(this);
7074 Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
7075 detached_contexts = WeakArrayList::AddToEnd(
7076 this, detached_contexts, MaybeObjectDirectHandle::Weak(context),
7077 Smi::zero());
7078 heap()->set_detached_contexts(*detached_contexts);
7079}
7080
7082 HandleScope scope(this);
7083 DirectHandle<WeakArrayList> detached_contexts =
7084 factory()->detached_contexts();
7085 int length = detached_contexts->length();
7086 if (length == 0) return;
7087 int new_length = 0;
7088 for (int i = 0; i < length; i += 2) {
7089 Tagged<MaybeObject> context = detached_contexts->Get(i);
7090 DCHECK(context.IsWeakOrCleared());
7091 if (!context.IsCleared()) {
7092 int mark_sweeps = detached_contexts->Get(i + 1).ToSmi().value();
7093 detached_contexts->Set(new_length, context);
7094 detached_contexts->Set(new_length + 1, Smi::FromInt(mark_sweeps + 1));
7095 new_length += 2;
7096 }
7097 }
7098 detached_contexts->set_length(new_length);
7099 while (new_length < length) {
7100 detached_contexts->Set(new_length, Smi::zero());
7101 ++new_length;
7102 }
7103
7104 if (v8_flags.trace_detached_contexts) {
7105 PrintF("%d detached contexts are collected out of %d\n",
7106 length - new_length, length);
7107 for (int i = 0; i < new_length; i += 2) {
7108 Tagged<MaybeObject> context = detached_contexts->Get(i);
7109 int mark_sweeps = detached_contexts->Get(i + 1).ToSmi().value();
7110 DCHECK(context.IsWeakOrCleared());
7111 if (mark_sweeps > 3) {
7112 PrintF("detached context %p\n survived %d GCs (leak?)\n",
7113 reinterpret_cast<void*>(context.ptr()), mark_sweeps);
7114 }
7115 }
7116 }
7117}
7118
7120 counters()->errors_thrown_per_context()->AddSample(
7121 env->native_context()->GetErrorsThrown());
7122
7123 ReadOnlyRoots roots(this);
7124 DirectHandle<JSGlobalProxy> global_proxy(env->global_proxy(), this);
7125 // NOTE: Turbofan's JSNativeContextSpecialization and Maglev depend on
7126 // DetachGlobal causing a map change.
7127 JSObject::ForceSetPrototype(this, global_proxy, factory()->null_value());
7128 // Detach the global object from the native context by making its map
7129 // contextless (use the global metamap instead of the contextful one).
7130 global_proxy->map()->set_map(this, roots.meta_map());
7131 global_proxy->map()->set_constructor_or_back_pointer(roots.null_value(),
7133 if (v8_flags.track_detached_contexts) AddDetachedContext(env);
7134 DCHECK(global_proxy->IsDetached());
7135
7136 env->native_context()->set_microtask_queue(this, nullptr);
7137}
7138
7139void Isolate::SetIsLoading(bool is_loading) {
7140 if (is_loading) {
7142 } else {
7144 }
7145 if (v8_flags.trace_rail) {
7146 // TODO(crbug.com/373688984): Switch to a trace flag for loading state.
7147 PrintIsolate(this, "RAIL mode: %s\n", is_loading ? "LOAD" : "ANIMATION");
7148 }
7149}
7150
7158
7159namespace {
7160base::LazyMutex print_with_timestamp_mutex_ = LAZY_MUTEX_INITIALIZER;
7161} // namespace
7162
7163void Isolate::PrintWithTimestamp(const char* format, ...) {
7164 base::MutexGuard guard(print_with_timestamp_mutex_.Pointer());
7165 base::OS::Print("[%d:%p:%d] %8.0f ms: ", base::OS::GetCurrentProcessId(),
7166 static_cast<void*>(this), id(), time_millis_since_init());
7167 va_list arguments;
7168 va_start(arguments, format);
7169 base::OS::VPrint(format, arguments);
7170 va_end(arguments);
7171}
7172
7173void Isolate::SetIdle(bool is_idle) {
7174 StateTag state = current_vm_state();
7175 if (js_entry_sp() != kNullAddress) return;
7176 DCHECK(state == EXTERNAL || state == IDLE);
7177 if (is_idle) {
7178 set_current_vm_state(IDLE);
7179 } else if (state == IDLE) {
7180 set_current_vm_state(EXTERNAL);
7181 }
7182}
7183
7185 if (!initialized_) return;
7186
7187 HandleScope scope(this);
7188 std::vector<Handle<SharedFunctionInfo>> sfis;
7189 {
7190 HeapObjectIterator iterator(heap());
7191 for (Tagged<HeapObject> obj = iterator.Next(); !obj.is_null();
7192 obj = iterator.Next()) {
7193 if (!IsSharedFunctionInfo(obj)) continue;
7195 // If the script is a Smi, then the SharedFunctionInfo is in
7196 // the process of being deserialized.
7197 Tagged<Object> script = sfi->raw_script(kAcquireLoad);
7198 if (IsSmi(script)) {
7200 continue;
7201 }
7202 if (!sfi->CanCollectSourcePosition(this)) continue;
7203 sfis.push_back(Handle<SharedFunctionInfo>(sfi, this));
7204 }
7205 }
7206 for (auto sfi : sfis) {
7208 }
7209}
7210
7211#ifdef V8_INTL_SUPPORT
7212
7213namespace {
7214
7215std::string GetStringFromLocales(Isolate* isolate,
7216 DirectHandle<Object> locales) {
7217 if (IsUndefined(*locales, isolate)) return "";
7218 return std::string(Cast<String>(*locales)->ToCString().get());
7219}
7220
7221bool StringEqualsLocales(Isolate* isolate, const std::string& str,
7222 DirectHandle<Object> locales) {
7223 if (IsUndefined(*locales, isolate)) return str.empty();
7224 return Cast<String>(locales)->IsEqualTo(
7225 base::VectorOf(str.c_str(), str.length()));
7226}
7227
7228} // namespace
7229
7230const std::string& Isolate::DefaultLocale() {
7231 if (default_locale_.empty()) {
7232 icu::Locale default_locale;
7233 // Translate ICU's fallback locale to a well-known locale.
7234 if (strcmp(default_locale.getName(), "en_US_POSIX") == 0 ||
7235 strcmp(default_locale.getName(), "c") == 0) {
7236 set_default_locale("en-US");
7237 } else {
7238 // Set the locale
7239 set_default_locale(default_locale.isBogus()
7240 ? "und"
7241 : Intl::ToLanguageTag(default_locale).FromJust());
7242 }
7243 DCHECK(!default_locale_.empty());
7244 }
7245 return default_locale_;
7246}
7247
7248void Isolate::ResetDefaultLocale() {
7249 default_locale_.clear();
7250 clear_cached_icu_objects();
7251 // We inline fast paths assuming certain locales. Since this path is rarely
7252 // taken, we deoptimize everything to keep things simple.
7254}
7255
7256icu::UMemory* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type,
7257 DirectHandle<Object> locales) {
7258 const ICUObjectCacheEntry& entry =
7259 icu_object_cache_[static_cast<int>(cache_type)];
7260 return StringEqualsLocales(this, entry.locales, locales) ? entry.obj.get()
7261 : nullptr;
7262}
7263
7264void Isolate::set_icu_object_in_cache(ICUObjectCacheType cache_type,
7265 DirectHandle<Object> locales,
7266 std::shared_ptr<icu::UMemory> obj) {
7267 icu_object_cache_[static_cast<int>(cache_type)] = {
7268 GetStringFromLocales(this, locales), std::move(obj)};
7269}
7270
7271void Isolate::clear_cached_icu_object(ICUObjectCacheType cache_type) {
7272 icu_object_cache_[static_cast<int>(cache_type)] = ICUObjectCacheEntry{};
7273}
7274
7275void Isolate::clear_cached_icu_objects() {
7276 for (int i = 0; i < kICUObjectCacheTypeCount; i++) {
7277 clear_cached_icu_object(static_cast<ICUObjectCacheType>(i));
7278 }
7279}
7280
7281#endif // V8_INTL_SUPPORT
7282
7285 if (V8_UNLIKELY(HasOverflowed())) {
7287 return true;
7288 }
7291 return true;
7292 }
7293 return false;
7294}
7295
7296bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
7297 StackGuard* stack_guard = isolate_->stack_guard();
7298#ifdef USE_SIMULATOR
7299 // The simulator uses a separate JS stack.
7300 Address jssp_address = Simulator::current(isolate_)->get_sp();
7301 uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
7302 if (jssp - gap < stack_guard->real_jslimit()) return true;
7303#endif // USE_SIMULATOR
7304 return GetCurrentStackPosition() - gap < stack_guard->real_climit();
7305}
7306
7307bool StackLimitCheck::WasmHasOverflowed(uintptr_t gap) const {
7308 StackGuard* stack_guard = isolate_->stack_guard();
7311 if (sp == 0) {
7312#ifdef USE_SIMULATOR
7313 // The simulator uses a separate JS stack.
7314 // Use it if code is executed on the central stack.
7315 Address jssp_address = Simulator::current(isolate_)->get_sp();
7316 uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
7317 if (jssp - gap < stack_guard->real_jslimit()) return true;
7318#endif // USE_SIMULATOR
7320 limit = stack_guard->real_climit();
7321 }
7322 return sp - gap < limit;
7323}
7324
7326 if (!isolate->context().is_null()) {
7327 context_ = Handle<Context>(isolate->context(), isolate);
7328 }
7329 if (!isolate->topmost_script_having_context().is_null()) {
7330 topmost_script_having_context_ =
7331 Handle<Context>(isolate->topmost_script_having_context(), isolate);
7332 }
7333}
7334
7342
7344 Tagged<Context> new_context)
7345 : SaveContext(isolate) {
7346 isolate->set_context(new_context);
7347}
7348
7349#ifdef DEBUG
7351 : isolate_(isolate),
7352 context_(isolate->context(), isolate),
7353 topmost_script_having_context_(isolate->topmost_script_having_context(),
7354 isolate) {}
7355
7356namespace {
7357
7358bool Overlapping(const MemoryRange& a, const MemoryRange& b) {
7359 uintptr_t a1 = reinterpret_cast<uintptr_t>(a.start);
7360 uintptr_t a2 = a1 + a.length_in_bytes;
7361 uintptr_t b1 = reinterpret_cast<uintptr_t>(b.start);
7362 uintptr_t b2 = b1 + b.length_in_bytes;
7363 // Either b1 or b2 are in the [a1, a2) range.
7364 return (a1 <= b1 && b1 < a2) || (a1 <= b2 && b2 < a2);
7365}
7366
7367} // anonymous namespace
7368
7369#endif // DEBUG
7370
7373 std::vector<MemoryRange>* old_code_pages = GetCodePages();
7374 DCHECK_NOT_NULL(old_code_pages);
7375#ifdef DEBUG
7376 auto overlapping = [range](const MemoryRange& a) {
7377 return Overlapping(range, a);
7378 };
7379 DCHECK_EQ(old_code_pages->end(),
7380 std::find_if(old_code_pages->begin(), old_code_pages->end(),
7381 overlapping));
7382#endif
7383
7384 std::vector<MemoryRange>* new_code_pages;
7385 if (old_code_pages == &code_pages_buffer1_) {
7386 new_code_pages = &code_pages_buffer2_;
7387 } else {
7388 new_code_pages = &code_pages_buffer1_;
7389 }
7390
7391 // Copy all existing data from the old vector to the new vector and insert the
7392 // new page.
7393 new_code_pages->clear();
7394 new_code_pages->reserve(old_code_pages->size() + 1);
7395 std::merge(old_code_pages->begin(), old_code_pages->end(), &range, &range + 1,
7396 std::back_inserter(*new_code_pages),
7397 [](const MemoryRange& a, const MemoryRange& b) {
7398 return a.start < b.start;
7399 });
7400
7401 // Atomically switch out the pointer
7402 SetCodePages(new_code_pages);
7403}
7404
7405// |chunk| is either a Page or an executable LargePage.
7407 // We only keep track of individual code pages/allocations if we are on arm32,
7408 // because on x64 and arm64 we have a code range which makes this unnecessary.
7409#if defined(V8_TARGET_ARCH_ARM)
7410 void* new_page_start = reinterpret_cast<void*>(chunk->area_start());
7411 size_t new_page_size = chunk->area_size();
7412
7413 MemoryRange new_range{new_page_start, new_page_size};
7414
7415 AddCodeMemoryRange(new_range);
7416#endif // !defined(V8_TARGET_ARCH_ARM)
7417}
7418
7419void Isolate::AddCodeRange(Address begin, size_t length_in_bytes) {
7421 MemoryRange{reinterpret_cast<void*>(begin), length_in_bytes});
7422}
7423
7427
7431 i::Tagged<i::Object> id = context->recorder_context_id();
7432 if (IsNullOrUndefined(id)) {
7433 CHECK_LT(last_recorder_context_id_, i::Smi::kMaxValue);
7434 context->set_recorder_context_id(
7435 i::Smi::FromIntptr(++last_recorder_context_id_));
7436 v8::HandleScope handle_scope(reinterpret_cast<v8::Isolate*>(this));
7437 auto result = recorder_context_id_map_.emplace(
7438 std::piecewise_construct,
7439 std::forward_as_tuple(last_recorder_context_id_),
7440 std::forward_as_tuple(reinterpret_cast<v8::Isolate*>(this),
7441 ToApiHandle<v8::Context>(context)));
7442 result.first->second.SetWeak(
7443 reinterpret_cast<void*>(last_recorder_context_id_),
7446 } else {
7447 DCHECK(IsSmi(id));
7449 static_cast<uintptr_t>(i::Smi::ToInt(id)));
7450 }
7451}
7452
7455 auto result = recorder_context_id_map_.find(id.id_);
7456 if (result == recorder_context_id_map_.end() || result->second.IsEmpty())
7457 return MaybeLocal<v8::Context>();
7458 return result->second.Get(reinterpret_cast<v8::Isolate*>(this));
7459}
7460
7467
7472
7474 Isolate* isolate = reinterpret_cast<Isolate*>(data.GetIsolate());
7475 uintptr_t context_id = reinterpret_cast<uintptr_t>(data.GetParameter());
7476 isolate->recorder_context_id_map_.erase(context_id);
7477}
7478
7482
7484 LocalHeap* local_heap = LocalHeap::Current();
7485 if (local_heap) return local_heap;
7487 return main_thread_local_heap();
7488}
7489
7490// |chunk| is either a Page or an executable LargePage.
7492 // We only keep track of individual code pages/allocations if we are on arm32,
7493 // because on x64 and arm64 we have a code range which makes this unnecessary.
7494#if defined(V8_TARGET_ARCH_ARM)
7495 void* removed_page_start = reinterpret_cast<void*>(chunk->area_start());
7496 std::vector<MemoryRange>* old_code_pages = GetCodePages();
7497 DCHECK_NOT_NULL(old_code_pages);
7498
7499 std::vector<MemoryRange>* new_code_pages;
7500 if (old_code_pages == &code_pages_buffer1_) {
7501 new_code_pages = &code_pages_buffer2_;
7502 } else {
7503 new_code_pages = &code_pages_buffer1_;
7504 }
7505
7506 // Copy all existing data from the old vector to the new vector except the
7507 // removed page.
7508 new_code_pages->clear();
7509 new_code_pages->reserve(old_code_pages->size() - 1);
7510 std::remove_copy_if(old_code_pages->begin(), old_code_pages->end(),
7511 std::back_inserter(*new_code_pages),
7512 [removed_page_start](const MemoryRange& range) {
7513 return range.start == removed_page_start;
7514 });
7515 DCHECK_EQ(old_code_pages->size(), new_code_pages->size() + 1);
7516 // Atomically switch out the pointer
7517 SetCodePages(new_code_pages);
7518#endif // !defined(V8_TARGET_ARCH_ARM)
7519}
7520
7521#if V8_ENABLE_DRUMBRAKE
7522void Isolate::initialize_wasm_execution_timer() {
7523 DCHECK(v8_flags.wasm_enable_exec_time_histograms &&
7524 v8_flags.slow_histograms && !v8_flags.wasm_jitless);
7525 wasm_execution_timer_ =
7526 std::make_unique<wasm::WasmExecutionTimer>(this, false);
7527}
7528#endif // V8_ENABLE_DRUMBRAKE
7529
7530#undef TRACE_ISOLATE
7531
7532// static
7534 DCHECK_NOT_NULL(function_name);
7535 if (!stack_access_count_map) {
7536 stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
7537 }
7538 auto& map = *stack_access_count_map;
7539 std::string name(function_name);
7540 // It is safe to return the address of std::map values.
7541 // Only iterators and references to the erased elements are invalidated.
7542 return reinterpret_cast<Address>(&map[name].first);
7543}
7544
7545// static
7547 DCHECK_NOT_NULL(function_name);
7548 if (!stack_access_count_map) {
7549 stack_access_count_map = new MapOfLoadsAndStoresPerFunction{};
7550 }
7551 auto& map = *stack_access_count_map;
7552 std::string name(function_name);
7553 // It is safe to return the address of std::map values.
7554 // Only iterators and references to the erased elements are invalidated.
7555 return reinterpret_cast<Address>(&map[name].second);
7556}
7557
7559 if (IsEphemeronHashTable(heap()->locals_block_list_cache())) {
7561 Cast<EphemeronHashTable>(heap()->locals_block_list_cache());
7562 cache->Rehash(this);
7563 }
7564}
7566 DirectHandle<ScopeInfo> scope_info,
7567 DirectHandle<ScopeInfo> outer_scope_info,
7568 DirectHandle<StringSet> locals_blocklist) {
7570 if (IsEphemeronHashTable(heap()->locals_block_list_cache())) {
7571 cache = handle(Cast<EphemeronHashTable>(heap()->locals_block_list_cache()),
7572 this);
7573 } else {
7574 CHECK(IsUndefined(heap()->locals_block_list_cache()));
7575 constexpr int kInitialCapacity = 8;
7576 cache = EphemeronHashTable::New(this, kInitialCapacity);
7577 }
7578 DCHECK(IsEphemeronHashTable(*cache));
7579
7581 if (!outer_scope_info.is_null()) {
7582 value = factory()->NewTuple2(outer_scope_info, locals_blocklist,
7584 } else {
7585 value = locals_blocklist;
7586 }
7587
7588 CHECK(!value.is_null());
7589 cache = EphemeronHashTable::Put(cache, scope_info, value);
7590 heap()->set_locals_block_list_cache(*cache);
7591}
7592
7594 DirectHandle<ScopeInfo> scope_info) {
7596
7597 if (!IsEphemeronHashTable(heap()->locals_block_list_cache())) {
7598 return ReadOnlyRoots(this).the_hole_value();
7599 }
7600
7601 Tagged<Object> maybe_value =
7602 Cast<EphemeronHashTable>(heap()->locals_block_list_cache())
7603 ->Lookup(scope_info);
7604 if (IsTuple2(maybe_value)) return Cast<Tuple2>(maybe_value)->value2();
7605
7606 CHECK(IsStringSet(maybe_value) || IsTheHole(maybe_value));
7607 return maybe_value;
7608}
7609
7610std::list<std::unique_ptr<detail::WaiterQueueNode>>&
7614
7616 v8::Isolate* isolate, v8::Local<v8::Context> context,
7618 WasmAsyncSuccess success) {
7619 MicrotasksScope microtasks_scope(context,
7621
7623 ? resolver->Resolve(context, result)
7624 : resolver->Reject(context, result);
7625 // It's guaranteed that no exceptions will be thrown by these
7626 // operations, but execution might be terminating.
7627 CHECK(ret.IsJust() ? ret.FromJust() : isolate->IsExecutionTerminating());
7628}
7629
7630// Mutex used to ensure that the dispatch table entries for builtins are only
7631// initialized once.
7633
7635#ifdef V8_ENABLE_LEAPTIERING
7636 // Ideally these entries would be created when the read only heap is
7637 // initialized. However, since builtins are deserialized later, we need to
7638 // patch it up here. Also, we need a mutex so the shared read only heaps space
7639 // is not initialized multiple times. This must be blocking as no isolate
7640 // should be allowed to proceed until the table is initialized.
7642 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
7643
7644 bool needs_initialization =
7645 !V8_STATIC_DISPATCH_HANDLES_BOOL ||
7646 jdt->PreAllocatedEntryNeedsInitialization(
7647 read_only_heap_->js_dispatch_table_space(),
7648 builtin_dispatch_handle(JSBuiltinDispatchHandleRoot::Idx::kFirst));
7649
7650 if (needs_initialization) {
7651 std::optional<JSDispatchTable::UnsealReadOnlySegmentScope> unseal_scope;
7652 if (V8_STATIC_DISPATCH_HANDLES_BOOL) {
7653 unseal_scope.emplace(jdt);
7654 }
7655 for (JSBuiltinDispatchHandleRoot::Idx idx =
7656 JSBuiltinDispatchHandleRoot::kFirst;
7657 idx < JSBuiltinDispatchHandleRoot::kCount;
7658 idx = static_cast<JSBuiltinDispatchHandleRoot::Idx>(
7659 static_cast<int>(idx) + 1)) {
7660 Builtin builtin = JSBuiltinDispatchHandleRoot::to_builtin(idx);
7662 Tagged<Code> code = builtins_.code(builtin);
7663 DCHECK(code->entrypoint_tag() == CodeEntrypointTag::kJSEntrypointTag);
7664 // TODO(olivf, 40931165): It might be more robust to get the static
7665 // parameter count of this builtin.
7666 int parameter_count = code->parameter_count();
7667#if V8_STATIC_DISPATCH_HANDLES_BOOL
7668 JSDispatchHandle handle = builtin_dispatch_handle(builtin);
7669 jdt->InitializePreAllocatedEntry(
7670 read_only_heap_->js_dispatch_table_space(), handle, code,
7672#else
7673 CHECK_LT(idx, JSBuiltinDispatchHandleRoot::kTableSize);
7674 JSDispatchHandle handle = jdt->AllocateAndInitializeEntry(
7675 read_only_heap_->js_dispatch_table_space(), parameter_count, code);
7676 isolate_data_.builtin_dispatch_table()[idx] = handle;
7677#endif // V8_STATIC_DISPATCH_HANDLES_BOOL
7678 }
7679 }
7680#endif
7681}
7682
7683} // namespace internal
7684} // namespace v8
Isolate * isolate_
constexpr int kMinimumOSPageSize
#define BUILTIN_PROMISE_REJECTION_PREDICTION_LIST(V)
int16_t parameter_count
Definition builtins.cc:67
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
Builtins::Kind kind
Definition builtins.cc:40
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
#define SBXCHECK_EQ(lhs, rhs)
Definition check.h:62
#define SLOW_DCHECK(condition)
Definition checks.h:21
SourcePosition pos
int GetStackTraceLimit()
Definition api.cc:10164
static Isolate * Allocate()
Definition api.cc:9964
void(*)(std::unique_ptr< CppHeap >) ReleaseCppHeapCallback
Isolate()=delete
static Isolate * TryGetCurrent()
Definition api.cc:9954
bool(*)(Isolate *) AbortOnUncaughtExceptionCallback
Definition v8-isolate.h:756
@ kObjectPrototypeHasElements
Definition v8-isolate.h:553
@ kArrayPrototypeHasElements
Definition v8-isolate.h:552
@ kErrorStackTraceLimit
Definition v8-isolate.h:514
void(*)(Isolate *isolate, UseCounterFeature feature) UseCounterCallback
Definition v8-isolate.h:669
V8_INLINE bool IsJust() const
Definition v8-maybe.h:36
V8_INLINE T FromJust() const &
Definition v8-maybe.h:64
virtual void DumpWithoutCrashing()
std::shared_ptr< v8::TaskRunner > GetForegroundTaskRunner(Isolate *isolate)
static V8_WARN_UNUSED_RESULT MaybeLocal< Resolver > New(Local< Context > context)
Definition api.cc:8640
@ kExposeFramesAcrossSecurityOrigins
Definition v8-debug.h:134
static V8_INLINE Local< String > Empty(Isolate *isolate)
void Reset()
Definition api.cc:2839
void * exception_
bool capture_message_
void ResetInternal()
Definition api.cc:2850
TryCatch * next_
static v8::internal::DirectHandle< To > OpenDirectHandle(v8::Local< From > handle)
Definition api.h:279
bool contains(Address address) const
static constexpr U encode(T value)
Definition bit-field.h:55
static constexpr U kMask
Definition bit-field.h:41
static V8_BASE_EXPORT bool GetFlushDenormals()
Definition fpu.cc:96
static void Abort()
static int GetCurrentProcessId()
static void AdjustSchedulingParams()
V8_INLINE int NextInt() V8_WARN_UNUSED_RESULT
static Processor * New(Platform *platform)
virtual void AsyncEventOccurred(debug::DebugAsyncActionType type, int id, bool is_blackboxed)=0
static Tagged< AccessCheckInfo > Get(Isolate *isolate, DirectHandle< JSObject > receiver)
Definition objects.cc:6594
static ApiAccessorExitFrame * cast(StackFrame *frame)
Definition frames.h:1036
Tagged< Object > holder() const
Definition frames-inl.h:237
Tagged< Name > property_name() const
Definition frames-inl.h:229
DirectHandle< FunctionTemplateInfo > GetFunctionTemplateInfo() const
Definition frames.cc:1304
static ApiCallbackExitFrame * cast(StackFrame *frame)
Definition frames.h:997
Tagged< Object > receiver() const
Definition frames-inl.h:176
static V8_EXPORT_PRIVATE DirectHandle< ArrayList > Add(Isolate *isolate, DirectHandle< ArrayList > array, Tagged< Smi > obj, AllocationType allocation=AllocationType::kYoung)
static DirectHandle< ArrayList > New(IsolateT *isolate, int capacity, AllocationType allocation=AllocationType::kYoung)
static void QuietNaN(Tagged< HeapObject > nan)
Definition assembler.h:381
AssertNoContextChange(Isolate *isolate)
Definition isolate.h:3007
void PatchContext(Tagged< Context > value)
Definition frames.cc:3364
intptr_t GetPCForBytecodeOffset(int lookup_offset) const
Definition frames.cc:3358
static BaselineFrame * cast(StackFrame *frame)
Definition frames.h:1179
static V8_EXPORT_PRIVATE BasicBlockProfiler * Get()
V8_EXPORT_PRIVATE void Log(Isolate *isolate, std::ostream &os)
V8_EXPORT_PRIVATE void ResetCounts(Isolate *isolate)
V8_EXPORT_PRIVATE void Print(Isolate *isolate, std::ostream &os)
void Initialize(bool create_heap_objects)
static BuiltinFrame * cast(StackFrame *frame)
Definition frames.h:1247
V8_EXPORT_PRIVATE Tagged< Code > code(Builtin builtin)
Definition builtins.cc:149
bool is_initialized() const
Definition builtins.h:279
static constexpr int kBuiltinCount
Definition builtins.h:105
static constexpr Builtin kFirst
Definition builtins.h:112
static constexpr bool IsIsolateIndependent(Builtin builtin)
Definition builtins.h:266
static void InitializeIsolateDataTables(Isolate *isolate)
Definition builtins.cc:379
static constexpr bool AllBuiltinsAreIsolateIndependent()
Definition builtins.h:263
static constexpr int kBuiltinTier0Count
Definition builtins.h:108
static constexpr bool kAllBuiltinsAreIsolateIndependent
Definition builtins.h:262
static constexpr Builtin kLast
Definition builtins.h:113
static DirectHandle< String > GetFunctionDebugName(DirectHandle< CallSiteInfo > info)
std::optional< Tagged< Script > > GetScript() const
static bool ComputeLocation(DirectHandle< CallSiteInfo > info, MessageLocation *location)
static int GetSourcePosition(DirectHandle< CallSiteInfo > info)
uint8_t * embedded_blob_code_copy() const
Definition code-range.h:83
static constexpr int kFixedFrameSizeAboveFp
virtual int position() const
Definition frames.cc:1474
virtual FrameSummaries Summarize() const
Definition frames.cc:1508
static CommonFrame * cast(StackFrame *frame)
Definition frames.h:662
WorkerThreadRuntimeCallStats * worker_thread_runtime_call_stats()
Definition counters.h:635
RuntimeCallStats * runtime_call_stats()
Definition counters.h:633
std::optional< Tagged< Object > > OnThrow(DirectHandle< Object > exception) V8_WARN_UNUSED_RESULT
Definition debug.cc:2448
void clear_restart_frame()
Definition debug.h:477
bool is_active() const
Definition debug.h:432
static V8_EXPORT_PRIVATE void DeoptimizeAll(Isolate *isolate)
V8_INLINE bool is_null() const
Definition handles.h:693
Address InstructionStartOf(Builtin builtin) const
static EmbeddedData FromBlob()
virtual int LookupOrAddExternallyCompiledFilename(const char *filename)=0
virtual int GetExternallyCompiledFilenameCount() const =0
virtual void PrepareBuiltinSourcePositionMap(Builtins *builtins)=0
virtual const char * GetExternallyCompiledFilename(int index) const =0
static StackPropertyLookupResult GetErrorStackProperty(Isolate *isolate, DirectHandle< JSReceiver > maybe_error_object)
Definition messages.cc:1132
static MaybeDirectHandle< JSObject > Construct(Isolate *isolate, DirectHandle< JSFunction > target, DirectHandle< Object > new_target, DirectHandle< Object > message, DirectHandle< Object > options)
Definition messages.cc:528
v8::ExceptionContext exception_context() const
Definition vm-state.h:54
void InitIsolateIndependent(MemorySpan< Address > shared_external_references)
Handle< String > NewStringFromAsciiChecked(const char *str, AllocationType allocation=AllocationType::kYoung)
Handle< FixedArray > NewFixedArray(int length, AllocationType allocation=AllocationType::kYoung)
Isolate * isolate() const
Definition factory.h:1281
Handle< Symbol > NewSymbol(AllocationType allocation=AllocationType::kOld)
Definition factory.cc:1234
Handle< Symbol > NewPrivateSymbol(AllocationType allocation=AllocationType::kOld)
Definition factory.cc:1238
Handle< JSObject > NewJSObjectWithNullProto()
Definition factory.cc:3004
DirectHandle< Tuple2 > NewTuple2(DirectHandle< Object > value1, DirectHandle< Object > value2, AllocationType allocation)
Definition factory.cc:396
Handle< StackTraceInfo > NewStackTraceInfo(DirectHandle< FixedArray > frames)
Definition factory.cc:4048
DirectHandle< ErrorStackData > NewErrorStackData(DirectHandle< UnionOf< JSAny, FixedArray > > call_site_infos_or_formatted_stack, DirectHandle< StackTraceInfo > stack_trace)
Definition factory.cc:1549
Handle< String > InternalizeString(base::Vector< const char > str, bool convert_encoding=false)
Definition factory.h:216
Handle< JSObject > NewError(DirectHandle< JSFunction > constructor, DirectHandle< String > message, DirectHandle< Object > options={})
Definition factory.cc:2799
Handle< String > SizeToString(size_t value, bool check_cache=true)
Definition factory.cc:3935
static constexpr int kMaxLength
static HandleType< FixedArray > RightTrimOrEmpty(Isolate *isolate, HandleType< FixedArray > array, int new_length)
static V8_EXPORT_PRIVATE HandleType< FixedArray > SetAndGrow(Isolate *isolate, HandleType< FixedArray > array, int index, DirectHandle< Object > value)
Handle< Object > script() const
bool AreSourcePositionsAvailable() const
Definition frames.cc:2717
Tagged< Object > load() const
Definition slots-inl.h:48
static Tagged< Object > GetTarget(const FunctionCallbackInfo< T > &info)
static void IsolateDeinit(Isolate *isolate)
void UpdateCurrentEventPriority(Priority priority)
void RemoveClient(Isolate *client)
Definition safepoint.cc:344
base::RecursiveMutex clients_mutex_
Definition safepoint.h:218
void AppendClient(Isolate *client)
Definition safepoint.cc:327
Tagged< HeapObject > Next()
Definition heap.cc:6658
static constexpr int kHeaderSize
bool StartSamplingHeapProfiler(uint64_t sample_interval, int stack_depth, v8::HeapProfiler::SamplingFlags)
bool is_tracking_object_moves() const
void SetUpFromReadOnlyHeap(ReadOnlyHeap *ro_heap)
Definition heap.cc:5653
Address code_range_base()
Definition heap-inl.h:181
void StartTearDown()
Definition heap.cc:6067
size_t MaxOldGenerationSize()
Definition heap.h:1235
HeapState gc_state() const
Definition heap.h:521
void SetUp(LocalHeap *main_thread_local_heap)
Definition heap.cc:5545
void NotifyLoadingStarted()
Definition heap.cc:7376
bool has_heap_object_allocation_tracker() const
Definition heap.h:1503
IncrementalMarking * incremental_marking() const
Definition heap.h:1062
CodeRange * code_range()
Definition heap.h:831
void ActivateMemoryReducerIfNeeded()
Definition heap.cc:3778
void NotifyLoadingEnded()
Definition heap.cc:7383
std::unique_ptr< CodeRange > code_range_
Definition heap.h:2295
OldSpace * old_space() const
Definition heap.h:730
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition heap.cc:1258
HeapProfiler * heap_profiler() const
Definition heap.h:366
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs() const
Definition heap.cc:4098
void IncrementDeferredCounts(base::Vector< const v8::Isolate::UseCounterFeature > features)
Definition heap.cc:939
bool HasBeenSetUp() const
Definition heap.cc:451
V8_EXPORT_PRIVATE void DisableInlineAllocation()
Definition heap.cc:5540
IsolateSafepoint * safepoint()
Definition heap.h:579
const base::AddressRegion & code_region()
Definition heap-inl.h:176
void TearDownWithSharedHeap()
Definition heap.cc:6103
ReadOnlySpace * read_only_space() const
Definition heap.h:738
CodeSpace * code_space() const
Definition heap.h:732
void SetUpSpaces(LinearAllocationArea &new_allocation_info, LinearAllocationArea &old_allocation_info)
Definition heap.cc:5769
void ClearKeptObjects()
Definition heap.cc:6948
GCTracer * tracer()
Definition heap.h:800
V8_EXPORT_PRIVATE void SetStackStart()
Definition heap.cc:6050
void SetIsMarkingFlag(bool value)
Definition heap.cc:7190
void NotifyDeserializationComplete()
Definition heap.cc:5961
MaybeDirectHandle< String > Finish()
V8_INLINE void AppendCharacter(uint8_t c)
V8_INLINE void AppendString(std::string_view str)
static V8_INLINE constexpr bool HasHeapObjectTag(Address value)
static const int kIsolateCageBaseOffset
static const int kIsolateApiCallbackThunkArgumentOffset
static const int kNewAllocationInfoOffset
static const int kIsolateFastCCallCallerPcOffset
static const int kIsolateThreadLocalTopOffset
static V8_INLINE Address GetRoot(v8::Isolate *isolate, int index)
static const int kIsolateRootsOffset
static const int kExternalPointerTableSize
static const int kIsolateStackGuardOffset
static const int kIsolateFastCCallCallerFpOffset
static const int kBuiltinTier0TableOffset
static const int kIsolateLongTaskStatsCounterOffset
static const int kIsolateHandleScopeDataOffset
static const int kBuiltinTier0EntryTableOffset
static const int kStackGuardSize
static const int kErrorMessageParamOffset
static const int kExternalPointerTableBasePointerOffset
static const int kBuiltinTier0TableSize
static const int kContinuationPreservedEmbedderDataOffset
static const int kTrustedPointerTableSize
static const int kBuiltinTier0EntryTableSize
static const int kOldAllocationInfoOffset
static const int kIsolateEmbedderDataOffset
static const int kTrustedPointerTableBasePointerOffset
static const int kVariousBooleanFlagsOffset
static InterpretedFrame * cast(StackFrame *frame)
Definition frames.h:1152
void PatchBytecodeOffset(int new_offset)
Definition frames.cc:3335
static Maybe< std::string > ToLanguageTag(const icu::Locale &locale)
Address builtin_tier0_table_[Builtins::kBuiltinTier0Count]
Tagged< Object > continuation_preserved_embedder_data_
HandleScopeData handle_scope_data_
wasm::StackMemory * active_stack()
Address builtin_tier0_entry_table_[Builtins::kBuiltinTier0Count]
void * embedder_data_[Internals::kNumIsolateDataSlots]
ThreadLocalTop thread_local_top_
LinearAllocationArea new_allocation_info_
LinearAllocationArea old_allocation_info_
ExternalReferenceTable * external_reference_table()
void set_active_stack(wasm::StackMemory *stack)
static IsolateGroup * current()
void AddIsolate(Isolate *isolate)
OptimizingCompileTaskExecutor * optimizing_compile_task_executor()
void RemoveIsolate(Isolate *isolate)
Isolate * shared_space_isolate() const
v8::PageAllocator * page_allocator() const
static IsolateGroup * AcquireDefault()
CodeRange * GetCodeRange() const
void SetupReadOnlyHeap(Isolate *isolate, SnapshotData *read_only_snapshot_data, bool can_rehash)
V8_EXPORT_PRIVATE void AssertMainThreadIsOnlyThread()
Definition safepoint.cc:304
base::RecursiveMutex local_heaps_mutex_
Definition safepoint.h:158
void Remove(PerIsolateThreadData *data)
Definition isolate.cc:3942
PerIsolateThreadData * Lookup(ThreadId thread_id)
Definition isolate.cc:3930
void Insert(PerIsolateThreadData *data)
Definition isolate.cc:3937
PerIsolateThreadData * FindPerThreadDataForThread(ThreadId thread_id)
Definition isolate.cc:573
void set_topmost_script_having_context(Tagged< Context > context)
Definition isolate-inl.h:58
Address continuation_preserved_embedder_data_address()
Definition isolate.h:2279
Isolate(const Isolate &)=delete
double time_millis_at_init_
Definition isolate.h:2679
int stack_trace_for_uncaught_exceptions_frame_limit_
Definition isolate.h:2572
std::vector< CallCompletedCallback > call_completed_callbacks_
Definition isolate.h:2765
V8_INLINE Address * builtin_table()
Definition isolate.h:1305
V8_NOINLINE void PushParamsAndDie(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
Definition isolate.cc:699
static void InitializeOncePerProcess()
Definition isolate.cc:583
int GetExternallyCompiledFilenameCount() const
Definition isolate.cc:6773
std::vector< Tagged< Object > > shared_heap_object_cache_
Definition isolate.h:2787
bool initialized_from_snapshot_
Definition isolate.h:2656
v8::TryCatch * try_catch_handler()
Definition isolate.h:850
void UpdateLogObjectRelocation()
Definition isolate.cc:4383
std::shared_ptr< CompilationStatistics > turbo_statistics_
Definition isolate.h:2769
base::Mutex managed_ptr_destructors_mutex_
Definition isolate.h:2833
std::shared_ptr< v8::TaskRunner > task_runner_
Definition isolate.h:2813
ReadOnlyHeap * read_only_heap() const
Definition isolate.h:1201
bool IsWasmImportedStringsEnabled(DirectHandle< NativeContext > context)
Definition isolate.cc:3698
std::queue< InterruptEntry > api_interrupts_queue_
Definition isolate.h:2710
bool HasIsolatePromiseHooks() const
Definition isolate.h:2513
MaybeDirectHandle< Script > CurrentReferrerScript()
Definition isolate.cc:1695
const uint8_t * embedded_blob_data() const
Definition isolate.cc:387
void SetHostCreateShadowRealmContextCallback(HostCreateShadowRealmContextCallback callback)
Definition isolate.cc:6710
uint32_t embedded_blob_data_size() const
Definition isolate.cc:390
void SetTerminationOnExternalTryCatch()
Definition isolate.cc:4768
void OnTerminationDuringRunMicrotasks()
Definition isolate.cc:6967
void CollectSourcePositionsForAllBytecodeArrays()
Definition isolate.cc:7184
AccountingAllocator * allocator()
Definition isolate.h:1979
void InitializeIsShortBuiltinCallsEnabled()
Definition isolate.cc:5150
base::RandomNumberGenerator * random_number_generator_
Definition isolate.h:2603
std::list< std::unique_ptr< detail::WaiterQueueNode > > async_waiter_queue_nodes_
Definition isolate.h:2882
uint32_t embedded_blob_data_size_
Definition isolate.h:2807
void OnPromiseAfter(DirectHandle< JSPromise > promise)
Definition isolate.cc:6949
void CreateAndSetEmbeddedBlob()
Definition isolate.cc:5117
void SetHostImportModuleDynamicallyCallback(HostImportModuleDynamicallyCallback callback)
Definition isolate.cc:6681
void PrintStack(StringStream *accumulator, PrintStackMode mode=kPrintStackVerbose)
Definition isolate.cc:1757
static void SetCurrent(Isolate *isolate)
Definition isolate.cc:528
bool IsLoggingCodeCreation() const
Definition isolate.cc:6145
MaybeDirectHandle< NativeContext > RunHostCreateShadowRealmContextCallback()
Definition isolate.cc:6716
Tagged< Object > ThrowIllegalOperation()
Definition isolate.cc:2855
void SetEmbeddedBlob(const uint8_t *code, uint32_t code_size, const uint8_t *data, uint32_t data_size)
Definition isolate.cc:324
bigint::Processor * bigint_processor_
Definition isolate.h:2584
void SetPromiseHook(PromiseHook hook)
Definition isolate.cc:6856
bool OwnsStringTables() const
Definition isolate.h:2321
static const uint8_t * CurrentEmbeddedBlobCode()
Definition isolate.cc:395
V8FileLogger * v8_file_logger_
Definition isolate.h:2564
std::atomic< uint32_t > next_unique_sfi_id_
Definition isolate.h:2757
void InitializeNextUniqueSfiId(uint32_t id)
Definition isolate.h:2751
std::atomic< v8::Isolate::Priority > priority_
Definition isolate.h:2663
void RunPromiseHook(PromiseHookType type, DirectHandle< JSPromise > promise, DirectHandle< Object > parent)
Definition isolate.cc:6874
MaybeDirectHandle< JSPromise > RunHostImportModuleDynamicallyCallback(MaybeDirectHandle< Script > maybe_referrer, Handle< Object > specifier, ModuleImportPhase phase, MaybeDirectHandle< Object > maybe_import_options_argument)
Definition isolate.cc:6503
static void IterateRegistersAndStackOfSimulator(::heap::base::StackVisitor *visitor)
Definition isolate.cc:3815
bool ComputeLocation(MessageLocation *target)
Definition isolate.cc:2899
std::unique_ptr< LazyCompileDispatcher > lazy_compile_dispatcher_
Definition isolate.h:2701
bool is_shared_space_isolate() const
Definition isolate.h:2292
bool IsBuiltinTableHandleLocation(Address *handle_location)
Definition isolate.cc:3781
static V8_INLINE Isolate * Current()
Definition isolate-inl.h:35
RegExpStack * regexp_stack_
Definition isolate.h:2597
static Address c_entry_fp(ThreadLocalTop *thread)
Definition isolate.h:889
std::unique_ptr< PersistentHandlesList > persistent_handles_list_
Definition isolate.h:2736
Address code_cage_base() const
Definition isolate.h:1222
Counters * counters()
Definition isolate.h:1180
void RequestInterrupt(InterruptCallback callback, void *data)
Definition isolate.cc:1960
void UpdateTypedArraySpeciesLookupChainProtectorOnSetPrototype(DirectHandle< JSObject > object)
Definition isolate.cc:6269
static uint32_t CurrentEmbeddedBlobCodeSize()
Definition isolate.cc:400
DateCache * date_cache() const
Definition isolate.h:1619
uint32_t embedded_blob_code_size() const
Definition isolate.cc:384
CancelableTaskManager * cancelable_task_manager_
Definition isolate.h:2817
void NotifyExceptionPropagationCallback()
Definition isolate.cc:4815
void set_thread_id(ThreadId id)
Definition isolate.h:818
StubCache * define_own_stub_cache_
Definition isolate.h:2567
const IsolateData * isolate_data() const
Definition isolate.h:1207
void SetPromiseRejectCallback(PromiseRejectCallback callback)
Definition isolate.cc:7011
void SetHostImportModuleWithPhaseDynamicallyCallback(HostImportModuleWithPhaseDynamicallyCallback callback)
Definition isolate.cc:6686
bool ComputeLocationFromDetailedStackTrace(MessageLocation *target, DirectHandle< Object > exception)
Definition isolate.cc:2972
void OnStackTraceCaptured(DirectHandle< StackTraceInfo > stack_trace)
Definition isolate.cc:6960
void CountUsage(v8::Isolate::UseCounterFeature feature)
Definition isolate.cc:7028
int GenerateIdentityHash(uint32_t mask)
Definition isolate.cc:6343
const v8::Context::BackupIncumbentScope * top_backup_incumbent_scope() const
Definition isolate.h:2150
Tagged< Object > ReThrow(Tagged< Object > exception)
Definition isolate.cc:2200
DateCache * date_cache_
Definition isolate.h:2602
bool HasPrepareStackTraceCallback() const
Definition isolate.cc:6801
SetupIsolateDelegate * setup_delegate_
Definition isolate.h:2587
TieringManager * tiering_manager_
Definition isolate.h:2554
HandleScopeImplementer * handle_scope_implementer_
Definition isolate.h:2576
std::vector< MemoryRange > * GetCodePages() const
Definition isolate.cc:3073
void UpdateProtectorsOnSetPrototype(DirectHandle< JSObject > object, DirectHandle< Object > new_prototype)
Definition isolate.cc:6252
void LocalsBlockListCacheRehash()
Definition isolate.cc:7558
size_t last_long_task_stats_counter_
Definition isolate.h:2778
void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback)
Definition isolate.cc:1776
Logger * logger() const
Definition isolate.h:1508
void ReportExceptionPropertyCallback(DirectHandle< JSReceiver > holder, DirectHandle< Name > name, v8::ExceptionContext callback_kind)
Definition isolate.cc:5006
void DiscardPerThreadDataForThisThread()
Definition isolate.cc:554
const uint8_t * embedded_blob_data_
Definition isolate.h:2806
LocalIsolate * main_thread_local_isolate()
Definition isolate.h:2183
Bootstrapper * bootstrapper()
Definition isolate.h:1178
StubCache * load_stub_cache_
Definition isolate.h:2565
HostCreateShadowRealmContextCallback host_create_shadow_realm_context_callback_
Definition isolate.h:2630
MaybeLocal< v8::Context > GetContextFromRecorderContextId(v8::metrics::Recorder::ContextId id)
Definition isolate.cc:7453
compiler::PerIsolateCompilerCache * compiler_cache_
Definition isolate.h:2696
std::optional< Isolate * > shared_space_isolate_
Definition isolate.h:2861
bool IsInCreationContext(Tagged< JSObject > object, uint32_t index)
Definition isolate.cc:6228
DirectHandle< JSMessageObject > CreateMessageOrAbort(DirectHandle< Object > exception, MessageLocation *location)
Definition isolate.cc:2056
std::shared_ptr< CompilationStatistics > GetTurboStatistics()
Definition isolate.cc:6106
V8_NOINLINE void PushStackTraceAndContinue(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
Definition isolate.cc:708
bool Init(SnapshotData *startup_snapshot_data, SnapshotData *read_only_snapshot_data, SnapshotData *shared_heap_snapshot_data, bool can_rehash)
Definition isolate.cc:5431
void set_pending_message(Tagged< Object > message_obj)
Definition isolate-inl.h:87
void RequestInvalidateNoProfilingProtector()
Definition isolate.cc:1983
void InstallConditionalFeatures(DirectHandle< NativeContext > context)
Definition isolate.cc:3628
LocalHeap * CurrentLocalHeap()
Definition isolate.cc:7483
GlobalSafepoint * global_safepoint() const
Definition isolate.h:2305
uint32_t embedded_blob_code_size_
Definition isolate.h:2805
Handle< JSMessageObject > CreateMessage(DirectHandle< Object > exception, MessageLocation *location)
Definition isolate.cc:2988
std::list< std::unique_ptr< detail::WaiterQueueNode > > & async_waiter_queue_nodes()
Definition isolate.cc:7611
Tagged< Context > context() const
Definition isolate.h:800
Handle< StackTraceInfo > GetDetailedStackTrace(DirectHandle< JSReceiver > error_object)
Definition isolate.cc:1513
v8::metrics::LongTaskStats * GetCurrentLongTaskStats()
Definition isolate.cc:7468
base::Mutex thread_data_table_mutex_
Definition isolate.h:2857
OptimizingCompileDispatcher * optimizing_compile_dispatcher()
Definition isolate.h:1715
PrepareStackTraceCallback prepare_stack_trace_callback_
Definition isolate.h:2844
bool is_short_builtin_calls_enabled_
Definition isolate.h:2659
void set_deoptimizer_lazy_throw(bool value)
Definition isolate.h:1327
int LookupOrAddExternallyCompiledFilename(const char *filename)
Definition isolate.cc:6758
bool PropagateExceptionToExternalTryCatch(ExceptionHandlerType top_handler)
Definition isolate.cc:4777
void UnregisterTryCatchHandler(v8::TryCatch *that)
Definition isolate.cc:659
void RemoveCallCompletedCallback(CallCompletedCallback callback)
Definition isolate.cc:6417
v8::metrics::Recorder::ContextId GetOrRegisterRecorderContextId(DirectHandle< NativeContext > context)
Definition isolate.cc:7428
void SetReleaseCppHeapCallback(v8::Isolate::ReleaseCppHeapCallback callback)
Definition isolate.cc:6845
void set_context(Tagged< Context > context)
Definition isolate-inl.h:43
MaybeHandle< JSObject > RunHostInitializeImportMetaObjectCallback(DirectHandle< SourceTextModule > module)
Definition isolate.cc:6691
Tagged< Object > Throw(Tagged< Object > exception, MessageLocation *location=nullptr)
Definition isolate.cc:2091
void set_date_cache(DateCache *date_cache)
Definition isolate.cc:6207
uint32_t current_async_task_id_
Definition isolate.h:2823
CompilationCache * compilation_cache_
Definition isolate.h:2555
void CancelTerminateExecution()
Definition isolate.cc:1954
uint64_t stress_deopt_count_
Definition isolate.h:2739
bool InitWithSnapshot(SnapshotData *startup_snapshot_data, SnapshotData *read_only_snapshot_data, SnapshotData *shared_heap_snapshot_data, bool can_rehash)
Definition isolate.cc:5230
InnerPointerToCodeCache * inner_pointer_to_code_cache_
Definition isolate.h:2579
Address get_address_from_id(IsolateAddressId id)
Definition isolate.cc:585
bool NeedsDetailedOptimizedCodeLineInfo() const
Definition isolate.cc:6141
DirectHandle< String > CurrentScriptNameOrSourceURL()
Definition isolate.cc:1688
std::unordered_set< int32_t * > active_dynamic_regexp_result_vectors_
Definition isolate.h:2601
std::vector< MemoryRange > code_pages_buffer1_
Definition isolate.h:2894
static Address load_from_stack_count_address(const char *function_name)
Definition isolate.cc:7533
V8_WARN_UNUSED_RESULT MaybeDirectHandle< Object > ReportFailedAccessCheck(DirectHandle< JSObject > receiver)
Definition isolate.cc:1781
bool IsSharedArrayBufferConstructorEnabled(DirectHandle< NativeContext > context)
Definition isolate.cc:3644
void MaybeRemapEmbeddedBuiltinsIntoCodeRange()
Definition isolate.cc:5180
static Isolate * New()
Definition isolate.cc:4093
TracedHandles traced_handles_
Definition isolate.h:2581
SharedStructTypeRegistry * shared_struct_type_registry() const
Definition isolate.h:791
void DetachGlobal(DirectHandle< Context > env)
Definition isolate.cc:7119
Tagged< NativeContext > raw_native_context()
Definition isolate-inl.h:53
PerIsolateThreadData * FindPerThreadDataForThisThread()
Definition isolate.cc:568
ManagedPtrDestructor * managed_ptr_destructors_head_
Definition isolate.h:2834
void UpdateNoElementsProtectorOnSetPrototype(DirectHandle< JSObject > object)
Definition isolate.h:1665
std::pair< InterruptCallback, void * > InterruptEntry
Definition isolate.h:2709
void SetFeedbackVectorsForProfilingTools(Tagged< Object > value)
Definition isolate.cc:6169
bool ComputeLocationFromException(MessageLocation *target, DirectHandle< Object > exception)
Definition isolate.cc:2929
void IncreaseConcurrentOptimizationPriority(CodeKind kind, Tagged< SharedFunctionInfo > function)
Definition isolate.cc:6087
static const uint8_t * CurrentEmbeddedBlobData()
Definition isolate.cc:405
bool is_profiling() const
Definition isolate.h:1476
IsolateData isolate_data_
Definition isolate.h:2534
void InvokeApiInterruptCallbacks()
Definition isolate.cc:1966
void PrintCurrentStackTrace(std::ostream &out, PrintCurrentStackTraceFilterCallback should_include_frame_callback=nullptr)
Definition isolate.cc:2860
void AddDetachedContext(DirectHandle< Context > context)
Definition isolate.cc:7072
void ReportExceptionFunctionCallback(DirectHandle< JSReceiver > receiver, DirectHandle< FunctionTemplateInfo > function, v8::ExceptionContext callback_kind)
Definition isolate.cc:4963
Tagged< Object > exception()
base::RandomNumberGenerator * fuzzer_rng_
Definition isolate.h:2604
bool GetStackTraceLimit(Isolate *isolate, int *result)
Definition isolate.cc:1706
void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback)
Definition isolate.cc:7023
void AddCodeMemoryChunk(MutablePageMetadata *chunk)
Definition isolate.cc:7406
std::unique_ptr< StringTable > string_table_
Definition isolate.h:2544
void SetCaptureStackTraceForUncaughtExceptions(bool capture, int frame_limit, StackTrace::StackTraceOptions options)
Definition isolate.cc:3612
const uint8_t * embedded_blob_code() const
Definition isolate.cc:381
void UpdateTypedArrayLengthLookupChainProtectorOnSetPrototype(DirectHandle< JSObject > object)
Definition isolate.cc:6261
bool WalkCallStackAndPromiseTree(MaybeDirectHandle< JSPromise > rejected_promise, const std::function< void(PromiseHandler)> &callback)
Definition isolate.cc:3487
Handle< JSMessageObject > CreateMessageFromException(DirectHandle< Object > exception)
Definition isolate.cc:3020
Handle< FixedArray > GetSimpleStackTrace(DirectHandle< JSReceiver > error_object)
Definition isolate.cc:1521
ThreadManager * thread_manager_
Definition isolate.h:2583
void AbortConcurrentOptimization(BlockingBehavior blocking_behavior)
Definition isolate.cc:6093
void IterateThread(ThreadVisitor *v, char *t)
Definition isolate.cc:600
void UpdatePromiseHookProtector()
Definition isolate.cc:6464
std::unique_ptr< LocalIsolate > main_thread_local_isolate_
Definition isolate.h:2825
static Address store_to_stack_count_address(const char *function_name)
Definition isolate.cc:7546
bool ComputeLocationFromSimpleStackTrace(MessageLocation *target, DirectHandle< Object > exception)
Definition isolate.cc:2955
void UnregisterManagedPtrDestructor(ManagedPtrDestructor *finalizer)
Definition isolate.cc:3801
bool is_catchable_by_javascript(Tagged< Object > exception)
void AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback)
Definition isolate.cc:6395
bool HasContextPromiseHooks() const
Definition isolate.h:1835
static void Delete(Isolate *isolate)
Definition isolate.cc:4115
V8_INLINE HandleScopeData * handle_scope_data()
Definition isolate.h:1393
DirectHandle< String > StackTraceString()
Definition isolate.cc:665
Address js_entry_sp()
Definition isolate.h:931
Address isolate_addresses_[kIsolateAddressCount+1]
Definition isolate.h:2552
char * RestoreThread(char *from)
Definition isolate.cc:3758
void FreeThreadResources()
Definition isolate.h:948
void InitializeLoggingAndCounters()
Definition isolate.cc:5048
Tagged< Object > UnwindAndFindHandler()
Definition isolate.cc:2237
ThreadId thread_id() const
Definition isolate.h:821
bool has_shared_space() const
Definition isolate.h:2303
ExceptionHandlerType TopExceptionHandlerType(Tagged< Object > exception)
Definition isolate.cc:3039
static bool CurrentEmbeddedBlobIsBinaryEmbedded()
Definition isolate.cc:312
char * ArchiveThread(char *to)
Definition isolate.cc:3752
V8_NOINLINE void PushStackTraceAndDie(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
Definition isolate.cc:690
std::unordered_map< uintptr_t, v8::Global< v8::Context > > recorder_context_id_map_
Definition isolate.h:2776
void MaybeInitializeVectorListFromHeap()
Definition isolate.cc:6174
Deoptimizer * current_deoptimizer_
Definition isolate.h:2568
BuiltinsConstantsTableBuilder * builtins_constants_table_builder_
Definition isolate.h:2791
GlobalHandles * global_handles_
Definition isolate.h:2580
void DumpAndResetBuiltinsProfileData()
Definition isolate.cc:6065
void RunAllPromiseHooks(PromiseHookType type, DirectHandle< JSPromise > promise, DirectHandle< Object > parent)
Definition isolate.cc:6861
void SetIdle(bool is_idle)
Definition isolate.cc:7173
bool IsWasmJSPIEnabled(DirectHandle< NativeContext > context)
Definition isolate.cc:3689
unsigned next_module_async_evaluation_ordinal_
Definition isolate.h:2759
OptimizingCompileDispatcher * optimizing_compile_dispatcher_
Definition isolate.h:2734
void RegisterManagedPtrDestructor(ManagedPtrDestructor *finalizer)
Definition isolate.cc:3790
static std::string GetTurboCfgFileName(Isolate *isolate)
Definition isolate.cc:7057
bool AllowsCodeCompaction() const
Definition isolate.cc:6151
HostImportModuleWithPhaseDynamicallyCallback host_import_module_with_phase_dynamically_callback_
Definition isolate.h:2610
StringTable * string_table() const
Definition isolate.h:781
Handle< NativeContext > native_context()
Definition isolate-inl.h:48
PerIsolateThreadData * FindOrAllocatePerThreadDataForThisThread()
Definition isolate.cc:536
bool HasAsyncEventDelegate() const
Definition isolate.h:2518
static base::AddressRegion GetShortBuiltinsCallRegion()
Definition isolate.cc:415
HostInitializeImportMetaObjectCallback host_initialize_import_meta_object_callback_
Definition isolate.h:2628
void RemoveCodeMemoryChunk(MutablePageMetadata *chunk)
Definition isolate.cc:7491
void RunReleaseCppHeapCallback(std::unique_ptr< v8::CppHeap > cpp_heap)
Definition isolate.cc:6850
void FireCallCompletedCallbackInternal(MicrotaskQueue *microtask_queue)
Definition isolate.cc:6424
std::unique_ptr< SharedStructTypeRegistry > shared_struct_type_registry_
Definition isolate.h:2866
size_t HashIsolateForEmbeddedBlob()
Definition isolate.cc:447
bool detailed_source_positions_for_profiling() const
Definition isolate.h:1160
static void SetIsolateThreadLocals(Isolate *isolate, PerIsolateThreadData *data)
Definition isolate.cc:4646
debug::AsyncEventDelegate * async_event_delegate_
Definition isolate.h:2821
Address GetAbstractPC(int *line, int *column)
Definition isolate.cc:1539
ThreadDataTable thread_data_table_
Definition isolate.h:2858
DirectHandle< StackTraceInfo > CaptureDetailedStackTrace(int limit, StackTrace::StackTraceOptions options)
Definition isolate.cc:1613
Tagged< Object > ThrowAt(DirectHandle< JSObject > exception, MessageLocation *location)
Definition isolate.cc:1925
std::unique_ptr< StringForwardingTable > string_forwarding_table_
Definition isolate.h:2545
StackGuard * stack_guard()
Definition isolate.h:1198
HostImportModuleDynamicallyCallback host_import_module_dynamically_callback_
Definition isolate.h:2607
Tagged< Object > TerminateExecution()
Definition isolate.cc:1950
bool is_catchable_by_wasm(Tagged< Object > exception)
void OnAsyncFunctionSuspended(DirectHandle< JSPromise > promise, DirectHandle< JSPromise > parent)
Definition isolate.cc:6883
DirectHandle< NativeContext > GetIncumbentContextSlow()
Definition isolate.cc:3713
v8::Isolate::AbortOnUncaughtExceptionCallback abort_on_uncaught_exception_callback_
Definition isolate.h:2828
IsolateGroup * isolate_group() const
Definition isolate.h:1230
IsolateGroup * isolate_group_
Definition isolate.h:2539
bool IsWasmStringRefEnabled(DirectHandle< NativeContext > context)
Definition isolate.cc:3655
CancelableTaskManager * cancelable_task_manager()
Definition isolate.h:1960
void CheckDetachedContextsAfterGC()
Definition isolate.cc:7081
MaybeDirectHandle< JSObject > CaptureAndSetErrorStack(DirectHandle< JSObject > error_object, FrameSkipMode mode, Handle< Object > caller)
Definition isolate.cc:1440
bool IsWasmJSPIRequested(DirectHandle< NativeContext > context)
Definition isolate.cc:3672
CodeTracer * GetCodeTracer()
Definition isolate.cc:6124
void IncreaseTotalRegexpCodeGenerated(DirectHandle< HeapObject > code)
Definition isolate.cc:6135
base::RandomNumberGenerator * random_number_generator()
Definition isolate.cc:6324
StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_
Definition isolate.h:2573
StubCache * store_stub_cache_
Definition isolate.h:2566
const uint8_t * embedded_blob_code_
Definition isolate.h:2804
void AddCallCompletedCallback(CallCompletedCallback callback)
Definition isolate.cc:6410
KnownPrototype IsArrayOrObjectOrStringPrototype(Tagged< JSObject > object)
Definition isolate.cc:6214
StringStream * incomplete_message_
Definition isolate.h:2551
void LocalsBlockListCacheSet(DirectHandle< ScopeInfo > scope_info, DirectHandle< ScopeInfo > outer_scope_info, DirectHandle< StringSet > locals_blocklist)
Definition isolate.cc:7565
std::unique_ptr< TracingCpuProfilerImpl > tracing_cpu_profiler_
Definition isolate.h:2840
Builtins * builtins()
Definition isolate.h:1443
interpreter::Interpreter * interpreter_
Definition isolate.h:2694
V8FileLogger * v8_file_logger() const
Definition isolate.h:1192
void UpdateNoElementsProtectorOnSetElement(DirectHandle< JSObject > object)
Definition isolate.cc:6237
Tagged< Object > pending_message()
Definition isolate-inl.h:92
v8::internal::Factory * factory()
Definition isolate.h:1527
void set_exception(Tagged< Object > exception_obj)
void PromiseHookStateUpdated()
Definition isolate.cc:6471
OptimizingCompileDispatcher * SetOptimizingCompileDispatcherForTesting(OptimizingCompileDispatcher *dispatcher)
Definition isolate.cc:5982
DescriptorLookupCache * descriptor_lookup_cache_
Definition isolate.h:2575
void InitializeDefaultEmbeddedBlob()
Definition isolate.cc:5092
void RemoveBeforeCallEnteredCallback(BeforeCallEnteredCallback callback)
Definition isolate.cc:6402
void Iterate(RootVisitor *v)
Definition isolate.cc:650
bool is_precise_count_code_coverage() const
Definition isolate.h:1578
v8::Isolate::ReleaseCppHeapCallback release_cpp_heap_callback_
Definition isolate.h:2605
Tagged< Context > topmost_script_having_context() const
Definition isolate.h:808
void SetUpFromReadOnlyArtifacts(ReadOnlyArtifacts *artifacts)
Definition isolate.cc:4146
double time_millis_since_init() const
Definition isolate.h:1615
void OnPromiseThen(DirectHandle< JSPromise > promise)
Definition isolate.cc:6897
Handle< JSGlobalProxy > global_proxy()
void AddCodeRange(Address begin, size_t length_in_bytes)
Definition isolate.cc:7419
bool detailed_source_positions_for_profiling_
Definition isolate.h:2731
static Address handler(ThreadLocalTop *thread)
Definition isolate.h:892
bool get_capture_stack_trace_for_uncaught_exceptions() const
Definition isolate.cc:3619
std::unique_ptr< GlobalSafepoint > global_safepoint_
Definition isolate.h:2886
static Isolate * Allocate(IsolateGroup *isolate_group)
Definition isolate.cc:4099
PromiseHook promise_hook_
Definition isolate.h:2606
static uint32_t CurrentEmbeddedBlobDataSize()
Definition isolate.cc:410
static V8_INLINE PerIsolateThreadData * CurrentPerIsolateThreadData()
Definition isolate-inl.h:30
bool is_short_builtin_calls_enabled() const
Definition isolate.h:1926
void SetExceptionPropagationCallback(ExceptionPropagationCallback callback)
Definition isolate.cc:5037
Debug * debug() const
Definition isolate.h:1474
void InitializeBuiltinJSDispatchTable()
Definition isolate.cc:7634
Address cage_base() const
Definition isolate.h:1213
Isolate * shared_space_isolate() const
Definition isolate.h:2295
EmbeddedFileWriterInterface * embedded_file_writer_
Definition isolate.h:2842
void SetIsLoading(bool is_loading)
Definition isolate.cc:7139
std::vector< BeforeCallEnteredCallback > before_call_entered_callbacks_
Definition isolate.h:2762
bool concurrent_recompilation_enabled()
Definition isolate.h:1705
void ReportPendingMessages(bool report=true)
Definition isolate.cc:3081
LocalHeap * main_thread_local_heap()
Definition isolate.cc:7479
MaterializedObjectStore * materialized_object_store_
Definition isolate.h:2570
void SetPrepareStackTraceCallback(PrepareStackTraceCallback callback)
Definition isolate.cc:6797
void clear_topmost_script_having_context()
Definition isolate-inl.h:63
Handle< Object > root_handle(RootIndex index)
Definition isolate.h:1269
MaybeDirectHandle< Object > RunPrepareStackTraceCallback(DirectHandle< NativeContext >, DirectHandle< JSObject > Error, DirectHandle< JSArray > sites)
Definition isolate.cc:6738
void SetCodePages(std::vector< MemoryRange > *new_code_pages)
Definition isolate.cc:3077
v8::PageAllocator * page_allocator() const
Definition isolate.cc:4156
void UpdateNumberStringNotRegexpLikeProtectorOnSetPrototype(DirectHandle< JSObject > object)
Definition isolate.cc:6279
std::vector< Tagged< Object > > startup_object_cache_
Definition isolate.h:2781
ReadOnlyHeap * read_only_heap_
Definition isolate.h:2541
V8_NOINLINE void PushParamsAndContinue(void *ptr1=nullptr, void *ptr2=nullptr, void *ptr3=nullptr, void *ptr4=nullptr, void *ptr5=nullptr, void *ptr6=nullptr)
Definition isolate.cc:717
uint32_t promise_hook_flags_
Definition isolate.h:2822
void SetPriority(v8::Isolate::Priority priority)
Definition isolate.cc:7151
std::shared_ptr< Counters > async_counters_
Definition isolate.h:2556
DirectHandle< Symbol > SymbolFor(RootIndex dictionary_index, Handle< String > name, bool private_symbol)
Definition isolate.cc:6360
const char * GetExternallyCompiledFilename(int index) const
Definition isolate.cc:6766
void PrepareBuiltinSourcePositionMap()
Definition isolate.cc:6780
CatchType PredictExceptionCatcher()
Definition isolate.cc:2839
Tagged< Object > LocalsBlockListCacheGet(DirectHandle< ScopeInfo > scope_info)
Definition isolate.cc:7593
v8::metrics::LongTaskStats long_task_stats_
Definition isolate.h:2779
void SetAddCrashKeyCallback(AddCrashKeyCallback callback)
Definition isolate.cc:6838
AccountingAllocator * allocator_
Definition isolate.h:2578
Tagged< Object > StackOverflow()
Definition isolate.cc:1862
RootsTable & roots_table()
Definition isolate.h:1250
ThreadLocalTop * thread_local_top()
Definition isolate.h:1331
AddCrashKeyCallback add_crash_key_callback_
Definition isolate.h:2915
base::Mutex code_pages_mutex_
Definition isolate.h:2897
void AddCrashKeysForIsolateAndHeapPointers()
Definition isolate.cc:5249
bool capture_stack_trace_for_uncaught_exceptions_
Definition isolate.h:2571
uintptr_t last_recorder_context_id_
Definition isolate.h:2774
static void RemoveContextIdCallback(const v8::WeakCallbackInfo< void > &data)
Definition isolate.cc:7473
void AddCodeMemoryRange(MemoryRange range)
Definition isolate.cc:7371
void UpdateStringWrapperToPrimitiveProtectorOnSetPrototype(DirectHandle< JSObject > object, DirectHandle< Object > new_prototype)
Definition isolate.cc:6297
void ReportPromiseReject(DirectHandle< JSPromise > promise, DirectHandle< Object > value, v8::PromiseRejectEvent event)
Definition isolate.cc:7015
v8::Isolate::Priority priority()
Definition isolate.h:2082
bool RequiresCodeRange() const
Definition isolate.cc:7424
void RegisterTryCatchHandler(v8::TryCatch *that)
Definition isolate.cc:655
size_t total_regexp_code_generated_
Definition isolate.h:2836
std::shared_ptr< metrics::Recorder > metrics_recorder_
Definition isolate.h:2773
Bootstrapper * bootstrapper_
Definition isolate.h:2553
std::atomic< EntryStackItem * > entry_stack_
Definition isolate.h:2548
void SetAbortOnUncaughtExceptionCallback(v8::Isolate::AbortOnUncaughtExceptionCallback callback)
Definition isolate.cc:3623
bool IsJSApiWrapperNativeError(DirectHandle< JSReceiver > obj)
Definition isolate.cc:6752
v8::Isolate::UseCounterCallback use_counter_callback_
Definition isolate.h:2767
StringForwardingTable * string_forwarding_table() const
Definition isolate.h:785
base::RandomNumberGenerator * fuzzer_rng()
Definition isolate.cc:6330
std::vector< MemoryRange > code_pages_buffer2_
Definition isolate.h:2895
EternalHandles * eternal_handles_
Definition isolate.h:2582
bool NeedsSourcePositions() const
Definition isolate.cc:6155
const AstStringConstants * ast_string_constants_
Definition isolate.h:2692
void OnPromiseBefore(DirectHandle< JSPromise > promise)
Definition isolate.cc:6938
std::atomic< std::vector< MemoryRange > * > code_pages_
Definition isolate.h:2893
void SetHostInitializeImportMetaObjectCallback(HostInitializeImportMetaObjectCallback callback)
Definition isolate.cc:6705
std::unique_ptr< PersistentHandles > NewPersistentHandles()
Definition isolate.cc:5987
bool MayAccess(DirectHandle< NativeContext > accessing_context, DirectHandle< JSObject > receiver)
Definition isolate.cc:1815
MaybeDirectHandle< FixedArray > GetImportAttributesFromArgument(MaybeDirectHandle< Object > maybe_import_options_argument)
Definition isolate.cc:6582
static void EnsureSourcePositionsAvailable(Isolate *isolate, DirectHandle< JSMessageObject > message)
static V8_EXPORT_PRIVATE void AddProperty(Isolate *isolate, DirectHandle< JSObject > object, DirectHandle< Name > name, DirectHandle< Object > value, PropertyAttributes attributes)
static void ForceSetPrototype(Isolate *isolate, DirectHandle< JSObject > object, DirectHandle< JSPrototype > proto)
static V8_WARN_UNUSED_RESULT Maybe< bool > HasRealNamedProperty(Isolate *isolate, DirectHandle< JSObject > object, DirectHandle< Name > name)
static uint32_t GetNextAsyncTaskId(uint32_t current_async_task_id)
static Handle< Object > GetDataProperty(Isolate *isolate, DirectHandle< JSReceiver > object, DirectHandle< Name > name)
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetProperty(Isolate *isolate, DirectHandle< JSReceiver > receiver, const char *key)
static DirectHandle< String > GetConstructorName(Isolate *isolate, DirectHandle< JSReceiver > receiver)
static JavaScriptBuiltinContinuationWithCatchFrame * cast(StackFrame *frame)
Definition frames.h:1607
Tagged< JSFunction > function() const override
Definition frames.cc:2492
bool is_unoptimized() const
Definition frames.h:801
static MaybeHandle< FixedArray > GetKeys(Isolate *isolate, DirectHandle< JSReceiver > object, KeyCollectionMode mode, PropertyFilter filter, GetKeysConversion keys_conversion=GetKeysConversion::kKeepNumbers, bool is_for_in=false, bool skip_indices=false)
Definition keys.cc:97
V8_INLINE void ExecuteMainThreadWhileParked(Callback callback)
bool is_listening_to_code_events()
V8_INLINE DirectHandle< T > ToHandleChecked() const
V8_WARN_UNUSED_RESULT V8_INLINE bool ToHandle(DirectHandle< S > *out) const
V8_INLINE bool is_null() const
static MaybeObjectDirectHandle Weak(Tagged< Object > object, Isolate *isolate)
static V8_EXPORT_PRIVATE const char * TemplateString(MessageTemplate index)
Definition messages.cc:425
static V8_EXPORT_PRIVATE void ReportMessage(Isolate *isolate, const MessageLocation *loc, DirectHandle< JSMessageObject > message)
Definition messages.cc:98
static std::unique_ptr< char[]> GetLocalizedMessage(Isolate *isolate, DirectHandle< Object > data)
Definition messages.cc:194
static V8_EXPORT_PRIVATE Handle< JSMessageObject > MakeMessageObject(Isolate *isolate, MessageTemplate type, const MessageLocation *location, DirectHandle< Object > argument, DirectHandle< StackTraceInfo > stack_trace=DirectHandle< StackTraceInfo >::null())
Definition messages.cc:77
Handle< Script > script() const
Definition messages.h:50
static void SetUpDefaultMicrotaskQueue(Isolate *isolate)
static V8_WARN_UNUSED_RESULT MaybeDirectHandle< String > ToFunctionName(Isolate *isolate, DirectHandle< Name > name)
Definition objects.cc:4049
static V8_WARN_UNUSED_RESULT HandleType< String >::MaybeType ToString(Isolate *isolate, HandleType< T > input)
V8_EXPORT_PRIVATE static V8_WARN_UNUSED_RESULT Maybe< bool > SetProperty(LookupIterator *it, DirectHandle< Object > value, StoreOrigin store_origin, Maybe< ShouldThrow > should_throw=Nothing< ShouldThrow >())
Definition objects.cc:2439
static V8_WARN_UNUSED_RESULT MaybeHandle< Object > GetPropertyOrElement(Isolate *isolate, DirectHandle< JSAny > object, DirectHandle< Name > name)
static double NumberValue(Tagged< Number > obj)
static void FreeOffHeapOffHeapInstructionStream(uint8_t *code, uint32_t code_size, uint8_t *data, uint32_t data_size)
static void CreateOffHeapOffHeapInstructionStream(Isolate *isolate, uint8_t **code, uint32_t *code_size, uint8_t **data, uint32_t *data_size)
int LookupExceptionHandlerInTable(int *data, HandlerTable::CatchPrediction *prediction) override
Definition frames.cc:3156
void Flush(BlockingBehavior blocking_behavior)
void Prioritize(Tagged< SharedFunctionInfo > function)
static void UnregisterIsolate(Isolate *isolate)
static void RegisterIsolate(Isolate *isolate)
static uint32_t GetPropertyIndex(const PropertyCallbackInfo< T > &info)
static Handle< Object > GetPropertyKeyHandle(const PropertyCallbackInfo< T > &info)
ReadOnlyHeap * read_only_heap() const
uint32_t initial_next_unique_sfi_id() const
static Handle< RegisteredSymbolTable > Add(Isolate *isolate, Handle< RegisteredSymbolTable > table, DirectHandle< String > key, DirectHandle< Symbol >)
Definition objects.cc:5635
virtual void VisitRootPointer(Root root, const char *description, FullObjectSlot p)
Definition visitors.h:75
FullObjectSlot slot(RootIndex root_index)
Definition roots.h:594
SaveAndSwitchContext(Isolate *isolate, Tagged< Context > new_context)
Definition isolate.cc:7343
SaveContext(Isolate *isolate)
Definition isolate.cc:7325
Handle< Context > context_
Definition isolate.h:2969
Handle< Context > topmost_script_having_context_
Definition isolate.h:2970
Isolate *const isolate_
Definition isolate.h:2968
static bool GetPositionInfo(DirectHandle< Script > script, int position, PositionInfo *info, OffsetFlag offset_flag=OffsetFlag::kWithOffset)
Definition objects.cc:4367
size_t total_size() const
virtual bool SetupHeap(Isolate *isolate, bool create_heap_objects)
virtual void SetupBuiltins(Isolate *isolate, bool compile_builtins)
static void EnsureSourcePositionsAvailable(Isolate *isolate, DirectHandle< SharedFunctionInfo > shared_info)
static void UnregisterJSStackComparableAddress(v8::internal::Isolate *isolate)
Definition simulator.h:139
static void IterateRegistersAndStack(Isolate *isolate, ::heap::base::StackVisitor *visitor)
Definition simulator.h:124
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > uninitialized_deserialization_value()
Definition smi.h:114
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static const v8::StartupData * DefaultSnapshotBlob()
static V8_EXPORT_PRIVATE uint32_t GetExpectedChecksum(const v8::StartupData *data)
Definition snapshot.cc:640
static NEVER_READ_ONLY_SPACE int GetSourcePosition(DirectHandle< StackFrameInfo > info)
V8_EXPORT_PRIVATE void Advance()
Definition frames.cc:161
StackFrame * frame() const
Definition frames.h:1726
bool is_baseline() const
Definition frames.h:242
V8_EXPORT_PRIVATE Tagged< Code > LookupCode() const
Definition frames.cc:757
Address sp() const
Definition frames.h:293
virtual Type type() const =0
bool is_javascript() const
Definition frames.h:290
bool is_builtin() const
Definition frames.h:266
StackHandler * top_handler() const
Definition frames-inl.h:74
StackFrameId id() const
Definition frames.h:334
Address pc() const
Definition frames-inl.h:78
Address fp() const
Definition frames.h:297
bool is_optimized_js() const
Definition frames.h:233
void InitThread(const ExecutionAccess &lock)
void SetStackLimitForStackSwitching(uintptr_t limit)
bool JsHasOverflowed(uintptr_t gap=0) const
Definition isolate.cc:7296
V8_INLINE bool InterruptRequested()
Definition isolate.h:3051
V8_EXPORT_PRIVATE bool HandleStackOverflowAndTerminationRequest()
Definition isolate.cc:7283
bool WasmHasOverflowed(uintptr_t gap=0) const
Definition isolate.cc:7307
StackTraceFailureMessage(Isolate *isolate, StackTraceMode mode, const Address *ptrs, size_t ptrs_count)
Definition isolate.cc:739
V8_NOINLINE void Print() volatile
Definition isolate.cc:726
char js_stack_trace_[kStacktraceBufferSize]
Definition isolate.h:3105
static V8_EXPORT_PRIVATE void ClearMentionedObjectCache(Isolate *isolate)
DirectHandle< String > ToString(Isolate *isolate)
void PrintMentionedObjectCache(Isolate *isolate)
void Add(const char *format)
void Log(Isolate *isolate)
int LookupExceptionHandlerInTable()
Definition frames.cc:2371
V8_INLINE constexpr StorageType ptr() const
V8_INLINE constexpr bool is_null() const
Definition tagged.h:502
bool IsValid() const
Definition thread-id.h:23
static ThreadId Current()
Definition thread-id.h:32
static ThreadId TryGetCurrent()
Definition thread-id.cc:21
static constexpr ThreadId Invalid()
Definition thread-id.h:35
Tagged< Context > pending_handler_context_
v8::FailedAccessCheckCallback failed_access_check_callback_
std::atomic< ThreadId > mutex_owner_
Definition v8threads.h:97
virtual void VisitThread(Isolate *isolate, ThreadLocalTop *top)=0
void TraceAllocateSegmentImpl(v8::internal::Segment *segment) override
Definition isolate.cc:3960
void TraceZoneDestructionImpl(const Zone *zone) override
Definition isolate.cc:3971
void UpdateMemoryTrafficAndReportMemoryUsage(size_t memory_traffic_delta)
Definition isolate.cc:3990
void TraceZoneCreationImpl(const Zone *zone) override
Definition isolate.cc:3965
void Dump(std::ostringstream &out, bool dump_details)
Definition isolate.cc:4030
std::unordered_set< const Zone * > active_zones_
Definition isolate.cc:4079
static int RegisterStackSlotCount(int register_count)
static UnoptimizedJSFrame * cast(StackFrame *frame)
Definition frames.h:1123
Tagged< BytecodeArray > GetBytecodeArray() const
Definition frames.cc:3299
Tagged< Object > ReadInterpreterRegister(int register_index) const
Definition frames.cc:3307
int LookupExceptionHandlerInTable(int *data, HandlerTable::CatchPrediction *prediction) override
Definition frames.cc:3286
virtual int GetBytecodeOffset() const =0
V8_EXPORT_PRIVATE bool is_logging()
Definition log.cc:1336
V8_EXPORT_PRIVATE void StopProfilerThread()
Definition log.cc:2442
V8_EXPORT_PRIVATE FILE * TearDownAndGetLogFile()
Definition log.cc:2449
bool is_listening_to_code_events() override
Definition log.h:272
void LateSetup(Isolate *isolate)
Definition log.cc:2355
sampler::Sampler * sampler()
Definition log.cc:2437
bool SetUp(Isolate *isolate)
Definition log.cc:2298
static V8_INLINE Tagged_t CompressObject(Address tagged)
static V8_INLINE void InitBase(Address base)
static V8_INLINE Address DecompressTagged(TOnHeapAddress on_heap_addr, Tagged_t raw_value)
V8_CONST static V8_INLINE Address base()
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
Definition v8.cc:282
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static constexpr int kSize
static V8_WARN_UNUSED_RESULT DirectHandle< WeakArrayList > Append(Isolate *isolate, DirectHandle< WeakArrayList > array, MaybeObjectDirectHandle value, AllocationType allocation=AllocationType::kYoung)
NEVER_READ_ONLY_SPACE static V8_EXPORT_PRIVATE Handle< WeakArrayList > AddToEnd(Isolate *isolate, Handle< WeakArrayList > array, MaybeObjectDirectHandle value)
static MarkingBarrier * SetForThread(MarkingBarrier *marking_barrier)
size_t segment_bytes_allocated() const
Definition zone.h:183
static StackMemory * GetCentralStackView(Isolate *isolate)
Definition stacks.cc:14
WasmCode * LookupCode(Isolate *isolate, Address pc) const
static void FreeAllOrphanedGlobalHandles(WasmOrphanedGlobalHandle *start)
static WasmOrphanedGlobalHandle * NewOrphanedGlobalHandle(WasmOrphanedGlobalHandle **pointer)
void DeleteCompileJobsOnIsolate(Isolate *isolate)
void RemoveIsolate(Isolate *isolate)
void AddIsolate(Isolate *isolate)
static void NotifyIsolateDisposal(Isolate *isolate)
static const ContextId Empty()
Definition v8-metrics.h:195
bool IsActive() const
Definition sampler.h:55
RecordWriteMode const mode_
Register const index_
base::Mutex & mutex_
Handle< Code > code
base::OwnedVector< uint8_t > buffer_
Definition assembler.cc:111
#define V8_ENABLE_LEAPTIERING_BOOL
Definition globals.h:151
#define V8_EXTERNAL_CODE_SPACE_BOOL
Definition globals.h:255
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define COMPRESS_POINTERS_IN_MULTIPLE_CAGES_BOOL
Definition globals.h:117
#define V8_ENABLE_NEAR_CODE_RANGE_BOOL
Definition globals.h:231
#define DEBUG_BOOL
Definition globals.h:87
#define V8_CAN_CREATE_SHARED_HEAP_BOOL
Definition globals.h:121
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)
Definition globals.h:2626
Handle< SharedFunctionInfo > info
Handle< Context > context_
LineAndColumn current
#define RETURN_ON_EXCEPTION(isolate, call)
Definition isolate.h:395
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)
Definition isolate.h:284
#define ISOLATE_INIT_ARRAY_LIST(V)
Definition isolate.h:503
#define THROW_NEW_ERROR(isolate, call)
Definition isolate.h:307
#define RETURN_VALUE_IF_EXCEPTION(isolate, value)
Definition isolate.h:224
#define MAYBE_RETURN_ON_EXCEPTION_VALUE(isolate, call, value)
Definition isolate.h:238
#define ISOLATE_INIT_LIST(V)
Definition isolate.h:513
#define API_ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)
Definition isolate.h:415
MicrotaskQueue * microtask_queue
Definition execution.cc:77
#define V8_SHORT_BUILTIN_CALLS_BOOL
std::string filename
Isolate * isolate
#define STATIC_ROOTS_FAILED_MSG
Definition heap-inl.h:83
#define INSTANCE_TYPE_CHECKERS_SINGLE(V)
#define INSTANCE_TYPE_CHECKERS_RANGE(V)
int32_t offset
uint32_t v8_Default_embedded_blob_code_size_
Definition isolate.cc:177
FrameSummaries summaries_
Definition isolate.cc:2718
Handle< FixedArray > frames_
Definition isolate.cc:1606
MaybeHandle< Script > current_script_
Definition isolate.cc:1683
#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length)
bool encountered_strict_function_
Definition isolate.cc:1118
const int limit_
Definition isolate.cc:1114
Handle< String > name_or_url_
Definition isolate.cc:1661
bool skip_next_frame_
Definition isolate.cc:1116
const uint8_t v8_Default_embedded_blob_data_[]
Definition isolate.cc:178
#define TRACE_ISOLATE(tag)
Definition isolate.cc:193
bool skipped_prev_frame_
Definition isolate.cc:1117
const uint8_t v8_Default_embedded_blob_code_[]
Definition isolate.cc:176
const Handle< Object > caller_
Definition isolate.cc:1115
Handle< FixedArray > elements_
Definition isolate.cc:1119
uint32_t v8_Default_embedded_blob_data_size_
Definition isolate.cc:179
StackFrameIterator stack_iterator_
Definition isolate.cc:2717
#define ASSIGN_ELEMENT(CamelName, hacker_name)
#define ISOLATE_INIT_EXECUTE(type, name, initial_value)
TNode< Context > context
std::optional< TNode< JSArray > > a
TNode< Object > receiver
SharedFunctionInfoRef shared
TNode< Object > callback
bool defined
DirectHandle< JSReceiver > options
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
Builtin builtin
ValueType obj_type
LiftoffAssembler::CacheState state
int pc_offset
Point from
int x
int position
Definition liveedit.cc:290
uint32_t const mask
base::SmallVector< int32_t, 1 > stack_slots
size_t priority
int n
Definition mul-fft.cc:296
#define LAZY_MUTEX_INITIALIZER
Definition mutex.h:105
STL namespace.
unsigned short uint16_t
Definition unicode.cc:39
V8_INLINE size_t hash_combine(size_t seed, size_t hash)
Definition hashing.h:77
int Fclose(FILE *stream)
Definition wrappers.h:22
constexpr Vector< const char > StaticCharVector(const char(&array)[N])
Definition vector.h:326
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition vector.h:360
void AlignedFree(void *ptr)
Definition memory.h:102
void * AlignedAlloc(size_t size, size_t alignment)
Definition memory.h:84
@ kDebugStackTraceCaptured
void MaybeSetHandlerNow(Isolate *isolate)
void AddIsolate(Isolate *isolate)
void RemoveIsolate(Isolate *isolate)
V8_INLINE constexpr bool IsSeqString(InstanceType instance_type)
V8_INLINE constexpr bool IsThinString(InstanceType instance_type)
V8_INLINE constexpr bool IsConsString(InstanceType instance_type)
V8_INLINE constexpr bool IsExternalString(InstanceType instance_type)
V8_INLINE constexpr bool IsOneByteString(InstanceType instance_type)
V8_INLINE constexpr bool IsInternalizedString(InstanceType instance_type)
V8_INLINE constexpr bool IsSlicedString(InstanceType instance_type)
V8_INLINE constexpr bool IsUncachedExternalString(InstanceType instance_type)
V8_INLINE constexpr bool IsTwoByteString(InstanceType instance_type)
TH_DISABLE_ASAN bool IsThreadInWasm()
WasmCodeManager * GetWasmCodeManager()
WasmEngine * GetWasmEngine()
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
uint32_t DefaultEmbeddedBlobDataSize()
Definition isolate.cc:205
constexpr NullMaybeHandleType kNullMaybeHandle
constexpr int kTaggedSize
Definition globals.h:542
const uint8_t * DefaultEmbeddedBlobCode()
Definition isolate.cc:196
bool NoExtension(const v8::FunctionCallbackInfo< v8::Value > &)
Definition isolate.cc:763
void FreeCurrentEmbeddedBlob()
Definition isolate.cc:286
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
base::LazyMutex read_only_dispatch_entries_mutex_
Definition isolate.cc:7632
constexpr size_t kIsolateDataAlignment
Definition globals.h:456
bool Is(IndirectHandle< U > value)
Definition handles-inl.h:51
@ SKIP_UNTIL_SEEN
Definition messages.h:70
bool IsNumber(Tagged< Object > obj)
void PrintF(const char *format,...)
Definition utils.cc:39
constexpr int kJSDispatchHandleSize
Definition globals.h:647
const size_t kShortBuiltinCallsOldSpaceSizeThreshold
Definition globals.h:236
Tagged(T object) -> Tagged< T >
thread_local Isolate::PerIsolateThreadData *g_current_per_isolate_thread_data_ V8_CONSTINIT
Definition isolate.cc:522
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
static void PrintFrames(Isolate *isolate, StringStream *accumulator, StackFrame::PrintMode mode)
Definition isolate.cc:1749
kInterpreterTrampolineOffset Tagged< HeapObject >
BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL BUILTIN_FP_CALL int character
bool IsAsyncFunction(FunctionKind kind)
Address Tagged_t
Definition globals.h:547
void SerializeCallSiteInfo(Isolate *isolate, DirectHandle< CallSiteInfo > frame, IncrementalStringBuilder *builder)
V8_INLINE DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
void Print(Tagged< Object > obj)
Definition objects.h:774
bool IsNullOrUndefined(Tagged< Object > obj, Isolate *isolate)
constexpr int kSystemPointerSize
Definition globals.h:410
uint32_t DefaultEmbeddedBlobCodeSize()
Definition isolate.cc:199
uintptr_t GetCurrentStackPosition()
Definition utils.cc:222
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
Definition union.h:123
constexpr size_t kMaxPCRelativeCodeRangeInMB
void DefaultWasmAsyncResolvePromiseCallback(v8::Isolate *isolate, v8::Local< v8::Context > context, v8::Local< v8::Promise::Resolver > resolver, v8::Local< v8::Value > result, WasmAsyncSuccess success)
Definition isolate.cc:7615
const uint8_t * DefaultEmbeddedBlobData()
Definition isolate.cc:202
bool is_strict(LanguageMode language_mode)
Definition globals.h:777
const int kHeapObjectTag
Definition v8-internal.h:72
constexpr bool kPlatformRequiresCodeRange
Definition globals.h:507
V8_EXPORT_PRIVATE FlagValues v8_flags
void DisableEmbeddedBlobRefcounting()
Definition isolate.cc:281
return value
Definition map-inl.h:893
static base::RandomNumberGenerator * ensure_rng_exists(base::RandomNumberGenerator **rng, int seed)
Definition isolate.cc:6312
constexpr bool kAllCodeObjectsLiveInTrustedSpace
uint64_t HashSeed(Isolate *isolate)
static constexpr Address kNullAddress
Definition v8-internal.h:53
void PrintIsolate(void *isolate, const char *format,...)
Definition utils.cc:61
void MemCopy(void *dest, const void *src, size_t size)
Definition memcopy.h:124
int FastD2IChecked(double x)
Definition conversions.h:91
void init_memcopy_functions()
Definition memcopy.cc:36
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
Definition flags.cc:2197
constexpr bool CodeKindCanDeoptimize(CodeKind kind)
Definition code-kind.h:83
kInterpreterTrampolineOffset script
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
bool(*)(Local< Context > context) WasmJSPIEnabledCallback
void(*)(CrashKeyId id, const std::string &value) AddCrashKeyCallback
@ kSnapshotChecksumCalculated
@ kCodeSpaceFirstPageAddress
@ kReadonlySpaceFirstPageAddress
Local< T > Handle
PromiseRejectEvent
Definition v8-promise.h:147
void(*)(ExceptionPropagationMessage message) ExceptionPropagationCallback
Maybe< T > Nothing()
Definition v8-maybe.h:112
bool(*)(Local< Context > accessing_context, Local< Object > accessed_object, Local< Value > data) AccessCheckCallback
void(*)(PromiseRejectMessage message) PromiseRejectCallback
Definition v8-promise.h:170
void(*)(Isolate *) CallCompletedCallback
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
void(*)(Local< Object > target, AccessType type, Local< Value > data) FailedAccessCheckCallback
ExceptionContext
void(*)(Local< Context > context, Local< Module > module, Local< Object > meta) HostInitializeImportMetaObjectCallback
void(*)(Isolate *isolate, void *data) InterruptCallback
bool(*)(Local< Context > context) WasmImportedStringsEnabledCallback
WasmAsyncSuccess
void(*)(PromiseHookType type, Local< Promise > promise, Local< Value > parent) PromiseHook
Definition v8-promise.h:143
v8::Local< T > ToApiHandle(v8::internal::DirectHandle< v8::internal::Object > obj)
Definition api.h:297
T ToCData(i::Isolate *isolate, v8::internal::Tagged< v8::internal::Object > obj)
Definition api-inl.h:23
@ ACCESS_HAS
bool(*)(Isolate *isolate, Local< String > script_name) PrintCurrentStackTraceFilterCallback
void(*)(Isolate *) BeforeCallEnteredCallback
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
Maybe< T > Just(const T &t)
Definition v8-maybe.h:117
ModuleImportPhase
StateTag
Definition v8-unwinder.h:36
@ EXTERNAL
Definition v8-unwinder.h:43
@ IDLE
Definition v8-unwinder.h:45
PromiseHookType
Definition v8-promise.h:141
SourcePositionTable *const table_
Definition pipeline.cc:227
#define STRONG_READ_ONLY_ROOT_LIST(V)
Definition roots.h:54
#define RCS_SCOPE(...)
#define FATAL(...)
Definition logging.h:47
#define CHECK_GE(lhs, rhs)
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LT(lhs, rhs)
#define DCHECK_WITH_MSG(condition, msg)
Definition logging.h:182
#define CHECK_WITH_MSG(condition, message)
Definition logging.h:118
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
constexpr T RoundDown(T x, intptr_t m)
Definition macros.h:371
#define OFFSET_OF(type, field)
Definition macros.h:57
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define arraysize(array)
Definition macros.h:67
bool enabled_
Definition string.cc:1013
size_t length_in_bytes
Definition v8-unwinder.h:62
const void * start
Definition v8-unwinder.h:61
ManagedPtrDestructor * prev_
Definition managed.h:67
ManagedPtrDestructor * next_
Definition managed.h:68
static V8_EXPORT_PRIVATE std::atomic_uint runtime_stats
#define V8_TLS_DEFINE_GETTER(Name, Type, Member)
Symbol method
#define TRACE_EVENT0(category_group, name)
#define TRACE_EVENT_END1(category_group, name, arg1_name, arg1_val)
#define TRACE_EVENT_INSTANT1(category_group, name, scope, arg1_name, arg1_val)
#define TRACE_EVENT_SCOPE_THREAD
#define TRACE_DISABLED_BY_DEFAULT(name)
#define TRACE_EVENT_BEGIN1(category_group, name, arg1_name, arg1_val)
#define TRACE_STR_COPY(str)
Definition trace-event.h:50
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001
#define END_ALLOW_USE_DEPRECATED()
Definition v8config.h:634
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define START_ALLOW_USE_DEPRECATED()
Definition v8config.h:633