v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
shared-function-info-inl.h
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
6#define V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
11#include <optional>
12
13#include "src/base/macros.h"
17#include "src/common/globals.h"
28#include "src/objects/string.h"
30
31#if V8_ENABLE_WEBASSEMBLY
34#endif // V8_ENABLE_WEBASSEMBLY
35
36// Has to be the last include (doesn't have include guards):
38
39namespace v8::internal {
40
41#include "torque-generated/src/objects/shared-function-info-tq-inl.inc"
42
44
45int PreparseData::inner_start_offset() const {
46 return InnerOffset(data_length());
47}
48
52
54 int data_end_offset = kDataStartOffset + data_length();
55 int padding_size = inner_start_offset() - data_end_offset;
56 DCHECK_LE(0, padding_size);
57 if (padding_size == 0) return;
58 memset(reinterpret_cast<void*>(address() + data_end_offset), 0, padding_size);
59}
60
61uint8_t PreparseData::get(int index) const {
62 DCHECK_LE(0, index);
63 DCHECK_LT(index, data_length());
64 int offset = kDataStartOffset + index * kByteSize;
65 return ReadField<uint8_t>(offset);
66}
67
68void PreparseData::set(int index, uint8_t value) {
69 DCHECK_LE(0, index);
70 DCHECK_LT(index, data_length());
71 int offset = kDataStartOffset + index * kByteSize;
72 WriteField<uint8_t>(offset, value);
73}
74
75void PreparseData::copy_in(int index, const uint8_t* buffer, int length) {
76 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
77 index + length <= this->data_length());
78 Address dst_addr = field_address(kDataStartOffset + index * kByteSize);
79 memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
80}
81
85
87 DCHECK_LE(0, index);
88 DCHECK_LT(index, this->children_length());
89 int offset = inner_start_offset() + index * kTaggedSize;
90 return RELAXED_READ_FIELD(*this, offset);
91}
92
94 WriteBarrierMode mode) {
95 DCHECK_LE(0, index);
96 DCHECK_LT(index, this->children_length());
97 int offset = inner_start_offset() + index * kTaggedSize;
98 RELAXED_WRITE_FIELD(*this, offset, value);
99 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
100}
101
107
110 kBytecodeArrayOffset)
112 kInterpreterTrampolineOffset)
113
115
117 Tagged<NameOrScopeInfoT>, kNameOrScopeInfoOffset)
119 kScriptOffset)
121 kScriptOffset)
122
123void SharedFunctionInfo::SetTrustedData(Tagged<ExposedTrustedObject> value,
124 WriteBarrierMode mode) {
125 WriteTrustedPointerField<kUnknownIndirectPointerTag>(
126 kTrustedFunctionDataOffset, value);
127
128 // Only one of trusted_function_data and untrusted_function_data can be in
129 // use, so clear the untrusted data field. Using -1 here as cleared data value
130 // allows HasBuiltinId to become quite simple, as it can just check if the
131 // untrusted data is a Smi containing a valid builtin ID.
132 constexpr int kClearedUntrustedFunctionDataValue = -1;
133 static_assert(!Builtins::IsBuiltinId(kClearedUntrustedFunctionDataValue));
135 *this, Smi::FromInt(kClearedUntrustedFunctionDataValue));
136
137 CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER(*this, kTrustedFunctionDataOffset,
139 mode);
140}
141
143 WriteBarrierMode mode) {
145 value);
146
147 // Only one of trusted_function_data and untrusted_function_data can be in
148 // use, so clear the trusted data field.
149 ClearTrustedPointerField(kTrustedFunctionDataOffset, kReleaseStore);
150
151 CONDITIONAL_WRITE_BARRIER(*this, kUntrustedFunctionDataOffset, value, mode);
152}
153
155 return !IsTrustedPointerFieldEmpty(kTrustedFunctionDataOffset);
156}
157
159
161 IsolateForSandbox isolate) const {
162 return ReadMaybeEmptyTrustedPointerField<kUnknownIndirectPointerTag>(
163 kTrustedFunctionDataOffset, isolate, kAcquireLoad);
164}
165
166template <typename T, IndirectPointerTag tag>
168 static_assert(tag != kUnknownIndirectPointerTag);
169 return Cast<T>(ReadMaybeEmptyTrustedPointerField<tag>(
170 kTrustedFunctionDataOffset, isolate, kAcquireLoad));
171}
172
174#ifdef V8_ENABLE_SANDBOX
175 auto trusted_data_slot = RawIndirectPointerField(kTrustedFunctionDataOffset,
177 // This routine is sometimes used for SFI's in read-only space (which never
178 // have trusted data). In that case, GetIsolateForSandbox cannot be used, so
179 // we need to return early in that case, before trying to obtain an Isolate.
180 IndirectPointerHandle handle = trusted_data_slot.Acquire_LoadHandle();
182 return trusted_data_slot.ResolveHandle(handle, GetIsolateForSandbox(*this));
183#else
185#endif
186}
187
191
193 return script(cage_base, kAcquireLoad);
194}
196 return IsScript(script(tag));
197}
198
201 outer_scope_info_or_feedback_metadata,
206 Tagged<HeapObject> value =
208 Acquire_Load(cage_base, *this);
209 return value;
210}
211
213 const {
214 const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
215 return param_count;
216}
217
219 const {
220 const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
221 if (param_count == kDontAdaptArgumentsSentinel) return param_count;
222 return param_count - kJSArgcReceiverSlots;
223}
224
226 DCHECK_EQ(value, static_cast<uint16_t>(value));
228 TorqueGeneratedClass::set_formal_parameter_count(value);
229}
230
232 function_token_offset, uint16_t)
233
235int32_t SharedFunctionInfo::relaxed_flags() const {
236 return flags(kRelaxedLoad);
237}
238void SharedFunctionInfo::set_relaxed_flags(int32_t flags) {
239 return set_flags(flags, kRelaxedStore);
240}
241
242UINT8_ACCESSORS(SharedFunctionInfo, flags2, kFlags2Offset)
243
244bool SharedFunctionInfo::HasSharedName() const {
245 Tagged<Object> value = name_or_scope_info(kAcquireLoad);
246 if (IsScopeInfo(value)) {
247 return Cast<ScopeInfo>(value)->HasSharedFunctionName();
248 }
249 return value != kNoSharedNameSentinel;
250}
251
253 if (!HasSharedName()) return GetReadOnlyRoots().empty_string();
254 Tagged<Object> value = name_or_scope_info(kAcquireLoad);
255 if (IsScopeInfo(value)) {
256 if (Cast<ScopeInfo>(value)->HasFunctionName()) {
257 return Cast<String>(Cast<ScopeInfo>(value)->FunctionName());
258 }
259 return GetReadOnlyRoots().empty_string();
260 }
261 return Cast<String>(value);
262}
263
265 Tagged<Object> maybe_scope_info = name_or_scope_info(kAcquireLoad);
266 if (IsScopeInfo(maybe_scope_info)) {
267 Cast<ScopeInfo>(maybe_scope_info)->SetFunctionName(name);
268 } else {
269 DCHECK(IsString(maybe_scope_info) ||
270 maybe_scope_info == kNoSharedNameSentinel);
271 set_name_or_scope_info(name, kReleaseStore);
272 }
274}
275
277 return scope_info(kAcquireLoad)->is_script_scope() &&
278 Cast<Script>(script())->compilation_type() ==
280}
281
283 return is_script() && scope_info(kAcquireLoad)->ContextLocalCount() > 0;
284}
285
287 // TODO(v8:11429): Decide if this return bytecode or baseline code, when the
288 // latter is present.
289 if (HasBytecodeArray(isolate)) {
290 return Cast<AbstractCode>(GetBytecodeArray(isolate));
291 } else {
292 return Cast<AbstractCode>(GetCode(isolate));
293 }
294}
295
297 int offset = raw_function_token_offset();
299 return kNoSourcePosition;
300 } else {
301 return StartPosition() - offset;
302 }
303}
304
305template <typename IsolateT>
307 if (v8_flags.enable_lazy_source_positions) {
308 return !HasBytecodeArray() ||
309 GetBytecodeArray(isolate)->HasSourcePositionTable();
310 }
311 return true;
312}
313
314template <typename IsolateT>
316 IsolateT* isolate) const {
317 if (!IsScript(script())) return kHasNoScript;
318
319 if (isolate->is_precise_binary_code_coverage() &&
321 // We may miss invocations if this function is inlined.
323 }
324
325 // Built-in functions are handled by the JSCallReducer.
326 if (HasBuiltinId()) return kIsBuiltin;
327
328 if (!IsUserJavaScript()) return kIsNotUserCode;
329
330 // If there is no bytecode array, it is either not compiled or it is compiled
331 // with WebAssembly for the asm.js pipeline. In either case we don't want to
332 // inline.
333 if (!HasBytecodeArray()) return kHasNoBytecode;
334
335 if (GetBytecodeArray(isolate)->length() >
336 v8_flags.max_inlined_bytecode_size) {
338 }
339
340 {
342 isolate->shared_function_info_access(), isolate);
343 if (HasBreakInfo(isolate->GetMainThreadIsolateUnsafe())) {
345 }
346 }
347
349
350 return kIsInlineable;
351}
352
353BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2, class_scope_has_private_brand,
354 SharedFunctionInfo::ClassScopeHasPrivateBrandBit)
355
358 SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit)
359
361 SharedFunctionInfo::IsSparkplugCompilingBit)
362
364 SharedFunctionInfo::MaglevCompilationFailedBit)
365
367 function_context_independent_compiled,
368 SharedFunctionInfo::FunctionContextIndependentCompiledBit)
369
371 SharedFunctionInfo::FunctionSyntaxKindBits)
372
374 SharedFunctionInfo::AllowLazyCompilationBit)
376 SharedFunctionInfo::HasDuplicateParametersBit)
377
379 SharedFunctionInfo::IsNativeBit)
380#if V8_ENABLE_WEBASSEMBLY
382 SharedFunctionInfo::IsAsmWasmBrokenBit)
383#endif // V8_ENABLE_WEBASSEMBLY
386 SharedFunctionInfo::RequiresInstanceMembersInitializerBit)
387
389 name_should_print_as_anonymous,
390 SharedFunctionInfo::NameShouldPrintAsAnonymousBit)
393 SharedFunctionInfo::HasReportedBinaryCoverageBit)
394
396 SharedFunctionInfo::IsTopLevelBit)
398 SharedFunctionInfo::PropertiesAreFinalBit)
400 private_name_lookup_skips_outer_class,
401 SharedFunctionInfo::PrivateNameLookupSkipsOuterClassBit)
403 SharedFunctionInfo::LiveEditedBit)
404
405bool SharedFunctionInfo::optimization_disabled() const {
406 return disabled_optimization_reason() != BailoutReason::kNoReason;
407}
408
410 return DisabledOptimizationReasonBits::decode(flags(kRelaxedLoad));
411}
412
414 static_assert(LanguageModeSize == 2);
415 return construct_language_mode(IsStrictBit::decode(flags(kRelaxedLoad)));
416}
417
419 static_assert(LanguageModeSize == 2);
420 // We only allow language mode transitions that set the same language mode
421 // again or go up in the chain:
422 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
423 int hints = flags(kRelaxedLoad);
424 hints = IsStrictBit::update(hints, is_strict(language_mode));
425 set_flags(hints, kRelaxedStore);
427}
428
430 static_assert(FunctionKindBits::kSize == kFunctionKindBitSize);
431 return FunctionKindBits::decode(flags(kRelaxedLoad));
432}
433
435 int hints = flags(kRelaxedLoad);
436 hints = FunctionKindBits::update(hints, kind);
437 hints = IsClassConstructorBit::update(hints, IsClassConstructor(kind));
438 set_flags(hints, kRelaxedStore);
440}
441
445
447 return ConstructAsBuiltinBit::decode(flags(kRelaxedLoad));
448}
449
451 bool uses_builtins_construct_stub = false;
452 if (HasBuiltinId()) {
453 Builtin id = builtin_id();
454 if (id != Builtin::kCompileLazy && id != Builtin::kEmptyFunction) {
455 uses_builtins_construct_stub = true;
456 }
457 } else if (IsApiFunction()) {
458 uses_builtins_construct_stub = true;
459 }
460
461 int f = flags(kRelaxedLoad);
462 f = ConstructAsBuiltinBit::update(f, uses_builtins_construct_stub);
463 set_flags(f, kRelaxedStore);
464}
465
466uint16_t SharedFunctionInfo::age() const {
467 return RELAXED_READ_UINT16_FIELD(*this, kAgeOffset);
468}
469
470void SharedFunctionInfo::set_age(uint16_t value) {
471 RELAXED_WRITE_UINT16_FIELD(*this, kAgeOffset, value);
472}
473
474uint16_t SharedFunctionInfo::CompareExchangeAge(uint16_t expected_age,
475 uint16_t new_age) {
476 Address age_addr = address() + kAgeOffset;
478 reinterpret_cast<base::Atomic16*>(age_addr), expected_age, new_age);
479}
480
481int SharedFunctionInfo::function_map_index() const {
482 // Note: Must be kept in sync with the FastNewClosure builtin.
484 FunctionMapIndexBits::decode(flags(kRelaxedLoad));
486 return index;
487}
488
489void SharedFunctionInfo::set_function_map_index(int index) {
490 static_assert(Context::LAST_FUNCTION_MAP_INDEX <=
491 Context::FIRST_FUNCTION_MAP_INDEX + FunctionMapIndexBits::kMax);
495 set_flags(FunctionMapIndexBits::update(flags(kRelaxedLoad), index),
497}
498
499void SharedFunctionInfo::clear_padding() { set_padding(0); }
500
502 int map_index =
504 set_function_map_index(map_index);
505}
506
508#if V8_ENABLE_WEBASSEMBLY
509 // TODO(leszeks): Revise this DCHECK now that the code field is gone.
510 DCHECK(!HasWasmExportedFunctionData());
511#endif // V8_ENABLE_WEBASSEMBLY
512 if (HasBuiltinId()) {
513 Builtin builtin = builtin_id();
514 if (Builtins::KindOf(builtin) == Builtins::TFJ) {
515 const int formal_parameter_count =
517 // If we have `kDontAdaptArgumentsSentinel` or no arguments, then we are
518 // good. Otherwise this is a mismatch.
519 if (formal_parameter_count != kDontAdaptArgumentsSentinel &&
520 formal_parameter_count != JSParameterCount(0)) {
521 FATAL(
522 "Conflicting argument adaptation configuration (SFI vs call "
523 "descriptor) for builtin: %s (%d)",
524 Builtins::name(builtin), static_cast<int>(builtin));
525 }
526 }
527 }
528 TorqueGeneratedClass::set_formal_parameter_count(kDontAdaptArgumentsSentinel);
529}
530
532 return TorqueGeneratedClass::formal_parameter_count() ==
534}
535
537 Tagged<Object> maybe_scope_info = name_or_scope_info(cage_base, kAcquireLoad);
538 if (IsScopeInfo(maybe_scope_info, cage_base)) {
539 return Cast<ScopeInfo>(maybe_scope_info);
540 }
541 return GetReadOnlyRoots().empty_scope_info();
542}
543
545 return scope_info(cage_base, kAcquireLoad);
546}
547
549 // Keep in sync with the scope_info getter above.
550 PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
551 Tagged<Object> maybe_scope_info = name_or_scope_info(cage_base, tag);
552 if (IsScopeInfo(maybe_scope_info, cage_base)) {
553 return Cast<ScopeInfo>(maybe_scope_info);
554 }
555 return EarlyGetReadOnlyRoots().empty_scope_info();
556}
557
559 WriteBarrierMode mode) {
560 // Move the existing name onto the ScopeInfo.
561 Tagged<NameOrScopeInfoT> name_or_scope_info =
562 this->name_or_scope_info(kAcquireLoad);
564 if (IsScopeInfo(name_or_scope_info)) {
565 name = Cast<ScopeInfo>(name_or_scope_info)->FunctionName();
566 } else {
567 name = Cast<UnionOf<Smi, String>>(name_or_scope_info);
568 }
569 DCHECK(IsString(name) || name == kNoSharedNameSentinel);
570 // ScopeInfo can get promoted to read-only space. Now that we reuse them after
571 // flushing bytecode, we'll actually reinstall read-only scopeinfos on
572 // SharedFunctionInfos if they required a context. The read-only scopeinfos
573 // should already be fully initialized though, and hence will already have the
574 // right FunctionName (and InferredName if relevant).
575 if (scope_info->FunctionName() != name) {
576 scope_info->SetFunctionName(name);
577 }
578 if (HasInferredName() && inferred_name()->length() != 0 &&
579 scope_info->InferredFunctionName() != inferred_name()) {
580 scope_info->SetInferredFunctionName(inferred_name());
581 }
582 set_name_or_scope_info(scope_info, kReleaseStore, mode);
583}
584
586 WriteBarrierMode mode) {
587 WRITE_FIELD(*this, kNameOrScopeInfoOffset, scope_info);
588 CONDITIONAL_WRITE_BARRIER(*this, kNameOrScopeInfoOffset, scope_info, mode);
589}
590
592 DCHECK(!is_compiled());
593 DCHECK(!HasFeedbackMetadata());
595}
596
598 Tagged<ScopeInfo> outer_info;
599 Tagged<ScopeInfo> info = scope_info(kAcquireLoad);
600 if (info->IsEmpty()) {
601 if (is_compiled()) return false;
602 if (!IsScopeInfo(outer_scope_info())) return false;
603 outer_info = Cast<ScopeInfo>(outer_scope_info());
604 } else {
605 if (!info->HasOuterScopeInfo()) return false;
606 outer_info = info->OuterScopeInfo();
607 }
608 return !outer_info->IsEmpty();
609}
610
613 Tagged<ScopeInfo> info = scope_info(kAcquireLoad);
614 if (info->IsEmpty()) return Cast<ScopeInfo>(outer_scope_info());
615 return info->OuterScopeInfo();
616}
617
618void SharedFunctionInfo::set_outer_scope_info(Tagged<HeapObject> value,
619 WriteBarrierMode mode) {
622 DCHECK(IsScopeInfo(value) || IsTheHole(value));
623 DCHECK(scope_info()->IsEmpty());
624 set_raw_outer_scope_info_or_feedback_metadata(value, mode);
625}
626
628 return IsFeedbackMetadata(raw_outer_scope_info_or_feedback_metadata());
629}
630
634
636 DCHECK(HasFeedbackMetadata());
639}
640
643 kOuterScopeInfoOrFeedbackMetadataOffset,
644 HasFeedbackMetadata(kAcquireLoad),
645 !HasFeedbackMetadata(kAcquireLoad) &&
646 IsFeedbackMetadata(value))
647
649 return GetUntrustedData() != Smi::FromEnum(Builtin::kCompileLazy) &&
651}
652
653template <typename IsolateT>
654IsCompiledScope SharedFunctionInfo::is_compiled_scope(IsolateT* isolate) const {
655 return IsCompiledScope(*this, isolate);
656}
657
659 Isolate* isolate)
660 : is_compiled_(shared->is_compiled()) {
661 if (shared->HasBaselineCode()) {
662 retain_code_ = handle(shared->baseline_code(kAcquireLoad), isolate);
663 } else if (shared->HasBytecodeArray()) {
664 retain_code_ = handle(shared->GetBytecodeArray(isolate), isolate);
665 } else {
666 retain_code_ = MaybeHandle<HeapObject>();
667 }
668
669 DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
670}
671
672IsCompiledScope::IsCompiledScope(const Tagged<SharedFunctionInfo> shared,
673 LocalIsolate* isolate)
674 : is_compiled_(shared->is_compiled()) {
675 if (shared->HasBaselineCode()) {
676 retain_code_ = isolate->heap()->NewPersistentHandle(
677 shared->baseline_code(kAcquireLoad));
678 } else if (shared->HasBytecodeArray()) {
679 retain_code_ =
680 isolate->heap()->NewPersistentHandle(shared->GetBytecodeArray(isolate));
681 } else {
682 retain_code_ = MaybeHandle<HeapObject>();
683 }
684
686}
687
689 return scope_info(kAcquireLoad)->HasSimpleParameters();
690}
691
693 return v8_flags.enable_lazy_source_positions && HasBytecodeArray() &&
694 !GetBytecodeArray(isolate)->HasSourcePositionTable();
695}
696
698 return IsFunctionTemplateInfo(GetUntrustedData());
699}
700
702 DCHECK(IsApiFunction());
703 return Cast<FunctionTemplateInfo>(GetUntrustedData());
704}
705
706DEF_GETTER(SharedFunctionInfo, HasBytecodeArray, bool) {
707 Tagged<Object> data = GetTrustedData();
708 // If the SFI has no trusted data, GetTrustedData() will return Smi::zero().
709 if (IsSmi(data)) return false;
710 InstanceType instance_type =
711 Cast<HeapObject>(data)->map(cage_base)->instance_type();
712 return InstanceTypeChecker::IsBytecodeArray(instance_type) ||
713 InstanceTypeChecker::IsInterpreterData(instance_type) ||
714 InstanceTypeChecker::IsCode(instance_type);
715}
716
717template <typename IsolateT>
719 IsolateT* isolate) const {
721 isolate->shared_function_info_access(), isolate);
722
723 DCHECK(HasBytecodeArray());
724
725 Isolate* main_isolate = isolate->GetMainThreadIsolateUnsafe();
726 std::optional<Tagged<DebugInfo>> debug_info = TryGetDebugInfo(main_isolate);
727 if (debug_info.has_value() &&
728 debug_info.value()->HasInstrumentedBytecodeArray()) {
729 return debug_info.value()->OriginalBytecodeArray(main_isolate);
730 }
731
732 return GetActiveBytecodeArray(main_isolate);
733}
734
736 IsolateForSandbox isolate) const {
737 Tagged<Object> data = GetTrustedData(isolate);
738 if (IsCode(data)) {
739 Tagged<Code> baseline_code = Cast<Code>(data);
740 data = baseline_code->bytecode_or_interpreter_data();
741 }
742 if (IsBytecodeArray(data)) {
743 return Cast<BytecodeArray>(data);
744 } else {
745 // We need an explicit check here since we use the
746 // kUnknownIndirectPointerTag above and so don't have any type guarantees.
747 SBXCHECK(IsInterpreterData(data));
748 return Cast<InterpreterData>(data)->bytecode_array();
749 }
750}
751
753 IsolateForSandbox isolate) {
754 // We don't allow setting the active bytecode array on baseline-optimized
755 // functions. They should have been flushed earlier.
756 DCHECK(!HasBaselineCode());
757
758 if (HasInterpreterData(isolate)) {
759 interpreter_data(isolate)->set_bytecode_array(bytecode);
760 } else {
761 DCHECK(HasBytecodeArray());
762 overwrite_bytecode_array(bytecode);
763 }
764}
765
767 DCHECK(GetUntrustedData() == Smi::FromEnum(Builtin::kCompileLazy) ||
769 SetTrustedData(bytecode);
770}
771
773 Tagged<BytecodeArray> bytecode) {
774 DCHECK(HasBytecodeArray());
775 SetTrustedData(bytecode);
776}
777
779 IsolateForSandbox isolate) const {
780 DCHECK(HasInterpreterData(isolate));
781 return interpreter_data(isolate)->interpreter_trampoline();
782}
783
785 Tagged<Object> data = GetTrustedData(isolate);
786 if (IsCode(data)) {
787 Tagged<Code> baseline_code = Cast<Code>(data);
788 DCHECK_EQ(baseline_code->kind(), CodeKind::BASELINE);
789 data = baseline_code->bytecode_or_interpreter_data();
790 }
791 return IsInterpreterData(data);
792}
793
795 IsolateForSandbox isolate) const {
796 DCHECK(HasInterpreterData(isolate));
797 Tagged<Object> data = GetTrustedData(isolate);
798 if (IsCode(data)) {
799 Tagged<Code> baseline_code = Cast<Code>(data);
800 DCHECK_EQ(baseline_code->kind(), CodeKind::BASELINE);
801 data = baseline_code->bytecode_or_interpreter_data();
802 }
803 SBXCHECK(IsInterpreterData(data));
804 return Cast<InterpreterData>(data);
805}
806
808 Isolate* isolate, Tagged<InterpreterData> interpreter_data,
809 WriteBarrierMode mode) {
810 DCHECK(isolate->interpreted_frames_native_stack());
811 DCHECK(!HasBaselineCode());
813}
814
815DEF_GETTER(SharedFunctionInfo, HasBaselineCode, bool) {
816 Tagged<Object> data = GetTrustedData();
817 if (IsCode(data, cage_base)) {
818 DCHECK_EQ(Cast<Code>(data)->kind(), CodeKind::BASELINE);
819 return true;
820 }
821 return false;
822}
823
825 DCHECK(HasBaselineCode(cage_base));
827 return GetTrustedData<Code, kCodeIndirectPointerTag>(isolate);
828}
829
830void SharedFunctionInfo::set_baseline_code(Tagged<Code> baseline_code,
831 ReleaseStoreTag tag,
832 WriteBarrierMode mode) {
833 DCHECK_EQ(baseline_code->kind(), CodeKind::BASELINE);
834 SetTrustedData(baseline_code, mode);
835}
836
838 DCHECK(HasBaselineCode());
839 Tagged<TrustedObject> new_data =
840 baseline_code(kAcquireLoad)->bytecode_or_interpreter_data();
841 DCHECK(IsBytecodeArray(new_data) || IsInterpreterData(new_data));
843}
844
845#if V8_ENABLE_WEBASSEMBLY
846bool SharedFunctionInfo::HasAsmWasmData() const {
847 return IsAsmWasmData(GetUntrustedData());
848}
849
850bool SharedFunctionInfo::HasWasmFunctionData() const {
851 return IsWasmFunctionData(GetTrustedData());
852}
853
854bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
855 return IsWasmExportedFunctionData(GetTrustedData());
856}
857
858bool SharedFunctionInfo::HasWasmJSFunctionData() const {
859 return IsWasmJSFunctionData(GetTrustedData());
860}
861
862bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
863 return IsWasmCapiFunctionData(GetTrustedData());
864}
865
866bool SharedFunctionInfo::HasWasmResumeData() const {
867 return IsWasmResumeData(GetUntrustedData());
868}
869
870DEF_GETTER(SharedFunctionInfo, asm_wasm_data, Tagged<AsmWasmData>) {
871 DCHECK(HasAsmWasmData());
872 return Cast<AsmWasmData>(GetUntrustedData());
873}
874
875void SharedFunctionInfo::set_asm_wasm_data(Tagged<AsmWasmData> data,
876 WriteBarrierMode mode) {
877 DCHECK(GetUntrustedData() == Smi::FromEnum(Builtin::kCompileLazy) ||
878 HasUncompiledData() || HasAsmWasmData());
879 SetUntrustedData(data, mode);
880}
881
882DEF_GETTER(SharedFunctionInfo, wasm_function_data, Tagged<WasmFunctionData>) {
883 DCHECK(HasWasmFunctionData());
884 // TODO(saelo): It would be nicer if the caller provided an IsolateForSandbox.
885 return GetTrustedData<WasmFunctionData, kWasmFunctionDataIndirectPointerTag>(
886 GetIsolateForSandbox(*this));
887}
888
889DEF_GETTER(SharedFunctionInfo, wasm_exported_function_data,
891 DCHECK(HasWasmExportedFunctionData());
892 Tagged<WasmFunctionData> data = wasm_function_data();
893 // TODO(saelo): the SBXCHECKs here and below are only needed because our type
894 // tags don't currently support type hierarchies.
895 SBXCHECK(IsWasmExportedFunctionData(data));
897}
898
899DEF_GETTER(SharedFunctionInfo, wasm_js_function_data,
901 DCHECK(HasWasmJSFunctionData());
902 Tagged<WasmFunctionData> data = wasm_function_data();
903 SBXCHECK(IsWasmJSFunctionData(data));
904 return Cast<WasmJSFunctionData>(data);
905}
906
907DEF_GETTER(SharedFunctionInfo, wasm_capi_function_data,
909 DCHECK(HasWasmCapiFunctionData());
910 Tagged<WasmFunctionData> data = wasm_function_data();
911 SBXCHECK(IsWasmCapiFunctionData(data));
912 return Cast<WasmCapiFunctionData>(data);
913}
914
915DEF_GETTER(SharedFunctionInfo, wasm_resume_data, Tagged<WasmResumeData>) {
916 DCHECK(HasWasmResumeData());
917 return Cast<WasmResumeData>(GetUntrustedData());
918}
919
920#endif // V8_ENABLE_WEBASSEMBLY
921
924 return IsSmi(data) && Builtins::IsBuiltinId(Smi::ToInt(data));
925}
926
927Builtin SharedFunctionInfo::builtin_id() const {
929 int id = Smi::ToInt(GetUntrustedData());
930 // The builtin id is read from the heap and so must be assumed to be
931 // untrusted in the sandbox attacker model. As it is considered trusted by
932 // e.g. `GetCode` (when fetching the code for this SFI), we validate it here.
934 return Builtins::FromInt(id);
935}
936
937void SharedFunctionInfo::set_builtin_id(Builtin builtin) {
939 SetUntrustedData(Smi::FromInt(static_cast<int>(builtin)), SKIP_WRITE_BARRIER);
940}
941
943 return IsUncompiledData(GetTrustedData());
944}
945
952
958
960 return IsUncompiledDataWithPreparseData(GetTrustedData());
961}
962
965 IsolateForSandbox isolate) const {
968 // TODO(saelo): this SBXCHECK is needed because our type tags don't currently
969 // support type hierarchies.
970 SBXCHECK(IsUncompiledDataWithPreparseData(data));
972}
973
981
983 return IsUncompiledDataWithoutPreparseData(GetTrustedData());
984}
985
987 IsolateForSandbox isolate) {
989 if (IsUncompiledDataWithPreparseDataAndJob(uncompiled_data)) {
991 ->set_job(kNullAddress);
992 } else if (IsUncompiledDataWithoutPreparseDataWithJob(uncompiled_data)) {
994 ->set_job(kNullAddress);
995 }
996}
997
1002
1003 // Trim off the pre-parsed scope data from the uncompiled data by swapping the
1004 // map, leaving only an uncompiled data without pre-parsed scope.
1007
1008 // We are basically trimming that object to its supertype, so recorded slots
1009 // within the object don't need to be invalidated.
1010 heap->NotifyObjectLayoutChange(data, no_gc, InvalidateRecordedSlots::kNo,
1012 static_assert(UncompiledDataWithoutPreparseData::kSize <
1013 UncompiledDataWithPreparseData::kSize);
1014 static_assert(UncompiledDataWithoutPreparseData::kSize ==
1015 UncompiledData::kHeaderSize);
1016
1017 // Fill the remaining space with filler and clear slots in the trimmed area.
1018 int old_size = data->Size();
1019 DCHECK_LE(UncompiledDataWithPreparseData::kSize, old_size);
1020 heap->NotifyObjectSizeChange(data, old_size,
1021 UncompiledDataWithoutPreparseData::kSize,
1023
1024 // Swap the map.
1025 data->set_map(heap->isolate(),
1026 GetReadOnlyRoots().uncompiled_data_without_preparse_data_map(),
1028
1029 // Ensure that the clear was successful.
1031}
1032
1034 Isolate* isolate, Tagged<String> inferred_name, int start_position,
1035 int end_position,
1036 std::function<void(Tagged<HeapObject> object, ObjectSlot slot,
1037 Tagged<HeapObject> target)>
1038 gc_notify_updated_slot) {
1039#ifdef V8_ENABLE_SANDBOX
1040 init_self_indirect_pointer(isolate);
1041#endif
1042 set_inferred_name(inferred_name);
1043 gc_notify_updated_slot(*this, RawField(UncompiledData::kInferredNameOffset),
1044 inferred_name);
1045 set_start_position(start_position);
1046 set_end_position(end_position);
1047}
1048
1050 return IsScript(script()) && Cast<Script>(script())->is_repl_mode();
1051}
1052
1054 Tagged<Object> scope_info = name_or_scope_info(kAcquireLoad);
1055 if (IsScopeInfo(scope_info)) {
1056 return Cast<ScopeInfo>(scope_info)->HasInferredFunctionName();
1057 }
1058 return HasUncompiledData();
1059}
1060
1062 Tagged<Object> maybe_scope_info = name_or_scope_info(kAcquireLoad);
1063 if (IsScopeInfo(maybe_scope_info)) {
1064 Tagged<ScopeInfo> scope_info = Cast<ScopeInfo>(maybe_scope_info);
1065 if (scope_info->HasInferredFunctionName()) {
1066 Tagged<Object> name = scope_info->InferredFunctionName();
1067 if (IsString(name)) return Cast<String>(name);
1068 }
1069 } else if (HasUncompiledData()) {
1070 return uncompiled_data(GetIsolateForSandbox(*this))
1071 ->inferred_name(cage_base);
1072 }
1073 return GetReadOnlyRoots().empty_string();
1074}
1075
1077 Tagged<Object> script_obj = script();
1078 if (IsUndefined(script_obj)) return false;
1079 Tagged<Script> script = Cast<Script>(script_obj);
1080 return script->IsUserJavaScript();
1081}
1082
1084#if V8_ENABLE_WEBASSEMBLY
1085 if (HasAsmWasmData()) return false;
1086 if (HasWasmExportedFunctionData()) return false;
1087#endif // V8_ENABLE_WEBASSEMBLY
1088 return IsUserJavaScript();
1089}
1090
1092#if V8_ENABLE_WEBASSEMBLY
1093 if (HasAsmWasmData()) return true;
1094#endif // V8_ENABLE_WEBASSEMBLY
1095 return HasBytecodeArray() || HasUncompiledDataWithPreparseData() ||
1096 HasBaselineCode();
1097}
1098
1100 return IsClassConstructorBit::decode(flags(kRelaxedLoad));
1101}
1102
1103void SharedFunctionInfo::set_are_properties_final(bool value) {
1104 if (is_class_constructor()) {
1105 set_properties_are_final(value);
1106 }
1107}
1108
1109bool SharedFunctionInfo::are_properties_final() const {
1110 bool bit = properties_are_final();
1111 return bit && is_class_constructor();
1112}
1113
1114OBJECT_CONSTRUCTORS_IMPL(SharedFunctionInfoWrapper, TrustedObject)
1115
1116ACCESSORS(SharedFunctionInfoWrapper, shared_info, Tagged<SharedFunctionInfo>,
1117 kSharedInfoOffset)
1118
1119} // namespace v8::internal
1120
1122
1123#endif // V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
Builtins::Kind kind
Definition builtins.cc:40
#define SBXCHECK(condition)
Definition check.h:61
static T Relaxed_CompareAndSwap(T *addr, typename std::remove_reference< T >::type old_value, typename std::remove_reference< T >::type new_value)
static V8_EXPORT_PRIVATE Kind KindOf(Builtin builtin)
Definition builtins.cc:471
static V8_EXPORT_PRIVATE int GetStackParameterCount(Builtin builtin)
Definition builtins.cc:160
static constexpr bool IsBuiltinId(Builtin builtin)
Definition builtins.h:128
static constexpr Builtin FromInt(int id)
Definition builtins.h:140
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
Definition builtins.cc:226
static const int FIRST_FUNCTION_MAP_INDEX
Definition contexts.h:570
static const int LAST_FUNCTION_MAP_INDEX
Definition contexts.h:571
static int FunctionMapIndex(LanguageMode language_mode, FunctionKind kind, bool has_shared_name)
MaybeHandle< HeapObject > retain_code_
Isolate * GetMainThreadIsolateUnsafe()
Definition isolate.h:2189
void copy_in(int index, const uint8_t *buffer, int length)
void set_child(int index, Tagged< PreparseData > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Tagged< Object > get_child_raw(int index) const
Tagged< PreparseData > get_child(int index) const
void set(int index, uint8_t value)
uint16_t internal_formal_parameter_count_with_receiver() const
void set_bytecode_array(Tagged< BytecodeArray > bytecode)
Tagged< BytecodeArray > GetActiveBytecodeArray(IsolateForSandbox isolate) const
Tagged< UncompiledDataWithPreparseData > uncompiled_data_with_preparse_data(IsolateForSandbox isolate) const
Tagged< BytecodeArray > GetBytecodeArray(IsolateT *isolate) const
static const uint16_t kFunctionTokenOutOfRange
void set_uncompiled_data(Tagged< UncompiledData > data, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void set_raw_scope_info(Tagged< ScopeInfo > scope_info, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Tagged< ScopeInfo > EarlyScopeInfo(AcquireLoadTag tag)
Tagged< AbstractCode > abstract_code(Isolate *isolate)
Tagged< Code > InterpreterTrampoline(IsolateForSandbox isolate) const
static V8_EXPORT_PRIVATE constexpr Tagged< Smi > const kNoSharedNameSentinel
void SetTrustedData(Tagged< ExposedTrustedObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
void ClearPreparseData(IsolateForSandbox isolate)
Tagged< HeapObject > script() const
void ClearUncompiledDataJobPointer(IsolateForSandbox isolate)
uint16_t CompareExchangeAge(uint16_t expected_age, uint16_t new_age)
Tagged< ScopeInfo > GetOuterScopeInfo() const
V8_EXPORT_PRIVATE std::optional< Tagged< DebugInfo > > TryGetDebugInfo(Isolate *isolate) const
void SetUntrustedData(Tagged< Object > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
V8_EXPORT_PRIVATE int StartPosition() const
Tagged< InterpreterData > interpreter_data(IsolateForSandbox isolate) const
Tagged< UncompiledData > uncompiled_data(IsolateForSandbox isolate) const
uint16_t internal_formal_parameter_count_without_receiver() const
void SetActiveBytecodeArray(Tagged< BytecodeArray > bytecode, IsolateForSandbox isolate)
void set_interpreter_data(Isolate *isolate, Tagged< InterpreterData > interpreter_data, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
bool has_script(AcquireLoadTag tag) const
void SetScopeInfo(Tagged< ScopeInfo > scope_info, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
V8_EXPORT_PRIVATE Tagged< Code > GetCode(Isolate *isolate) const
IsCompiledScope is_compiled_scope(IsolateT *isolate) const
Inlineability GetInlineability(IsolateT *isolate) const
bool AreSourcePositionsAvailable(IsolateT *isolate) const
void overwrite_bytecode_array(Tagged< BytecodeArray > bytecode)
void set_relaxed_flags(int32_t flags)
V8_EXPORT_PRIVATE bool HasBreakInfo(Isolate *isolate) const
void set_language_mode(LanguageMode language_mode)
bool HasInterpreterData(IsolateForSandbox isolate) const
void set_uncompiled_data_with_preparse_data(Tagged< UncompiledDataWithPreparseData > data, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static constexpr Tagged< Smi > FromEnum(E value)
Definition smi.h:58
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static void Release_Store(Tagged< HeapObject > host, PtrType value)
static PtrType Acquire_Load(Tagged< HeapObject > host, int offset=0)
void InitAfterBytecodeFlush(Isolate *isolate, Tagged< String > inferred_name, int start_position, int end_position, std::function< void(Tagged< HeapObject > object, ObjectSlot slot, Tagged< HeapObject > target)> gc_notify_updated_slot)
int32_t offset
int16_t Atomic16
Definition atomicops.h:58
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
constexpr int kByteSize
Definition globals.h:395
constexpr int kTaggedSize
Definition globals.h:542
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit SharedFunctionInfo::RequiresInstanceMembersInitializerBit has_reported_binary_coverage
@ SKIP_WRITE_BARRIER
Definition objects.h:52
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit SharedFunctionInfo::RequiresInstanceMembersInitializerBit SharedFunctionInfo::HasReportedBinaryCoverageBit SharedFunctionInfo::PropertiesAreFinalBit live_edited
bool is_sloppy(LanguageMode language_mode)
Definition globals.h:773
constexpr int kNoSourcePosition
Definition globals.h:850
bool IsClassConstructor(FunctionKind kind)
ReadOnlyRoots GetReadOnlyRoots()
Definition roots-inl.h:86
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit syntax_kind
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit maglev_compilation_failed
Tagged(T object) -> Tagged< T >
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit SharedFunctionInfo::RequiresInstanceMembersInitializerBit SharedFunctionInfo::HasReportedBinaryCoverageBit properties_are_final
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition objects.h:665
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr int kFunctionKindBitSize
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
Flag flags[]
Definition flags.cc:3797
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
V8_INLINE IsolateForSandbox GetIsolateForSandbox(Tagged< HeapObject >)
Definition isolate.h:75
uint32_t IndirectPointerHandle
V8_INLINE PtrComprCageBase GetPtrComprCageBase()
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits has_duplicate_parameters
bool is_strict(LanguageMode language_mode)
Definition globals.h:777
UnionOf< Smi, String, ScopeInfo > NameOrScopeInfoT
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
V8_INLINE Heap * GetHeapFromWritableObject(Tagged< HeapObject > object)
constexpr IndirectPointerHandle kNullIndirectPointerHandle
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit relaxed_flags
return value
Definition map-inl.h:893
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit requires_instance_members_initializer
LanguageMode construct_language_mode(bool strict_bit)
Definition globals.h:786
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr int kMaxInt
Definition globals.h:374
static const size_t LanguageModeSize
Definition globals.h:753
kInterpreterTrampolineOffset script
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr ReleaseStoreTag kReleaseStore
Definition globals.h:2910
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define PROTECTED_POINTER_ACCESSORS(holder, name, type, offset)
#define RENAME_TORQUE_ACCESSORS(holder, name, torque_name, type)
#define UINT8_ACCESSORS(holder, name, offset)
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super)
#define DEF_ACQUIRE_GETTER(holder, name,...)
#define ACCESSORS(holder, name, type, offset)
#define RELAXED_READ_FIELD(p, offset)
#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, set_condition)
#define WRITE_FIELD(p, offset, value)
#define CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER(object, offset, tag, value, mode)
#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type)
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
#define BIT_FIELD_ACCESSORS(holder, field, name, BitField)
#define RELAXED_INT32_ACCESSORS(holder, name, offset)
#define RENAME_PRIMITIVE_TORQUE_ACCESSORS(holder, name, torque_name, type)
#define RELAXED_WRITE_FIELD(p, offset, value)
#define RELAXED_WRITE_UINT16_FIELD(p, offset, value)
#define RELAXED_READ_UINT16_FIELD(p, offset)
#define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset)
#define DEF_GETTER(Camel, Lower, Bit)
#define FATAL(...)
Definition logging.h:47
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485