v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
code-inl.h
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_CODE_INL_H_
6#define V8_OBJECTS_CODE_INL_H_
7
8#include "src/objects/code.h"
9// Include the non-inl header before the rest of the headers.
10
21
22// Has to be the last include (doesn't have include guards):
24
25namespace v8 {
26namespace internal {
27
28OBJECT_CONSTRUCTORS_IMPL(Code, ExposedTrustedObject)
29OBJECT_CONSTRUCTORS_IMPL(GcSafeCode, HeapObject)
30
31Tagged<Code> GcSafeCode::UnsafeCastToCode() const {
32 return UncheckedCast<Code>(*this);
33}
34
35#define GCSAFE_CODE_FWD_ACCESSOR(ReturnType, Name) \
36 ReturnType GcSafeCode::Name() const { return UnsafeCastToCode()->Name(); }
37GCSAFE_CODE_FWD_ACCESSOR(Address, instruction_start)
38GCSAFE_CODE_FWD_ACCESSOR(Address, instruction_end)
39GCSAFE_CODE_FWD_ACCESSOR(bool, is_builtin)
42GCSAFE_CODE_FWD_ACCESSOR(bool, is_interpreter_trampoline_builtin)
43GCSAFE_CODE_FWD_ACCESSOR(bool, is_baseline_trampoline_builtin)
44GCSAFE_CODE_FWD_ACCESSOR(bool, is_baseline_leave_frame_builtin)
45GCSAFE_CODE_FWD_ACCESSOR(bool, has_instruction_stream)
46GCSAFE_CODE_FWD_ACCESSOR(bool, is_maglevved)
47GCSAFE_CODE_FWD_ACCESSOR(bool, is_turbofanned)
48GCSAFE_CODE_FWD_ACCESSOR(bool, has_tagged_outgoing_params)
49GCSAFE_CODE_FWD_ACCESSOR(bool, marked_for_deoptimization)
50GCSAFE_CODE_FWD_ACCESSOR(Tagged<Object>, raw_instruction_stream)
52GCSAFE_CODE_FWD_ACCESSOR(uint16_t, wasm_js_tagged_parameter_count)
53GCSAFE_CODE_FWD_ACCESSOR(uint16_t, wasm_js_first_tagged_parameter)
55GCSAFE_CODE_FWD_ACCESSOR(Address, safepoint_table_address)
56#undef GCSAFE_CODE_FWD_ACCESSOR
57
59 Address pc) const {
60 return UnsafeCastToCode()->GetOffsetFromInstructionStart(isolate, pc);
61}
62
64 return UnsafeCastToCode()->InstructionStart(isolate, pc);
65}
66
68 return UnsafeCastToCode()->InstructionEnd(isolate, pc);
69}
70
72 if (!UnsafeCastToCode()->uses_deoptimization_data()) return false;
74 UnsafeCastToCode()->unchecked_deoptimization_data());
75 Address code_start_address = instruction_start();
76 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
77 if (deopt_data->Pc(i).value() == -1) continue;
78 Address address = code_start_address + deopt_data->Pc(i).value();
79 if (address == pc && deopt_data->GetBytecodeOffsetOrBuiltinContinuationId(
80 i) != BytecodeOffset::None()) {
81 return true;
82 }
83 }
84 return false;
85}
86
88 PtrComprCageBase code_cage_base) const {
89 return UnsafeCastToCode()->raw_instruction_stream(code_cage_base);
90}
91
92INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
93INT_ACCESSORS(Code, metadata_size, kMetadataSizeOffset)
94INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
95INT_ACCESSORS(Code, code_comments_offset, kCodeCommentsOffsetOffset)
96INT32_ACCESSORS(Code, unwinding_info_offset, kUnwindingInfoOffsetOffset)
97UINT16_ACCESSORS(Code, parameter_count, kParameterCountOffset)
98inline uint16_t Code::parameter_count_without_receiver() const {
99 return parameter_count() - 1;
100}
101
102inline Tagged<ProtectedFixedArray> Code::deoptimization_data() const {
105 ReadProtectedPointerField(kDeoptimizationDataOrInterpreterDataOffset));
106}
107
108inline void Code::set_deoptimization_data(Tagged<ProtectedFixedArray> value,
109 WriteBarrierMode mode) {
112
113 WriteProtectedPointerField(kDeoptimizationDataOrInterpreterDataOffset, value);
115 *this, kDeoptimizationDataOrInterpreterDataOffset, value, mode);
116}
117
121
123 ClearProtectedPointerField(kDeoptimizationDataOrInterpreterDataOffset);
124}
125
128 kDeoptimizationDataOrInterpreterDataOffset);
129}
130
132 DCHECK_EQ(kind(), CodeKind::BASELINE);
133 return ReadProtectedPointerField(kDeoptimizationDataOrInterpreterDataOffset);
134}
136 WriteBarrierMode mode) {
137 DCHECK(kind() == CodeKind::BASELINE);
138 DCHECK(IsBytecodeArray(value) || IsInterpreterData(value));
139
140 WriteProtectedPointerField(kDeoptimizationDataOrInterpreterDataOffset, value);
142 *this, kDeoptimizationDataOrInterpreterDataOffset, value, mode);
143}
144
145inline Tagged<TrustedByteArray> Code::source_position_table() const {
148 ReadProtectedPointerField(kPositionTableOffset));
149}
150
151inline void Code::set_source_position_table(Tagged<TrustedByteArray> value,
152 WriteBarrierMode mode) {
154
155 WriteProtectedPointerField(kPositionTableOffset, value);
156 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*this, kPositionTableOffset,
157 value, mode);
158}
159
160inline Tagged<TrustedByteArray> Code::bytecode_offset_table() const {
163 ReadProtectedPointerField(kPositionTableOffset));
164}
165
166inline void Code::set_bytecode_offset_table(Tagged<TrustedByteArray> value,
167 WriteBarrierMode mode) {
169
170 WriteProtectedPointerField(kPositionTableOffset, value);
171 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*this, kPositionTableOffset,
172 value, mode);
173}
174
178
185
190
194
195ACCESSORS(Code, wrapper, Tagged<CodeWrapper>, kWrapperOffset)
196
198 Isolate* isolate, Tagged<SharedFunctionInfo> sfi) const {
200
201 if (kind() == CodeKind::BASELINE) {
202 return sfi->GetBytecodeArray(isolate)->SourcePositionTable(isolate);
203 }
204
205 if (!has_source_position_table()) {
206 return *isolate->factory()->empty_trusted_byte_array();
207 }
208
209 return source_position_table();
210}
211
212Address Code::body_start() const { return instruction_start(); }
213
215
216int Code::body_size() const { return instruction_size() + metadata_size(); }
217
219 return instruction_start() + instruction_size();
220}
221
225 return instruction_start() + instruction_size();
226 }
227 // An embedded builtin. Remapping is irrelevant wrt the metadata section so
228 // we can simply use the global blob.
229 // TODO(jgruber): Consider adding this as a physical Code field to avoid the
230 // lookup. Alternatively, rename this (and callers) to camel-case to clarify
231 // it's more than a simple accessor.
234}
235
237 if (V8_LIKELY(has_instruction_stream())) return instruction_start();
238 // Note we intentionally don't bounds-check that `pc` is within the returned
239 // instruction area.
240 return EmbeddedData::FromBlobForPc(isolate, pc)
242}
243
245 return InstructionStart(isolate, pc) + instruction_size();
246}
247
249 const Address offset = pc - InstructionStart(isolate, pc);
250 DCHECK_LE(offset, instruction_size());
251 return static_cast<int>(offset);
252}
253
255 return metadata_start() + metadata_size();
256}
257
261
263 return handler_table_offset() - safepoint_table_offset();
264}
265
267
269 return metadata_start() + handler_table_offset();
270}
271
273 return constant_pool_offset() - handler_table_offset();
274}
275
276bool Code::has_handler_table() const { return handler_table_size() > 0; }
277
279 const int size = code_comments_offset() - constant_pool_offset();
281 DCHECK_EQ(size, 0);
282 return 0;
283 }
284 DCHECK_GE(size, 0);
285 return size;
286}
287
288bool Code::has_constant_pool() const { return constant_pool_size() > 0; }
289
294
295uint8_t* Code::relocation_start() const {
297 ? instruction_stream()->relocation_start()
298 : nullptr;
299}
300
301uint8_t* Code::relocation_end() const {
303 ? instruction_stream()->relocation_end()
304 : nullptr;
305}
306
309 ? instruction_stream()->relocation_size()
310 : 0;
311}
312
313bool Code::contains(Isolate* isolate, Address inner_pointer) const {
314 const Address start = InstructionStart(isolate, inner_pointer);
315 if (inner_pointer < start) return false;
316 return inner_pointer < start + instruction_size();
317}
318
322
324 int size = InstructionStreamObjectSize();
325 size += relocation_size();
327 size += deoptimization_data()->Size();
328 }
329 return size;
330}
331
332CodeKind Code::kind() const { return KindField::decode(flags(kRelaxedLoad)); }
333
335 Tagged<BytecodeArray> bytecodes) {
339 CHECK_EQ(kind(), CodeKind::BASELINE);
340 baseline::BytecodeOffsetIterator offset_iterator(
341 Cast<TrustedByteArray>(bytecode_offset_table()), bytecodes);
342 Address pc = baseline_pc - instruction_start();
343 offset_iterator.AdvanceToPCOffset(pc);
344 return offset_iterator.current_bytecode_offset();
345}
346
348 int bytecode_offset, BytecodeToPCPosition position,
349 Tagged<BytecodeArray> bytecodes) {
351 CHECK_EQ(kind(), CodeKind::BASELINE);
352 // The following check ties together the bytecode being executed in
353 // Generate_BaselineOrInterpreterEntry with the bytecode that was used to
354 // compile this baseline code. Together, this ensures that we don't OSR into a
355 // wrong code object.
356 auto maybe_bytecodes = bytecode_or_interpreter_data();
357 if (IsBytecodeArray(maybe_bytecodes)) {
358 SBXCHECK_EQ(maybe_bytecodes, bytecodes);
359 } else {
360 CHECK(IsInterpreterData(maybe_bytecodes));
361 SBXCHECK_EQ(Cast<InterpreterData>(maybe_bytecodes)->bytecode_array(),
362 bytecodes);
363 }
364 baseline::BytecodeOffsetIterator offset_iterator(
365 Cast<TrustedByteArray>(bytecode_offset_table()), bytecodes);
366 offset_iterator.AdvanceToBytecodeOffset(bytecode_offset);
367 uintptr_t pc = 0;
369 pc = offset_iterator.current_pc_start_offset();
370 } else {
372 pc = offset_iterator.current_pc_end_offset();
373 }
374 return pc;
375}
376
378 int bytecode_offset, Tagged<BytecodeArray> bytecodes) {
380 bytecodes);
381}
382
384 int bytecode_offset, Tagged<BytecodeArray> bytecodes) {
386 bytecodes);
387}
388
390 int bytecode_offset, Tagged<BytecodeArray> bytecodes) {
392 CHECK_EQ(kind(), CodeKind::BASELINE);
393 baseline::BytecodeOffsetIterator offset_iterator(
394 Cast<TrustedByteArray>(bytecode_offset_table()), bytecodes);
395 Handle<BytecodeArray> bytecodes_handle(
396 reinterpret_cast<Address*>(&bytecodes));
397 interpreter::BytecodeArrayIterator bytecode_iterator(bytecodes_handle,
398 bytecode_offset);
399 interpreter::Bytecode bytecode = bytecode_iterator.current_bytecode();
400 if (bytecode == interpreter::Bytecode::kJumpLoop) {
402 bytecode_iterator.GetJumpTargetOffset(), bytecodes);
403 } else {
407 return GetBaselineEndPCForBytecodeOffset(bytecode_offset, bytecodes);
408 }
409}
410
411inline bool Code::checks_tiering_state() const {
412 bool checks_state = (builtin_id() == Builtin::kCompileLazy ||
413 builtin_id() == Builtin::kInterpreterEntryTrampoline ||
415 return checks_state ||
416 (CodeKindCanDeoptimize(kind()) && marked_for_deoptimization());
417}
418
420 return kind != CodeKind::JS_TO_WASM_FUNCTION &&
421 kind != CodeKind::C_WASM_ENTRY && kind != CodeKind::WASM_FUNCTION;
422}
423
425#if V8_ENABLE_WEBASSEMBLY
427 builtin_id() != Builtin::kWasmCompileLazy;
428#else
430#endif
431}
432
433inline bool Code::is_context_specialized() const {
434 return IsContextSpecializedField::decode(flags(kRelaxedLoad));
435}
436
437inline bool Code::is_turbofanned() const {
438 return IsTurbofannedField::decode(flags(kRelaxedLoad));
439}
440
441inline bool Code::is_maglevved() const { return kind() == CodeKind::MAGLEV; }
442
443unsigned Code::inlined_bytecode_size() const {
444 unsigned size = RELAXED_READ_UINT_FIELD(*this, kInlinedBytecodeSizeOffset);
446 return size;
447}
448
449void Code::set_inlined_bytecode_size(unsigned size) {
451 RELAXED_WRITE_UINT_FIELD(*this, kInlinedBytecodeSizeOffset, size);
452}
453
454// For optimized on-heap wasm-js wrappers, we repurpose the (otherwise unused)
455// 32-bit InlinedBytecodeSize field to encode two 16 values needed for scanning
456// the frame: the count and starting offset of incoming tagged parameters.
457// TODO(wasm): Eventually the wrappers should be managed off-heap by the wasm
458// engine. Remove these accessors when that is the case.
459void Code::set_wasm_js_tagged_parameter_count(uint16_t count) {
460 DCHECK_EQ(kind(), CodeKind::WASM_TO_JS_FUNCTION);
461 RELAXED_WRITE_UINT16_FIELD(*this, kInlinedBytecodeSizeOffset, count);
462}
463
464uint16_t Code::wasm_js_tagged_parameter_count() const {
465 DCHECK_EQ(kind(), CodeKind::WASM_TO_JS_FUNCTION);
466 return RELAXED_READ_UINT16_FIELD(*this, kInlinedBytecodeSizeOffset);
467}
468
469void Code::set_wasm_js_first_tagged_parameter(uint16_t count) {
470 DCHECK_EQ(kind(), CodeKind::WASM_TO_JS_FUNCTION);
471 RELAXED_WRITE_UINT16_FIELD(*this, kInlinedBytecodeSizeOffset + 2, count);
472}
473
474uint16_t Code::wasm_js_first_tagged_parameter() const {
475 DCHECK_EQ(kind(), CodeKind::WASM_TO_JS_FUNCTION);
476 return RELAXED_READ_UINT16_FIELD(*this, kInlinedBytecodeSizeOffset + 2);
477}
478
479BytecodeOffset Code::osr_offset() const {
480 return BytecodeOffset(RELAXED_READ_INT32_FIELD(*this, kOsrOffsetOffset));
481}
482
483void Code::set_osr_offset(BytecodeOffset offset) {
484 RELAXED_WRITE_INT32_FIELD(*this, kOsrOffsetOffset, offset.ToInt());
485}
486
488 return is_turbofanned() || is_maglevved() || is_wasm_code();
489}
490
500
501bool Code::marked_for_deoptimization() const {
502 return MarkedForDeoptimizationField::decode(flags(kRelaxedLoad));
503}
504
505void Code::set_marked_for_deoptimization(bool flag) {
506 DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(
508 int32_t previous = flags(kRelaxedLoad);
509 int32_t updated = MarkedForDeoptimizationField::update(previous, flag);
510 set_flags(updated, kRelaxedStore);
511}
512
514 LazyDeoptimizeReason reason) {
515 set_marked_for_deoptimization(true);
516 // Eager deopts are already logged by the deoptimizer.
517 if (reason != LazyDeoptimizeReason::kEagerDeopt &&
518 V8_UNLIKELY(v8_flags.trace_deopt || v8_flags.log_deopt)) {
519 TraceMarkForDeoptimization(isolate, reason);
520 }
521#ifdef V8_ENABLE_LEAPTIERING
522 JSDispatchHandle handle = js_dispatch_handle();
524 JSDispatchTable* jdt = IsolateGroup::current()->js_dispatch_table();
525 Tagged<Code> cur = jdt->GetCode(handle);
526 if (SafeEquals(cur)) {
527 if (v8_flags.reopt_after_lazy_deopts &&
528 isolate->concurrent_recompilation_enabled()) {
529 jdt->SetCodeNoWriteBarrier(
530 handle, *BUILTIN_CODE(isolate, InterpreterEntryTrampoline));
531 // Somewhat arbitrary list of lazy deopt reasons which we expect to be
532 // stable enough to warrant either immediate re-optimization, or
533 // re-optimization after one invocation (to detect potential follow-up
534 // IC changes).
535 // TODO(olivf): We should also work on reducing the number of
536 // dependencies we create in the compilers to require less of these
537 // quick re-compilations.
538 switch (reason) {
539 case LazyDeoptimizeReason::kAllocationSiteTenuringChange:
540 case LazyDeoptimizeReason::kAllocationSiteTransitionChange:
541 case LazyDeoptimizeReason::kEmptyContextExtensionChange:
542 case LazyDeoptimizeReason::kFrameValueMaterialized:
543 case LazyDeoptimizeReason::kPropertyCellChange:
544 case LazyDeoptimizeReason::kScriptContextSlotPropertyChange:
545 case LazyDeoptimizeReason::kPrototypeChange:
546 case LazyDeoptimizeReason::kExceptionCaught:
547 case LazyDeoptimizeReason::kFieldTypeConstChange:
548 case LazyDeoptimizeReason::kFieldRepresentationChange:
549 case LazyDeoptimizeReason::kFieldTypeChange:
550 case LazyDeoptimizeReason::kInitialMapChange:
551 case LazyDeoptimizeReason::kMapDeprecated:
552 jdt->SetTieringRequest(
553 handle, TieringBuiltin::kMarkReoptimizeLazyDeoptimized,
554 isolate);
555 break;
556 default:
557 // TODO(olivf): This trampoline is just used to reset the budget. If
558 // we knew the feedback cell and the bytecode size here, we could
559 // directly reset the budget.
560 jdt->SetTieringRequest(handle, TieringBuiltin::kMarkLazyDeoptimized,
561 isolate);
562 break;
563 }
564 } else {
565 jdt->SetCodeNoWriteBarrier(handle, *BUILTIN_CODE(isolate, CompileLazy));
566 }
567 }
568 // Ensure we don't try to patch the entry multiple times.
569 set_js_dispatch_handle(kNullJSDispatchHandle);
570 }
571#endif
572}
573
575 return Code::EmbeddedObjectsClearedField::decode(flags(kRelaxedLoad));
576}
577
579 DCHECK_IMPLIES(flag, marked_for_deoptimization());
580 int32_t previous = flags(kRelaxedLoad);
581 int32_t updated = Code::EmbeddedObjectsClearedField::update(previous, flag);
582 set_flags(updated, kRelaxedStore);
583}
584
585inline bool Code::can_have_weak_objects() const {
586 return CanHaveWeakObjectsField::decode(flags(kRelaxedLoad));
587}
588
589inline void Code::set_can_have_weak_objects(bool value) {
590 int32_t previous = flags(kRelaxedLoad);
591 int32_t updated = CanHaveWeakObjectsField::update(previous, value);
592 set_flags(updated, kRelaxedStore);
593}
594
595bool Code::is_wasm_code() const { return kind() == CodeKind::WASM_FUNCTION; }
596
597int Code::constant_pool_offset() const {
599 // Redirection needed since the field doesn't exist in this case.
600 return code_comments_offset();
601 }
602 return ReadField<int>(kConstantPoolOffsetOffset);
603}
604
605void Code::set_constant_pool_offset(int value) {
607 // Redirection needed since the field doesn't exist in this case.
608 return;
609 }
610 DCHECK_LE(value, metadata_size());
611 WriteField<int>(kConstantPoolOffsetOffset, value);
612}
613
615 if (!has_constant_pool()) return kNullAddress;
616 return metadata_start() + constant_pool_offset();
617}
618
620 return metadata_start() + code_comments_offset();
621}
622
624 return builtin_jump_table_info_offset() - code_comments_offset();
625}
626
627bool Code::has_code_comments() const { return code_comments_size() > 0; }
628
629int32_t Code::builtin_jump_table_info_offset() const {
631 // Redirection needed since the field doesn't exist in this case.
632 return unwinding_info_offset();
633 }
634 return ReadField<int32_t>(kBuiltinJumpTableInfoOffsetOffset);
635}
636
637void Code::set_builtin_jump_table_info_offset(int32_t value) {
639 // Redirection needed since the field doesn't exist in this case.
640 return;
641 }
642 DCHECK_LE(value, metadata_size());
643 WriteField<int32_t>(kBuiltinJumpTableInfoOffsetOffset, value);
644}
645
647 return metadata_start() + builtin_jump_table_info_offset();
648}
649
651 return unwinding_info_offset() - builtin_jump_table_info_offset();
652}
653
657
659 return metadata_start() + unwinding_info_offset();
660}
661
663
665 return static_cast<int>(unwinding_info_end() - unwinding_info_start());
666}
667
668bool Code::has_unwinding_info() const { return unwinding_info_size() > 0; }
669
670// static
674
676 return is_optimized_code() && can_have_weak_objects();
677}
678
682
684 Tagged<Map> map_object = object->map(kAcquireLoad);
685 if (InstanceTypeChecker::IsMap(map_object)) {
686 return Cast<Map>(object)->CanTransition();
687 }
688 return InstanceTypeChecker::IsPropertyCell(map_object) ||
689 InstanceTypeChecker::IsJSReceiver(map_object) ||
690 InstanceTypeChecker::IsContext(map_object);
691}
692
694 // Maps must be strong because they can be used as part of the description for
695 // how to materialize an object upon deoptimization, in which case it is
696 // possible to reach the code that requires the Map without anything else
697 // holding a strong pointer to that Map.
698 return IsHeapObject(object) && !IsMap(object) &&
700}
701
704 DCHECK(kind() == CodeKind::BASELINE ||
706 return;
707 }
708
709 auto deopt_data = Cast<DeoptimizationData>(deoptimization_data());
710 if (deopt_data->length() == 0) return;
711
712 Tagged<DeoptimizationLiteralArray> literals = deopt_data->LiteralArray();
713 const int literals_length = literals->length();
714 for (int i = 0; i < literals_length; ++i) {
715 Tagged<MaybeObject> maybe_literal = literals->get_raw(i);
716 Tagged<HeapObject> heap_literal;
717 if (maybe_literal.GetHeapObject(&heap_literal)) {
718 v->VisitRootPointer(Root::kStackRoots, "deoptimization literal",
719 FullObjectSlot(&heap_literal));
720 }
721 }
722}
723
724Tagged<Object> Code::raw_instruction_stream() const {
725 PtrComprCageBase cage_base = code_cage_base();
726 return Code::raw_instruction_stream(cage_base);
727}
728
729Tagged<Object> Code::raw_instruction_stream(PtrComprCageBase cage_base) const {
730 return ExternalCodeField<Object>::load(cage_base, *this);
731}
732
733void Code::set_raw_instruction_stream(Tagged<Object> value,
734 WriteBarrierMode mode) {
735 ExternalCodeField<Object>::Release_Store(*this, value);
736 CONDITIONAL_WRITE_BARRIER(*this, kInstructionStreamOffset, value, mode);
737}
738
740#if defined(V8_COMPRESS_POINTERS) || !defined(V8_HOST_ARCH_64_BIT)
741 const uint32_t value = ReadField<uint32_t>(kInstructionStreamOffset);
742#else
743 const uint64_t value = ReadField<uint64_t>(kInstructionStreamOffset);
744#endif
745 SLOW_DCHECK(value == 0 || !HeapLayout::InReadOnlySpace(*this));
746 return value != 0;
747}
748
750#if defined(V8_COMPRESS_POINTERS) || !defined(V8_HOST_ARCH_64_BIT)
751 const uint32_t value =
752 RELAXED_READ_INT32_FIELD(*this, kInstructionStreamOffset);
753#else
754 const uint64_t value =
755 RELAXED_READ_INT64_FIELD(*this, kInstructionStreamOffset);
756#endif
757 SLOW_DCHECK(value == 0 || !HeapLayout::InReadOnlySpace(*this));
758 return value != 0;
759}
760
762#ifdef V8_EXTERNAL_CODE_SPACE
763 return PtrComprCageBase(ExternalCodeCompressionScheme::base());
764#else // V8_EXTERNAL_CODE_SPACE
765 // Without external code space: `code_cage_base == main_cage_base`. We can
766 // get the main cage base from any heap object, including objects in RO
767 // space.
768 return GetPtrComprCageBase(*this);
769#endif // V8_EXTERNAL_CODE_SPACE
770}
771
772Tagged<InstructionStream> Code::instruction_stream() const {
773 PtrComprCageBase cage_base = code_cage_base();
774 return Code::instruction_stream(cage_base);
775}
776
780
781Tagged<InstructionStream> Code::instruction_stream(
782 PtrComprCageBase cage_base) const {
784 return ExternalCodeField<InstructionStream>::load(cage_base, *this);
785}
786
787Tagged<InstructionStream> Code::instruction_stream(RelaxedLoadTag tag) const {
788 PtrComprCageBase cage_base = code_cage_base();
789 return Code::instruction_stream(cage_base, tag);
790}
791
792Tagged<InstructionStream> Code::instruction_stream(PtrComprCageBase cage_base,
793 RelaxedLoadTag tag) const {
795 return ExternalCodeField<InstructionStream>::Relaxed_Load(cage_base, *this);
796}
797
798Tagged<Object> Code::raw_instruction_stream(RelaxedLoadTag tag) const {
799 PtrComprCageBase cage_base = code_cage_base();
800 return Code::raw_instruction_stream(cage_base, tag);
801}
802
803Tagged<Object> Code::raw_instruction_stream(PtrComprCageBase cage_base,
804 RelaxedLoadTag tag) const {
805 return ExternalCodeField<Object>::Relaxed_Load(cage_base, *this);
806}
807
808DEF_GETTER(Code, instruction_start, Address) {
809#ifdef V8_ENABLE_SANDBOX
810 return ReadCodeEntrypointViaCodePointerField(kSelfIndirectPointerOffset,
811 entrypoint_tag());
812#else
813 return ReadField<Address>(kInstructionStartOffset);
814#endif
815}
816
818#ifdef V8_ENABLE_SANDBOX
819 WriteCodeEntrypointViaCodePointerField(kSelfIndirectPointerOffset, value,
821#else
822 WriteField<Address>(kInstructionStartOffset, value);
823#endif
824}
825
827 switch (kind()) {
828 case CodeKind::BYTECODE_HANDLER:
830 case CodeKind::BUILTIN:
832 case CodeKind::REGEXP:
834 case CodeKind::WASM_FUNCTION:
835 case CodeKind::WASM_TO_CAPI_FUNCTION:
836 case CodeKind::WASM_TO_JS_FUNCTION:
837 return kWasmEntrypointTag;
838 case CodeKind::JS_TO_WASM_FUNCTION:
839 return kJSEntrypointTag;
840 default:
841 // TODO(saelo): eventually we'll want this to be UNREACHABLE().
843 }
844}
845
848 WriteBarrierMode mode) {
849 set_raw_instruction_stream(code, mode);
850 set_instruction_start(isolate, code->instruction_start());
851}
852
858
860#ifdef V8_ENABLE_SANDBOX
861 // The instruction start is stored in this object's code pointer table.
862 WriteField<CodePointerHandle>(kSelfIndirectPointerOffset,
864#else
866#endif // V8_ENABLE_SANDBOX
867}
868
871 DCHECK_EQ(raw_instruction_stream(), istream);
872 set_instruction_start(isolate, istream->instruction_start());
873}
874
876 memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
877 kSize - kUnalignedSize);
878}
879
880RELAXED_UINT32_ACCESSORS(Code, flags, kFlagsOffset)
881
882void Code::initialize_flags(CodeKind kind, bool is_context_specialized,
883 bool is_turbofanned) {
885 uint32_t value = KindField::encode(kind) |
886 IsContextSpecializedField::encode(is_context_specialized) |
887 IsTurbofannedField::encode(is_turbofanned);
888 static_assert(FIELD_SIZE(kFlagsOffset) == kInt32Size);
889 set_flags(value, kRelaxedStore);
890}
891
892// Ensure builtin_id field fits into int16_t, so that we can rely on sign
893// extension to convert int16_t{-1} to kNoBuiltinId.
894// If the asserts fail, update the code that use kBuiltinIdOffset below.
895static_assert(static_cast<int>(Builtin::kNoBuiltinId) == -1);
896static_assert(Builtins::kBuiltinCount < std::numeric_limits<int16_t>::max());
897
899 static_assert(FIELD_SIZE(kBuiltinIdOffset) == kInt16Size);
900 Relaxed_WriteField<int16_t>(kBuiltinIdOffset,
901 static_cast<int16_t>(builtin_id));
902}
903
905 // Rely on sign-extension when converting int16_t to int to preserve
906 // kNoBuiltinId value.
907 static_assert(FIELD_SIZE(kBuiltinIdOffset) == kInt16Size);
908 static_assert(static_cast<int>(static_cast<int16_t>(Builtin::kNoBuiltinId)) ==
909 static_cast<int>(Builtin::kNoBuiltinId));
910 int value = ReadField<int16_t>(kBuiltinIdOffset);
911 return static_cast<Builtin>(value);
912}
913
915
919
923
927
929 return builtin_id() == Builtin::kBaselineLeaveFrame;
930}
931
932#ifdef V8_ENABLE_LEAPTIERING
933inline JSDispatchHandle Code::js_dispatch_handle() const {
934 return JSDispatchHandle(
935 ReadField<JSDispatchHandle::underlying_type>(kDispatchHandleOffset));
936}
937
938inline void Code::set_js_dispatch_handle(JSDispatchHandle handle) {
940 handle.value());
941}
942#endif // V8_ENABLE_LEAPTIERING
943
944OBJECT_CONSTRUCTORS_IMPL(CodeWrapper, Struct)
945CODE_POINTER_ACCESSORS(CodeWrapper, code, kCodeOffset)
946
947} // namespace internal
948} // namespace v8
949
951
952#endif // V8_OBJECTS_CODE_INL_H_
int16_t parameter_count
Definition builtins.cc:67
Builtins::Kind kind
Definition builtins.cc:40
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
#define SBXCHECK_EQ(lhs, rhs)
Definition check.h:62
#define SLOW_DCHECK(condition)
Definition checks.h:21
static V8_EXPORT_PRIVATE CodeEntrypointTag EntrypointTagFor(Builtin builtin)
Definition builtins.cc:501
static constexpr BytecodeOffset None()
Definition utils.h:675
void set_instruction_start(IsolateForSandbox isolate, Address value)
Definition code-inl.h:817
Address builtin_jump_table_info() const
Definition code-inl.h:646
bool checks_tiering_state() const
Definition code-inl.h:411
bool has_source_position_table() const
Definition code-inl.h:179
Tagged< InstructionStream > unchecked_instruction_stream() const
Definition code-inl.h:777
int InstructionStreamObjectSize() const
Definition code-inl.h:319
void TraceMarkForDeoptimization(Isolate *isolate, LazyDeoptimizeReason reason)
Definition code.cc:337
bool is_baseline_trampoline_builtin() const
Definition code-inl.h:924
uintptr_t GetBaselinePCForBytecodeOffset(int bytecode_offset, BytecodeToPCPosition position, Tagged< BytecodeArray > bytecodes)
Definition code-inl.h:347
CodeEntrypointTag entrypoint_tag() const
Definition code-inl.h:826
bool CanContainWeakObjects()
Definition code-inl.h:675
int handler_table_size() const
Definition code-inl.h:272
uint32_t stack_slots() const
Definition code-inl.h:491
Address InstructionStart(Isolate *isolate, Address pc) const
Definition code-inl.h:236
bool uses_safepoint_table() const
Definition code-inl.h:487
Address body_start() const
Definition code-inl.h:212
void set_embedded_objects_cleared(bool flag)
Definition code-inl.h:578
Address unwinding_info_end() const
Definition code-inl.h:662
void SetInstructionStreamAndInstructionStart(IsolateForSandbox isolate, Tagged< InstructionStream > code, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition code-inl.h:846
static Tagged< Code > FromTargetAddress(Address address)
Definition code-inl.h:671
Address metadata_end() const
Definition code-inl.h:254
Address instruction_end() const
Definition code-inl.h:218
bool is_turbofanned() const
Definition code-inl.h:437
void clear_source_position_table_and_bytecode_offset_table()
Definition code-inl.h:191
CodeKind kind() const
Definition code-inl.h:332
int unwinding_info_size() const
Definition code-inl.h:664
bool has_deoptimization_data_or_interpreter_data() const
Definition code-inl.h:126
void SetMarkedForDeoptimization(Isolate *isolate, LazyDeoptimizeReason reason)
Definition code-inl.h:513
bool has_tagged_outgoing_params() const
Definition code-inl.h:424
int code_comments_size() const
Definition code-inl.h:623
int safepoint_table_offset() const
Definition code.h:263
int constant_pool_size() const
Definition code-inl.h:278
Address constant_pool() const
Definition code-inl.h:614
bool is_wasm_code() const
Definition code-inl.h:595
void set_bytecode_or_interpreter_data(Tagged< TrustedObject > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
Definition code-inl.h:135
uintptr_t GetBaselineStartPCForBytecodeOffset(int bytecode_offset, Tagged< BytecodeArray > bytecodes)
Definition code-inl.h:377
int SizeIncludingMetadata() const
Definition code-inl.h:323
bool contains(Isolate *isolate, Address pc) const
Definition code-inl.h:313
bool IsWeakObject(Tagged< HeapObject > object)
Definition code-inl.h:679
bool is_builtin() const
Definition code-inl.h:914
Address handler_table_address() const
Definition code-inl.h:268
uintptr_t GetBaselineEndPCForBytecodeOffset(int bytecode_offset, Tagged< BytecodeArray > bytecodes)
Definition code-inl.h:383
uintptr_t GetBaselinePCForNextExecutedBytecode(int bytecode_offset, Tagged< BytecodeArray > bytecodes)
Definition code-inl.h:389
int body_size() const
Definition code-inl.h:216
Tagged< TrustedObject > bytecode_or_interpreter_data() const
Definition code-inl.h:131
uint8_t * relocation_end() const
Definition code-inl.h:301
bool is_optimized_code() const
Definition code-inl.h:916
bool has_unwinding_info() const
Definition code-inl.h:668
bool is_interpreter_trampoline_builtin() const
Definition code-inl.h:920
Address InstructionEnd(Isolate *isolate, Address pc) const
Definition code-inl.h:244
int safepoint_table_size() const
Definition code-inl.h:262
Address body_end() const
Definition code-inl.h:214
int relocation_size() const
Definition code-inl.h:307
void SetInstructionStartForOffHeapBuiltin(IsolateForSandbox isolate, Address entry)
Definition code-inl.h:853
bool has_constant_pool() const
Definition code-inl.h:288
Address safepoint_table_address() const
Definition code-inl.h:258
Address unwinding_info_start() const
Definition code-inl.h:658
bool is_context_specialized() const
Definition code-inl.h:433
void set_builtin_id(Builtin builtin_id)
Definition code-inl.h:898
bool has_handler_table() const
Definition code-inl.h:276
int GetOffsetFromInstructionStart(Isolate *isolate, Address pc) const
Definition code-inl.h:248
bool has_safepoint_table() const
Definition code-inl.h:266
void ClearInstructionStartForSerialization(IsolateForSandbox isolate)
Definition code-inl.h:859
bool is_baseline_leave_frame_builtin() const
Definition code-inl.h:928
Tagged< ProtectedFixedArray > unchecked_deoptimization_data() const
Definition code-inl.h:290
static bool IsWeakObjectInOptimizedCode(Tagged< HeapObject > object)
Definition code-inl.h:683
int builtin_jump_table_info_size() const
Definition code-inl.h:650
void IterateDeoptimizationLiterals(RootVisitor *v)
Definition code-inl.h:702
Address code_comments() const
Definition code-inl.h:619
bool uses_deoptimization_data() const
Definition code-inl.h:118
int GetBytecodeOffsetForBaselinePC(Address baseline_pc, Tagged< BytecodeArray > bytecodes)
Definition code-inl.h:334
static bool IsWeakObjectInDeoptimizationLiteralArray(Tagged< Object > object)
Definition code-inl.h:693
uint8_t * relocation_start() const
Definition code-inl.h:295
bool has_code_comments() const
Definition code-inl.h:627
bool embedded_objects_cleared() const
Definition code-inl.h:574
bool is_maglevved() const
Definition code-inl.h:441
void UpdateInstructionStart(IsolateForSandbox isolate, Tagged< InstructionStream > istream)
Definition code-inl.h:869
PtrComprCageBase code_cage_base() const
Definition code-inl.h:761
Address metadata_start() const
Definition code-inl.h:222
Builtin builtin_id() const
Definition code-inl.h:904
void clear_deoptimization_data_and_interpreter_data()
Definition code-inl.h:122
bool has_builtin_jump_table_info() const
Definition code-inl.h:654
bool has_bytecode_offset_table() const
Definition code-inl.h:186
bool has_instruction_stream() const
Definition code-inl.h:739
bool has_source_position_table_or_bytecode_offset_table() const
Definition code-inl.h:175
Address InstructionStartOf(Builtin builtin) const
Address MetadataStartOf(Builtin builtin) const
static EmbeddedData FromBlob()
static EmbeddedData FromBlobForPc(Isolate *isolate, Address maybe_builtin_pc)
Address InstructionEnd(Isolate *isolate, Address pc) const
Definition code-inl.h:67
bool CanDeoptAt(Isolate *isolate, Address pc) const
Definition code-inl.h:71
Tagged< Object > raw_instruction_stream() const
Address InstructionStart(Isolate *isolate, Address pc) const
Definition code-inl.h:63
Tagged< Code > UnsafeCastToCode() const
Definition code-inl.h:31
int GetOffsetFromInstructionStart(Isolate *isolate, Address pc) const
Definition code-inl.h:58
Address instruction_start() const
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InReadOnlySpace(Tagged< HeapObject > object)
void Relaxed_WriteField(size_t offset, T value)
T ReadField(size_t offset) const
Address address() const
void WriteField(size_t offset, T value) const
static NEVER_READ_ONLY_SPACE constexpr bool kOnHeapBodyIsContiguous
static constexpr bool kOffHeapBodyIsContiguous
static Tagged< InstructionStream > FromTargetAddress(Address address)
static constexpr int SizeFor(int body_size)
static IsolateGroup * current()
virtual void VisitRootPointer(Root root, const char *description, FullObjectSlot p)
Definition visitors.h:75
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static PtrType load(Tagged< HeapObject > host, int offset=0)
static void store(Tagged< HeapObject > host, PtrType value)
constexpr bool SafeEquals(TaggedImpl< kOtherRefType, Address > other) const
Definition tagged-impl.h:93
bool GetHeapObject(Tagged< HeapObject > *result) const
void ClearProtectedPointerField(int offset)
void WriteProtectedPointerField(int offset, Tagged< TrustedObject > value)
bool IsProtectedPointerFieldEmpty(int offset) const
Tagged< TrustedObject > ReadProtectedPointerField(int offset) const
static constexpr bool Returns(Bytecode bytecode)
Definition bytecodes.h:872
static constexpr bool IsSwitch(Bytecode bytecode)
Definition bytecodes.h:819
static constexpr bool IsJump(Bytecode bytecode)
Definition bytecodes.h:798
#define GCSAFE_CODE_FWD_ACCESSOR(ReturnType, Name)
Definition code-inl.h:35
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
Definition globals.h:81
#define V8_BUILTIN_JUMP_TABLE_INFO_BOOL
Definition globals.h:262
int start
LineAndColumn previous
int32_t offset
int position
Definition liveedit.cc:290
base::SmallVector< int32_t, 1 > stack_slots
unsigned short uint16_t
Definition unicode.cc:39
T & Memory(Address addr)
Definition memory.h:18
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
V8_INLINE Address ReadCodeEntrypointViaCodePointerField(Address field_address, CodeEntrypointTag tag)
constexpr bool CodeKindIsOptimizedJSFunction(CodeKind kind)
Definition code-kind.h:66
uint32_t SafepointTableStackSlotsField_t
V8_INLINE constexpr bool IsInterpreterTrampolineBuiltin(Builtin builtin_id)
Definition builtins.h:438
constexpr int kInt16Size
Definition globals.h:398
constexpr int kSafepointTableStackSlotsOffset
constexpr bool CodeKindCanTierUp(CodeKind kind)
Definition code-kind.h:95
Tagged(T object) -> Tagged< T >
V8_INLINE constexpr bool IsBaselineTrampolineBuiltin(Builtin builtin_id)
Definition builtins.h:447
V8_INLINE Isolate * GetIsolateFromWritableObject(Tagged< HeapObject > object)
base::StrongAlias< JSDispatchHandleAliasTag, uint32_t > JSDispatchHandle
Definition globals.h:557
Flag flags[]
Definition flags.cc:3797
constexpr int kFunctionExitBytecodeOffset
Definition globals.h:858
constexpr bool CodeKindIsInterpretedJSFunction(CodeKind kind)
Definition code-kind.h:51
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
V8_INLINE PtrComprCageBase GetPtrComprCageBase()
constexpr JSDispatchHandle kNullJSDispatchHandle(0)
constexpr bool CodeKindMayLackSourcePositionTable(CodeKind kind)
Definition code-kind.h:117
constexpr int kInt32Size
Definition globals.h:401
constexpr bool CodeKindHasTaggedOutgoingParams(CodeKind kind)
Definition code-inl.h:419
V8_EXPORT_PRIVATE FlagValues v8_flags
return value
Definition map-inl.h:893
constexpr CodePointerHandle kNullCodePointerHandle
static constexpr Address kNullAddress
Definition v8-internal.h:53
constexpr bool CodeKindUsesDeoptimizationData(CodeKind kind)
Definition code-kind.h:109
constexpr bool CodeKindUsesBytecodeOffsetTable(CodeKind kind)
Definition code-kind.h:113
V8_INLINE void WriteCodeEntrypointViaCodePointerField(Address field_address, Address value, CodeEntrypointTag tag)
constexpr bool CodeKindCanDeoptimize(CodeKind kind)
Definition code-kind.h:83
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define RELAXED_WRITE_INT32_FIELD(p, offset, value)
#define RELAXED_WRITE_UINT_FIELD(p, offset, value)
#define CODE_POINTER_ACCESSORS(holder, name, offset)
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super)
#define RELAXED_READ_INT32_FIELD(p, offset)
#define CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(object, offset, value, mode)
#define UINT16_ACCESSORS(holder, name, offset)
#define RELAXED_READ_UINT_FIELD(p, offset)
#define ACCESSORS(holder, name, type, offset)
#define INT_ACCESSORS(holder, name, offset)
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
#define INT32_ACCESSORS(holder, name, offset)
#define RELAXED_WRITE_UINT16_FIELD(p, offset, value)
#define RELAXED_UINT32_ACCESSORS(holder, name, offset)
#define RELAXED_READ_UINT16_FIELD(p, offset)
#define DEF_GETTER(Camel, Lower, Bit)
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define FIELD_SIZE(Name)
Definition utils.h:259
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660