v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
instruction-stream-inl.h
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_INSTRUCTION_STREAM_INL_H_
6#define V8_OBJECTS_INSTRUCTION_STREAM_INL_H_
7
9// Include the non-inl header before the rest of the headers.
10
11#include <optional>
12
16#include "src/objects/code.h"
17#include "src/objects/objects-inl.h" // For HeapObject::IsInstructionStream.
18
19// Has to be the last include (doesn't have include guards):
21
22namespace v8::internal {
23
25NEVER_READ_ONLY_SPACE_IMPL(InstructionStream)
26
27uint32_t InstructionStream::body_size() const {
28 return ReadField<uint32_t>(kBodySizeOffset);
29}
30
31// TODO(sroettger): remove unused setter functions once all code writes go
32// through the WritableJitAllocation, e.g. the body_size setter above.
33
34#if V8_EMBEDDED_CONSTANT_POOL_BOOL
36 return address() + ReadField<int>(kConstantPoolOffsetOffset);
37}
38#else
40#endif
41
42// static
44 Tagged<HeapObject> self, Tagged<Map> map, uint32_t body_size,
45 int constant_pool_offset, Tagged<TrustedByteArray> reloc_info) {
46 {
47 WritableJitAllocation writable_allocation =
49 self.address(), InstructionStream::SizeFor(body_size));
50 CHECK_EQ(InstructionStream::SizeFor(body_size), writable_allocation.size());
51
52 writable_allocation.WriteHeaderSlot<Map, kMapOffset>(map, kRelaxedStore);
53
54 writable_allocation.WriteHeaderSlot<uint32_t, kBodySizeOffset>(body_size);
55
56 if constexpr (V8_EMBEDDED_CONSTANT_POOL_BOOL) {
57 writable_allocation.WriteHeaderSlot<int, kConstantPoolOffsetOffset>(
58 kHeaderSize + constant_pool_offset);
59 }
60
61 // During the Code initialization process, InstructionStream::code is
62 // briefly unset (the Code object has not been allocated yet). In this state
63 // it is only visible through heap iteration.
64 writable_allocation.WriteHeaderSlot<Smi, kCodeOffset>(Smi::zero(),
66
69 kRelocationInfoOffset>(
70 reloc_info, kRelaxedStore);
71
72 // Clear header padding
73 writable_allocation.ClearBytes(kUnalignedSize,
74 kHeaderSize - kUnalignedSize);
75 // Clear trailing padding.
76 writable_allocation.ClearBytes(kHeaderSize + body_size,
77 TrailingPaddingSizeFor(body_size));
78 }
79
81
82 // We want to keep the code minimal that runs with write access to a JIT
83 // allocation, so trigger the write barriers after the WritableJitAllocation
84 // went out of scope.
85 SLOW_DCHECK(!WriteBarrier::IsRequired(istream, map));
86 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*istream, kRelocationInfoOffset,
87 reloc_info, UPDATE_WRITE_BARRIER);
88
89 return istream;
90}
91
92// Copy from compilation artifacts stored in CodeDesc to the target on-heap
93// objects.
94//
95// Note this is quite convoluted for historical reasons. The CodeDesc buffer
96// contains instructions, a part of inline metadata, and the relocation info.
97// Additionally, the unwinding_info is stored in a separate buffer
98// `desc.unwinding_info`. In this method, we copy all these parts into the
99// final on-heap representation.
100//
101// The off-heap representation:
102//
103// CodeDesc.buffer:
104//
105// +-------------------
106// | instructions
107// +-------------------
108// | inline metadata
109// | .. safepoint table
110// | .. handler table
111// | .. constant pool
112// | .. code comments
113// +-------------------
114// | reloc info
115// +-------------------
116//
117// CodeDesc.unwinding_info: .. the unwinding info.
118//
119// This is transformed into the on-heap representation, where
120// InstructionStream contains all instructions and inline metadata, and a
121// pointer to the relocation info byte array.
123 Tagged<TrustedByteArray> reloc_info,
124 CodeDesc desc, Heap* heap) {
126 std::optional<WriteBarrierPromise> promise;
127
128 // Copy the relocation info first before we unlock the Jit allocation.
129 // TODO(sroettger): reloc info should live in protected memory.
130 DCHECK_EQ(reloc_info->length(), desc.reloc_size);
131 CopyBytes(reloc_info->begin(), desc.buffer + desc.reloc_offset,
132 static_cast<size_t>(desc.reloc_size));
133
134 {
135 WritableJitAllocation writable_allocation =
137 address(), InstructionStream::SizeFor(body_size()),
139
140 // Copy code and inline metadata.
142 writable_allocation.CopyCode(kHeaderSize, desc.buffer,
143 static_cast<size_t>(desc.instr_size));
144 writable_allocation.CopyData(kHeaderSize + desc.instr_size,
145 desc.unwinding_info,
146 static_cast<size_t>(desc.unwinding_info_size));
147 DCHECK_EQ(desc.body_size(), desc.instr_size + desc.unwinding_info_size);
148 DCHECK_EQ(code->body_size(),
149 code->instruction_size() + code->metadata_size());
150
151 promise.emplace(RelocateFromDesc(writable_allocation, heap, desc,
152 code->constant_pool(), no_gc));
153
154 // Publish the code pointer after the istream has been fully initialized.
155 writable_allocation.WriteProtectedPointerHeaderSlot<Code, kCodeOffset>(
157 }
158
159 // Trigger the write barriers after we dropped the JIT write permissions.
160 RelocateFromDescWriteBarriers(heap, desc, code->constant_pool(), *promise,
161 no_gc);
162 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*this, kCodeOffset, code,
164
165 code->FlushICache();
166}
167
171
173 static_assert(kOnHeapBodyIsContiguous);
174 return instruction_start() + body_size();
175}
176
183
187
189 AcquireLoadTag tag) const {
190 Tagged<Object> maybe_code = raw_code(tag);
191 if (maybe_code == Smi::zero()) return false;
192 *code_out = Cast<Code>(maybe_code);
193 return true;
194}
195
197 AcquireLoadTag tag) const {
198 Tagged<Object> maybe_code = raw_code(tag);
199 if (maybe_code == Smi::zero()) return false;
200 *code_out = UncheckedCast<Code>(maybe_code);
201 return true;
202}
203
208
212
218
220 return relocation_info()->begin();
221}
222
224 return relocation_info()->end();
225}
226
228 return relocation_info()->length();
229}
230
231int InstructionStream::Size() const { return SizeFor(body_size()); }
232
233// static
235 Address address) {
236 {
237 // TODO(jgruber,v8:6666): Support embedded builtins here. We'd need to pass
238 // in the current isolate.
239 Address start =
240 reinterpret_cast<Address>(Isolate::CurrentEmbeddedBlobCode());
243 }
244
245 Tagged<HeapObject> code =
247 // Unchecked cast because we can't rely on the map currently not being a
248 // forwarding pointer.
250}
251
252// static
254 Address location_of_address) {
255 Address code_entry = base::Memory<Address>(location_of_address);
256 Tagged<HeapObject> code =
258 // Unchecked cast because we can't rely on the map currently not being a
259 // forwarding pointer.
261}
262
263// static
265#ifdef V8_COMPRESS_POINTERS
267#else
268 return PtrComprCageBase{};
269#endif
270}
271
272} // namespace v8::internal
273
275
276#endif // V8_OBJECTS_INSTRUCTION_STREAM_INL_H_
#define SLOW_DCHECK(condition)
Definition checks.h:21
Tagged< Object > Acquire_Load() const
Definition slots-inl.h:74
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
static V8_INLINE bool InTrustedSpace(Tagged< HeapObject > object)
static constexpr int kHeaderSize
static Tagged< HeapObject > FromAddress(Address address)
Address field_address(size_t offset) const
static constexpr int kMapOffset
T ReadField(size_t offset) const
Address address() const
Tagged< Object > raw_code(AcquireLoadTag tag) const
static PtrComprCageBase main_cage_base()
static NEVER_READ_ONLY_SPACE constexpr bool kOnHeapBodyIsContiguous
void RelocateFromDescWriteBarriers(Heap *heap, const CodeDesc &desc, Address constant_pool, WriteBarrierPromise &promise, const DisallowGarbageCollection &no_gc)
bool TryGetCode(Tagged< Code > *code_out, AcquireLoadTag tag) const
static constexpr int TrailingPaddingSizeFor(uint32_t body_size)
static Tagged< InstructionStream > FromTargetAddress(Address address)
bool TryGetCodeUnchecked(Tagged< Code > *code_out, AcquireLoadTag tag) const
static Tagged< InstructionStream > FromEntryAddress(Address location_of_address)
V8_INLINE void Finalize(Tagged< Code > code, Tagged< TrustedByteArray > reloc_info, CodeDesc desc, Heap *heap)
WriteBarrierPromise RelocateFromDesc(WritableJitAllocation &jit_allocation, Heap *heap, const CodeDesc &desc, Address constant_pool, const DisallowGarbageCollection &no_gc)
static constexpr int SizeFor(int body_size)
Tagged< Code > code(AcquireLoadTag tag) const
Tagged< TrustedByteArray > relocation_info() const
Tagged< TrustedByteArray > unchecked_relocation_info() const
static V8_INLINE Tagged< InstructionStream > Initialize(Tagged< HeapObject > self, Tagged< Map > map, uint32_t body_size, int constant_pool_offset, Tagged< TrustedByteArray > reloc_info)
static const uint8_t * CurrentEmbeddedBlobCode()
Definition isolate.cc:395
static uint32_t CurrentEmbeddedBlobCodeSize()
Definition isolate.cc:400
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static WritableJitAllocation RegisterInstructionStreamAllocation(Address addr, size_t size, bool enforce_write_api=false)
static WritableJitAllocation LookupJitAllocation(Address addr, size_t size, JitAllocationType type, bool enforce_write_api=false)
ProtectedPointerSlot RawProtectedPointerField(int byte_offset) const
Tagged< TrustedObject > ReadProtectedPointerField(int offset) const
static constexpr int kHeaderSize
V8_CONST static V8_INLINE Address base()
V8_INLINE void CopyCode(size_t dst_offset, const uint8_t *src, size_t num_bytes)
V8_INLINE void WriteHeaderSlot(T value)
V8_INLINE void ClearBytes(size_t offset, size_t len)
V8_INLINE void WriteProtectedPointerHeaderSlot(Tagged< T > value, ReleaseStoreTag)
V8_INLINE void CopyData(size_t dst_offset, const uint8_t *src, size_t num_bytes)
Handle< Code > code
#define V8_EMBEDDED_CONSTANT_POOL_BOOL
Definition globals.h:81
int start
int end
std::map< const std::string, const std::string > map
T & Memory(Address addr)
Definition memory.h:18
void CopyBytes(T *dst, const T *src, size_t num_bytes)
Definition memcopy.h:261
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
return value
Definition map-inl.h:893
static constexpr Address kNullAddress
Definition v8-internal.h:53
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
static constexpr ReleaseStoreTag kReleaseStore
Definition globals.h:2910
static constexpr RelaxedStoreTag kRelaxedStore
Definition globals.h:2911
static constexpr AcquireLoadTag kAcquireLoad
Definition globals.h:2908
#define OBJECT_CONSTRUCTORS_IMPL(Type, Super)
#define CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(object, offset, value, mode)
#define NEVER_READ_ONLY_SPACE_IMPL(Type)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485