v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
constant-pool.h
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_CODEGEN_CONSTANT_POOL_H_
6#define V8_CODEGEN_CONSTANT_POOL_H_
7
8#include <map>
9
11#include "src/codegen/label.h"
13#include "src/common/globals.h"
14
15namespace v8 {
16namespace internal {
17
18class Instruction;
19
20// -----------------------------------------------------------------------------
21// Constant pool support
22
24 public:
25 ConstantPoolEntry() = default;
38
39 int position() const { return position_; }
40 bool sharing_ok() const { return merged_index_ != SHARING_PROHIBITED; }
41 bool is_merged() const { return merged_index_ >= 0; }
42 int merged_index() const {
44 return merged_index_;
45 }
46 void set_merged_index(int index) {
50 }
51 int offset() const {
53 return merged_index_;
54 }
55 void set_offset(int offset) {
56 DCHECK_GE(offset, 0);
58 }
59 intptr_t value() const { return value_; }
60 uint64_t value64() const { return value64_; }
61 RelocInfo::Mode rmode() const { return rmode_; }
62
64
65 static int size(Type type) {
66 return (type == INTPTR) ? kSystemPointerSize : kDoubleSize;
67 }
68
70
71 private:
74 union {
75 intptr_t value_;
76 uint64_t value64_;
77 };
78 // TODO(leszeks): The way we use this, it could probably be packed into
79 // merged_index_ if size is a concern.
82};
83
84#if defined(V8_TARGET_ARCH_PPC64)
85
86// -----------------------------------------------------------------------------
87// Embedded constant pool support
88
89class ConstantPoolBuilder {
90 public:
91 ConstantPoolBuilder(int ptr_reach_bits, int double_reach_bits);
92
93#ifdef DEBUG
94 ~ConstantPoolBuilder() {
95 // Unused labels to prevent DCHECK failures.
96 emitted_label_.Unuse();
97 emitted_label_.UnuseNear();
98 }
99#endif
100
101 // Add pointer-sized constant to the embedded constant pool
102 ConstantPoolEntry::Access AddEntry(int position, intptr_t value,
103 bool sharing_ok) {
104 ConstantPoolEntry entry(position, value, sharing_ok);
105 return AddEntry(&entry, ConstantPoolEntry::INTPTR);
106 }
107
108 // Add double constant to the embedded constant pool
109 ConstantPoolEntry::Access AddEntry(int position, base::Double value) {
110 ConstantPoolEntry entry(position, value);
111 return AddEntry(&entry, ConstantPoolEntry::DOUBLE);
112 }
113
114 // Add double constant to the embedded constant pool
115 ConstantPoolEntry::Access AddEntry(int position, double value) {
116 return AddEntry(position, base::Double(value));
117 }
118
119 // Previews the access type required for the next new entry to be added.
120 ConstantPoolEntry::Access NextAccess(ConstantPoolEntry::Type type) const;
121
122 bool IsEmpty() {
123 return info_[ConstantPoolEntry::INTPTR].entries.empty() &&
124 info_[ConstantPoolEntry::INTPTR].shared_entries.empty() &&
125 info_[ConstantPoolEntry::DOUBLE].entries.empty() &&
126 info_[ConstantPoolEntry::DOUBLE].shared_entries.empty();
127 }
128
129 // Emit the constant pool. Invoke only after all entries have been
130 // added and all instructions have been emitted.
131 // Returns position of the emitted pool (zero implies no constant pool).
132 int Emit(Assembler* assm);
133
134 // Returns the label associated with the start of the constant pool.
135 // Linking to this label in the function prologue may provide an
136 // efficient means of constant pool pointer register initialization
137 // on some architectures.
138 inline Label* EmittedPosition() { return &emitted_label_; }
139
140 private:
141 ConstantPoolEntry::Access AddEntry(ConstantPoolEntry* entry,
142 ConstantPoolEntry::Type type);
143 void EmitSharedEntries(Assembler* assm, ConstantPoolEntry::Type type);
144 void EmitGroup(Assembler* assm, ConstantPoolEntry::Access access,
145 ConstantPoolEntry::Type type);
146
147 struct PerTypeEntryInfo {
148 PerTypeEntryInfo() : regular_count(0), overflow_start(-1) {}
149 bool overflow() const {
150 return (overflow_start >= 0 &&
151 overflow_start < static_cast<int>(entries.size()));
152 }
153 int regular_reach_bits;
154 int regular_count;
155 int overflow_start;
156 std::vector<ConstantPoolEntry> entries;
157 std::vector<ConstantPoolEntry> shared_entries;
158 };
159
160 Label emitted_label_; // Records pc_offset of emitted pool
161 PerTypeEntryInfo info_[ConstantPoolEntry::NUMBER_OF_TYPES];
162};
163
164#endif // defined(V8_TARGET_ARCH_PPC64)
165
166#if defined(V8_TARGET_ARCH_ARM64) || defined(V8_TARGET_ARCH_RISCV64) || \
167 defined(V8_TARGET_ARCH_RISCV32)
168
169class ConstantPoolKey {
170 public:
171 explicit ConstantPoolKey(uint64_t value,
172 RelocInfo::Mode rmode = RelocInfo::NO_INFO)
173 : is_value32_(false), value64_(value), rmode_(rmode) {}
174
175 explicit ConstantPoolKey(uint32_t value,
176 RelocInfo::Mode rmode = RelocInfo::NO_INFO)
177 : is_value32_(true), value32_(value), rmode_(rmode) {}
178
179 uint64_t value64() const {
180 CHECK(!is_value32_);
181 return value64_;
182 }
183 uint32_t value32() const {
184 CHECK(is_value32_);
185 return value32_;
186 }
187
188 bool is_value32() const { return is_value32_; }
189 RelocInfo::Mode rmode() const { return rmode_; }
190
191 bool AllowsDeduplication() const {
192 DCHECK(rmode_ != RelocInfo::CONST_POOL &&
193 rmode_ != RelocInfo::VENEER_POOL &&
194 rmode_ != RelocInfo::DEOPT_SCRIPT_OFFSET &&
195 rmode_ != RelocInfo::DEOPT_INLINING_ID &&
196 rmode_ != RelocInfo::DEOPT_REASON && rmode_ != RelocInfo::DEOPT_ID &&
197 rmode_ != RelocInfo::DEOPT_NODE_ID);
198 // CODE_TARGETs can be shared because they aren't patched anymore,
199 // and we make sure we emit only one reloc info for them (thus delta
200 // patching) will apply the delta only once. At the moment, we do not dedup
201 // code targets if they are wrapped in a heap object request (value == 0).
202 bool is_sharable_code_target =
203 rmode_ == RelocInfo::CODE_TARGET &&
204 (is_value32() ? (value32() != 0) : (value64() != 0));
205 bool is_sharable_embedded_object = RelocInfo::IsEmbeddedObjectMode(rmode_);
206 return RelocInfo::IsShareableRelocMode(rmode_) || is_sharable_code_target ||
207 is_sharable_embedded_object;
208 }
209
210 private:
211 bool is_value32_;
212 union {
213 uint64_t value64_;
214 uint32_t value32_;
215 };
216 RelocInfo::Mode rmode_;
217};
218
219// Order for pool entries. 64bit entries go first.
220inline bool operator<(const ConstantPoolKey& a, const ConstantPoolKey& b) {
221 if (a.is_value32() < b.is_value32()) return true;
222 if (a.is_value32() > b.is_value32()) return false;
223 if (a.rmode() < b.rmode()) return true;
224 if (a.rmode() > b.rmode()) return false;
225 if (a.is_value32()) return a.value32() < b.value32();
226 return a.value64() < b.value64();
227}
228
229inline bool operator==(const ConstantPoolKey& a, const ConstantPoolKey& b) {
230 if (a.rmode() != b.rmode() || a.is_value32() != b.is_value32()) {
231 return false;
232 }
233 if (a.is_value32()) return a.value32() == b.value32();
234 return a.value64() == b.value64();
235}
236
237// Constant pool generation
238enum class Jump { kOmitted, kRequired };
239enum class Emission { kIfNeeded, kForced };
240enum class Alignment { kOmitted, kRequired };
241enum class RelocInfoStatus { kMustRecord, kMustOmitForDuplicate };
242enum class PoolEmissionCheck { kSkip };
243
244// Pools are emitted in the instruction stream, preferably after unconditional
245// jumps or after returns from functions (in dead code locations).
246// If a long code sequence does not contain unconditional jumps, it is
247// necessary to emit the constant pool before the pool gets too far from the
248// location it is accessed from. In this case, we emit a jump over the emitted
249// constant pool.
250// Constants in the pool may be addresses of functions that gets relocated;
251// if so, a relocation info entry is associated to the constant pool entry.
252class ConstantPool {
253 public:
254 explicit ConstantPool(Assembler* assm);
255 ~ConstantPool();
256
257 // Returns true when we need to write RelocInfo and false when we do not.
258 RelocInfoStatus RecordEntry(uint32_t data, RelocInfo::Mode rmode);
259 RelocInfoStatus RecordEntry(uint64_t data, RelocInfo::Mode rmode);
260
261 size_t Entry32Count() const { return entry32_count_; }
262 size_t Entry64Count() const { return entry64_count_; }
263 bool IsEmpty() const { return entries_.empty(); }
264 // Check if pool will be out of range at {pc_offset}.
265 bool IsInImmRangeIfEmittedAt(int pc_offset);
266 // Size in bytes of the constant pool. Depending on parameters, the size will
267 // include the branch over the pool and alignment padding.
268 int ComputeSize(Jump require_jump, Alignment require_alignment) const;
269
270 // Emit the pool at the current pc with a branch over the pool if requested.
271 void EmitAndClear(Jump require);
272 bool ShouldEmitNow(Jump require_jump, size_t margin = 0) const;
273 V8_EXPORT_PRIVATE void Check(Emission force_emission, Jump require_jump,
274 size_t margin = 0);
275
276 V8_EXPORT_PRIVATE void MaybeCheck();
277 void Clear();
278
279 // Constant pool emission can be blocked temporarily.
280 bool IsBlocked() const;
281
282 // Repeated checking whether the constant pool should be emitted is expensive;
283 // only check once a number of instructions have been generated.
284 void SetNextCheckIn(size_t instructions);
285
286 // Class for scoping postponing the constant pool generation.
287 class V8_EXPORT_PRIVATE V8_NODISCARD BlockScope {
288 public:
289 // BlockScope immediatelly emits the pool if necessary to ensure that
290 // during the block scope at least {margin} bytes can be emitted without
291 // pool emission becomming necessary.
292 explicit BlockScope(Assembler* pool, size_t margin = 0);
293 BlockScope(Assembler* pool, PoolEmissionCheck);
294 ~BlockScope();
295
296 private:
297 ConstantPool* pool_;
299 };
300
301 // Hard limit to the const pool which must not be exceeded.
302 static const size_t kMaxDistToPool32;
303 static const size_t kMaxDistToPool64;
304 // Approximate distance where the pool should be emitted.
305 static const size_t kApproxDistToPool32;
306 V8_EXPORT_PRIVATE static const size_t kApproxDistToPool64;
307 // Approximate distance where the pool may be emitted if
308 // no jump is required (due to a recent unconditional jump).
309 static const size_t kOpportunityDistToPool32;
310 static const size_t kOpportunityDistToPool64;
311 // PC distance between constant pool checks.
312 V8_EXPORT_PRIVATE static const size_t kCheckInterval;
313 // Number of entries in the pool which trigger a check.
314 static const size_t kApproxMaxEntryCount;
315
316 private:
317 void StartBlock();
318 void EndBlock();
319
320 void EmitEntries();
321 void EmitPrologue(Alignment require_alignment);
322 int PrologueSize(Jump require_jump) const;
323 RelocInfoStatus RecordKey(ConstantPoolKey key, int offset);
324 RelocInfoStatus GetRelocInfoStatusFor(const ConstantPoolKey& key);
325 void Emit(const ConstantPoolKey& key);
326 void SetLoadOffsetToConstPoolEntry(int load_offset, Instruction* entry_offset,
327 const ConstantPoolKey& key);
328 Alignment IsAlignmentRequiredIfEmittedAt(Jump require_jump,
329 int pc_offset) const;
330
331 Assembler* assm_;
332 // Keep track of the first instruction requiring a constant pool entry
333 // since the previous constant pool was emitted.
334 int first_use_32_ = -1;
335 int first_use_64_ = -1;
336 // We sort not according to insertion order, but since we do not insert
337 // addresses (for heap objects we insert an index which is created in
338 // increasing order), the order is deterministic. We map each entry to the
339 // pc offset of the load. We use a multimap because we need to record the
340 // pc offset of each load of the same constant so that the immediate of the
341 // loads can be back-patched when the pool is emitted.
342 std::multimap<ConstantPoolKey, int> entries_;
343 size_t entry32_count_ = 0;
344 size_t entry64_count_ = 0;
345 int next_check_ = 0;
346 int old_next_check_ = 0;
347 int blocked_nesting_ = 0;
348};
349
350#endif // defined(V8_TARGET_ARCH_ARM64) || defined(V8_TARGET_ARCH_RISCV64) ||
351 // defined(V8_TARGET_ARCH_RISCV32)
352
353} // namespace internal
354} // namespace v8
355
356#endif // V8_CODEGEN_CONSTANT_POOL_H_
static int size(Type type)
ConstantPoolEntry(int position, intptr_t value, bool sharing_ok, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
RelocInfo::Mode rmode() const
ConstantPoolEntry(int position, base::Double value, RelocInfo::Mode rmode=RelocInfo::NO_INFO)
int32_t offset
BalanceOverflow overflow
std::vector< EntryBuilder > entries_
ZoneVector< Entry > entries
int pc_offset
int position
Definition liveedit.cc:290
constexpr int kSystemPointerSize
Definition globals.h:410
V8_INLINE constexpr bool operator<(Builtin a, Builtin b)
Definition builtins.h:75
bool operator==(ExternalReference lhs, ExternalReference rhs)
constexpr int kDoubleSize
Definition globals.h:407
OptimizedCompilationInfo * info_
Definition pipeline.cc:305
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define V8_EXPORT_PRIVATE
Definition macros.h:460
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition macros.h:130
#define V8_NODISCARD
Definition v8config.h:693
std::unique_ptr< ValueMirror > key