v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
object-macros.h
Go to the documentation of this file.
1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// Note 1: Any file that includes this one should include object-macros-undef.h
6// at the bottom.
7
8// Note 2: This file is deliberately missing the include guards (the undeffing
9// approach wouldn't work otherwise).
10//
11// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD
12
13// The accessors with RELAXED_, ACQUIRE_, and RELEASE_ prefixes should be used
14// for fields that can be written to and read from multiple threads at the same
15// time. See comments in src/base/atomicops.h for the memory ordering sematics.
16
17// First, ensure that we do not include object-macros.h twice without including
18// object-macros-undef.h in between.
19#ifdef V8_OBJECT_MACROS_DEFINED
20#error Include object-macros-undef.h before including object-macros.h again
21#endif
22#define V8_OBJECT_MACROS_DEFINED
23
24#include "src/base/memory.h"
25
26// V8 objects are defined as:
27//
28// V8_OBJECT class Foo : public Base {
29// ...
30// } V8_OBJECT_END;
31//
32// These macros are to enable packing down to 4-byte alignment (i.e. int32
33// alignment, since we have int32 fields), and to add warnings which ensure that
34// there is no unwanted within-object padding.
35#if V8_CC_GNU
36#define V8_OBJECT_PUSH \
37 _Pragma("pack(push)") _Pragma("pack(4)") _Pragma("GCC diagnostic push") \
38 _Pragma("GCC diagnostic error \"-Wpadded\"")
39#define V8_OBJECT_POP _Pragma("pack(pop)") _Pragma("GCC diagnostic pop")
40#elif V8_CC_MSVC
41#define V8_OBJECT_PUSH \
42 __pragma(pack(push)) __pragma(pack(4)) __pragma(warning(push)) \
43 __pragma(warning(default : 4820))
44#define V8_OBJECT_POP __pragma(pack(pop)) __pragma(warning(pop))
45#else
46#error Unsupported compiler
47#endif
48
49#define V8_OBJECT V8_OBJECT_PUSH
50// Compilers wants the pragmas to be a new statement, but we prefer to have
51// V8_OBJECT_END look like part of the definition. Insert a semicolon before the
52// pragma to make the compilers happy, and use static_assert(true) to swallow
53// the next semicolon.
54#define V8_OBJECT_END \
55 ; \
56 V8_OBJECT_POP static_assert(true)
57
58#define V8_OBJECT_INNER_CLASS V8_OBJECT_POP
59#define V8_OBJECT_INNER_CLASS_END \
60 ; \
61 V8_OBJECT_PUSH static_assert(true)
62
63// Since this changes visibility, it should always be last in a class
64// definition.
65#define OBJECT_CONSTRUCTORS(Type, ...) \
66 public: \
67 constexpr Type() : __VA_ARGS__() {} \
68 \
69 /* For every object, add a `->` operator which returns a pointer to this \
70 object. This will allow smoother transition between T and Tagged<T>. */ \
71 Type* operator->() { return this; } \
72 const Type* operator->() const { return this; } \
73 \
74 protected: \
75 friend class Tagged<Type>; \
76 \
77 /* Special constructor for constexpr construction which allows skipping type \
78 * checks. */ \
79 explicit constexpr V8_INLINE Type(Address ptr, HeapObject::SkipTypeCheckTag) \
80 : __VA_ARGS__(ptr, HeapObject::SkipTypeCheckTag()) {} \
81 \
82 inline void CheckTypeOnCast(); \
83 explicit inline Type(Address ptr)
84
85#define OBJECT_CONSTRUCTORS_IMPL(Type, Super) \
86 inline void Type::CheckTypeOnCast() { SLOW_DCHECK(Is##Type(*this)); } \
87 inline Type::Type(Address ptr) : Super(ptr) { CheckTypeOnCast(); }
88
89#define NEVER_READ_ONLY_SPACE \
90 inline Heap* GetHeap() const; \
91 inline Isolate* GetIsolate() const;
92
93// TODO(leszeks): Add checks in the factory that we never allocate these
94// objects in RO space.
95#define NEVER_READ_ONLY_SPACE_IMPL(Type) \
96 Heap* Type::GetHeap() const { return GetHeapFromWritableObject(*this); } \
97 Isolate* Type::GetIsolate() const { \
98 return GetIsolateFromWritableObject(*this); \
99 }
100
101#define DECL_PRIMITIVE_GETTER(name, type) inline type name() const;
102
103#define DECL_PRIMITIVE_SETTER(name, type) inline void set_##name(type value);
104
105#define DECL_PRIMITIVE_ACCESSORS(name, type) \
106 DECL_PRIMITIVE_GETTER(name, type) \
107 DECL_PRIMITIVE_SETTER(name, type)
108
109#define DECL_BOOLEAN_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, bool)
110
111#define DECL_INT_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int)
112
113#define DECL_INT32_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int32_t)
114
115#define DECL_SANDBOXED_POINTER_ACCESSORS(name, type) \
116 DECL_PRIMITIVE_GETTER(name, type) \
117 DECL_PRIMITIVE_SETTER(name, type)
118
119#define DECL_UINT16_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, uint16_t)
120
121#define DECL_INT16_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, int16_t)
122
123#define DECL_UINT8_ACCESSORS(name) DECL_PRIMITIVE_ACCESSORS(name, uint8_t)
124
125#define DECL_RELAXED_PRIMITIVE_ACCESSORS(name, type) \
126 inline type name(RelaxedLoadTag) const; \
127 inline void set_##name(type value, RelaxedStoreTag);
128
129#define DECL_RELAXED_INT32_ACCESSORS(name) \
130 DECL_RELAXED_PRIMITIVE_ACCESSORS(name, int32_t)
131
132#define DECL_RELAXED_UINT32_ACCESSORS(name) \
133 DECL_RELAXED_PRIMITIVE_ACCESSORS(name, uint32_t)
134
135#define DECL_RELAXED_UINT16_ACCESSORS(name) \
136 DECL_RELAXED_PRIMITIVE_ACCESSORS(name, uint16_t)
137
138#define DECL_RELAXED_UINT8_ACCESSORS(name) \
139 DECL_RELAXED_PRIMITIVE_ACCESSORS(name, uint8_t)
140
141#define DECL_GETTER(name, ...) \
142 inline __VA_ARGS__ name() const; \
143 inline __VA_ARGS__ name(PtrComprCageBase cage_base) const;
144
145#define DEF_GETTER(holder, name, ...) \
146 __VA_ARGS__ holder::name() const { \
147 PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
148 return holder::name(cage_base); \
149 } \
150 __VA_ARGS__ holder::name(PtrComprCageBase cage_base) const
151
152#define DEF_RELAXED_GETTER(holder, name, ...) \
153 __VA_ARGS__ holder::name(RelaxedLoadTag tag) const { \
154 PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
155 return holder::name(cage_base, tag); \
156 } \
157 __VA_ARGS__ holder::name(PtrComprCageBase cage_base, RelaxedLoadTag) const
158
159#define DEF_ACQUIRE_GETTER(holder, name, ...) \
160 __VA_ARGS__ holder::name(AcquireLoadTag tag) const { \
161 PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
162 return holder::name(cage_base, tag); \
163 } \
164 __VA_ARGS__ holder::name(PtrComprCageBase cage_base, AcquireLoadTag) const
165
166#define DEF_HEAP_OBJECT_PREDICATE(holder, name) \
167 bool name(Tagged<holder> obj) { \
168 PtrComprCageBase cage_base = GetPtrComprCageBase(obj); \
169 return name(obj, cage_base); \
170 } \
171 bool name(Tagged<holder> obj, PtrComprCageBase cage_base)
172
173#define TQ_FIELD_TYPE(name, tq_type) \
174 static constexpr const char* k##name##TqFieldType = tq_type;
175
176#define DECL_FIELD_OFFSET_TQ(name, value, tq_type) \
177 static const int k##name##Offset = value; \
178 TQ_FIELD_TYPE(name, tq_type)
179
180#define DECL_SETTER(name, ...) \
181 inline void set_##name(__VA_ARGS__ value, \
182 WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
183
184#define DECL_ACCESSORS(name, ...) \
185 DECL_GETTER(name, __VA_ARGS__) \
186 DECL_SETTER(name, __VA_ARGS__)
187
188#define DECL_ACCESSORS_LOAD_TAG(name, type, tag_type) \
189 inline UNPAREN(type) name(tag_type tag) const; \
190 inline UNPAREN(type) name(PtrComprCageBase cage_base, tag_type) const;
191
192#define DECL_ACCESSORS_STORE_TAG(name, type, tag_type) \
193 inline void set_##name(UNPAREN(type) value, tag_type, \
194 WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
195
196#define DECL_RELAXED_GETTER(name, ...) \
197 DECL_ACCESSORS_LOAD_TAG(name, (__VA_ARGS__), RelaxedLoadTag)
198
199#define DECL_RELAXED_SETTER(name, ...) \
200 DECL_ACCESSORS_STORE_TAG(name, (__VA_ARGS__), RelaxedStoreTag)
201
202#define DECL_RELAXED_ACCESSORS(name, ...) \
203 DECL_RELAXED_GETTER(name, __VA_ARGS__) \
204 DECL_RELAXED_SETTER(name, __VA_ARGS__)
205
206#define DECL_ACQUIRE_GETTER(name, ...) \
207 DECL_ACCESSORS_LOAD_TAG(name, (__VA_ARGS__), AcquireLoadTag)
208
209#define DECL_RELEASE_SETTER(name, ...) \
210 DECL_ACCESSORS_STORE_TAG(name, (__VA_ARGS__), ReleaseStoreTag)
211
212#define DECL_RELEASE_ACQUIRE_ACCESSORS(name, ...) \
213 DECL_ACQUIRE_GETTER(name, __VA_ARGS__) \
214 DECL_RELEASE_SETTER(name, __VA_ARGS__)
215
216#define DEF_PRIMITIVE_ACCESSORS(holder, name, offset, type) \
217 type holder::name() const { return ReadField<type>(offset); } \
218 void holder::set_##name(type value) { WriteField<type>(offset, value); }
219
220#define INT_ACCESSORS(holder, name, offset) \
221 DEF_PRIMITIVE_ACCESSORS(holder, name, offset, int)
222
223#define INT32_ACCESSORS(holder, name, offset) \
224 DEF_PRIMITIVE_ACCESSORS(holder, name, offset, int32_t)
225
226#define UINT16_ACCESSORS(holder, name, offset) \
227 DEF_PRIMITIVE_ACCESSORS(holder, name, offset, uint16_t)
228
229#define UINT8_ACCESSORS(holder, name, offset) \
230 DEF_PRIMITIVE_ACCESSORS(holder, name, offset, uint8_t)
231
232#define RELAXED_INT32_ACCESSORS(holder, name, offset) \
233 int32_t holder::name(RelaxedLoadTag) const { \
234 return RELAXED_READ_INT32_FIELD(*this, offset); \
235 } \
236 void holder::set_##name(int32_t value, RelaxedStoreTag) { \
237 RELAXED_WRITE_INT32_FIELD(*this, offset, value); \
239
240#define RELAXED_UINT32_ACCESSORS(holder, name, offset) \
241 uint32_t holder::name(RelaxedLoadTag) const { \
242 return RELAXED_READ_UINT32_FIELD(*this, offset); \
243 } \
244 void holder::set_##name(uint32_t value, RelaxedStoreTag) { \
245 RELAXED_WRITE_UINT32_FIELD(*this, offset, value); \
247
248#define RELAXED_UINT16_ACCESSORS(holder, name, offset) \
249 uint16_t holder::name(RelaxedLoadTag) const { \
250 return RELAXED_READ_UINT16_FIELD(*this, offset); \
251 } \
252 void holder::set_##name(uint16_t value, RelaxedStoreTag) { \
253 RELAXED_WRITE_UINT16_FIELD(*this, offset, value); \
255
256#define RELAXED_UINT8_ACCESSORS(holder, name, offset) \
257 uint8_t holder::name(RelaxedLoadTag) const { \
258 return RELAXED_READ_UINT8_FIELD(*this, offset); \
259 } \
260 void holder::set_##name(uint8_t value, RelaxedStoreTag) { \
261 RELAXED_WRITE_UINT8_FIELD(*this, offset, value); \
263
264#define ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
265 set_condition) \
266 DEF_GETTER(holder, name, UNPAREN(type)) { \
267 UNPAREN(type) \
268 value = TaggedField<UNPAREN(type), offset>::load(cage_base, *this); \
269 DCHECK(get_condition); \
270 return value; \
271 } \
272 void holder::set_##name(UNPAREN(type) value, WriteBarrierMode mode) { \
273 DCHECK(set_condition); \
274 TaggedField<UNPAREN(type), offset>::store(*this, value); \
275 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
277
278#define ACCESSORS_CHECKED(holder, name, type, offset, condition) \
279 ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
280
281#define ACCESSORS(holder, name, type, offset) \
282 ACCESSORS_CHECKED(holder, name, type, offset, true)
283
284// TODO(jgruber): Eventually, all accessors should be ported to the NOCAGE
285// variant (which doesn't define a PtrComprCageBase overload). Once that's
286// done, remove the cage-ful macros (e.g. ACCESSORS) and rename the cage-less
287// macros (e.g. ACCESSORS_NOCAGE).
288#define ACCESSORS_NOCAGE(holder, name, type, offset) \
289 type holder::name() const { \
290 PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
291 return TaggedField<type, offset>::load(cage_base, *this); \
292 } \
293 void holder::set_##name(type value, WriteBarrierMode mode) { \
294 TaggedField<type, offset>::store(*this, value); \
295 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
297
298#define RENAME_TORQUE_ACCESSORS(holder, name, torque_name, type) \
299 inline type holder::name() const { \
300 return TorqueGeneratedClass::torque_name(); \
301 } \
302 inline type holder::name(PtrComprCageBase cage_base) const { \
303 return TorqueGeneratedClass::torque_name(cage_base); \
304 } \
305 inline void holder::set_##name(type value, WriteBarrierMode mode) { \
306 TorqueGeneratedClass::set_##torque_name(value, mode); \
308
309#define RENAME_PRIMITIVE_TORQUE_ACCESSORS(holder, name, torque_name, type) \
310 type holder::name() const { return TorqueGeneratedClass::torque_name(); } \
311 void holder::set_##name(type value) { \
312 TorqueGeneratedClass::set_##torque_name(value); \
314
315#define ACCESSORS_RELAXED_CHECKED2(holder, name, type, offset, get_condition, \
316 set_condition) \
317 type holder::name() const { \
318 PtrComprCageBase cage_base = GetPtrComprCageBase(*this); \
319 return holder::name(cage_base); \
320 } \
321 type holder::name(PtrComprCageBase cage_base) const { \
322 type value = TaggedField<type, offset>::Relaxed_Load(cage_base, *this); \
323 DCHECK(get_condition); \
324 return value; \
325 } \
326 void holder::set_##name(type value, WriteBarrierMode mode) { \
327 DCHECK(set_condition); \
328 TaggedField<type, offset>::Relaxed_Store(*this, value); \
329 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
331
332#define ACCESSORS_RELAXED_CHECKED(holder, name, type, offset, condition) \
333 ACCESSORS_RELAXED_CHECKED2(holder, name, type, offset, condition, condition)
334
335#define ACCESSORS_RELAXED(holder, name, type, offset) \
336 ACCESSORS_RELAXED_CHECKED(holder, name, type, offset, true)
338// Similar to ACCESSORS_RELAXED above but with respective relaxed tags.
339#define RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, get_condition, \
340 set_condition) \
341 DEF_RELAXED_GETTER(holder, name, UNPAREN(type)) { \
342 UNPAREN(type) \
343 value = \
344 TaggedField<UNPAREN(type), offset>::Relaxed_Load(cage_base, *this); \
345 DCHECK(get_condition); \
346 return value; \
347 } \
348 void holder::set_##name(UNPAREN(type) value, RelaxedStoreTag, \
349 WriteBarrierMode mode) { \
350 DCHECK(set_condition); \
351 TaggedField<UNPAREN(type), offset>::Relaxed_Store(*this, value); \
352 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
354
355#define RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, condition) \
356 RELAXED_ACCESSORS_CHECKED2(holder, name, type, offset, condition, condition)
357
358#define RELAXED_ACCESSORS(holder, name, type, offset) \
359 RELAXED_ACCESSORS_CHECKED(holder, name, type, offset, true)
360
361#define RELEASE_ACQUIRE_GETTER_CHECKED(holder, name, type, offset, \
362 get_condition) \
363 DEF_ACQUIRE_GETTER(holder, name, UNPAREN(type)) { \
364 UNPAREN(type) \
365 value = \
366 TaggedField<UNPAREN(type), offset>::Acquire_Load(cage_base, *this); \
367 DCHECK(get_condition); \
368 return value; \
370
371#define RELEASE_ACQUIRE_SETTER_CHECKED(holder, name, type, offset, \
372 set_condition) \
373 void holder::set_##name(UNPAREN(type) value, ReleaseStoreTag, \
374 WriteBarrierMode mode) { \
375 DCHECK(set_condition); \
376 TaggedField<UNPAREN(type), offset>::Release_Store(*this, value); \
377 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode); \
379
380#define RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, \
381 get_condition, set_condition) \
382 RELEASE_ACQUIRE_GETTER_CHECKED(holder, name, type, offset, get_condition) \
383 RELEASE_ACQUIRE_SETTER_CHECKED(holder, name, type, offset, set_condition)
384
385#define RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, \
386 condition) \
387 RELEASE_ACQUIRE_ACCESSORS_CHECKED2(holder, name, type, offset, condition, \
388 condition)
389
390#define RELEASE_ACQUIRE_ACCESSORS(holder, name, type, offset) \
391 RELEASE_ACQUIRE_ACCESSORS_CHECKED(holder, name, type, offset, true)
393// Getter that returns a Smi as an int and writes an int as a Smi.
394#define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \
395 int holder::name() const { \
396 DCHECK(condition); \
397 Tagged<Smi> value = TaggedField<Smi, offset>::load(*this); \
398 return value.value(); \
399 } \
400 void holder::set_##name(int value) { \
401 DCHECK(condition); \
402 TaggedField<Smi, offset>::store(*this, Smi::FromInt(value)); \
404
405#define SMI_ACCESSORS(holder, name, offset) \
406 SMI_ACCESSORS_CHECKED(holder, name, offset, true)
407
408#define DECL_RELEASE_ACQUIRE_INT_ACCESSORS(name) \
409 inline int name(AcquireLoadTag) const; \
410 inline void set_##name(int value, ReleaseStoreTag);
411
412#define RELEASE_ACQUIRE_SMI_ACCESSORS(holder, name, offset) \
413 int holder::name(AcquireLoadTag) const { \
414 Tagged<Smi> value = TaggedField<Smi, offset>::Acquire_Load(*this); \
415 return value.value(); \
416 } \
417 void holder::set_##name(int value, ReleaseStoreTag) { \
418 TaggedField<Smi, offset>::Release_Store(*this, Smi::FromInt(value)); \
420
421#define DECL_RELAXED_INT_ACCESSORS(name) \
422 inline int name(RelaxedLoadTag) const; \
423 inline void set_##name(int value, RelaxedStoreTag);
424
425#define RELAXED_SMI_ACCESSORS(holder, name, offset) \
426 int holder::name(RelaxedLoadTag) const { \
427 Tagged<Smi> value = TaggedField<Smi, offset>::Relaxed_Load(*this); \
428 return value.value(); \
429 } \
430 void holder::set_##name(int value, RelaxedStoreTag) { \
431 TaggedField<Smi, offset>::Relaxed_Store(*this, Smi::FromInt(value)); \
433
434#define BOOL_GETTER(holder, field, name, offset) \
435 bool holder::name() const { return BooleanBit::get(field(), offset); }
436
437#define BOOL_ACCESSORS(holder, field, name, offset) \
438 bool holder::name() const { return BooleanBit::get(field(), offset); } \
439 void holder::set_##name(bool value) { \
440 set_##field(BooleanBit::set(field(), offset, value)); \
442
443#define DECL_RELAXED_BOOL_ACCESSORS(name) \
444 inline bool name(RelaxedLoadTag) const; \
445 inline void set_##name(bool value, RelaxedStoreTag);
446
447#define RELAXED_BOOL_ACCESSORS(holder, field, name, offset) \
448 bool holder::name(RelaxedLoadTag) const { \
449 return BooleanBit::get(field(kRelaxedLoad), offset); \
450 } \
451 void holder::set_##name(bool value, RelaxedStoreTag) { \
452 set_##field(BooleanBit::set(field(kRelaxedLoad), offset, value), \
453 kRelaxedStore); \
454 }
456// Host objects in ReadOnlySpace can't define the isolate-less accessor.
457#define DECL_LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST(name, type) \
458 inline void init_##name(); \
459 inline bool has_##name() const; \
460 inline type name(i::IsolateForSandbox isolate) const; \
461 inline void set_##name(i::IsolateForSandbox isolate, const type value);
463// Host objects in ReadOnlySpace can't define the isolate-less accessor.
464#define LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST_CHECKED2( \
465 holder, name, type, offset, tag, get_condition, set_condition) \
466 void holder::init_##name() { \
467 HeapObject::SetupLazilyInitializedExternalPointerField(offset); \
468 } \
469 bool holder::has_##name() const { \
470 return HeapObject::IsLazilyInitializedExternalPointerFieldInitialized( \
471 offset); \
472 } \
473 type holder::name(i::IsolateForSandbox isolate) const { \
474 DCHECK(get_condition); \
475 /* This is a workaround for MSVC error C2440 not allowing */ \
476 /* reinterpret casts to the same type. */ \
477 struct C2440 {}; \
478 Address result = \
479 HeapObject::ReadExternalPointerField<tag>(offset, isolate); \
480 return reinterpret_cast<type>(reinterpret_cast<C2440*>(result)); \
481 } \
482 void holder::set_##name(i::IsolateForSandbox isolate, const type value) { \
483 DCHECK(set_condition); \
484 /* This is a workaround for MSVC error C2440 not allowing */ \
485 /* reinterpret casts to the same type. */ \
486 struct C2440 {}; \
487 Address the_value = \
488 reinterpret_cast<Address>(reinterpret_cast<const C2440*>(value)); \
489 HeapObject::WriteLazilyInitializedExternalPointerField<tag>( \
490 offset, isolate, the_value); \
492
493#define LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST_CHECKED( \
494 holder, name, type, offset, tag, condition) \
495 LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST_CHECKED2( \
496 holder, name, type, offset, tag, condition, condition)
497
498#define LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST( \
499 holder, name, type, offset, tag) \
500 LAZY_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST_CHECKED2( \
501 holder, name, type, offset, tag, true, true)
503// Host objects in ReadOnlySpace can't define the isolate-less accessor.
504#define DECL_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST(name, type) \
505 inline type name(i::IsolateForSandbox isolate) const; \
506 inline void init_##name(i::IsolateForSandbox isolate, \
507 const type initial_value); \
508 inline void set_##name(i::IsolateForSandbox isolate, const type value);
510// Host objects in ReadOnlySpace can't define the isolate-less accessor.
511#define EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST(holder, name, type, \
512 offset, tag) \
513 type holder::name(i::IsolateForSandbox isolate) const { \
514 /* This is a workaround for MSVC error C2440 not allowing */ \
515 /* reinterpret casts to the same type. */ \
516 struct C2440 {}; \
517 Address result = \
518 HeapObject::ReadExternalPointerField<tag>(offset, isolate); \
519 return reinterpret_cast<type>(reinterpret_cast<C2440*>(result)); \
520 } \
521 void holder::init_##name(i::IsolateForSandbox isolate, \
522 const type initial_value) { \
523 /* This is a workaround for MSVC error C2440 not allowing */ \
524 /* reinterpret casts to the same type. */ \
525 struct C2440 {}; \
526 Address the_value = reinterpret_cast<Address>( \
527 reinterpret_cast<const C2440*>(initial_value)); \
528 HeapObject::InitExternalPointerField<tag>(offset, isolate, the_value); \
529 } \
530 void holder::set_##name(i::IsolateForSandbox isolate, const type value) { \
531 /* This is a workaround for MSVC error C2440 not allowing */ \
532 /* reinterpret casts to the same type. */ \
533 struct C2440 {}; \
534 Address the_value = \
535 reinterpret_cast<Address>(reinterpret_cast<const C2440*>(value)); \
536 HeapObject::WriteExternalPointerField<tag>(offset, isolate, the_value); \
538
539#define DECL_EXTERNAL_POINTER_ACCESSORS(name, type) \
540 inline type name() const; \
541 DECL_EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST(name, type)
542
543#define EXTERNAL_POINTER_ACCESSORS(holder, name, type, offset, tag) \
544 type holder::name() const { \
545 i::IsolateForSandbox isolate = GetIsolateForSandbox(*this); \
546 return holder::name(isolate); \
547 } \
548 EXTERNAL_POINTER_ACCESSORS_MAYBE_READ_ONLY_HOST(holder, name, type, offset, \
549 tag)
550
551#define DECL_TRUSTED_POINTER_GETTERS(name, type) \
552 /* Trusted pointers currently always have release-acquire semantics. */ \
553 /* However, we still expose explicit release-acquire accessors so it */ \
554 /* can be made clear when they are required. */ \
555 /* If desired, we could create separate {Read|Write}TrustedPointer */ \
556 /* routines for relaxed- and release-acquire semantics in the future. */ \
557 inline Tagged<type> name(IsolateForSandbox isolate) const; \
558 inline Tagged<type> name(IsolateForSandbox isolate, AcquireLoadTag) const; \
559 inline bool has_##name() const; \
560 /* Checks if the field in question is populated but unpublished. Most */ \
561 /* code shouldn't need to care (i.e. may assume regularly published */ \
562 /* fields), but some code needs to be robust to both situations. */ \
563 inline bool has_##name##_unpublished(IsolateForSandbox isolate) const;
564
565#define DECL_TRUSTED_POINTER_SETTERS(name, type) \
566 /* Trusted pointers currently always have release-acquire semantics. */ \
567 /* However, we still expose explicit release-acquire accessors so it */ \
568 /* can be made clear when they are required. */ \
569 /* If desired, we could create separate {Read|Write}TrustedPointer */ \
570 /* routines for relaxed- and release-acquire semantics in the future. */ \
571 inline void set_##name(Tagged<type> value, \
572 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
573 inline void set_##name(Tagged<type> value, ReleaseStoreTag, \
574 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
575 inline void clear_##name();
576
577#define DECL_TRUSTED_POINTER_ACCESSORS(name, type) \
578 DECL_TRUSTED_POINTER_GETTERS(name, type) \
579 DECL_TRUSTED_POINTER_SETTERS(name, type)
580
581#define TRUSTED_POINTER_ACCESSORS(holder, name, type, offset, tag) \
582 Tagged<type> holder::name(IsolateForSandbox isolate) const { \
583 return name(isolate, kAcquireLoad); \
584 } \
585 Tagged<type> holder::name(IsolateForSandbox isolate, AcquireLoadTag) const { \
586 DCHECK(has_##name()); \
587 return Cast<type>(ReadTrustedPointerField<tag>(offset, isolate)); \
588 } \
589 void holder::set_##name(Tagged<type> value, WriteBarrierMode mode) { \
590 set_##name(value, kReleaseStore, mode); \
591 } \
592 void holder::set_##name(Tagged<type> value, ReleaseStoreTag, \
593 WriteBarrierMode mode) { \
594 WriteTrustedPointerField<tag>(offset, value); \
595 CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER(*this, offset, tag, value, \
596 mode); \
597 } \
598 bool holder::has_##name() const { \
599 return !IsTrustedPointerFieldEmpty(offset); \
600 } \
601 bool holder::has_##name##_unpublished(IsolateForSandbox isolate) const { \
602 return IsTrustedPointerFieldUnpublished(offset, tag, isolate); \
603 } \
604 void holder::clear_##name() { ClearTrustedPointerField(offset); }
605
606#define DECL_CODE_POINTER_ACCESSORS(name) \
607 DECL_TRUSTED_POINTER_ACCESSORS(name, Code)
608#define CODE_POINTER_ACCESSORS(holder, name, offset) \
609 TRUSTED_POINTER_ACCESSORS(holder, name, Code, offset, kCodeIndirectPointerTag)
610
611// Accessors for "protected" pointers, i.e. references from one trusted object
612// to another trusted object. For these pointers it can be assumed that neither
613// the pointer nor the pointed-to object can be manipulated by an attacker.
614#define DECL_PROTECTED_POINTER_ACCESSORS(name, type) \
615 inline Tagged<type> name() const; \
616 inline void set_##name(Tagged<type> value, \
617 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
618 inline bool has_##name() const; \
619 inline void clear_##name();
620
621#define PROTECTED_POINTER_ACCESSORS(holder, name, type, offset) \
622 static_assert(std::is_base_of<TrustedObject, holder>::value); \
623 Tagged<type> holder::name() const { \
624 DCHECK(has_##name()); \
625 return Cast<type>(ReadProtectedPointerField(offset)); \
626 } \
627 void holder::set_##name(Tagged<type> value, WriteBarrierMode mode) { \
628 WriteProtectedPointerField(offset, value); \
629 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*this, offset, value, mode); \
630 } \
631 bool holder::has_##name() const { \
632 return !IsProtectedPointerFieldEmpty(offset); \
633 } \
634 void holder::clear_##name() { return ClearProtectedPointerField(offset); }
635
636#define DECL_RELEASE_ACQUIRE_PROTECTED_POINTER_ACCESSORS(name, type) \
637 inline Tagged<type> name(AcquireLoadTag) const; \
638 inline void set_##name(Tagged<type> value, ReleaseStoreTag, \
639 WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
640 inline bool has_##name(AcquireLoadTag) const; \
641 inline void clear_##name(ReleaseStoreTag);
642
643#define RELEASE_ACQUIRE_PROTECTED_POINTER_ACCESSORS(holder, name, type, \
644 offset) \
645 static_assert(std::is_base_of<TrustedObject, holder>::value); \
646 Tagged<type> holder::name(AcquireLoadTag tag) const { \
647 DCHECK(has_##name(tag)); \
648 return Cast<type>(ReadProtectedPointerField(offset, tag)); \
649 } \
650 void holder::set_##name(Tagged<type> value, ReleaseStoreTag tag, \
651 WriteBarrierMode mode) { \
652 WriteProtectedPointerField(offset, value, tag); \
653 CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(*this, offset, value, mode); \
654 } \
655 bool holder::has_##name(AcquireLoadTag tag) const { \
656 return !IsProtectedPointerFieldEmpty(offset, tag); \
657 } \
658 void holder::clear_##name(ReleaseStoreTag tag) { \
659 return ClearProtectedPointerField(offset, tag); \
661
662#define BIT_FIELD_ACCESSORS2(holder, get_field, set_field, name, BitField) \
663 typename BitField::FieldType holder::name() const { \
664 return BitField::decode(get_field()); \
665 } \
666 void holder::set_##name(typename BitField::FieldType value) { \
667 set_##set_field(BitField::update(set_field(), value)); \
669
670#define BIT_FIELD_ACCESSORS(holder, field, name, BitField) \
671 BIT_FIELD_ACCESSORS2(holder, field, field, name, BitField)
672
673#define RELAXED_INT16_ACCESSORS(holder, name, offset) \
674 int16_t holder::name() const { \
675 return RELAXED_READ_INT16_FIELD(*this, offset); \
676 } \
677 void holder::set_##name(int16_t value) { \
678 RELAXED_WRITE_INT16_FIELD(*this, offset, value); \
680
681#define FIELD_ADDR(p, offset) ((p).ptr() + offset - kHeapObjectTag)
682
683#define SEQ_CST_READ_FIELD(p, offset) \
684 TaggedField<Object>::SeqCst_Load(p, offset)
685
686#define ACQUIRE_READ_FIELD(p, offset) \
687 TaggedField<Object>::Acquire_Load(p, offset)
688
689#define RELAXED_READ_FIELD(p, offset) \
690 TaggedField<Object>::Relaxed_Load(p, offset)
691
692#define RELAXED_READ_WEAK_FIELD(p, offset) \
693 TaggedField<MaybeObject>::Relaxed_Load(p, offset)
694
695#define WRITE_FIELD(p, offset, value) \
696 TaggedField<Object>::store(p, offset, value)
697
698#define SEQ_CST_WRITE_FIELD(p, offset, value) \
699 TaggedField<Object>::SeqCst_Store(p, offset, value)
700
701#define RELEASE_WRITE_FIELD(p, offset, value) \
702 TaggedField<Object>::Release_Store(p, offset, value)
703
704#define RELAXED_WRITE_FIELD(p, offset, value) \
705 TaggedField<Object>::Relaxed_Store(p, offset, value)
706
707#define RELAXED_WRITE_WEAK_FIELD(p, offset, value) \
708 TaggedField<MaybeObject>::Relaxed_Store(p, offset, value)
709
710#define SEQ_CST_SWAP_FIELD(p, offset, value) \
711 TaggedField<Object>::SeqCst_Swap(p, offset, value)
712
713#define SEQ_CST_COMPARE_AND_SWAP_FIELD(p, offset, expected, value) \
714 TaggedField<Object>::SeqCst_CompareAndSwap(p, offset, expected, value)
715
716#ifdef V8_DISABLE_WRITE_BARRIERS
717#define WRITE_BARRIER(object, offset, value)
718#else
719#define WRITE_BARRIER(object, offset, value) \
720 do { \
721 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
722 static_assert(kTaggedCanConvertToRawObjects); \
723 /* For write barriers, it doesn't matter if the slot is strong or weak, */ \
724 /* so use the most generic slot (a maybe weak one). */ \
725 WriteBarrier::ForValue(object, Tagged(object)->RawMaybeWeakField(offset), \
726 value, UPDATE_WRITE_BARRIER); \
727 } while (false)
728#endif
729
730#ifdef V8_DISABLE_WRITE_BARRIERS
731#define EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag)
732#else
733#define EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag) \
734 do { \
735 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
736 WriteBarrier::ForExternalPointer( \
737 object, Tagged(object)->RawExternalPointerField(offset, tag), \
738 UPDATE_WRITE_BARRIER); \
739 } while (false)
740#endif
741
742#ifdef V8_DISABLE_WRITE_BARRIERS
743#define INDIRECT_POINTER_WRITE_BARRIER(object, offset, tag, value)
744#else
745#define INDIRECT_POINTER_WRITE_BARRIER(object, offset, tag, value) \
746 do { \
747 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
748 WriteBarrier::ForIndirectPointer( \
749 object, Tagged(object)->RawIndirectPointerField(offset, tag), value, \
750 UPDATE_WRITE_BARRIER); \
751 } while (false)
752#endif
753
754#ifdef V8_DISABLE_WRITE_BARRIERS
755#define JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle)
756#else
757#define JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle) \
758 do { \
759 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
760 WriteBarrier::ForJSDispatchHandle(object, handle, UPDATE_WRITE_BARRIER); \
761 } while (false)
762#endif
763
764#ifdef V8_DISABLE_WRITE_BARRIERS
765#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode)
766#elif V8_ENABLE_UNCONDITIONAL_WRITE_BARRIERS
767#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
768 WRITE_BARRIER(object, offset, value)
769#else
770#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
771 do { \
772 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
773 /* For write barriers, it doesn't matter if the slot is strong or weak, */ \
774 /* so use the most generic slot (a maybe weak one). */ \
775 WriteBarrier::ForValue(object, (object)->RawMaybeWeakField(offset), value, \
776 mode); \
777 } while (false)
778#endif
779
780#ifdef V8_DISABLE_WRITE_BARRIERS
781#define CONDITIONAL_EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag, mode)
782#else
783#define CONDITIONAL_EXTERNAL_POINTER_WRITE_BARRIER(object, offset, tag, mode) \
784 do { \
785 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
786 WriteBarrier::ForExternalPointer( \
787 object, Tagged(object)->RawExternalPointerField(offset, tag), mode); \
788 } while (false)
789#endif
790#ifdef V8_DISABLE_WRITE_BARRIERS
791#define CONDITIONAL_INDIRECT_POINTER_WRITE_BARRIER(object, offset, tag, value, \
792 mode)
793#else
794#define CONDITIONAL_INDIRECT_POINTER_WRITE_BARRIER(object, offset, tag, value, \
795 mode) \
796 do { \
797 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
798 WriteBarrier::ForIndirectPointer( \
799 object, (object).RawIndirectPointerField(offset, tag), value, mode); \
800 } while (false)
801#endif
802
803#ifdef V8_ENABLE_SANDBOX
804#define CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER(object, offset, tag, value, \
805 mode) \
806 CONDITIONAL_INDIRECT_POINTER_WRITE_BARRIER(object, offset, tag, value, mode)
807#else
808#define CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER(object, offset, tag, value, \
809 mode) \
810 CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
811#endif // V8_ENABLE_SANDBOX
812#define CONDITIONAL_CODE_POINTER_WRITE_BARRIER(object, offset, value, mode) \
813 CONDITIONAL_TRUSTED_POINTER_WRITE_BARRIER( \
814 object, offset, kCodeIndirectPointerTag, value, mode)
815
816#define CONDITIONAL_PROTECTED_POINTER_WRITE_BARRIER(object, offset, value, \
817 mode) \
818 do { \
819 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
820 WriteBarrier::ForProtectedPointer( \
821 object, (object).RawProtectedPointerField(offset), value, mode); \
822 } while (false)
823
824#ifdef V8_DISABLE_WRITE_BARRIERS
825#define CONDITIONAL_JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle, mode)
826#else
827#define CONDITIONAL_JS_DISPATCH_HANDLE_WRITE_BARRIER(object, handle, mode) \
828 do { \
829 DCHECK(HeapLayout::IsOwnedByAnyHeap(object)); \
830 WriteBarrier::ForJSDispatchHandle(object, handle, mode); \
831 } while (false)
832#endif
833
834#define ACQUIRE_READ_INT8_FIELD(p, offset) \
835 static_cast<int8_t>(base::Acquire_Load( \
836 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
837
838#define ACQUIRE_READ_INT32_FIELD(p, offset) \
839 static_cast<int32_t>(base::Acquire_Load( \
840 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
841
842#define RELAXED_WRITE_INT8_FIELD(p, offset, value) \
843 base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
844 static_cast<base::Atomic8>(value));
845#define RELAXED_READ_INT8_FIELD(p, offset) \
846 static_cast<int8_t>(base::Relaxed_Load( \
847 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
848
849#define RELAXED_WRITE_UINT8_FIELD(p, offset, value) \
850 base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
851 static_cast<base::Atomic8>(value));
852#define RELAXED_READ_UINT8_FIELD(p, offset) \
853 static_cast<uint8_t>(base::Relaxed_Load( \
854 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
855
856#define RELAXED_READ_UINT16_FIELD(p, offset) \
857 static_cast<uint16_t>(base::Relaxed_Load( \
858 reinterpret_cast<const base::Atomic16*>(FIELD_ADDR(p, offset))))
859
860#define RELAXED_WRITE_UINT16_FIELD(p, offset, value) \
861 base::Relaxed_Store( \
862 reinterpret_cast<base::Atomic16*>(FIELD_ADDR(p, offset)), \
863 static_cast<base::Atomic16>(value));
864
865#define RELAXED_READ_INT16_FIELD(p, offset) \
866 static_cast<int16_t>(base::Relaxed_Load( \
867 reinterpret_cast<const base::Atomic16*>(FIELD_ADDR(p, offset))))
868
869#define RELAXED_WRITE_INT16_FIELD(p, offset, value) \
870 base::Relaxed_Store( \
871 reinterpret_cast<base::Atomic16*>(FIELD_ADDR(p, offset)), \
872 static_cast<base::Atomic16>(value));
873
874#define RELAXED_READ_UINT32_FIELD(p, offset) \
875 static_cast<uint32_t>(base::Relaxed_Load( \
876 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
877
878#define ACQUIRE_READ_UINT32_FIELD(p, offset) \
879 static_cast<uint32_t>(base::Acquire_Load( \
880 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
881
882#define RELAXED_WRITE_UINT32_FIELD(p, offset, value) \
883 base::Relaxed_Store( \
884 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
885 static_cast<base::Atomic32>(value));
886
887#define RELEASE_WRITE_INT8_FIELD(p, offset, value) \
888 base::Release_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
889 static_cast<base::Atomic8>(value));
890
891#define RELEASE_WRITE_UINT32_FIELD(p, offset, value) \
892 base::Release_Store( \
893 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
894 static_cast<base::Atomic32>(value));
895
896#define RELAXED_READ_INT32_FIELD(p, offset) \
897 static_cast<int32_t>(base::Relaxed_Load( \
898 reinterpret_cast<const base::Atomic32*>(FIELD_ADDR(p, offset))))
899
900#if defined(V8_HOST_ARCH_64_BIT)
901#define RELAXED_READ_INT64_FIELD(p, offset) \
902 static_cast<int64_t>(base::Relaxed_Load( \
903 reinterpret_cast<const base::Atomic64*>(FIELD_ADDR(p, offset))))
904#endif
905
906#define RELEASE_WRITE_INT32_FIELD(p, offset, value) \
907 base::Release_Store( \
908 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
909 static_cast<base::Atomic32>(value))
910
911#define RELAXED_WRITE_INT32_FIELD(p, offset, value) \
912 base::Relaxed_Store( \
913 reinterpret_cast<base::Atomic32*>(FIELD_ADDR(p, offset)), \
914 static_cast<base::Atomic32>(value))
915
916static_assert(sizeof(int) == sizeof(int32_t),
917 "sizeof int must match sizeof int32_t");
918
919#define RELAXED_READ_INT_FIELD(p, offset) RELAXED_READ_INT32_FIELD(p, offset)
920
921#define RELAXED_WRITE_INT_FIELD(p, offset, value) \
922 RELAXED_WRITE_INT32_FIELD(p, offset, value)
923
924static_assert(sizeof(unsigned) == sizeof(uint32_t),
925 "sizeof unsigned must match sizeof uint32_t");
926
927#define RELAXED_READ_UINT_FIELD(p, offset) RELAXED_READ_UINT32_FIELD(p, offset)
928
929#define RELAXED_WRITE_UINT_FIELD(p, offset, value) \
930 RELAXED_WRITE_UINT32_FIELD(p, offset, value)
931
932#define RELAXED_READ_BYTE_FIELD(p, offset) \
933 static_cast<uint8_t>(base::Relaxed_Load( \
934 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
935
936#define ACQUIRE_READ_BYTE_FIELD(p, offset) \
937 static_cast<uint8_t>(base::Acquire_Load( \
938 reinterpret_cast<const base::Atomic8*>(FIELD_ADDR(p, offset))))
939
940#define RELAXED_WRITE_BYTE_FIELD(p, offset, value) \
941 base::Relaxed_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
942 static_cast<base::Atomic8>(value));
943
944#define RELEASE_WRITE_BYTE_FIELD(p, offset, value) \
945 base::Release_Store(reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
946 static_cast<base::Atomic8>(value));
947
948#ifdef OBJECT_PRINT
949#define DECL_PRINTER(Name) void Name##Print(std::ostream& os);
950#else
951#define DECL_PRINTER(Name)
952#endif
953
954#ifdef VERIFY_HEAP
955#define DECL_VERIFIER(Name) void Name##Verify(Isolate* isolate);
956#define EXPORT_DECL_VERIFIER(Name) \
957 V8_EXPORT_PRIVATE void Name##Verify(Isolate* isolate);
958#define DECL_STATIC_VERIFIER(Name) \
959 static void Name##Verify(Tagged<Name> obj, Isolate* isolate);
960#define EXPORT_DECL_STATIC_VERIFIER(Name) \
961 V8_EXPORT_PRIVATE static void Name##Verify(Tagged<Name> obj, \
962 Isolate* isolate);
963#else
964#define DECL_VERIFIER(Name)
965#define EXPORT_DECL_VERIFIER(Name)
966#define DECL_STATIC_VERIFIER(Name)
967#define EXPORT_DECL_STATIC_VERIFIER(Name)
968#endif
969
970#define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
971 auto DeoptimizationData::name() const -> Tagged<type> { \
972 return Cast<type>(get(k##name##Index)); \
973 } \
974 void DeoptimizationData::Set##name(Tagged<type> value) { \
975 set(k##name##Index, value); \
977
978#define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
979 Tagged<type> DeoptimizationData::name(int i) const { \
980 return Cast<type>(get(IndexForEntry(i) + k##name##Offset)); \
981 } \
982 void DeoptimizationData::Set##name(int i, Tagged<type> value) { \
983 set(IndexForEntry(i) + k##name##Offset, value); \
985
986#define TQ_OBJECT_CONSTRUCTORS(Type) \
987 OBJECT_CONSTRUCTORS(Type, TorqueGenerated##Type<Type, Super>); \
988 friend class TorqueGenerated##Type<Type, Super>;
989
990#define TQ_OBJECT_CONSTRUCTORS_IMPL(Type) \
991 inline Type::Type(Address ptr) \
992 : TorqueGenerated##Type<Type, Type::Super>(ptr) {}
993
994#define TQ_CPP_OBJECT_DEFINITION_ASSERTS(_class, parent) \
995 template class TorqueGenerated##_class##Asserts<_class, parent>;