18#if V8_ENABLE_WEBASSEMBLY
65 const char* function_debug_name)
71 graph_assembler_(graph_assembler),
73 allocation_folding_(allocation_folding),
74 write_barrier_assert_failed_(
callback),
75 function_debug_name_(function_debug_name) {}
80 switch (node->opcode()) {
81 case IrOpcode::kAllocate:
85 case IrOpcode::kAllocateRaw:
87 case IrOpcode::kLoadFromObject:
88 case IrOpcode::kLoadImmutableFromObject:
90 case IrOpcode::kLoadElement:
92 case IrOpcode::kLoadField:
94 case IrOpcode::kStoreToObject:
95 case IrOpcode::kInitializeImmutableInObject:
97 case IrOpcode::kStoreElement:
99 case IrOpcode::kStoreField:
101 case IrOpcode::kStore:
115 graph_zone(), descriptor, descriptor.GetStackParameterCount(),
120#if V8_ENABLE_WEBASSEMBLY
124 if (use->opcode() == IrOpcode::kParameter &&
140 Node* alignment_check =
__ WordEqual(
142 __ UintPtrConstant(0));
144 __ GotoIf(alignment_check, &already_aligned, value);
150 aligned_value =
__ WordAnd(
154 __ Goto(&already_aligned, aligned_value);
157 __ Bind(&already_aligned);
159 return already_aligned.PhiAt(0);
165 DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
167 state_ptr !=
nullptr);
183 Node* allocate_builtin;
186 allocate_builtin =
__ AllocateInYoungGenerationStubConstant();
188 allocate_builtin =
__ AllocateInOldGenerationStubConstant();
191#if V8_ENABLE_WEBASSEMBLY
198 builtin = Builtin::kWasmAllocateInYoungGeneration;
200 builtin = Builtin::kWasmAllocateInOldGeneration;
202 static_assert(std::is_same<Smi, BuiltinPtr>(),
"BuiltinPtr must be Smi");
207 allocate_builtin =
__ WasmAllocateInYoungGenerationStubConstant();
209 allocate_builtin =
__ WasmAllocateInOldGenerationStubConstant();
221 top_address =
__ ExternalConstant(
223 ? ExternalReference::new_space_allocation_top_address(
isolate())
224 : ExternalReference::old_space_allocation_top_address(
isolate()));
225 limit_address =
__ ExternalConstant(
227 ? ExternalReference::new_space_allocation_limit_address(
isolate())
228 : ExternalReference::old_space_allocation_limit_address(
isolate()));
232#if V8_ENABLE_WEBASSEMBLY
234 int top_address_offset =
236 ? WasmTrustedInstanceData::kNewAllocationTopAddressOffset
237 : WasmTrustedInstanceData::kOldAllocationTopAddressOffset;
238 int limit_address_offset =
240 ? WasmTrustedInstanceData::kNewAllocationLimitAddressOffset
241 : WasmTrustedInstanceData::kOldAllocationLimitAddressOffset;
258 intptr_t
const object_size =
262 state->group()->allocation() == allocation_type) {
266 intptr_t
const state_size = state->size() + object_size;
273 common()->Int64Constant(state_size));
279 common()->Int32Constant(
static_cast<int32_t
>(state_size)));
293 value =
__ BitcastWordToTagged(
303 auto call_runtime =
__ MakeDeferredLabel();
308 Node* reservation_size =
__ UniqueIntPtrConstant(object_size);
318 Node* check =
__ UintLessThan(
__ IntAdd(top, reservation_size), limit);
320 __ GotoIfNot(check, &call_runtime);
323 __ Bind(&call_runtime);
329 __ Goto(&done, vfalse);
341 value =
__ BitcastWordToTagged(
348 value, allocation_type, reservation_size,
zone());
353 auto call_runtime =
__ MakeDeferredLabel();
366 Node* check =
__ UintLessThan(new_top, limit);
367 __ GotoIfNot(check, &call_runtime);
374 __ Goto(&done,
__ BitcastWordToTagged(
377 __ Bind(&call_runtime);
382 value = done.PhiAt(0);
398 DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
399 node->opcode() == IrOpcode::kLoadImmutableFromObject);
420 DCHECK_EQ(IrOpcode::kLoadElement, node->opcode());
425 DCHECK(!type.IsMapWord());
431 DCHECK_EQ(node->opcode(), IrOpcode::kLoadField);
434#ifdef V8_ENABLE_SANDBOX
443 __ InitializeEffectControl(effect, control);
465 Node* table_address =
471 shared_external_pointer_table_address_address(
474 :
__ ExternalConstant(
475 ExternalReference::external_pointer_table_address(
isolate()));
482 actual_tag =
__ TruncateInt64ToInt32(
484 Node* expected_tag =
__ Int32Constant(tag);
488 __ GotoIf(
__ WordEqual(actual_tag, expected_tag), &done, pointer);
499#ifdef V8_ENABLE_SANDBOX
508 __ InitializeEffectControl(effect, control);
510 Node* raw_value =
__ AddNode(
graph()->CloneNode(node));
512 Node* decoded_size =
__ Word64Shr(raw_value, shift_amount);
525 __ InitializeEffectControl(effect, control);
527 node =
__ AddNode(
graph()->CloneNode(node));
536 DCHECK_EQ(IrOpcode::kLoadField, node->opcode());
542 if (type.IsMapWord()) {
543 DCHECK(!access.type.Is(Type::ExternalPointer()));
547 if (access.type.Is(Type::ExternalPointer())) {
551 if (access.is_bounded_size_access) {
562 DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
563 node->opcode() == IrOpcode::kInitializeImmutableInObject);
569 node,
object, value, state, access.write_barrier_kind);
570 DCHECK(!access.machine_type.IsMapWord());
583 DCHECK_EQ(IrOpcode::kStoreElement, node->opcode());
590 node,
object, value, state, access.write_barrier_kind);
593 access.machine_type.representation(), write_barrier_kind)));
599 DCHECK_EQ(IrOpcode::kStoreField, node->opcode());
605 DCHECK(!access.type.Is(Type::SandboxedPointer()));
607 DCHECK(!access.is_bounded_size_access);
614 __ InitializeEffectControl(effect, control);
617 node,
object, value, state, access.write_barrier_kind);
625 node->ReplaceInput(2, mapword);
637 node,
machine()->StoreIndirectPointer(write_barrier_kind));
648 DCHECK_EQ(IrOpcode::kStore, node->opcode());
664 int const element_size_shift =
666 if (element_size_shift) {
669 int const fixed_offset = access.header_size - access.tag();
681 switch (value->opcode()) {
682 case IrOpcode::kBitcastWordToTaggedSigned:
684 case IrOpcode::kHeapConstant: {
686 if (isolate->roots_table().IsRootHandle(
HeapConstantOf(value->op()),
702 DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
710 if (state && state->IsYoungGenerationAllocation() &&
711 state->group()->Contains(
object)) {
717 if (
v8_flags.disable_write_barriers) {
723 return write_barrier_kind;
745 node_ids_.insert(node->id());
751 while (node_ids_.find(node->id()) == node_ids_.end()) {
752 switch (node->opcode()) {
753 case IrOpcode::kBitcastTaggedToWord:
754 case IrOpcode::kBitcastWordToTagged:
755 case IrOpcode::kInt32Add:
756 case IrOpcode::kInt64Add:
768 size_(
std::numeric_limits<int>::max()),
775 size_(
std::numeric_limits<int>::max()),
780 intptr_t size,
Node* top,
782 : group_(group),
size_(size), top_(top),
effect_(effect) {}
785 return group() && group()->IsYoungGenerationAllocation();
static const int kExternalPointerTableBasePointerOffset
constexpr MachineSemantic semantic() const
static constexpr MachineType Pointer()
constexpr MachineRepresentation representation() const
static constexpr MachineType AnyTagged()
static constexpr MachineType Uint64()
static constexpr MachineType Uint32()
constexpr bool IsMapWord() const
static constexpr MachineType TaggedPointer()
static constexpr MachineRepresentation PointerRepresentation()
static constexpr bool IsImmortalImmovable(RootIndex root_index)
void InitializeEffectControl(Node *effect, Node *control)
static CallDescriptor * GetStubCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count, CallDescriptor::Flags flags, Operator::Properties properties=Operator::kNoProperties, StubCallMode stub_mode=StubCallMode::kCallCodeObject)
bool UnalignedLoadSupported(MachineRepresentation rep)
const Operator * Load(LoadRepresentation rep)
const Operator * UnalignedLoad(LoadRepresentation rep)
bool UnalignedStoreSupported(MachineRepresentation rep)
const Operator * UnalignedStore(UnalignedStoreRepresentation rep)
const Operator * Store(StoreRepresentation rep)
bool Contains(Node *object) const
DISALLOW_IMPLICIT_CONSTRUCTORS(AllocationGroup)
ZoneSet< NodeId > node_ids_
AllocationGroup(Node *node, AllocationType allocation, Zone *zone)
bool IsYoungGenerationAllocation() const
~AllocationGroup()=default
AllocationType const allocation_
static AllocationType CheckAllocationType(AllocationType allocation)
AllocationType allocation() const
static AllocationState const * Open(AllocationGroup *group, intptr_t size, Node *top, Node *effect, Zone *zone)
bool IsYoungGenerationAllocation() const
static AllocationState const * Closed(AllocationGroup *group, Node *effect, Zone *zone)
Zone * graph_zone() const
Node * ComputeIndex(ElementAccess const &access, Node *node)
Reduction ReduceLoadField(Node *node)
SetOncePointer< const Operator > allocate_operator_
Isolate * isolate() const
AllocationFolding allocation_folding_
Node * GetWasmInstanceNode()
Reduction ReduceLoadElement(Node *node)
Reduction ReduceStoreToObject(Node *node, AllocationState const *state=nullptr)
Node * AlignToAllocationAlignment(Node *address)
Reduction ReduceStoreElement(Node *node, AllocationState const *state=nullptr)
Reduction ReduceLoadFromObject(Node *node)
WriteBarrierKind ComputeWriteBarrierKind(Node *node, Node *object, Node *value, AllocationState const *state, WriteBarrierKind)
MachineOperatorBuilder * machine() const
JSGraphAssembler * gasm() const
WriteBarrierAssertFailedCallback write_barrier_assert_failed_
const char * function_debug_name_
Reduction ReduceLoadMap(Node *node)
Reduction Reduce(Node *node) override
Reduction ReduceStoreField(Node *node, AllocationState const *state=nullptr)
Reduction ReduceAllocateRaw(Node *node, AllocationType allocation_type, AllocationState const **state)
Reduction ReduceLoadExternalPointerField(Node *node)
std::function< void( Node *node, Node *object, const char *name, Zone *temp_zone)> WriteBarrierAssertFailedCallback
void EnsureAllocateOperator()
CommonOperatorBuilder * common() const
MemoryLowering(JSGraph *jsgraph, Zone *zone, JSGraphAssembler *graph_assembler, bool is_wasm, AllocationFolding allocation_folding=AllocationFolding::kDontAllocationFolding, WriteBarrierAssertFailedCallback callback=[](Node *, Node *, const char *, Zone *) { UNREACHABLE();}, const char *function_debug_name=nullptr)
Reduction ReduceStore(Node *node, AllocationState const *state=nullptr)
SetOncePointer< Node > wasm_instance_node_
Reduction ReduceLoadBoundedSize(Node *node)
static void ChangeOp(Node *node, const Operator *new_op)
static Node * GetEffectInput(Node *node, int index=0)
static Node * GetValueInput(Node *node, int index)
static Node * GetControlInput(Node *node, int index=0)
const Operator * op() const
void ReplaceInput(int index, Node *new_to)
Node * InputAt(int index) const
static Reduction Replace(Node *node)
static Reduction Changed(Node *node)
static Reduction NoChange()
MachineRepresentation representation() const
WriteBarrierKind write_barrier_kind() const
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
#define V8_COMPRESS_POINTERS_8GB_BOOL
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
#define V8_ENABLE_SANDBOX_BOOL
const AllocationType allocation_
bool ValueNeedsWriteBarrier(const Graph *graph, const Operation &value, Isolate *isolate)
NumberConstant(std::numeric_limits< double >::quiet_NaN())) DEFINE_GETTER(EmptyStateValues
Handle< HeapObject > HeapConstantOf(const Operator *op)
StoreRepresentation const & StoreRepresentationOf(Operator const *op)
int ParameterIndexOf(const Operator *const op)
const FieldAccess & FieldAccessOf(const Operator *op)
const ElementAccess & ElementAccessOf(const Operator *op)
const AllocateParameters & AllocateParametersOf(const Operator *op)
const ObjectAccess & ObjectAccessOf(const Operator *op)
T const & OpParameter(const Operator *op)
static const Operator * IntPtrConstant(CommonOperatorBuilder *common, intptr_t value)
constexpr int kWasmInstanceDataParameterIndex
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
constexpr uint64_t kExternalPointerTagShift
constexpr int kTaggedSize
constexpr int kMaxRegularHeapObjectSize
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
constexpr uint64_t kExternalPointerPayloadMask
@ kIndirectPointerNullTag
constexpr uint64_t kExternalPointerTagMask
constexpr int kSystemPointerSizeLog2
constexpr intptr_t kObjectAlignment8GbHeapMask
constexpr intptr_t kObjectAlignment8GbHeap
@ kExternalPointerNullTag
V8_EXPORT_PRIVATE FlagValues v8_flags
V8_EXPORT_PRIVATE constexpr int ElementSizeLog2Of(MachineRepresentation)
other heap size generate builtins concurrently on separate threads in mksnapshot track concurrent recompilation artificial compilation delay in ms max number of threads that concurrent Turbofan can use(0 for unbounded)") DEFINE_BOOL( stress_concurrent_inlining
V8_EXPORT_PRIVATE constexpr int ElementSizeInBytes(MachineRepresentation)
i::Address Load(i::Address address)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
constexpr bool IsAligned(T value, U alignment)