v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
csa-load-elimination.cc
Go to the documentation of this file.
1// Copyright 2019 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
11
12namespace v8 {
13namespace internal {
14namespace compiler {
15
17 if (v8_flags.trace_turbo_load_elimination) {
18 if (node->op()->EffectInputCount() > 0) {
19 PrintF(" visit #%d:%s", node->id(), node->op()->mnemonic());
20 if (node->op()->ValueInputCount() > 0) {
21 PrintF("(");
22 for (int i = 0; i < node->op()->ValueInputCount(); ++i) {
23 if (i > 0) PrintF(", ");
24 Node* const value = NodeProperties::GetValueInput(node, i);
25 PrintF("#%d:%s", value->id(), value->op()->mnemonic());
26 }
27 PrintF(")");
28 }
29 PrintF("\n");
30 for (int i = 0; i < node->op()->EffectInputCount(); ++i) {
31 Node* const effect = NodeProperties::GetEffectInput(node, i);
32 if (AbstractState const* const state = node_states_.Get(effect)) {
33 PrintF(" state[%i]: #%d:%s\n", i, effect->id(),
34 effect->op()->mnemonic());
35 state->mutable_state.Print();
36 state->immutable_state.Print();
37 } else {
38 PrintF(" no state[%i]: #%d:%s\n", i, effect->id(),
39 effect->op()->mnemonic());
40 }
41 }
42 }
43 }
44 switch (node->opcode()) {
45 case IrOpcode::kLoadFromObject:
46 case IrOpcode::kLoadImmutableFromObject:
47 return ReduceLoadFromObject(node, ObjectAccessOf(node->op()));
48 case IrOpcode::kStoreToObject:
49 case IrOpcode::kInitializeImmutableInObject:
50 return ReduceStoreToObject(node, ObjectAccessOf(node->op()));
51 case IrOpcode::kDebugBreak:
52 case IrOpcode::kAbortCSADcheck:
53 // Avoid changing optimizations in the presence of debug instructions.
54 return PropagateInputState(node);
55 case IrOpcode::kCall:
56 return ReduceCall(node);
57 case IrOpcode::kEffectPhi:
58 return ReduceEffectPhi(node);
59 case IrOpcode::kDead:
60 return NoChange();
61 case IrOpcode::kStart:
62 return ReduceStart(node);
63 default:
64 return ReduceOtherNode(node);
65 }
67}
68
69namespace CsaLoadEliminationHelpers {
70
72 if (from == to) return true;
73 if (IsAnyTagged(from)) return IsAnyTagged(to);
74 if (IsIntegral(from)) {
75 return IsIntegral(to) && ElementSizeInBytes(from) >= ElementSizeInBytes(to);
76 }
77 return false;
78}
79
80bool IsConstantObject(Node* object) {
81 return object->opcode() == IrOpcode::kParameter ||
82 object->opcode() == IrOpcode::kLoadImmutable ||
84}
85
86bool IsFreshObject(Node* object) {
87 return object->opcode() == IrOpcode::kAllocate ||
88 object->opcode() == IrOpcode::kAllocateRaw;
89}
90
91} // namespace CsaLoadEliminationHelpers
92
93namespace Helpers = CsaLoadEliminationHelpers;
94
95// static
96template <typename OuterKey>
98 OuterMap<OuterKey>& to, const OuterMap<OuterKey>& from) {
99 FieldInfo empty_info;
100 for (const std::pair<OuterKey, InnerMap>& to_map : to) {
101 InnerMap to_map_copy(to_map.second);
102 OuterKey key = to_map.first;
103 InnerMap current_map = from.Get(key);
104 for (std::pair<Node*, FieldInfo> info : to_map.second) {
105 if (current_map.Get(info.first) != info.second) {
106 to_map_copy.Set(info.first, empty_info);
107 }
108 }
109 to.Set(key, to_map_copy);
110 }
111}
112
114 IntersectWith(fresh_entries_, that->fresh_entries_);
115 IntersectWith(constant_entries_, that->constant_entries_);
116 IntersectWith(arbitrary_entries_, that->arbitrary_entries_);
117 IntersectWith(fresh_unknown_entries_, that->fresh_unknown_entries_);
118 IntersectWith(constant_unknown_entries_, that->constant_unknown_entries_);
119 IntersectWith(arbitrary_unknown_entries_, that->arbitrary_unknown_entries_);
120}
121
123 Node* object, Node* offset, MachineRepresentation repr) const {
124 HalfState* result = zone_->New<HalfState>(*this);
125 UnknownOffsetInfos empty_unknown(zone_, InnerMap(zone_));
127 if (m.HasResolvedValue()) {
128 uint32_t num_offset = static_cast<uint32_t>(m.ResolvedValue());
129 if (Helpers::IsFreshObject(object)) {
130 // May alias with:
131 // - The same object/offset
132 // - Arbitrary objects with the same offset
133 // - The same object, unkwown offset
134 // - Arbitrary objects with unkwown offset
135 result->KillOffsetInFresh(object, num_offset, repr);
136 KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
137 result->fresh_unknown_entries_.Set(object, InnerMap(zone_));
138 result->arbitrary_unknown_entries_ = empty_unknown;
139 } else if (Helpers::IsConstantObject(object)) {
140 // May alias with:
141 // - Constant/arbitrary objects with the same offset
142 // - Constant/arbitrary objects with unkwown offset
143 KillOffset(result->constant_entries_, num_offset, repr, zone_);
144 KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
145 result->constant_unknown_entries_ = empty_unknown;
146 result->arbitrary_unknown_entries_ = empty_unknown;
147 } else {
148 // May alias with:
149 // - Any object with the same or unknown offset
150 KillOffset(result->fresh_entries_, num_offset, repr, zone_);
151 KillOffset(result->constant_entries_, num_offset, repr, zone_);
152 KillOffset(result->arbitrary_entries_, num_offset, repr, zone_);
153 result->fresh_unknown_entries_ = empty_unknown;
154 result->constant_unknown_entries_ = empty_unknown;
155 result->arbitrary_unknown_entries_ = empty_unknown;
156 }
157 } else {
158 ConstantOffsetInfos empty_constant(zone_, InnerMap(zone_));
159 if (Helpers::IsFreshObject(object)) {
160 // May alias with:
161 // - The same object with any known/unknown offset
162 // - Arbitrary objects with any known/unknown offset
163 for (auto map : result->fresh_entries_) {
164 // TODO(manoskouk): Consider adding a map from fresh objects to offsets
165 // to implement this efficiently.
166 InnerMap map_copy(map.second);
167 map_copy.Set(object, FieldInfo());
168 result->fresh_entries_.Set(map.first, map_copy);
169 }
170 result->fresh_unknown_entries_.Set(object, InnerMap(zone_));
171 result->arbitrary_entries_ = empty_constant;
172 result->arbitrary_unknown_entries_ = empty_unknown;
173 } else if (Helpers::IsConstantObject(object)) {
174 // May alias with:
175 // - Constant/arbitrary objects with the any known/unknown offset
176 result->constant_entries_ = empty_constant;
177 result->constant_unknown_entries_ = empty_unknown;
178 result->arbitrary_entries_ = empty_constant;
179 result->arbitrary_unknown_entries_ = empty_unknown;
180 } else {
181 // May alias with anything. Clear the state.
182 return zone_->New<HalfState>(zone_);
183 }
184 }
185
186 return result;
187}
188
190 Node* object, Node* offset, Node* value, MachineRepresentation repr) const {
191 HalfState* new_state = zone_->New<HalfState>(*this);
193 if (m.HasResolvedValue()) {
194 uint32_t offset_num = static_cast<uint32_t>(m.ResolvedValue());
196 ? new_state->fresh_entries_
198 ? new_state->constant_entries_
199 : new_state->arbitrary_entries_;
200 Update(infos, offset_num, object, FieldInfo(value, repr));
201 } else {
202 UnknownOffsetInfos& infos =
204 ? new_state->fresh_unknown_entries_
206 ? new_state->constant_unknown_entries_
207 : new_state->arbitrary_unknown_entries_;
208 Update(infos, object, offset, FieldInfo(value, repr));
209 }
210 return new_state;
211}
212
214 Node* object, Node* offset) const {
216 if (m.HasResolvedValue()) {
217 uint32_t num_offset = static_cast<uint32_t>(m.ResolvedValue());
218 const ConstantOffsetInfos& infos = Helpers::IsFreshObject(object)
219 ? fresh_entries_
221 ? constant_entries_
222 : arbitrary_entries_;
223 return infos.Get(num_offset).Get(object);
224 } else {
225 const UnknownOffsetInfos& infos = Helpers::IsFreshObject(object)
226 ? fresh_unknown_entries_
228 ? constant_unknown_entries_
229 : arbitrary_unknown_entries_;
230 return infos.Get(object).Get(offset);
231 }
232}
233
234// static
235// Kill all elements in {infos} that overlap with an element with {offset} and
236// size {ElementSizeInBytes(repr)}.
238 uint32_t offset,
240 Zone* zone) {
241 // All elements in the range [{offset}, {offset + ElementSizeInBytes(repr)})
242 // are in the killed range. We do not need to traverse the inner maps, we can
243 // just clear them.
244 for (int i = 0; i < ElementSizeInBytes(repr); i++) {
245 infos.Set(offset + i, InnerMap(zone));
246 }
247
248 // Now we have to remove all elements in earlier offsets that overlap with an
249 // element in {offset}.
250 // The earliest offset that may overlap with {offset} is
251 // {kMaximumReprSizeInBytes - 1} before.
252 uint32_t initial_offset = offset >= kMaximumReprSizeInBytes - 1
254 : 0;
255 // For all offsets from {initial_offset} to {offset}, we traverse the
256 // respective inner map, and reset all elements that are large enough to
257 // overlap with {offset}.
258 for (uint32_t i = initial_offset; i < offset; i++) {
259 InnerMap map_copy(infos.Get(i));
260 for (const std::pair<Node*, FieldInfo> info : infos.Get(i)) {
261 if (info.second.representation != MachineRepresentation::kNone &&
262 ElementSizeInBytes(info.second.representation) >
263 static_cast<int>(offset - i)) {
264 map_copy.Set(info.first, {});
265 }
266 }
267 infos.Set(i, map_copy);
268 }
269}
270
272 Node* const object, uint32_t offset, MachineRepresentation repr) {
273 for (int i = 0; i < ElementSizeInBytes(repr); i++) {
274 Update(fresh_entries_, offset + i, object, {});
275 }
276 uint32_t initial_offset = offset >= kMaximumReprSizeInBytes - 1
278 : 0;
279 for (uint32_t i = initial_offset; i < offset; i++) {
280 const FieldInfo& info = fresh_entries_.Get(i).Get(object);
281 if (info.representation != MachineRepresentation::kNone &&
282 ElementSizeInBytes(info.representation) >
283 static_cast<int>(offset - i)) {
284 Update(fresh_entries_, i, object, {});
285 }
286 }
287}
288
289// static
292 for (const auto outer_entry : infos) {
293 for (const auto inner_entry : outer_entry.second) {
294 Node* object = inner_entry.first;
295 uint32_t offset = outer_entry.first;
296 FieldInfo info = inner_entry.second;
297 PrintF(" #%d:%s+(%d) -> #%d:%s [repr=%s]\n", object->id(),
298 object->op()->mnemonic(), offset, info.value->id(),
299 info.value->op()->mnemonic(),
300 MachineReprToString(info.representation));
301 }
302 }
303}
304
305// static
308 for (const auto outer_entry : infos) {
309 for (const auto inner_entry : outer_entry.second) {
310 Node* object = outer_entry.first;
311 Node* offset = inner_entry.first;
312 FieldInfo info = inner_entry.second;
313 PrintF(" #%d:%s+#%d:%s -> #%d:%s [repr=%s]\n", object->id(),
314 object->op()->mnemonic(), offset->id(), offset->op()->mnemonic(),
315 info.value->id(), info.value->op()->mnemonic(),
316 MachineReprToString(info.representation));
317 }
318 }
319}
320
322 Print(fresh_entries_);
323 Print(constant_entries_);
324 Print(arbitrary_entries_);
325 Print(fresh_unknown_entries_);
326 Print(constant_unknown_entries_);
327 Print(arbitrary_unknown_entries_);
328}
329
330// We may encounter a mutable/immutable inconsistency if the same field offset
331// is loaded/stored from the same object both as mutable and immutable. This can
332// only happen in code where the object has been cast to two different
333// incompatible types, i.e. in unreachable code. For safety, we introduce an
334// Unreachable node before the load/store.
336 Node* effect = NodeProperties::GetEffectInput(node);
337 Node* control = NodeProperties::GetControlInput(node);
338 Node* unreachable =
339 graph()->NewNode(jsgraph()->common()->Unreachable(), effect, control);
340 return Replace(unreachable);
341}
342
344 ObjectAccess const& access) {
345 DCHECK(node->opcode() == IrOpcode::kLoadFromObject ||
346 node->opcode() == IrOpcode::kLoadImmutableFromObject);
347 Node* object = NodeProperties::GetValueInput(node, 0);
349 Node* effect = NodeProperties::GetEffectInput(node);
350 AbstractState const* state = node_states_.Get(effect);
351 if (state == nullptr) return NoChange();
352 bool is_mutable = node->opcode() == IrOpcode::kLoadFromObject;
353 // We can only find the field in the wrong half-state in unreachable code.
354 if (!(is_mutable ? &state->immutable_state : &state->mutable_state)
355 ->Lookup(object, offset)
356 .IsEmpty()) {
357 Node* control = NodeProperties::GetControlInput(node);
358 Node* unreachable =
359 graph()->NewNode(jsgraph()->common()->Unreachable(), effect, control);
360 auto rep = ObjectAccessOf(node->op()).machine_type.representation();
361 Node* dead_value =
362 graph()->NewNode(jsgraph()->common()->DeadValue(rep), unreachable);
363 ReplaceWithValue(node, dead_value, unreachable, control);
364 node->Kill();
365 return Replace(dead_value);
366 }
367 HalfState const* half_state =
368 is_mutable ? &state->mutable_state : &state->immutable_state;
369
370 MachineRepresentation representation = access.machine_type.representation();
371 FieldInfo lookup_result = half_state->Lookup(object, offset);
372 if (!lookup_result.IsEmpty()) {
373 // Make sure we don't reuse values that were recorded with a different
374 // representation or resurrect dead {replacement} nodes.
375 MachineRepresentation from = lookup_result.representation;
376 if (Helpers::Subsumes(from, representation) &&
377 !lookup_result.value->IsDead()) {
378 Node* replacement =
379 TruncateAndExtend(lookup_result.value, from, access.machine_type);
380 ReplaceWithValue(node, replacement, effect);
381 // This might have opened an opportunity for escape analysis to eliminate
382 // the object altogether.
383 Revisit(object);
384 return Replace(replacement);
385 }
386 }
387 half_state = half_state->AddField(object, offset, node, representation);
388
389 AbstractState const* new_state =
390 is_mutable
391 ? zone()->New<AbstractState>(*half_state, state->immutable_state)
392 : zone()->New<AbstractState>(state->mutable_state, *half_state);
393
394 return UpdateState(node, new_state);
395}
396
398 ObjectAccess const& access) {
399 DCHECK(node->opcode() == IrOpcode::kStoreToObject ||
400 node->opcode() == IrOpcode::kInitializeImmutableInObject);
401 Node* object = NodeProperties::GetValueInput(node, 0);
403 Node* value = NodeProperties::GetValueInput(node, 2);
404 Node* effect = NodeProperties::GetEffectInput(node);
405 AbstractState const* state = node_states_.Get(effect);
406 if (state == nullptr) return NoChange();
407 MachineRepresentation repr = access.machine_type.representation();
408 if (node->opcode() == IrOpcode::kStoreToObject) {
409 // We can only find the field in the wrong half-state in unreachable code.
410 if (!(state->immutable_state.Lookup(object, offset).IsEmpty())) {
411 return AssertUnreachable(node);
412 }
413 HalfState const* mutable_state =
414 state->mutable_state.KillField(object, offset, repr);
415 mutable_state = mutable_state->AddField(object, offset, value, repr);
416 AbstractState const* new_state =
417 zone()->New<AbstractState>(*mutable_state, state->immutable_state);
418 return UpdateState(node, new_state);
419 } else {
420 // We can only find the field in the wrong half-state in unreachable code.
421 if (!(state->mutable_state.Lookup(object, offset).IsEmpty())) {
422 return AssertUnreachable(node);
423 }
424 // We should not initialize the same immutable field twice.
425 DCHECK(state->immutable_state.Lookup(object, offset).IsEmpty());
426 HalfState const* immutable_state =
427 state->immutable_state.AddField(object, offset, value, repr);
428 AbstractState const* new_state =
429 zone()->New<AbstractState>(state->mutable_state, *immutable_state);
430 return UpdateState(node, new_state);
431 }
432}
433
435 Node* const effect0 = NodeProperties::GetEffectInput(node, 0);
436 Node* const control = NodeProperties::GetControlInput(node);
437 AbstractState const* state0 = node_states_.Get(effect0);
438 if (state0 == nullptr) return NoChange();
439 if (control->opcode() == IrOpcode::kLoop) {
440 // Here we rely on having only reducible loops:
441 // The loop entry edge always dominates the header, so we can just take
442 // the state from the first input, and compute the loop state based on it.
443 AbstractState const* state = ComputeLoopState(node, state0);
444 return UpdateState(node, state);
445 }
446 DCHECK_EQ(IrOpcode::kMerge, control->opcode());
447
448 // Shortcut for the case when we do not know anything about some input.
449 int const input_count = node->op()->EffectInputCount();
450 for (int i = 1; i < input_count; ++i) {
451 Node* const effect = NodeProperties::GetEffectInput(node, i);
452 if (node_states_.Get(effect) == nullptr) return NoChange();
453 }
454
455 // Make a copy of the first input's state and intersect it with the state
456 // from other inputs.
457 // TODO(manoskouk): Consider computing phis for at least a subset of the
458 // state.
459 AbstractState* state = zone()->New<AbstractState>(*state0);
460 for (int i = 1; i < input_count; ++i) {
461 Node* const input = NodeProperties::GetEffectInput(node, i);
462 state->IntersectWith(node_states_.Get(input));
463 }
464 return UpdateState(node, state);
465}
466
470
472 Node* value = NodeProperties::GetValueInput(node, 0);
474 if (m.Is(ExternalReference::check_object_type())) {
475 return PropagateInputState(node);
476 }
477 return ReduceOtherNode(node);
478}
479
481 if (node->op()->EffectInputCount() == 1 &&
482 node->op()->EffectOutputCount() == 1) {
483 Node* const effect = NodeProperties::GetEffectInput(node);
484 AbstractState const* state = node_states_.Get(effect);
485 // If we do not know anything about the predecessor, do not propagate just
486 // yet because we will have to recompute anyway once we compute the
487 // predecessor.
488 if (state == nullptr) return NoChange();
489 // If this {node} has some uncontrolled side effects, set its state to
490 // the immutable half-state of its input state, otherwise to its input
491 // state.
492 return UpdateState(
493 node, node->op()->HasProperty(Operator::kNoWrite)
494 ? state
495 : zone()->New<AbstractState>(HalfState(zone()),
496 state->immutable_state));
497 }
498 DCHECK_EQ(0, node->op()->EffectOutputCount());
499 return NoChange();
500}
501
503 AbstractState const* state) {
504 AbstractState const* original = node_states_.Get(node);
505 // Only signal that the {node} has Changed, if the information about {state}
506 // has changed wrt. the {original}.
507 if (state != original) {
508 if (original == nullptr || !state->Equals(original)) {
509 node_states_.Set(node, state);
510 return Changed(node);
511 }
512 }
513 return NoChange();
514}
515
517 Node* const effect = NodeProperties::GetEffectInput(node);
518 AbstractState const* state = node_states_.Get(effect);
519 if (state == nullptr) return NoChange();
520 return UpdateState(node, state);
521}
522
524 Node* node, AbstractState const* state) const {
525 DCHECK_EQ(node->opcode(), IrOpcode::kEffectPhi);
526 std::queue<Node*> queue;
527 std::unordered_set<Node*> visited;
528 visited.insert(node);
529 for (int i = 1; i < node->InputCount() - 1; ++i) {
530 queue.push(node->InputAt(i));
531 }
532 while (!queue.empty()) {
533 Node* const current = queue.front();
534 queue.pop();
535 if (visited.insert(current).second) {
536 if (current->opcode() == IrOpcode::kStoreToObject) {
537 Node* object = NodeProperties::GetValueInput(current, 0);
540 ObjectAccessOf(current->op()).machine_type.representation();
541 const HalfState* new_mutable_state =
542 state->mutable_state.KillField(object, offset, repr);
543 state = zone()->New<AbstractState>(*new_mutable_state,
544 state->immutable_state);
545 } else if (current->opcode() == IrOpcode::kInitializeImmutableInObject) {
546#if DEBUG
547 // We are not allowed to reset an immutable (object, offset) pair.
548 Node* object = NodeProperties::GetValueInput(current, 0);
550 CHECK(state->immutable_state.Lookup(object, offset).IsEmpty());
551#endif
552 } else if (!current->op()->HasProperty(Operator::kNoWrite)) {
553 return zone()->New<AbstractState>(HalfState(zone()),
554 state->immutable_state);
555 }
556 for (int i = 0; i < current->op()->EffectInputCount(); ++i) {
557 queue.push(NodeProperties::GetEffectInput(current, i));
558 }
559 }
560 }
561 return state;
562}
563
566 MachineType to) {
567 DCHECK(Helpers::Subsumes(from, to.representation()));
568 DCHECK_GE(ElementSizeInBytes(from), ElementSizeInBytes(to.representation()));
569
570 if (to == MachineType::Int8() || to == MachineType::Int16()) {
571 // 1st case: We want to eliminate a signed 8/16-bit load using the value
572 // from a previous subsuming load or store. Since that value might be
573 // outside 8/16-bit range, we first truncate it accordingly. Then we
574 // sign-extend the result to 32-bit.
575 DCHECK_EQ(to.semantic(), MachineSemantic::kInt32);
576 if (from == MachineRepresentation::kWord64) {
577 node = graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
578 }
579 int shift = 32 - 8 * ElementSizeInBytes(to.representation());
580 return graph()->NewNode(machine()->Word32Sar(),
581 graph()->NewNode(machine()->Word32Shl(), node,
582 jsgraph()->Int32Constant(shift)),
583 jsgraph()->Int32Constant(shift));
584 } else if (to == MachineType::Uint8() || to == MachineType::Uint16()) {
585 // 2nd case: We want to eliminate an unsigned 8/16-bit load using the value
586 // from a previous subsuming load or store. Since that value might be
587 // outside 8/16-bit range, we first truncate it accordingly.
588 if (from == MachineRepresentation::kWord64) {
589 node = graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
590 }
591 int mask = (1 << 8 * ElementSizeInBytes(to.representation())) - 1;
592 return graph()->NewNode(machine()->Word32And(), node,
593 jsgraph()->Int32Constant(mask));
594 } else if (from == MachineRepresentation::kWord64 &&
595 to.representation() == MachineRepresentation::kWord32) {
596 // 3rd case: Truncate 64-bits into 32-bits.
597 return graph()->NewNode(machine()->TruncateInt64ToInt32(), node);
598 } else {
599 // 4th case: No need for truncation.
600 DCHECK((from == to.representation() &&
602 from == MachineRepresentation::kWord64 || !IsIntegral(from))) ||
603 (IsAnyTagged(from) && IsAnyTagged(to.representation())));
604 return node;
605 }
606}
607
611
615
617
619
620} // namespace compiler
621} // namespace internal
622} // namespace v8
static constexpr MachineType Uint8()
static constexpr MachineType Uint16()
static constexpr MachineType Int16()
static constexpr MachineType Int8()
T * New(Args &&... args)
Definition zone.h:114
HalfState const * KillField(Node *object, Node *offset, MachineRepresentation repr) const
static void KillOffset(ConstantOffsetInfos &infos, uint32_t offset, MachineRepresentation repr, Zone *zone)
HalfState const * AddField(Node *object, Node *offset, Node *value, MachineRepresentation repr) const
void KillOffsetInFresh(Node *object, uint32_t offset, MachineRepresentation repr)
FieldInfo Lookup(Node *object, Node *offset) const
Reduction ReduceLoadFromObject(Node *node, ObjectAccess const &access)
Reduction ReduceStoreToObject(Node *node, ObjectAccess const &access)
NodeAuxData< AbstractState const * > node_states_
Reduction UpdateState(Node *node, AbstractState const *state)
AbstractState const * ComputeLoopState(Node *node, AbstractState const *state) const
Node * TruncateAndExtend(Node *node, MachineRepresentation from, MachineType to)
Isolate * isolate() const
Definition js-graph.h:106
CommonOperatorBuilder * common() const
MachineOperatorBuilder * machine() const
static Node * GetEffectInput(Node *node, int index=0)
static Node * GetValueInput(Node *node, int index)
static Node * GetControlInput(Node *node, int index=0)
constexpr IrOpcode::Value opcode() const
Definition node.h:52
const Value & Get(const Key &key) const
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
int32_t offset
Node * node
ZoneVector< RpoNumber > & result
LiftoffAssembler::CacheState state
uint32_t const mask
int m
Definition mul-fft.cc:294
bool Subsumes(MachineRepresentation from, MachineRepresentation to)
const ObjectAccess & ObjectAccessOf(const Operator *op)
constexpr int kMaximumReprSizeInBytes
void PrintF(const char *format,...)
Definition utils.cc:39
constexpr bool IsAnyTagged(MachineRepresentation rep)
void Print(Tagged< Object > obj)
Definition objects.h:774
const char * MachineReprToString(MachineRepresentation rep)
V8_EXPORT_PRIVATE FlagValues v8_flags
V8_EXPORT_PRIVATE constexpr int ElementSizeInBytes(MachineRepresentation)
constexpr bool IsIntegral(MachineRepresentation rep)
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485