v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
wasm-address-reassociation.cc
Go to the documentation of this file.
1// Copyright 2023 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
13#include "src/compiler/node.h"
17#include "src/zone/zone.h"
18
19namespace v8 {
20namespace internal {
21namespace compiler {
22
23// Wasm address reassociation.
24//
25// wasm32 load and store operations use a 32-bit dynamic offset along with a
26// 32-bit static index to create a 33-bit effective address. This means that
27// to use a static index, greater than zero, the producer needs to prove that
28// the addition of the index won't overflow. However, if we're performing
29// address computations with 64-bits, we should be able to more readily use
30// immediate indexes.
31//
32// So, the purpose of this transform is to pattern match certain address
33// computations and reorganize the operands for more efficient code generation.
34//
35// Many addresses will be computed in the form like this:
36// - ProtectedLoad (IntPtrAdd (base_reg, immediate_offset), register_offset)
37// - ProtectedStore (IntPtrAdd (base_reg, immediate_offset), register_offset)
38
39// And this pass aims to transform this into:
40// - ProtectedLoad (IntPtrAdd (base_reg, register_offset), immediate_offset)
41// - ProtectedStore (IntPtrAdd (base_reg, register_offset), immediate_offset)
42//
43// This allows the reuse of a base pointer across multiple instructions, each of
44// which then has the opportunity to use an immediate offset.
45
47 : graph_(jsgraph->graph()),
48 common_(jsgraph->common()),
49 machine_(jsgraph->machine()),
50 candidate_base_addrs_(zone),
51 candidates_(zone),
52 zone_(zone) {}
53
54void WasmAddressReassociation::Optimize() {
55 for (auto& candidate : candidates_) {
56 const CandidateAddressKey& key = candidate.first;
57 if (!ShouldTryOptimize(key)) continue;
58 // We've found multiple instances of addresses in the form
59 // object(base + imm_offset), reg_offset
60 // So, create a new object for these operations to share and then use an
61 // immediate offset:
62 // object(base, reg_offset), imm_offset
63 Node* new_object = CreateNewBase(key);
64 CandidateMemOps& mem_ops = candidate.second;
65 size_t num_nodes = mem_ops.GetNumNodes();
66 for (size_t i = 0; i < num_nodes; ++i) {
67 Node* mem_op = mem_ops.mem_op(i);
68 Node* imm_offset =
69 graph_->NewNode(common_->Int64Constant(mem_ops.imm_offset(i)));
70 ReplaceInputs(mem_op, new_object, imm_offset);
71 }
72 }
73}
74
75bool WasmAddressReassociation::ShouldTryOptimize(
76 const CandidateAddressKey& key) const {
77 // We already process the graph in terms of effect chains in an attempt to
78 // reduce the risk of creating large live-ranges, but also set a lower
79 // bound for the number of required users so that the benefits are more
80 // likely to outweigh any detrimental affects, such as additions being shared
81 // and so the number of operations is increased. Benchmarking showed two or
82 // more was a good heuristic.
83 return candidates_.at(key).GetNumNodes() > 1;
84}
85
86Node* WasmAddressReassociation::CreateNewBase(const CandidateAddressKey& key) {
87 CandidateBaseAddr& candidate_base_addr = candidate_base_addrs_.at(key);
88 Node* base = candidate_base_addr.base();
89 Node* reg_offset = candidate_base_addr.offset();
90 return graph_->NewNode(machine_->Int64Add(), base, reg_offset);
91}
92
93void WasmAddressReassociation::ReplaceInputs(Node* mem_op, Node* base,
94 Node* offset) {
95 DCHECK_GT(mem_op->InputCount(), 1);
96 DCHECK(NodeProperties::IsConstant(offset));
97 mem_op->ReplaceInput(0, base);
98 mem_op->ReplaceInput(1, offset);
99}
100
101void WasmAddressReassociation::VisitProtectedMemOp(Node* node,
102 NodeId effect_chain) {
103 DCHECK(node->opcode() == IrOpcode::kProtectedLoad ||
104 node->opcode() == IrOpcode::kProtectedStore);
105
106 Node* base(node->InputAt(0));
107 Node* offset(node->InputAt(1));
108
109 if (base->opcode() == IrOpcode::kInt64Add &&
110 offset->opcode() == IrOpcode::kInt64Add) {
111 Int64BinopMatcher base_add(base);
112 Int64BinopMatcher offset_add(offset);
113 if (base_add.right().HasResolvedValue() &&
114 !base_add.left().HasResolvedValue() &&
115 offset_add.right().HasResolvedValue() &&
116 !offset_add.left().HasResolvedValue()) {
117 Node* base_reg = base_add.left().node();
118 Node* reg_offset = offset_add.left().node();
119 int64_t imm_offset =
120 base_add.right().ResolvedValue() + offset_add.right().ResolvedValue();
121 return AddCandidate(node, base_reg, reg_offset, imm_offset, effect_chain);
122 }
123 }
124 if (base->opcode() == IrOpcode::kInt64Add) {
125 Int64BinopMatcher base_add(base);
126 if (base_add.right().HasResolvedValue() &&
127 !base_add.left().HasResolvedValue()) {
128 Node* base_reg = base_add.left().node();
129 Node* reg_offset = node->InputAt(1);
130 int64_t imm_offset = base_add.right().ResolvedValue();
131 return AddCandidate(node, base_reg, reg_offset, imm_offset, effect_chain);
132 }
133 }
134 if (offset->opcode() == IrOpcode::kInt64Add) {
135 Int64BinopMatcher offset_add(offset);
136 if (offset_add.right().HasResolvedValue() &&
137 !offset_add.left().HasResolvedValue()) {
138 Node* base_reg = node->InputAt(0);
139 Node* reg_offset = offset_add.left().node();
140 int64_t imm_offset = offset_add.right().ResolvedValue();
141 return AddCandidate(node, base_reg, reg_offset, imm_offset, effect_chain);
142 }
143 }
144}
145
146void WasmAddressReassociation::AddCandidate(Node* mem_op, Node* base,
147 Node* reg_offset,
148 int64_t imm_offset,
149 NodeId effect_chain) {
150 // Sort base and offset so that the key is the same for either permutation.
151 if (base->id() > reg_offset->id()) {
152 std::swap(base, reg_offset);
153 }
155 std::make_tuple(base->id(), reg_offset->id(), effect_chain);
156 bool is_new =
157 candidate_base_addrs_.emplace(key, CandidateBaseAddr(base, reg_offset))
158 .second;
159 auto it = is_new ? candidates_.emplace(key, CandidateMemOps(zone_)).first
160 : candidates_.find(key);
161 it->second.AddCandidate(mem_op, imm_offset);
162}
163
164bool WasmAddressReassociation::HasCandidateBaseAddr(
165 const CandidateAddressKey& key) const {
166 return candidate_base_addrs_.count(key);
167}
168
169void WasmAddressReassociation::CandidateMemOps::AddCandidate(
170 Node* mem_op, int64_t imm_offset) {
171 DCHECK(mem_op->opcode() == IrOpcode::kProtectedLoad ||
172 mem_op->opcode() == IrOpcode::kProtectedStore);
173 mem_ops_.push_back(mem_op);
174 imm_offsets_.push_back(imm_offset);
175}
176
177size_t WasmAddressReassociation::CandidateMemOps::GetNumNodes() const {
178 DCHECK_EQ(mem_ops_.size(), imm_offsets_.size());
179 return mem_ops_.size();
180}
181
182Node* WasmAddressReassociation::CandidateMemOps::mem_op(size_t i) const {
183 return mem_ops_[i];
184}
185
186int64_t WasmAddressReassociation::CandidateMemOps::imm_offset(size_t i) const {
187 return imm_offsets_[i];
188}
189
190} // namespace compiler
191} // namespace internal
192} // namespace v8
JSGraph * jsgraph
friend Zone
Definition asm-types.cc:195
constexpr IrOpcode::Value opcode() const
Definition node.h:52
int InputCount() const
Definition node.h:59
void ReplaceInput(int index, Node *new_to)
Definition node.h:76
Node * InputAt(int index) const
Definition node.h:70
NodeId id() const
Definition node.h:57
std::tuple< NodeId, NodeId, NodeId > CandidateAddressKey
WasmAddressReassociation(JSGraph *jsgraph, Zone *zone)
Zone * zone_
int32_t offset
BinopMatcher< Int64Matcher, Int64Matcher, MachineRepresentation::kWord64 > Int64BinopMatcher
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
std::unique_ptr< ValueMirror > key
TFGraph * graph_