v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
state-values-utils.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
15 : js_graph_(js_graph),
16 hash_map_(AreKeysEqual, ZoneHashMap::kDefaultHashMapCapacity,
17 ZoneAllocationPolicy(zone())),
18 working_space_(zone()),
19 empty_state_values_(nullptr) {}
20
21
22// static
23bool StateValuesCache::AreKeysEqual(void* key1, void* key2) {
24 NodeKey* node_key1 = reinterpret_cast<NodeKey*>(key1);
25 NodeKey* node_key2 = reinterpret_cast<NodeKey*>(key2);
26
27 if (node_key1->node == nullptr) {
28 if (node_key2->node == nullptr) {
29 return AreValueKeysEqual(reinterpret_cast<StateValuesKey*>(key1),
30 reinterpret_cast<StateValuesKey*>(key2));
31 } else {
32 return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key1),
33 node_key2->node);
34 }
35 } else {
36 if (node_key2->node == nullptr) {
37 // If the nodes are already processed, they must be the same.
38 return IsKeysEqualToNode(reinterpret_cast<StateValuesKey*>(key2),
39 node_key1->node);
40 } else {
41 return node_key1->node == node_key2->node;
42 }
43 }
45}
46
47
48// static
50 if (key->count != static_cast<size_t>(node->InputCount())) {
51 return false;
52 }
53
54 DCHECK_EQ(IrOpcode::kStateValues, node->opcode());
55 SparseInputMask node_mask = SparseInputMaskOf(node->op());
56
57 if (node_mask != key->mask) {
58 return false;
59 }
60
61 // Comparing real inputs rather than sparse inputs, since we already know the
62 // sparse input masks are the same.
63 for (size_t i = 0; i < key->count; i++) {
64 if (key->values[i] != node->InputAt(static_cast<int>(i))) {
65 return false;
66 }
67 }
68 return true;
69}
70
71
72// static
74 StateValuesKey* key2) {
75 if (key1->count != key2->count) {
76 return false;
77 }
78 if (key1->mask != key2->mask) {
79 return false;
80 }
81 for (size_t i = 0; i < key1->count; i++) {
82 if (key1->values[i] != key2->values[i]) {
83 return false;
84 }
85 }
86 return true;
87}
88
89
97
99 size_t level) {
100 if (working_space_.size() <= level) {
101 working_space_.resize(level + 1);
102 }
103 return &working_space_[level];
104}
105
106namespace {
107
108int StateValuesHashKey(Node** nodes, size_t count) {
109 size_t hash = count;
110 for (size_t i = 0; i < count; i++) {
111 hash = hash * 23 + (nodes[i] == nullptr ? 0 : nodes[i]->id());
112 }
113 return static_cast<int>(hash & 0x7FFFFFFF);
114}
115
116} // namespace
117
120 StateValuesKey key(count, mask, nodes);
121 int hash = StateValuesHashKey(nodes, count);
123 DCHECK_NOT_NULL(lookup);
124 Node* node;
125 if (lookup->value == nullptr) {
126 int node_count = static_cast<int>(count);
127 node = graph()->NewNode(common()->StateValues(node_count, mask), node_count,
128 nodes);
129 NodeKey* new_key = zone()->New<NodeKey>(node);
130 lookup->key = new_key;
131 lookup->value = node;
132 } else {
133 node = reinterpret_cast<Node*>(lookup->value);
134 }
135 return node;
136}
137
139 WorkingBuffer* node_buffer, size_t* node_count, size_t* values_idx,
140 Node** values, size_t count, const BytecodeLivenessState* liveness) {
141 SparseInputMask::BitMaskType input_mask = 0;
142
143 // Virtual nodes are the live nodes plus the implicit optimized out nodes,
144 // which are implied by the liveness mask.
145 size_t virtual_node_count = *node_count;
146
147 while (*values_idx < count && *node_count < kMaxInputCount &&
148 virtual_node_count < SparseInputMask::kMaxSparseInputs) {
149 DCHECK_LE(*values_idx, static_cast<size_t>(INT_MAX));
150
151 if (liveness == nullptr ||
152 liveness->RegisterIsLive(static_cast<int>(*values_idx))) {
153 input_mask |= 1 << (virtual_node_count);
154 (*node_buffer)[(*node_count)++] = values[*values_idx];
155 }
156 virtual_node_count++;
157
158 (*values_idx)++;
159 }
160
162 DCHECK_GE(SparseInputMask::kMaxSparseInputs, virtual_node_count);
163
164 // Add the end marker at the end of the mask.
165 input_mask |= SparseInputMask::kEndMarker << virtual_node_count;
166
167 return input_mask;
168}
169
170Node* StateValuesCache::BuildTree(size_t* values_idx, Node** values,
171 size_t count,
172 const BytecodeLivenessState* liveness,
173 size_t level) {
174 WorkingBuffer* node_buffer = GetWorkingSpace(level);
175 size_t node_count = 0;
177
178 if (level == 0) {
179 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
180 values, count, liveness);
181 // Make sure we returned a sparse input mask.
183 } else {
184 while (*values_idx < count && node_count < kMaxInputCount) {
185 if (count - *values_idx < kMaxInputCount - node_count) {
186 // If we have fewer values remaining than inputs remaining, dump the
187 // remaining values into this node.
188 // TODO(leszeks): We could optimise this further by only counting
189 // remaining live nodes.
190
191 size_t previous_input_count = node_count;
192 input_mask = FillBufferWithValues(node_buffer, &node_count, values_idx,
193 values, count, liveness);
194 // Make sure we have exhausted our values.
195 DCHECK_EQ(*values_idx, count);
196 // Make sure we returned a sparse input mask.
198
199 // Make sure we haven't touched inputs below previous_input_count in the
200 // mask.
201 DCHECK_EQ(input_mask & ((1 << previous_input_count) - 1), 0u);
202 // Mark all previous inputs as live.
203 input_mask |= ((1 << previous_input_count) - 1);
204
205 break;
206
207 } else {
208 // Otherwise, add the values to a subtree and add that as an input.
209 Node* subtree =
210 BuildTree(values_idx, values, count, liveness, level - 1);
211 (*node_buffer)[node_count++] = subtree;
212 // Don't touch the bitmask, so that it stays dense.
213 }
214 }
215 }
216
217 if (node_count == 1 && input_mask == SparseInputMask::kDenseBitMask) {
218 // Elide the StateValue node if there is only one, dense input. This will
219 // only happen if we built a single subtree (as nodes with values are always
220 // sparse), and so we can replace ourselves with it.
221 DCHECK_EQ((*node_buffer)[0]->opcode(), IrOpcode::kStateValues);
222 return (*node_buffer)[0];
223 } else {
224 return GetValuesNodeFromCache(node_buffer->data(), node_count,
225 SparseInputMask(input_mask));
226 }
227}
228
229#if DEBUG
230namespace {
231
232void CheckTreeContainsValues(Node* tree, Node** values, size_t count,
233 const BytecodeLivenessState* liveness) {
235
236 int i;
237 auto access = StateValuesAccess(tree);
238 auto it = access.begin();
239 auto itend = access.end();
240 for (i = 0; it != itend; ++it, ++i) {
241 if (liveness == nullptr || liveness->RegisterIsLive(i)) {
242 DCHECK_EQ(it.node(), values[i]);
243 } else {
244 DCHECK_NULL(it.node());
245 }
246 }
247 DCHECK_EQ(static_cast<size_t>(i), count);
248}
249
250} // namespace
251#endif
252
254 Node** values, size_t count, const BytecodeLivenessState* liveness) {
255#if DEBUG
256 // Check that the values represent actual values, and not a tree of values.
257 for (size_t i = 0; i < count; i++) {
258 if (values[i] != nullptr) {
259 DCHECK_NE(values[i]->opcode(), IrOpcode::kStateValues);
260 DCHECK_NE(values[i]->opcode(), IrOpcode::kTypedStateValues);
261 }
262 }
263 if (liveness != nullptr) {
264 DCHECK_LE(count, static_cast<size_t>(liveness->register_count()));
265
266 for (size_t i = 0; i < count; i++) {
267 if (liveness->RegisterIsLive(static_cast<int>(i))) {
268 DCHECK_NOT_NULL(values[i]);
269 }
270 }
271 }
272#endif
273
274 if (count == 0) {
275 return GetEmptyStateValues();
276 }
277
278 // This is a worst-case tree height estimate, assuming that all values are
279 // live. We could get a better estimate by counting zeroes in the liveness
280 // vector, but there's no point -- any excess height in the tree will be
281 // collapsed by the single-input elision at the end of BuildTree.
282 size_t height = 0;
283 size_t max_inputs = kMaxInputCount;
284 while (count > max_inputs) {
285 height++;
286 max_inputs *= kMaxInputCount;
287 }
288
289 size_t values_idx = 0;
290 Node* tree = BuildTree(&values_idx, values, count, liveness, height);
291 // The values should be exhausted by the end of BuildTree.
292 DCHECK_EQ(values_idx, count);
293
294 // The 'tree' must be rooted with a state value node.
295 DCHECK_EQ(tree->opcode(), IrOpcode::kStateValues);
296
297#if DEBUG
298 CheckTreeContainsValues(tree, values, count, liveness);
299#endif
300
301 return tree;
302}
303
304StateValuesAccess::iterator::iterator(Node* node) : current_depth_(0) {
306 SparseInputMaskOf(node->op()).IterateOverInputs(node);
307 EnsureValid();
308}
309
311 DCHECK_LE(0, current_depth_);
312 DCHECK_GT(kMaxInlineDepth, current_depth_);
313 return &(stack_[current_depth_]);
314}
315
317 current_depth_++;
318 CHECK_GT(kMaxInlineDepth, current_depth_);
319 stack_[current_depth_] =
320 SparseInputMaskOf(node->op()).IterateOverInputs(node);
321}
322
323
325 DCHECK_LE(0, current_depth_);
326 current_depth_--;
327}
328
330 Top()->Advance();
331 EnsureValid();
332}
333
335 size_t count = 0;
336 while (!done() && Top()->IsEmpty()) {
337 count += Top()->AdvanceToNextRealOrEnd();
338 EnsureValid();
339 }
340 return count;
341}
342
344 while (true) {
346
347 if (top->IsEmpty()) {
348 // We are on a valid (albeit optimized out) node.
349 return;
350 }
351
352 if (top->IsEnd()) {
353 // We have hit the end of this iterator. Pop the stack and move to the
354 // next sibling iterator.
355 Pop();
356 if (done()) {
357 // Stack is exhausted, we have reached the end.
358 return;
359 }
360 Top()->Advance();
361 continue;
362 }
363
364 // At this point the value is known to be live and within our input nodes.
365 Node* value_node = top->GetReal();
366
367 if (value_node->opcode() == IrOpcode::kStateValues ||
368 value_node->opcode() == IrOpcode::kTypedStateValues) {
369 // Nested state, we need to push to the stack.
370 Push(value_node);
371 continue;
372 }
373
374 // We are on a valid node, we can stop the iteration.
375 return;
376 }
377}
378
380 DCHECK(!done());
381 return Top()->Get(nullptr);
382}
383
385 Node* parent = Top()->parent();
386 DCHECK(!Top()->IsEmpty());
387 if (parent->opcode() == IrOpcode::kStateValues) {
388 return MachineType::AnyTagged();
389 } else {
390 DCHECK_EQ(IrOpcode::kTypedStateValues, parent->opcode());
391
392 ZoneVector<MachineType> const* types = MachineTypesOf(parent->op());
393 return (*types)[Top()->real_index()];
394 }
395}
396
398 // We only allow comparison with end().
399 CHECK(other.done());
400 return !done();
401}
402
404 DCHECK(!done());
405 Advance();
406 return *this;
407}
408
409
413
415 size_t count = 0;
417
419
420 for (; !iterator.IsEnd(); iterator.Advance()) {
421 if (iterator.IsEmpty()) {
422 count++;
423 } else {
424 Node* value = iterator.GetReal();
425 if (value->opcode() == IrOpcode::kStateValues ||
426 value->opcode() == IrOpcode::kTypedStateValues) {
427 count += StateValuesAccess(value).size();
428 } else {
429 count++;
430 }
431 }
432 }
433
434 return count;
435}
436
437} // namespace compiler
438} // namespace internal
439} // namespace v8
Entry * LookupOrInsert(const Key &key, uint32_t hash)
Definition hashmap.h:223
static constexpr MachineType AnyTagged()
void resize(size_t new_size)
T * New(Args &&... args)
Definition zone.h:114
constexpr IrOpcode::Value opcode() const
Definition node.h:52
const Operator * op() const
Definition node.h:50
InputIterator IterateOverInputs(Node *node)
SparseInputMask::InputIterator stack_[kMaxInlineDepth]
SparseInputMask::BitMaskType FillBufferWithValues(WorkingBuffer *node_buffer, size_t *node_count, size_t *values_idx, Node **values, size_t count, const BytecodeLivenessState *liveness)
ZoneVector< WorkingBuffer > working_space_
static bool IsKeysEqualToNode(StateValuesKey *key, Node *node)
Node * BuildTree(size_t *values_idx, Node **values, size_t count, const BytecodeLivenessState *liveness, size_t level)
Node * GetNodeForValues(Node **values, size_t count, const BytecodeLivenessState *liveness=nullptr)
static bool AreValueKeysEqual(StateValuesKey *key1, StateValuesKey *key2)
WorkingBuffer * GetWorkingSpace(size_t level)
std::array< Node *, kMaxInputCount > WorkingBuffer
static bool AreKeysEqual(void *key1, void *key2)
Node * GetValuesNodeFromCache(Node **nodes, size_t count, SparseInputMask mask)
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
uint32_t count
Node * node
uint32_t const mask
ZoneVector< MachineType > const * MachineTypesOf(Operator const *op)
SparseInputMask SparseInputMaskOf(Operator const *op)
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define CHECK_GT(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
wasm::ValueType type