v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
register-allocator-verifier.cc
Go to the documentation of this file.
1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8
11#include "src/utils/ostreams.h"
12
13namespace v8 {
14namespace internal {
15namespace compiler {
16
17namespace {
18
19size_t OperandCount(const Instruction* instr) {
20 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
21}
22
23void VerifyEmptyGaps(const Instruction* instr) {
26 Instruction::GapPosition inner_pos =
27 static_cast<Instruction::GapPosition>(i);
28 CHECK_NULL(instr->GetParallelMove(inner_pos));
29 }
30}
31
32void VerifyAllocatedGaps(const Instruction* instr, const char* caller_info) {
35 Instruction::GapPosition inner_pos =
36 static_cast<Instruction::GapPosition>(i);
37 const ParallelMove* moves = instr->GetParallelMove(inner_pos);
38 if (moves == nullptr) continue;
39 for (const MoveOperands* move : *moves) {
40 if (move->IsRedundant()) continue;
42 move->source().IsAllocated() || move->source().IsConstant(),
43 caller_info);
44 CHECK_WITH_MSG(move->destination().IsAllocated(), caller_info);
45 }
46 }
47}
48
49int GetValue(const ImmediateOperand* imm) {
50 switch (imm->type()) {
52 return imm->inline_int32_value();
54 return static_cast<int>(imm->inline_int64_value());
57 return imm->indexed_value();
58 }
59}
60
61} // namespace
62
64 Zone* zone, const RegisterConfiguration* config,
65 const InstructionSequence* sequence, const Frame* frame)
66 : zone_(zone),
67 config_(config),
68 sequence_(sequence),
69 constraints_(zone),
70 assessments_(zone),
71 outstanding_assessments_(zone),
72 spill_slot_delta_(frame->GetTotalFrameSlotCount() -
73 frame->GetSpillSlotCount()) {
74 constraints_.reserve(sequence->instructions().size());
75 // TODO(dcarney): model unique constraints.
76 // Construct OperandConstraints for all InstructionOperands, eliminating
77 // kSameAsInput along the way.
78 for (const Instruction* instr : sequence->instructions()) {
79 // All gaps should be totally unallocated at this point.
80 VerifyEmptyGaps(instr);
81 const size_t operand_count = OperandCount(instr);
82 OperandConstraint* op_constraints =
83 zone->AllocateArray<OperandConstraint>(operand_count);
84 size_t count = 0;
85 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
86 BuildConstraint(instr->InputAt(i), &op_constraints[count]);
87 VerifyInput(op_constraints[count]);
88 }
89 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
90 BuildConstraint(instr->TempAt(i), &op_constraints[count]);
91 VerifyTemp(op_constraints[count]);
92 }
93 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
94 BuildConstraint(instr->OutputAt(i), &op_constraints[count]);
95 if (op_constraints[count].type_ == kSameAsInput) {
96 int input_index = op_constraints[count].value_;
97 CHECK_LT(input_index, instr->InputCount());
98 op_constraints[count].type_ = op_constraints[input_index].type_;
99 op_constraints[count].value_ = op_constraints[input_index].value_;
100 }
101 VerifyOutput(op_constraints[count]);
102 }
103 InstructionConstraint instr_constraint = {instr, operand_count,
104 op_constraints};
105 constraints()->push_back(instr_constraint);
106 }
107}
108
109void RegisterAllocatorVerifier::VerifyInput(
110 const OperandConstraint& constraint) {
111 CHECK_NE(kSameAsInput, constraint.type_);
112 if (constraint.type_ != kImmediate) {
113 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
114 constraint.virtual_register_);
115 }
116}
117
118void RegisterAllocatorVerifier::VerifyTemp(
119 const OperandConstraint& constraint) {
120 CHECK_NE(kSameAsInput, constraint.type_);
121 CHECK_NE(kImmediate, constraint.type_);
122 CHECK_NE(kConstant, constraint.type_);
123}
124
125void RegisterAllocatorVerifier::VerifyOutput(
126 const OperandConstraint& constraint) {
127 CHECK_NE(kImmediate, constraint.type_);
128 CHECK_NE(InstructionOperand::kInvalidVirtualRegister,
129 constraint.virtual_register_);
130}
131
132void RegisterAllocatorVerifier::VerifyAssignment(const char* caller_info) {
133 caller_info_ = caller_info;
134 CHECK(sequence()->instructions().size() == constraints()->size());
135 auto instr_it = sequence()->begin();
136 for (const auto& instr_constraint : *constraints()) {
137 const Instruction* instr = instr_constraint.instruction_;
138 // All gaps should be totally allocated at this point.
139 VerifyAllocatedGaps(instr, caller_info_);
140 const size_t operand_count = instr_constraint.operand_constaints_size_;
141 const OperandConstraint* op_constraints =
142 instr_constraint.operand_constraints_;
143 CHECK_EQ(instr, *instr_it);
144 CHECK(operand_count == OperandCount(instr));
145 size_t count = 0;
146 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
147 CheckConstraint(instr->InputAt(i), &op_constraints[count]);
148 }
149 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
150 CheckConstraint(instr->TempAt(i), &op_constraints[count]);
151 }
152 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
153 CheckConstraint(instr->OutputAt(i), &op_constraints[count]);
154 }
155 ++instr_it;
156 }
157}
158
159void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op,
160 OperandConstraint* constraint) {
161 constraint->value_ = kMinInt;
162 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister;
163 if (op->IsConstant()) {
164 constraint->type_ = kConstant;
165 constraint->value_ = ConstantOperand::cast(op)->virtual_register();
166 constraint->virtual_register_ = constraint->value_;
167 } else if (op->IsImmediate()) {
168 const ImmediateOperand* imm = ImmediateOperand::cast(op);
169 constraint->type_ = kImmediate;
170 constraint->value_ = GetValue(imm);
171 } else {
172 CHECK(op->IsUnallocated());
173 const UnallocatedOperand* unallocated = UnallocatedOperand::cast(op);
174 int vreg = unallocated->virtual_register();
175 constraint->virtual_register_ = vreg;
176 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) {
177 constraint->type_ = kFixedSlot;
178 constraint->value_ = unallocated->fixed_slot_index();
179 } else {
180 switch (unallocated->extended_policy()) {
181 case UnallocatedOperand::REGISTER_OR_SLOT:
182 case UnallocatedOperand::NONE:
183 if (sequence()->IsFP(vreg)) {
184 constraint->type_ = kRegisterOrSlotFP;
185 } else {
186 constraint->type_ = kRegisterOrSlot;
187 }
188 break;
189 case UnallocatedOperand::REGISTER_OR_SLOT_OR_CONSTANT:
190 DCHECK(!sequence()->IsFP(vreg));
191 constraint->type_ = kRegisterOrSlotOrConstant;
192 break;
193 case UnallocatedOperand::FIXED_REGISTER:
194 if (unallocated->HasSecondaryStorage()) {
195 constraint->type_ = kRegisterAndSlot;
196 constraint->spilled_slot_ = unallocated->GetSecondaryStorage();
197 } else {
198 constraint->type_ = kFixedRegister;
199 }
200 constraint->value_ = unallocated->fixed_register_index();
201 break;
202 case UnallocatedOperand::FIXED_FP_REGISTER:
203 constraint->type_ = kFixedFPRegister;
204 constraint->value_ = unallocated->fixed_register_index();
205 break;
206 case UnallocatedOperand::MUST_HAVE_REGISTER:
207 if (sequence()->IsFP(vreg)) {
208 constraint->type_ = kFPRegister;
209 } else {
210 constraint->type_ = kRegister;
211 }
212 break;
213 case UnallocatedOperand::MUST_HAVE_SLOT:
214 constraint->type_ = kSlot;
215 constraint->value_ =
216 ElementSizeLog2Of(sequence()->GetRepresentation(vreg));
217 break;
218 case UnallocatedOperand::SAME_AS_INPUT:
219 constraint->type_ = kSameAsInput;
220 constraint->value_ = unallocated->input_index();
221 break;
222 }
223 }
224 }
225}
226
227void RegisterAllocatorVerifier::CheckConstraint(
228 const InstructionOperand* op, const OperandConstraint* constraint) {
229 switch (constraint->type_) {
230 case kConstant:
231 CHECK_WITH_MSG(op->IsConstant(), caller_info_);
232 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(),
233 constraint->value_);
234 return;
235 case kImmediate: {
236 CHECK_WITH_MSG(op->IsImmediate(), caller_info_);
237 const ImmediateOperand* imm = ImmediateOperand::cast(op);
238 int value = GetValue(imm);
239 CHECK_EQ(value, constraint->value_);
240 return;
241 }
242 case kRegister:
243 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
244 return;
245 case kFPRegister:
246 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
247 return;
248 case kFixedRegister:
249 case kRegisterAndSlot:
250 CHECK_WITH_MSG(op->IsRegister(), caller_info_);
251 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
252 return;
253 case kFixedFPRegister:
254 CHECK_WITH_MSG(op->IsFPRegister(), caller_info_);
255 CHECK_EQ(LocationOperand::cast(op)->register_code(), constraint->value_);
256 return;
257 case kFixedSlot:
258 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
259 CHECK_EQ(LocationOperand::cast(op)->index(), constraint->value_);
260 return;
261 case kSlot:
262 CHECK_WITH_MSG(op->IsStackSlot() || op->IsFPStackSlot(), caller_info_);
263 CHECK_EQ(ElementSizeLog2Of(LocationOperand::cast(op)->representation()),
264 constraint->value_);
265 return;
266 case kRegisterOrSlot:
267 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot(), caller_info_);
268 return;
269 case kRegisterOrSlotFP:
270 CHECK_WITH_MSG(op->IsFPRegister() || op->IsFPStackSlot(), caller_info_);
271 return;
273 CHECK_WITH_MSG(op->IsRegister() || op->IsStackSlot() || op->IsConstant(),
274 caller_info_);
275 return;
276 case kSameAsInput:
277 CHECK_WITH_MSG(false, caller_info_);
278 return;
279 }
280}
281
282void BlockAssessments::PerformMoves(const Instruction* instruction) {
283 const ParallelMove* first =
284 instruction->GetParallelMove(Instruction::GapPosition::START);
285 PerformParallelMoves(first);
286 const ParallelMove* last =
287 instruction->GetParallelMove(Instruction::GapPosition::END);
288 PerformParallelMoves(last);
289}
290
291void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) {
292 if (moves == nullptr) return;
293
294 CHECK(map_for_moves_.empty());
295 for (MoveOperands* move : *moves) {
296 if (move->IsEliminated() || move->IsRedundant()) continue;
297 auto it = map_.find(move->source());
298 // The RHS of a parallel move should have been already assessed.
299 CHECK(it != map_.end());
300 // The LHS of a parallel move should not have been assigned in this
301 // parallel move.
302 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end());
303 // The RHS of a parallel move should not be a stale reference.
304 CHECK(!IsStaleReferenceStackSlot(move->source()));
305 // Copy the assessment to the destination.
306 map_for_moves_[move->destination()] = it->second;
307 }
308 for (auto pair : map_for_moves_) {
309 // Re-insert the existing key for the new assignment so that it has the
310 // correct representation (which is ignored by the canonicalizing map
311 // comparator).
312 InstructionOperand op = pair.first;
313 map_.erase(op);
314 map_.insert(pair);
315 // Destination is no longer a stale reference.
316 stale_ref_stack_slots().erase(op);
317 }
318 map_for_moves_.clear();
319}
320
321void BlockAssessments::DropRegisters() {
322 for (auto iterator = map().begin(), end = map().end(); iterator != end;) {
323 auto current = iterator;
324 ++iterator;
325 InstructionOperand op = current->first;
326 if (op.IsAnyRegister()) map().erase(current);
327 }
328}
329
330void BlockAssessments::CheckReferenceMap(const ReferenceMap* reference_map) {
331 // First mark all existing reference stack spill slots as stale.
332 for (auto pair : map()) {
333 InstructionOperand op = pair.first;
334 if (op.IsStackSlot()) {
335 const LocationOperand* loc_op = LocationOperand::cast(&op);
336 // Only mark arguments that are spill slots as stale, the reference map
337 // doesn't track arguments or fixed stack slots, which are implicitly
338 // tracked by the GC.
340 loc_op->index() >= spill_slot_delta()) {
341 stale_ref_stack_slots().insert(op);
342 }
343 }
344 }
345
346 // Now remove any stack spill slots in the reference map from the list of
347 // stale slots.
348 for (auto ref_map_operand : reference_map->reference_operands()) {
349 if (ref_map_operand.IsStackSlot()) {
350 auto pair = map().find(ref_map_operand);
351 CHECK(pair != map().end());
352 stale_ref_stack_slots().erase(pair->first);
353 }
354 }
355}
356
357bool BlockAssessments::IsStaleReferenceStackSlot(InstructionOperand op,
358 std::optional<int> vreg) {
359 if (!op.IsStackSlot()) return false;
360 if (vreg.has_value() && !sequence_->IsReference(*vreg)) return false;
361
362 const LocationOperand* loc_op = LocationOperand::cast(&op);
364 stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end();
365}
366
367void BlockAssessments::Print() const {
368 StdoutStream os;
369 for (const auto& pair : map()) {
370 const InstructionOperand op = pair.first;
371 const Assessment* assessment = pair.second;
372 // Use operator<< so we can write the assessment on the same
373 // line.
374 os << op << " : ";
375 if (assessment->kind() == AssessmentKind::Final) {
376 os << "v" << FinalAssessment::cast(assessment)->virtual_register();
377 } else {
378 os << "P";
379 }
380 if (stale_ref_stack_slots().find(op) != stale_ref_stack_slots().end()) {
381 os << " (stale reference)";
382 }
383 os << std::endl;
384 }
385 os << std::endl;
386}
387
388BlockAssessments* RegisterAllocatorVerifier::CreateForBlock(
389 const InstructionBlock* block) {
390 RpoNumber current_block_id = block->rpo_number();
391
392 BlockAssessments* ret =
393 zone()->New<BlockAssessments>(zone(), spill_slot_delta(), sequence_);
394 if (block->PredecessorCount() == 0) {
395 // TODO(mtrofin): the following check should hold, however, in certain
396 // unit tests it is invalidated by the last block. Investigate and
397 // normalize the CFG.
398 // CHECK_EQ(0, current_block_id.ToInt());
399 // The phi size test below is because we can, technically, have phi
400 // instructions with one argument. Some tests expose that, too.
401 } else if (block->PredecessorCount() == 1 && block->phis().empty()) {
402 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]];
403 ret->CopyFrom(prev_block);
404 } else {
405 for (RpoNumber pred_id : block->predecessors()) {
406 // For every operand coming from any of the predecessors, create an
407 // Unfinalized assessment.
408 auto iterator = assessments_.find(pred_id);
409 if (iterator == assessments_.end()) {
410 // This block is the head of a loop, and this predecessor is the
411 // loopback
412 // arc.
413 // Validate this is a loop case, otherwise the CFG is malformed.
414 CHECK(pred_id >= current_block_id);
415 CHECK(block->IsLoopHeader());
416 continue;
417 }
418 const BlockAssessments* pred_assessments = iterator->second;
419 CHECK_NOT_NULL(pred_assessments);
420 for (auto pair : pred_assessments->map()) {
421 InstructionOperand operand = pair.first;
422 if (ret->map().find(operand) == ret->map().end()) {
423 ret->map().insert(std::make_pair(
424 operand, zone()->New<PendingAssessment>(zone(), block, operand)));
425 }
426 }
427
428 // Any references stack slots that became stale in predecessors will be
429 // stale here.
430 ret->stale_ref_stack_slots().insert(
431 pred_assessments->stale_ref_stack_slots().begin(),
432 pred_assessments->stale_ref_stack_slots().end());
433 }
434 }
435 return ret;
436}
437
438void RegisterAllocatorVerifier::ValidatePendingAssessment(
439 RpoNumber block_id, InstructionOperand op,
440 const BlockAssessments* current_assessments,
441 PendingAssessment* const assessment, int virtual_register) {
442 if (assessment->IsAliasOf(virtual_register)) return;
443
444 // When validating a pending assessment, it is possible some of the
445 // assessments for the original operand (the one where the assessment was
446 // created for first) are also pending. To avoid recursion, we use a work
447 // list. To deal with cycles, we keep a set of seen nodes.
448 Zone local_zone(zone()->allocator(), ZONE_NAME);
450 ZoneSet<RpoNumber> seen(&local_zone);
451 worklist.push(std::make_pair(assessment, virtual_register));
452 seen.insert(block_id);
453
454 while (!worklist.empty()) {
455 auto work = worklist.front();
456 const PendingAssessment* current_assessment = work.first;
457 int current_virtual_register = work.second;
458 InstructionOperand current_operand = current_assessment->operand();
459 worklist.pop();
460
461 const InstructionBlock* origin = current_assessment->origin();
462 CHECK(origin->PredecessorCount() > 1 || !origin->phis().empty());
463
464 // Check if the virtual register is a phi first, instead of relying on
465 // the incoming assessments. In particular, this handles the case
466 // v1 = phi v0 v0, which structurally is identical to v0 having been
467 // defined at the top of a diamond, and arriving at the node joining the
468 // diamond's branches.
469 const PhiInstruction* phi = nullptr;
470 for (const PhiInstruction* candidate : origin->phis()) {
471 if (candidate->virtual_register() == current_virtual_register) {
472 phi = candidate;
473 break;
474 }
475 }
476
477 int op_index = 0;
478 for (RpoNumber pred : origin->predecessors()) {
479 int expected =
480 phi != nullptr ? phi->operands()[op_index] : current_virtual_register;
481
482 ++op_index;
483 auto pred_assignment = assessments_.find(pred);
484 if (pred_assignment == assessments_.end()) {
485 CHECK(origin->IsLoopHeader());
486 auto [todo_iter, inserted] = outstanding_assessments_.try_emplace(pred);
487 DelayedAssessments*& set = todo_iter->second;
488 if (inserted) {
489 set = zone()->New<DelayedAssessments>(zone());
490 }
491 set->AddDelayedAssessment(current_operand, expected);
492 continue;
493 }
494
495 const BlockAssessments* pred_assessments = pred_assignment->second;
496 auto found_contribution = pred_assessments->map().find(current_operand);
497 CHECK(found_contribution != pred_assessments->map().end());
498 Assessment* contribution = found_contribution->second;
499
500 switch (contribution->kind()) {
501 case Final:
502 CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(),
503 expected);
504 break;
505 case Pending: {
506 // This happens if we have a diamond feeding into another one, and
507 // the inner one never being used - other than for carrying the value.
508 const PendingAssessment* next = PendingAssessment::cast(contribution);
509 auto [it, inserted] = seen.insert(pred);
510 if (inserted) {
511 worklist.push({next, expected});
512 }
513 // Note that we do not want to finalize pending assessments at the
514 // beginning of a block - which is the information we'd have
515 // available here. This is because this operand may be reused to
516 // define duplicate phis.
517 break;
518 }
519 }
520 }
521 }
522 assessment->AddAlias(virtual_register);
523}
524
525void RegisterAllocatorVerifier::ValidateUse(
526 RpoNumber block_id, BlockAssessments* current_assessments,
527 InstructionOperand op, int virtual_register) {
528 auto iterator = current_assessments->map().find(op);
529 // We should have seen this operand before.
530 CHECK(iterator != current_assessments->map().end());
531 Assessment* assessment = iterator->second;
532
533 // The operand shouldn't be a stale reference stack slot.
534 CHECK(!current_assessments->IsStaleReferenceStackSlot(op, virtual_register));
535
536 switch (assessment->kind()) {
537 case Final:
538 CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(),
539 virtual_register);
540 break;
541 case Pending: {
542 PendingAssessment* pending = PendingAssessment::cast(assessment);
543 ValidatePendingAssessment(block_id, op, current_assessments, pending,
544 virtual_register);
545 break;
546 }
547 }
548}
549
550void RegisterAllocatorVerifier::VerifyGapMoves() {
551 CHECK(assessments_.empty());
552 CHECK(outstanding_assessments_.empty());
553 const size_t block_count = sequence()->instruction_blocks().size();
554 for (size_t block_index = 0; block_index < block_count; ++block_index) {
555 const InstructionBlock* block =
556 sequence()->instruction_blocks()[block_index];
557 BlockAssessments* block_assessments = CreateForBlock(block);
558
559 for (int instr_index = block->code_start(); instr_index < block->code_end();
560 ++instr_index) {
561 const InstructionConstraint& instr_constraint = constraints_[instr_index];
562 const Instruction* instr = instr_constraint.instruction_;
563 block_assessments->PerformMoves(instr);
564
565 const OperandConstraint* op_constraints =
566 instr_constraint.operand_constraints_;
567 size_t count = 0;
568 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
569 if (op_constraints[count].type_ == kImmediate) {
570 continue;
571 }
572 int virtual_register = op_constraints[count].virtual_register_;
573 InstructionOperand op = *instr->InputAt(i);
574 ValidateUse(block->rpo_number(), block_assessments, op,
575 virtual_register);
576 }
577 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
578 block_assessments->Drop(*instr->TempAt(i));
579 }
580 if (instr->IsCall()) {
581 block_assessments->DropRegisters();
582 }
583 if (instr->HasReferenceMap()) {
584 block_assessments->CheckReferenceMap(instr->reference_map());
585 }
586 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
587 int virtual_register = op_constraints[count].virtual_register_;
588 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register);
589 if (op_constraints[count].type_ == kRegisterAndSlot) {
590 const AllocatedOperand* reg_op =
591 AllocatedOperand::cast(instr->OutputAt(i));
593 const AllocatedOperand* stack_op = AllocatedOperand::New(
594 zone(), LocationOperand::LocationKind::STACK_SLOT, rep,
595 op_constraints[i].spilled_slot_);
596 block_assessments->AddDefinition(*stack_op, virtual_register);
597 }
598 }
599 }
600 // Now commit the assessments for this block. If there are any delayed
601 // assessments, ValidatePendingAssessment should see this block, too.
602 assessments_[block->rpo_number()] = block_assessments;
603
604 auto todo_iter = outstanding_assessments_.find(block->rpo_number());
605 if (todo_iter == outstanding_assessments_.end()) continue;
606 DelayedAssessments* todo = todo_iter->second;
607 for (auto pair : todo->map()) {
608 InstructionOperand op = pair.first;
609 int vreg = pair.second;
610 auto found_op = block_assessments->map().find(op);
611 CHECK(found_op != block_assessments->map().end());
612 // This block is a jump back to the loop header, ensure that the op hasn't
613 // become a stale reference during the blocks in the loop.
614 CHECK(!block_assessments->IsStaleReferenceStackSlot(op, vreg));
615 switch (found_op->second->kind()) {
616 case Final:
617 CHECK_EQ(FinalAssessment::cast(found_op->second)->virtual_register(),
618 vreg);
619 break;
620 case Pending:
621 ValidatePendingAssessment(block->rpo_number(), op, block_assessments,
622 PendingAssessment::cast(found_op->second),
623 vreg);
624 break;
625 }
626 }
627 }
628}
629
630} // namespace compiler
631} // namespace internal
632} // namespace v8
void reserve(size_t new_cap)
void push_back(const T &value)
void CheckReferenceMap(const ReferenceMap *reference_map)
bool IsStaleReferenceStackSlot(InstructionOperand op, std::optional< int > vreg=std::nullopt)
void PerformMoves(const Instruction *instruction)
void AddDefinition(InstructionOperand operand, int virtual_register)
const PhiInstructions & phis() const
ParallelMove * GetParallelMove(GapPosition pos)
MachineRepresentation representation() const
const ZoneVector< InstructionOperand > & reference_operands() const
const ZoneMap< InstructionOperand, int, OperandAsKeyLess > & map() const
static void VerifyOutput(const OperandConstraint &constraint)
RegisterAllocatorVerifier(Zone *zone, const RegisterConfiguration *config, const InstructionSequence *sequence, const Frame *frame)
static void VerifyTemp(const OperandConstraint &constraint)
void BuildConstraint(const InstructionOperand *op, OperandConstraint *constraint)
Zone * zone_
const MapRef map_
const ObjectRef type_
uint32_t count
int end
std::map< const std::string, const std::string > map
Instruction * instr
Node::Uses::const_iterator begin(const Node::Uses &uses)
Definition node.h:708
constexpr int kMinInt
Definition globals.h:375
constexpr bool CanBeTaggedOrCompressedPointer(MachineRepresentation rep)
V8_EXPORT_PRIVATE constexpr int ElementSizeLog2Of(MachineRepresentation)
#define CHECK(condition)
Definition logging.h:124
#define CHECK_LT(lhs, rhs)
#define CHECK_WITH_MSG(condition, message)
Definition logging.h:118
#define CHECK_NULL(val)
#define CHECK_NOT_NULL(val)
#define CHECK_NE(lhs, rhs)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define ZONE_NAME
Definition zone.h:22