27 if (v8_flags.trace_turbo_alloc) PrintF(__VA_ARGS__); \
32static constexpr int kFloat32Bit =
34static constexpr int kSimd128Bit =
37const InstructionBlock* GetContainingLoop(
const InstructionSequence* sequence,
38 const InstructionBlock* block) {
39 RpoNumber index = block->loop_header();
40 if (!index.IsValid())
return nullptr;
41 return sequence->InstructionBlockAt(index);
44const InstructionBlock* GetInstructionBlock(
const InstructionSequence* code,
45 LifetimePosition
pos) {
46 return code->GetInstructionBlock(
pos.ToInstructionIndex());
49Instruction* GetLastInstruction(InstructionSequence* code,
50 const InstructionBlock* block) {
51 return code->InstructionAt(block->last_instruction_index());
61 if (a.first == b.first) {
62 return a.second.Compare(b.second);
64 return a.first < b.first;
75 bool register_beneficial =
true;
83 register_beneficial =
false;
86 register_beneficial =
false;
103 if (
hint_ ==
nullptr)
return false;
112 *register_code = assigned_register;
126 *register_code = assigned_register;
181 : relative_id_(relative_id),
185 top_level_(top_level),
187 current_interval_(intervals_.
begin()) {
195void LiveRange::VerifyPositions()
const {
208 while (!interval->Contains(
pos->pos()) && interval->end() !=
pos->pos()) {
215void LiveRange::VerifyIntervals()
const {
219 LifetimePosition last_end =
intervals().front().end();
221 interval !=
intervals().end(); ++interval) {
223 last_end = interval->end();
261 old_next->
next_ =
nullptr;
296 bool needs_revisit =
false;
299 if ((*pos_it)->HintRegister(register_index)) {
304 needs_revisit = needs_revisit ||
308 if (!needs_revisit) {
324 return use->pos() < start;
332 [](
const UsePosition*
pos) { return pos->RegisterIsBeneficial(); });
339 if (next_use ==
nullptr)
return End();
340 return next_use->
pos();
348 return pos->type() == UsePositionType::kRequiresRegister ||
349 pos->SpillDetrimental();
358 return pos->type() == UsePositionType::kRequiresRegister;
367 if (use_pos ==
nullptr)
return true;
382 if (
TopLevel()->HasSpillOperand()) {
384 DCHECK(!op->IsUnallocated());
397 return interval.end() < position;
408 if (to_start_of->
start() > but_not_past)
return;
429 return position < interval.end();
433 bool split_at_start =
false;
434 if (split_interval->start() ==
position) {
435 split_at_start =
true;
436 }
else if (split_interval->Contains(
position)) {
450 if (split_at_start) {
454 split_position_it = std::lower_bound(
457 return use_pos->pos() < pos;
460 split_position_it = std::lower_bound(
463 return use_pos->pos() <= pos;
467 size_t result_size = std::distance(split_position_it,
positions_span_.end());
474 result->current_hint_position_index_ =
483 VerifyChildStructure();
484 result->VerifyChildStructure();
503 if (!
pos->HasOperand())
continue;
504 switch (
pos->type()) {
528 if (
start == other_start) {
545 if (other->positions_span_.empty())
return true;
547 UsePosition* other_pos = other->positions_span_.first();
549 if (
pos->pos() == other_pos->
pos())
551 return pos->pos() < other_pos->
pos();
553 return start < other_start;
558 if (!
pos->HasOperand())
continue;
559 switch (
pos->type()) {
565 pos->set_assigned_register(register_index);
584 interval[1].
start() >= interval->start());
606 [=](
const UseInterval& interval) { return interval.end() >= position; });
608 return interval->end();
617 return interval.start() >= position;
625 if (
IsEmpty() || other->IsEmpty() || other->Start() >
End() ||
634 if (a->start() > min_end || b->
start() > min_end)
break;
636 if (cur_intersection.
IsValid()) {
637 return cur_intersection;
639 if (a->start() < b->
start()) {
641 if (a ==
intervals().
end() || a->start() > other->End())
break;
651 bool with_children)
const {
655 for (
const LiveRange*
i =
this;
i !=
nullptr;
i =
i->next()) {
657 os << wrapper << std::endl;
658 if (!with_children)
break;
669 *hint = bundle->
reg();
693 spill_operand_(nullptr),
694 spill_move_insertion_locations_(nullptr),
696 spilled_in_deferred_blocks_(
false),
697 has_preassigned_slot_(
false),
699 bits_ |= SpillTypeField::encode(SpillType::kNoSpillType);
719 Zone* zone = sequence->zone();
722 to_spill !=
nullptr; to_spill = to_spill->
next) {
726 move->
AddMove(*to_spill->operand, op);
727 instr->block()->mark_needs_frame();
740 to_spill !=
nullptr;
previous = to_spill, to_spill = to_spill->
next) {
748 if (move_op->IsEliminated())
continue;
749 if (move_op->source().Equals(*to_spill->operand) &&
750 move_op->destination().Equals(op)) {
766 instr->block()->mark_needs_frame();
773 DCHECK(!operand->IsUnallocated() && !operand->IsImmediate());
796 child = child->
next();
804 return range->End() <= pos;
811void TopLevelLiveRange::Verify()
const {
812 VerifyChildrenInOrder();
813 for (
const LiveRange* child =
this; child !=
nullptr; child = child->
next()) {
814 VerifyChildStructure();
818void TopLevelLiveRange::VerifyChildrenInOrder()
const {
819 LifetimePosition last_end =
End();
820 for (
const LiveRange* child = this->
next(); child !=
nullptr;
821 child = child->
next()) {
823 last_end = child->End();
838 TRACE(
"Ensure live range %d in interval [%d %d[\n",
vreg(),
start.value(),
852 if (
end_ < new_end) {
862 TRACE(
"Add to live range %d interval [%d %d[\n",
vreg(),
start.value(),
870 if (
end == first_interval.
start()) {
874 }
else if (
end < first_interval.
start()) {
896 TRACE(
"Add to live range %d use position %d\n",
vreg(),
903 return UsePosition::Ordering()(use_pos, pos);
916 os <<
"Range: " << range->TopLevel()->vreg() <<
":" << range->relative_id()
918 if (range->TopLevel()->is_phi()) os <<
"phi ";
919 if (range->TopLevel()->is_non_loop_phi()) os <<
"nlphi ";
921 os <<
"{" << std::endl;
923 if (use_pos->HasOperand()) {
924 os << *use_pos->operand() << use_pos->pos() <<
" ";
929 for (
const UseInterval& interval : range->intervals()) {
930 interval.PrettyPrint(os);
940 for (
auto block : blocks) {
942 block->first_instruction_index());
944 block->last_instruction_index())
946 int length = end_pos.
value() - start_pos.value();
947 constexpr int kMaxPrefixLength = 32;
948 char buffer[kMaxPrefixLength];
949 int rpo_number = block->rpo_number().ToInt();
950 const char* deferred_marker = block->IsDeferred() ?
"(deferred)" :
"";
951 int max_prefix_length = std::min(length, kMaxPrefixLength);
952 int prefix = snprintf(buffer, max_prefix_length,
"[-B%d-%s", rpo_number,
955 int remaining = length - std::min(prefix, max_prefix_length) - 1;
956 for (
int i = 0;
i < remaining; ++
i) os <<
'-';
966 os << std::setw(3) << toplevel->
vreg() <<
": ";
968 const char* kind_string;
983 for (
const LiveRange* range = toplevel; range !=
nullptr;
984 range = range->
next()) {
985 for (
const UseInterval& interval : range->intervals()) {
992 int length =
end.value() -
start.value();
993 constexpr int kMaxPrefixLength = 32;
994 char buffer[kMaxPrefixLength];
995 int max_prefix_length = std::min(length + 1, kMaxPrefixLength);
997 if (range->spilled()) {
998 prefix = snprintf(buffer, max_prefix_length,
"|%s", kind_string);
1000 prefix = snprintf(buffer, max_prefix_length,
"|%s",
1004 position += std::min(prefix, max_prefix_length - 1);
1006 const char line_style = range->spilled() ?
'-' :
'=';
1016 std::ostringstream os;
1017 PrintBlockRow(os,
code()->instruction_blocks());
1018 for (
auto const toplevel :
data()->fixed_live_ranges()) {
1019 if (toplevel ==
nullptr)
continue;
1023 for (
auto toplevel :
data()->live_ranges()) {
1025 if (rowcount++ % 10 == 0) PrintBlockRow(os,
code()->instruction_blocks());
1028 PrintF(
"%s\n", os.str().c_str());
1034 assigned_slot_(kUnassignedSlot),
1042 for (
const LiveRange* range = parent; range !=
nullptr;
1043 range = range->
next()) {
1048 bool can_coalesce = last_end == interval.start();
1054 last_end = interval.end();
1063static std::optional<std::pair<UseInterval, UseInterval>>
1067 std::is_sorted(b.
begin(), b.
end()));
1068 if (a.empty() || b.
empty() || a.last().end() <= b.
first().start() ||
1069 b.
last().end() <= a.first().start()) {
1074 if (a.size() > b.
size()) {
1078 auto a_it = a.begin();
1081 auto b_it = std::lower_bound(
1084 return interval.end() < position;
1086 while (a_it != a.end() && b_it != b.
end()) {
1087 if (a_it->end() <= b_it->start()) {
1089 }
else if (b_it->end() <= a_it->start()) {
1092 return std::make_pair(*a_it, *b_it);
1101template <
typename ContainerA,
typename ContainerB>
1103 const ContainerA& a,
const ContainerB& b) {
1109 if (
HasSlot() || other->HasSlot())
return false;
1110 if (
byte_width() != other->byte_width())
return false;
1122 other->intervals_.clear();
1126 DCHECK(range->GetSpillRange() == other);
1127 range->SetSpillRange(
this);
1129 ranges_.insert(
ranges_.end(), other->ranges_.begin(), other->ranges_.end());
1130 other->ranges_.clear();
1137 os <<
"{" << std::endl;
1139 os << range->vreg() <<
" ";
1144 interval.PrettyPrint(os);
1147 os <<
"}" << std::endl;
1155 incoming_operands_(zone),
1162 incoming_operands_.push_back(operand);
1186 this->
config()->num_general_registers(),
1190 this->
config()->num_double_registers(),
1208 this->
config()->num_simd128_registers(),
1213 this->
config()->num_simd128_registers(),
1219 for (
int i = 0;
i < code->VirtualRegisterCount(); ++
i) {
1247 return moves->
AddMove(from, to);
1251 int virtual_register) {
1252 DCHECK_LT(virtual_register,
code()->VirtualRegisterCount());
1275 phi_map_.insert(std::make_pair(phi->virtual_register(), map_value));
1282 int virtual_register) {
1283 auto it =
phi_map_.find(virtual_register);
1297 PrintF(
"Register allocator error: live v%d reached first block.\n",
1300 PrintF(
" (first use is at position %d in instruction %d)\n",
1301 range->positions().first()->pos().value(),
1302 range->positions().first()->pos().ToInstructionIndex());
1321 const size_t live_ranges_size =
live_ranges().size();
1326 if (range->IsEmpty() ||
1328 ->GetInstructionBlock(range->Start().ToInstructionIndex())
1332 for (
const UseInterval& interval : range->intervals()) {
1333 int first = interval.FirstGapIndex();
1334 int last = interval.LastGapIndex();
1337 if (!block->IsDeferred())
return false;
1348 DCHECK(!range->HasSpillOperand());
1350 SpillRange* spill_range = range->GetAllocatedSpillRange();
1351 if (spill_range ==
nullptr) {
1355 (range->spill_type() != SpillType::kSpillRange)) {
1356 range->set_spill_type(SpillType::kDeferredSpillRange);
1358 range->set_spill_type(SpillType::kSpillRange);
1381 int alias_base_index = -1;
1382 int aliases =
config()->GetAliases(
1384 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1386 int aliased_reg = alias_base_index + aliases;
1417 int alias_base_index = -1;
1418 int aliases =
config()->GetAliases(
1420 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1422 while (aliases-- && !
result) {
1423 int aliased_reg = alias_base_index + aliases;
1454 int alias_base_index = -1;
1455 int aliases =
config()->GetAliases(
1457 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
1459 int aliased_reg = alias_base_index + aliases;
1475 return pos.IsFullStart() &&
1476 (
static_cast<size_t>(
pos.ToInstructionIndex()) ==
1478 code()->GetInstructionBlock(
pos.ToInstructionIndex())->code_start() ==
1479 pos.ToInstructionIndex());
1502 data()->config()->IsAllocatableGeneralCode(
1512 data()->config()->IsAllocatableFloatCode(
1516 data()->config()->IsAllocatableDoubleCode(
1520 data()->config()->IsAllocatableSimd128Code(
1530 if (is_input && allocated.IsAnyRegister()) {
1535 TRACE(
"Fixed reg is tagged at %d\n",
pos);
1537 if (
instr->HasReferenceMap()) {
1552 int start = block->first_instruction_index();
1553 int end = block->last_instruction_index();
1565 int end = block->last_instruction_index();
1569 DCHECK(!output_operand->IsConstant());
1571 int output_vreg = output->virtual_register();
1573 bool assigned =
false;
1574 if (output->HasFixedPolicy()) {
1577 if (output->IsStackSlot()) {
1579 data()->frame()->GetSpillSlotCount());
1581 range->SetSpillStartIndex(
end);
1585 for (
const RpoNumber& succ : block->successors()) {
1598 for (
const RpoNumber& succ : block->successors()) {
1603 range->SetSpillStartIndex(gap_index);
1612 for (
size_t i = 0;
i < first->TempCount();
i++) {
1618 for (
size_t i = 0;
i < first->OutputCount();
i++) {
1620 if (output->IsConstant()) {
1621 int output_vreg = ConstantOperand::cast(output)->virtual_register();
1624 range->SetSpillOperand(output);
1630 bool assigned =
false;
1637 range->MarkHasPreassignedSlot();
1641 AllocateFixed(first_output, instr_index, is_tagged,
false,
true);
1646 data()->frame()->GetTotalFrameSlotCount());
1648 range->SetSpillStartIndex(instr_index + 1);
1659 range->SetSpillStartIndex(instr_index + 1);
1668 for (
size_t i = 0;
i <
second->InputCount();
i++) {
1670 if (input->IsImmediate()) {
1677 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
1678 bool already_spilled =
false;
1679 if (spilled_consts ==
nullptr) {
1685 std::find(spilled_consts->
begin(), spilled_consts->
end(), range);
1686 already_spilled = it != spilled_consts->
end();
1689 if (it ==
data()->slot_for_const_range().
end()) {
1690 DCHECK(!already_spilled);
1695 range->representation(), index);
1698 if (!already_spilled) {
1699 auto* slot = it->second;
1714 AllocateFixed(cur_input, instr_index, is_tagged,
true,
false);
1719 for (
size_t i = 0;
i <
second->OutputCount();
i++) {
1721 if (!output->IsUnallocated())
continue;
1734 input_copy, *cur_input);
1736 if (
code()->IsReference(input_vreg) && !
code()->IsReference(output_vreg)) {
1737 if (
second->HasReferenceMap()) {
1756 int phi_vreg = phi->virtual_register();
1761 for (
size_t i = 0;
i < phi->operands().
size(); ++
i) {
1765 phi->operands()[
i]);
1771 ->HasReferenceMap());
1774 int gap_index = block->first_instruction_index();
1785 :
data_(data), phi_hints_(local_zone) {}
1789 size_t block_index = block->rpo_number().ToSize();
1791 if (live_out ==
nullptr) {
1794 Zone* zone = data->allocation_zone();
1800 for (
const RpoNumber& succ : block->successors()) {
1802 if (succ <= block->rpo_number())
continue;
1804 if (live_in !=
nullptr) live_out->
Union(*live_in);
1812 live_out->
Add(phi->operands()[index]);
1815 data->live_out_sets()[block_index] = live_out;
1825 block->first_instruction_index());
1827 block->last_instruction_index())
1829 for (
int operand_index : *live_out) {
1862 int offset = spill_mode == SpillMode::kSpillAtDefinition
1864 :
config()->num_general_registers();
1871 result->set_assigned_register(index);
1873 if (spill_mode == SpillMode::kSpillDeferred) {
1874 result->set_deferred_fixed();
1883 int num_regs =
config()->num_double_registers();
1890 num_regs =
config()->num_float_registers();
1894 num_regs =
config()->num_simd128_registers();
1902 int offset = spill_mode == SpillMode::kSpillAtDefinition ? 0 : num_regs;
1904 DCHECK(index < num_regs);
1910 result->set_assigned_register(index);
1912 if (spill_mode == SpillMode::kSpillDeferred) {
1913 result->set_deferred_fixed();
1923 int num_regs =
config()->num_simd128_registers();
1926 int offset = spill_mode == SpillMode::kSpillAtDefinition ? 0 : num_regs;
1928 DCHECK(index < num_regs);
1936 result->set_assigned_register(index);
1938 if (spill_mode == SpillMode::kSpillDeferred) {
1939 result->set_deferred_fixed();
1948 if (operand->IsUnallocated()) {
1950 UnallocatedOperand::cast(operand)->virtual_register());
1951 }
else if (operand->IsConstant()) {
1953 ConstantOperand::cast(operand)->virtual_register());
1982 if (range ==
nullptr)
return nullptr;
1984 if (range->IsEmpty() || range->Start() >
position) {
1992 if (!operand->IsUnallocated())
return nullptr;
2006 if (range ==
nullptr)
return nullptr;
2008 if (operand->IsUnallocated()) {
2019 int block_start = block->first_instruction_index();
2022 bool fixed_float_live_ranges =
false;
2023 bool fixed_simd128_live_ranges =
false;
2026 fixed_float_live_ranges = (
mask & kFloat32Bit) != 0;
2027 fixed_simd128_live_ranges = (
mask & kSimd128Bit) != 0;
2030 fixed_simd128_live_ranges = (
mask & kSimd128Bit) != 0;
2034 for (
int index = block->last_instruction_index(); index >= block_start;
2042 for (
size_t i = 0;
i <
instr->OutputCount();
i++) {
2044 if (output->IsUnallocated()) {
2046 DCHECK(!UnallocatedOperand::cast(output)->HasSlotPolicy());
2047 int out_vreg = UnallocatedOperand::cast(output)->virtual_register();
2048 live->Remove(out_vreg);
2049 }
else if (output->IsConstant()) {
2050 int out_vreg = ConstantOperand::cast(output)->virtual_register();
2051 live->Remove(out_vreg);
2053 if (block->IsHandler() && index == block_start && output->IsAllocated() &&
2054 output->IsRegister() &&
2064 Define(curr_position, output, spill_mode);
2068 if (
instr->ClobbersRegisters()) {
2069 for (
int i = 0;
i <
config()->num_allocatable_general_registers(); ++
i) {
2074 int code =
config()->GetAllocatableGeneralCode(
i);
2076 range->AddUseInterval(curr_position, curr_position.
End(),
2081 if (
instr->ClobbersDoubleRegisters()) {
2082 for (
int i = 0;
i <
config()->num_allocatable_double_registers(); ++
i) {
2085 int code =
config()->GetAllocatableDoubleCode(
i);
2088 range->AddUseInterval(curr_position, curr_position.
End(),
2093 if (fixed_float_live_ranges) {
2094 for (
int i = 0;
i <
config()->num_allocatable_float_registers();
2098 int code =
config()->GetAllocatableFloatCode(
i);
2101 range->AddUseInterval(curr_position, curr_position.
End(),
2105 if (fixed_simd128_live_ranges) {
2106 for (
int i = 0;
i <
config()->num_allocatable_simd128_registers();
2108 int code =
config()->GetAllocatableSimd128Code(
i);
2111 range->AddUseInterval(curr_position, curr_position.
End(),
2116 if (fixed_simd128_live_ranges) {
2117 for (
int i = 0;
i <
config()->num_allocatable_simd128_registers();
2119 int code =
config()->GetAllocatableSimd128Code(
i);
2122 range->AddUseInterval(curr_position, curr_position.
End(),
2129 for (
size_t i = 0;
i <
instr->InputCount();
i++) {
2131 if (input->IsImmediate()) {
2135 if (input->IsUnallocated() &&
2136 UnallocatedOperand::cast(input)->IsUsedAtStart()) {
2137 use_pos = curr_position;
2139 use_pos = curr_position.
End();
2142 if (input->IsUnallocated()) {
2153 Use(block_start_position, use_pos, input, spill_mode);
2156 for (
size_t i = 0;
i <
instr->TempCount();
i++) {
2160 !UnallocatedOperand::cast(temp)->HasSlotPolicy());
2161 if (
instr->ClobbersTemps()) {
2163 if (temp->IsUnallocated()) {
2170 Use(block_start_position, curr_position.
End(), temp, spill_mode);
2171 Define(curr_position, temp, spill_mode);
2177 curr_position = curr_position.
PrevStart();
2181 if (move ==
nullptr)
continue;
2183 curr_position = curr_position.
End();
2185 curr_position = curr_position.
Start();
2194 if (to.IsUnallocated()) {
2195 int to_vreg = UnallocatedOperand::cast(to).virtual_register();
2197 if (to_range->
is_phi()) {
2208 if (live->Contains(to_vreg)) {
2210 Define(curr_position, &to, &from,
2212 live->Remove(to_vreg);
2219 Define(curr_position, &to, spill_mode);
2221 UsePosition* from_use =
Use(block_start_position, curr_position, &from,
2222 hint, hint_type, spill_mode);
2224 if (from.IsUnallocated()) {
2225 live->Add(UnallocatedOperand::cast(from).virtual_register());
2231 if (to.IsAnyRegister() ||
2232 (to.IsUnallocated() &&
2233 UnallocatedOperand::cast(&to)->HasRegisterPolicy())) {
2237 if (to_use !=
nullptr && from_use !=
nullptr) {
2255 int phi_vreg = phi->virtual_register();
2256 live->Remove(phi_vreg);
2269 int hint_preference = 0;
2275 int predecessor_limit = 2;
2277 for (
RpoNumber predecessor : block->predecessors()) {
2283 if (predecessor >= block->rpo_number())
continue;
2287 GetLastInstruction(
code(), predecessor_block);
2294 if (to.IsUnallocated() &&
2295 UnallocatedOperand::cast(to).virtual_register() == phi_vreg) {
2296 predecessor_hint = &move->source();
2305 int predecessor_hint_preference = 0;
2306 const int kNotDeferredBlockPreference = (1 << 2);
2307 const int kMoveIsAllocatedPreference = (1 << 1);
2308 const int kBlockIsEmptyPreference = (1 << 0);
2312 predecessor_hint_preference |= kNotDeferredBlockPreference;
2334 if (moves !=
nullptr) {
2337 if (predecessor_hint->
Equals(to)) {
2338 if (move->source().IsAllocated()) {
2339 predecessor_hint_preference |= kMoveIsAllocatedPreference;
2349 predecessor_hint_preference |= kBlockIsEmptyPreference;
2352 if ((hint ==
nullptr) ||
2353 (predecessor_hint_preference > hint_preference)) {
2355 hint = predecessor_hint;
2356 hint_preference = predecessor_hint_preference;
2359 if (--predecessor_limit <= 0)
break;
2364 block->first_instruction_index());
2374 DCHECK(block->IsLoopHeader());
2378 block->first_instruction_index());
2380 code()->LastLoopInstructionIndex(block))
2382 for (
int operand_index : *live) {
2387 for (
int i = block->rpo_number().ToInt() + 1; i < block->loop_end().ToInt();
2395 for (
int block_id =
code()->InstructionBlockCount() - 1; block_id >= 0;
2422 if (range->has_slot_use() && range->HasNoSpillType()) {
2424 range->slot_use_kind() ==
2426 ? SpillMode::kSpillDeferred
2427 : SpillMode::kSpillAtDefinition;
2434 if (range->HasSpillOperand() && range->GetSpillOperand()->IsConstant()) {
2442 if (!
pos->pos().IsGapPosition()) {
2445 pos->set_type(new_type,
true);
2448 range->ResetCurrentHintPosition();
2450 for (
auto preassigned :
data()->preassigned_slot_ranges()) {
2452 int slot_id = preassigned.second;
2454 ? range->GetSpillRange()
2456 range, SpillMode::kSpillAtDefinition);
2467 auto res =
phi_hints_.insert(std::make_pair(operand, use_pos));
2476 DCHECK(!it->second->IsResolved());
2477 it->second->ResolveHint(use_pos);
2481void LiveRangeBuilder::Verify()
const {
2483 DCHECK(hint.second->IsResolved());
2485 for (TopLevelLiveRange* current :
data()->live_ranges()) {
2487 if (!current->IsEmpty()) {
2492 if (current->intervals().size() < 2)
continue;
2499 DCHECK(NextIntervalStartsInDifferentBlocks(*interval, *next_interval));
2501 for (++interval; interval != current->intervals().
end(); ++interval) {
2506 DCHECK(IntervalStartsAtBlockBoundary(*interval));
2509 DCHECK(IntervalPredecessorsCoveredByRange(*interval, current));
2510 next_interval = interval + 1;
2511 if (next_interval != current->intervals().end()) {
2515 NextIntervalStartsInDifferentBlocks(*interval, *next_interval));
2522bool LiveRangeBuilder::IntervalStartsAtBlockBoundary(
2523 UseInterval interval)
const {
2524 LifetimePosition
start = interval.start();
2525 if (!
start.IsFullStart())
return false;
2526 int instruction_index =
start.ToInstructionIndex();
2527 const InstructionBlock* block =
2532bool LiveRangeBuilder::IntervalPredecessorsCoveredByRange(
2533 UseInterval interval, TopLevelLiveRange* range)
const {
2534 LifetimePosition
start = interval.start();
2535 int instruction_index =
start.ToInstructionIndex();
2536 const InstructionBlock* block =
2538 for (RpoNumber pred_index : block->predecessors()) {
2539 const InstructionBlock* predecessor =
2542 predecessor->last_instruction_index());
2543 last_pos = last_pos.NextStart().End();
2544 if (!range->Covers(last_pos))
return false;
2549bool LiveRangeBuilder::NextIntervalStartsInDifferentBlocks(
2550 UseInterval interval, UseInterval next)
const {
2551 LifetimePosition
end = interval.end();
2552 LifetimePosition next_start = next.start();
2555 end =
end.IsStart() ?
end.PrevStart().End() :
end.Start();
2556 int last_covered_index =
end.ToInstructionIndex();
2557 const InstructionBlock* block =
2559 const InstructionBlock* next_block =
2561 return block->
rpo_number() < next_block->rpo_number();
2566 TRACE(
"Build bundles\n");
2568 for (
int block_id =
code()->InstructionBlockCount() - 1; block_id >= 0;
2572 TRACE(
"Block B%d\n", block_id);
2573 for (
auto phi : block->phis()) {
2577 if (out ==
nullptr) {
2582 TRACE(
"Processing phi for v%d with %d:%d\n", phi->virtual_register(),
2584 bool phi_interferes_with_backedge_input =
false;
2585 for (
auto input : phi->operands()) {
2587 TRACE(
"Input value v%d with range %d:%d\n", input,
2590 if (input_bundle !=
nullptr) {
2594 if (merged !=
nullptr) {
2598 TRACE(
"Merged %d and %d to %d\n", phi->virtual_register(), input,
2600 }
else if (input_range->
Start() > out_range->
Start()) {
2603 phi_interferes_with_backedge_input =
true;
2607 if (out->TryAddRange(input_range)) {
2608 TRACE(
"Added %d and %d to %d\n", phi->virtual_register(), input,
2610 }
else if (input_range->
Start() > out_range->
Start()) {
2613 phi_interferes_with_backedge_input =
true;
2621 if (phi_interferes_with_backedge_input)
2624 TRACE(
"Done block B%d\n", block_id);
2646 ranges_.insert(range_insert_it, 1, range);
2647 range->set_bundle(
this);
2656 *interval_insert_it != interval);
2657 intervals_.insert(interval_insert_it, 1, interval);
2663 if (rhs == lhs)
return lhs;
2667 auto [interval1, interval2] = *found;
2668 TRACE(
"No merge %d:%d %d:%d\n", interval1.start().value(),
2669 interval1.end().value(), interval2.start().value(),
2670 interval2.end().value());
2677 std::swap(lhs, rhs);
2693 if (range->TopLevel()->HasSpillRange()) {
2694 SpillRange* current = range->TopLevel()->GetSpillRange();
2695 if (target ==
nullptr) {
2697 }
else if (target != current) {
2698 target->TryMerge(current);
2714 num_allocatable_registers_(
2716 allocatable_register_codes_(
2718 check_fp_aliasing_(false) {
2721 (kFloat32Bit | kSimd128Bit)) != 0;
2726 const LiveRange* range,
int instruction_index) {
2730 if (range->Start() >= ret || ret >= range->
End()) {
2738 for (
size_t i = 0;
i < initial_range_count; ++
i) {
2745 if (range->HasNoSpillType() ||
2746 (range->HasSpillRange() && !range->has_non_deferred_slot_use())) {
2750 TRACE(
"Live range %d:%d is defined by a spill operand.\n",
2751 range->TopLevel()->vreg(), range->relative_id());
2757 UsePosition*
pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
2760 if (
pos ==
nullptr) {
2761 Spill(range, SpillMode::kSpillAtDefinition);
2762 }
else if (
pos->pos() > range->Start().NextStart()) {
2766 range,
pos->pos().ToInstructionIndex());
2768 if (!split_pos.
IsValid())
continue;
2774 Spill(range, SpillMode::kSpillAtDefinition);
2781 DCHECK(!range->TopLevel()->IsFixed());
2782 TRACE(
"Splitting live range %d:%d at %d\n", range->TopLevel()->vreg(),
2783 range->relative_id(),
pos.value());
2790 (GetInstructionBlock(
code(),
pos)->last_instruction_index() !=
2791 pos.ToInstructionIndex()));
2800 DCHECK(!range->TopLevel()->IsFixed());
2801 TRACE(
"Splitting live range %d:%d in position between [%d, %d]\n",
2802 range->TopLevel()->vreg(), range->relative_id(),
start.value(),
2813 int end_instr =
end.ToInstructionIndex();
2817 if (start_instr == end_instr)
return end;
2822 if (end_block == start_block) {
2832 if (loop ==
nullptr ||
2845 block->first_instruction_index());
2851 *begin_spill_out = range;
2854 if (spill_mode == SpillMode::kSpillDeferred)
return pos;
2858 if (loop_header ==
nullptr)
return pos;
2860 while (loop_header !=
nullptr) {
2868 if (range->TopLevel()->Start() > loop_start ||
2869 (range->TopLevel()->Start() == loop_start &&
2870 range->TopLevel()->SpillAtLoopHeaderNotBeneficial()))
2875 if (live_at_header !=
nullptr && !live_at_header->
spilled()) {
2876 for (
const LiveRange* check_use = live_at_header;
2877 check_use !=
nullptr && check_use->
Start() <
pos;
2878 check_use = check_use->next()) {
2882 check_use->NextUsePositionSpillDetrimental(loop_start);
2885 if (next_use !=
nullptr && next_use->
pos() <=
pos) {
2890 *begin_spill_out = live_at_header;
2895 loop_header = GetContainingLoop(
code(), loop_header);
2901 DCHECK(!range->spilled());
2902 DCHECK(spill_mode == SpillMode::kSpillAtDefinition ||
2903 GetInstructionBlock(
code(), range->Start())->IsDeferred());
2905 TRACE(
"Spilling live range %d:%d mode %d\n", first->vreg(),
2906 range->relative_id(), spill_mode);
2908 TRACE(
"Starting spill type is %d\n",
static_cast<int>(first->spill_type()));
2909 if (first->HasNoSpillType()) {
2910 TRACE(
"New spill range needed\n");
2915 if ((spill_mode == SpillMode::kSpillAtDefinition) &&
2916 (first->spill_type() ==
2918 TRACE(
"Upgrading\n");
2921 TRACE(
"Final spill type is %d\n",
static_cast<int>(first->spill_type()));
2940 unhandled_live_ranges_(local_zone),
2941 active_live_ranges_(local_zone),
2957 if (begin_range != end_range) {
2959 if (!begin_range->
spilled()) {
2960 SpillAfter(begin_range, begin_pos, SpillMode::kSpillAtDefinition);
2962 for (
LiveRange* range = begin_range->
next(); range != end_range;
2963 range = range->
next()) {
2964 if (!range->spilled()) {
2972 if (range->next() !=
nullptr && range->next()->ShouldRecombine()) {
2974 TRACE(
"Recombining %d:%d with %d\n", range->TopLevel()->vreg(),
2983 range->AttachToNext(zone);
2984 }
else if (range->next() !=
nullptr) {
2985 TRACE(
"No recombine for %d:%d to %d\n", range->TopLevel()->vreg(),
2986 range->relative_id(), range->next()->relative_id());
2997 auto found = to_be_live.
find(toplevel);
2998 if (found == to_be_live.
end()) {
3006 TRACE(
"Keeping reactivated fixed range for %s\n",
3014 TRACE(
"Putting back %d:%d\n", toplevel->
vreg(),
3022 if (next_use !=
nullptr) {
3027 TRACE(
"Next use at %d\n", revisit_at.
value());
3028 if (!
data()->IsBlockBoundary(revisit_at)) {
3038 Spill(split, spill_mode);
3039 TRACE(
"Marking %d:%d to recombine\n", toplevel->
vreg(),
3047 Spill(split, spill_mode);
3053 int expected_register = found->second;
3054 to_be_live.
erase(found);
3057 TRACE(
"Keeping %d:%d in %s\n", toplevel->
vreg(),
3064 TRACE(
"Scheduling %d:%d\n", toplevel->
vreg(),
3067 split->set_controlflow_hint(expected_register);
3085 for (
int cur_reg = 0; cur_reg <
num_registers(); ++cur_reg) {
3093 !
data()->config()->AreAliases(cur_inactive->representation(), cur_reg,
3094 range->representation(),
reg)) {
3097 if (new_end <= cur_inactive->NextStart()) {
3102 auto next_intersection = cur_inactive->FirstIntersection(range);
3103 if (!next_intersection.IsValid())
continue;
3104 new_end = std::min(new_end, next_intersection);
3107 if (new_end != range->
End()) {
3108 TRACE(
"Found new end for %d:%d at %d\n", range->TopLevel()->vreg(),
3109 range->relative_id(), new_end.
value());
3123 for (
auto [range,
reg] : to_be_live) {
3125 if (to_resurrect ==
nullptr) {
3129 TRACE(
"No candidate for %d at %d\n", range->vreg(),
position.value());
3143 TRACE(
"Reload %d:%d starting at %d itself\n", range->vreg(),
3145 if (to_resurrect->
spilled()) {
3167 TRACE(
"Reload %d:%d starting at %d as %d\n", range->vreg(),
3168 to_resurrect->
relative_id(), split->Start().value(),
3169 split->relative_id());
3178 split->set_controlflow_hint(
reg);
3219 struct RangeUseCount {
3226 int use_count_delta;
3231 auto left_it = left_set.
find(parent);
3232 bool range_is_shared_left_and_right = left_it != left_set.
end();
3233 if (range_is_shared_left_and_right) {
3234 left_set.
erase(left_it);
3238 if (child !=
nullptr) {
3239 unique_range_use_counts.
push_back({child, -1});
3245 for (
auto [parent,
_] : left_set) {
3246 LiveRange* child = parent->GetChildCovers(boundary);
3247 if (child !=
nullptr) {
3248 unique_range_use_counts.
push_back({child, +1});
3253 int use_count_difference = 0;
3254 for (
auto [range, use_count] : unique_range_use_counts) {
3255 if (range->NextUsePositionRegisterIsBeneficial(boundary) !=
nullptr) {
3256 use_count_difference += use_count;
3259 if (use_count_difference == 0) {
3263 TRACE(
"Looking at only uses\n");
3264 for (
auto [range, use_count] : unique_range_use_counts) {
3265 if (range->NextUsePosition(boundary) != range->positions().end()) {
3266 use_count_difference += use_count;
3270 TRACE(
"Left predecessor has %d more uses than right\n", use_count_difference);
3271 return use_count_difference > 0 ? current_block->
predecessors()[0]
3278 for (
auto [range, expected_reg] : to_be_live) {
3279 if (
data()->config()->AreAliases(range->representation(), expected_reg, rep,
3291 int used_registers[RegisterConfiguration::kMaxRegisters];
3292 explicit Vote(
int reg) :
count(1), used_registers{0} {
3293 used_registers[
reg] = 1;
3296 struct TopLevelLiveRangeComparator {
3307 using RangeVoteMap =
3309 static_assert(
sizeof(RangeVoteMap) < 4096,
"too large stack allocation");
3312 int deferred_blocks = 0;
3323 if (!range->HasRegisterAssigned())
continue;
3325 auto [it, inserted] =
3329 it->second.used_registers[range->assigned_register()]++;
3335 const size_t majority =
3339 auto assign_to_live = [
this, majority, &counts](
3342 bool* taken_registers) {
3343 bool check_aliasing =
3345 for (
const auto& val : counts) {
3346 if (!filter(val.first))
continue;
3347 if (val.second.count >= majority) {
3348 int register_max = 0;
3350 bool conflict =
false;
3359 for (
int idx = 0; idx < num_regs; idx++) {
3360 int uses = val.second.used_registers[idx];
3361 if (uses == 0)
continue;
3362 if (uses > register_max || (conflict && uses == register_max)) {
3364 register_max = uses;
3366 : taken_registers[
reg];
3371 }
else if (!check_aliasing) {
3372 taken_registers[
reg] =
true;
3374 TRACE(
"Reset %d as live due vote %zu in %s\n", val.first->vreg(),
3376 auto [
_, inserted] = to_be_live.
emplace(val.first,
reg);
3398 return (predecessor < current_block->rpo_number()) &&
3400 !
code()->InstructionBlockAt(predecessor)->IsDeferred());
3405 if (spill_mode == SpillMode::kSpillDeferred) {
3409 auto add_to_inactive = [
this, max](
LiveRange* range) {
3416 if (other->TopLevel()->IsFixed())
return;
3417 int reg = range->assigned_register();
3419 if (other->assigned_register() !=
reg) {
3423 if (!
data()->config()->AreAliases(range->representation(),
reg,
3424 other->representation(),
3425 other->assigned_register())) {
3434 if (!next_start.
IsValid() || (next_start > max)) {
3442 TRACE(
"Resolving conflict of %d with deferred fixed for register %s\n",
3443 other->TopLevel()->vreg(),
3451 update_caches(other);
3458 split_conflicting(range, active, [
this](
LiveRange* updated) {
3465 reg != range->assigned_register()) {
3470 if (inactive->NextStart() > max)
break;
3471 split_conflicting(range, inactive, [
this](
LiveRange* updated) {
3480 if (current !=
nullptr) {
3481 if (current->IsDeferredFixed()) {
3482 add_to_inactive(current);
3488 if (current !=
nullptr) {
3489 if (current->IsDeferredFixed()) {
3490 add_to_inactive(current);
3496 if (current !=
nullptr) {
3497 if (current->IsDeferredFixed()) {
3498 add_to_inactive(current);
3503 if (current !=
nullptr) {
3504 if (current->IsDeferredFixed()) {
3505 add_to_inactive(current);
3513 if (current !=
nullptr) {
3514 if (current->IsDeferredFixed()) {
3515 add_to_inactive(current);
3525 if ((*it)->TopLevel()->IsDeferredFixed()) {
3537 if (block->IsDeferred())
return true;
3538 if (block->PredecessorCount() == 0)
return true;
3539 bool pred_is_deferred =
false;
3540 for (
auto pred : block->predecessors()) {
3541 if (pred.IsNext(block->rpo_number())) {
3546 return !pred_is_deferred;
3550 for (
auto pred : block->predecessors()) {
3576 for (
LiveRange* to_add = range; to_add !=
nullptr;
3577 to_add = to_add->
next()) {
3578 if (!to_add->spilled()) {
3586 if (current !=
nullptr) {
3587 if (current->IsDeferredFixed())
continue;
3593 if (current !=
nullptr) {
3594 if (current->IsDeferredFixed())
continue;
3600 if (current !=
nullptr) {
3601 if (current->IsDeferredFixed())
continue;
3606 if (current !=
nullptr) {
3607 if (current->IsDeferredFixed())
continue;
3615 if (current !=
nullptr) {
3616 if (current->IsDeferredFixed())
continue;
3629 ->InstructionBlockAt(last_block)
3630 ->last_instruction_index())
3632 SpillMode spill_mode = SpillMode::kSpillAtDefinition;
3646 current ? current->
Start() : next_block_boundary;
3652 if (
position >= next_block_boundary) {
3653 TRACE(
"Processing boundary at %d leaving %d\n",
3654 next_block_boundary.
value(), last_block.
ToInt());
3678 if ((spill_mode == SpillMode::kSpillDeferred) !=
3682 ? SpillMode::kSpillDeferred
3683 : SpillMode::kSpillAtDefinition;
3689 allocation_finger_ = next_block_boundary;
3702 allocation_finger_ = next_block_boundary;
3719 bool no_change_required =
false;
3721 auto pick_state_from = [
this, current_block](
3724 TRACE(
"Using information from B%d\n", pred.
ToInt());
3730 TRACE(
"Not a fallthrough. Adding %zu elements...\n",
3731 spill_state.size());
3734 this->
code()->InstructionBlockAt(pred)->code_end());
3736 for (
const auto range : spill_state) {
3740 if (range->End() < pred_end || !range->HasRegisterAssigned())
3742 auto [
_, inserted] = to_be_live.
emplace(
3743 range->TopLevel(), range->assigned_register());
3760 TRACE(
"Single predecessor for B%d\n",
3762 no_change_required =
3763 pick_state_from(current_block->
predecessors()[0], to_be_live);
3765 TRACE(
"Two predecessors for B%d\n",
3779 current_block, next_block_boundary);
3781 no_change_required = pick_state_from(chosen_predecessor, to_be_live);
3788 if (!no_change_required) {
3807 TRACE(
"Processing interval %d:%d start=%d\n", current->TopLevel()->vreg(),
3808 current->relative_id(),
position.value());
3818 DCHECK(!current->HasRegisterAssigned() && !current->spilled());
3831 range->set_assigned_register(
reg);
3832 range->SetUseHints(
reg);
3833 range->UpdateBundleRegister(
reg);
3834 if (range->IsTopLevel() && range->TopLevel()->is_phi()) {
3840 TRACE(
"Add live range %d:%d in %s to active\n", range->TopLevel()->vreg(),
3841 range->relative_id(),
RegisterName(range->assigned_register()));
3848 TRACE(
"Add live range %d:%d to inactive\n", range->TopLevel()->vreg(),
3849 range->relative_id());
3852 DCHECK(range->HasRegisterAssigned());
3855 .
insert(std::upper_bound(
3863 if (range ==
nullptr || range->IsEmpty())
return;
3864 DCHECK(!range->HasRegisterAssigned() && !range->spilled());
3865 DCHECK(allocation_finger_ <= range->Start());
3867 TRACE(
"Add live range %d:%d to unhandled\n", range->TopLevel()->vreg(),
3868 range->relative_id());
3874 TRACE(
"Moving live range %d:%d from active to handled\n",
3875 (*it)->TopLevel()->vreg(), (*it)->relative_id());
3882 TRACE(
"Moving live range %d:%d from active to inactive\n",
3883 (range)->TopLevel()->vreg(), range->relative_id());
3887 DCHECK(range->HasRegisterAssigned());
3890 .
insert(std::upper_bound(
3901 TRACE(
"Moving live range %d:%d from inactive to handled\n",
3902 range->TopLevel()->vreg(), range->relative_id());
3903 int reg = range->assigned_register();
3914 TRACE(
"Moving live range %d:%d from inactive to active\n",
3915 range->TopLevel()->vreg(), range->relative_id());
3918 int reg = range->assigned_register();
3978 while ((
start->rpo_number() < last_block)) {
3984 return start->last_instruction_index();
3988 int* num_regs,
int* num_codes,
3989 const int** codes)
const {
3992 *num_regs =
data()->
config()->num_float_registers();
3993 *num_codes =
data()->
config()->num_allocatable_float_registers();
3994 *codes =
data()->
config()->allocatable_float_codes();
3996 *num_regs =
data()->
config()->num_simd128_registers();
3997 *num_codes =
data()->
config()->num_allocatable_simd128_registers();
3998 *codes =
data()->
config()->allocatable_simd128_codes();
4000 *num_regs =
data()->
config()->num_simd256_registers();
4001 *num_codes =
data()->
config()->num_allocatable_simd256_registers();
4002 *codes =
data()->
config()->allocatable_simd256_codes();
4009 const int** codes)
const {
4012 *num_regs =
data()->
config()->num_simd128_registers();
4013 *num_codes =
data()->
config()->num_allocatable_simd128_registers();
4014 *codes =
data()->
config()->allocatable_simd128_codes();
4031 DCHECK_GE(positions.length(), num_regs);
4033 for (
int i = 0;
i < num_regs; ++
i) {
4038 int cur_reg = cur_active->assigned_register();
4041 TRACE(
"Register %s is free until pos %d (1) due to %d\n",
4044 cur_active->TopLevel()->vreg());
4046 int alias_base_index = -1;
4048 cur_active->representation(), cur_reg, rep, &alias_base_index);
4049 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
4051 int aliased_reg = alias_base_index + aliases;
4057 for (
int cur_reg = 0; cur_reg < num_regs; ++cur_reg) {
4060 DCHECK_GT(cur_inactive->End(), range->Start());
4061 DCHECK_EQ(cur_inactive->assigned_register(), cur_reg);
4066 (positions[cur_reg] <= cur_inactive->NextStart() ||
4067 range->End() <= cur_inactive->NextStart())) {
4071 cur_inactive->FirstIntersection(range);
4072 if (!next_intersection.
IsValid())
continue;
4074 positions[cur_reg] = std::min(positions[cur_reg], next_intersection);
4076 positions[cur_reg].
value());
4078 int alias_base_index = -1;
4080 cur_inactive->representation(), cur_reg, rep, &alias_base_index);
4081 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
4083 int aliased_reg = alias_base_index + aliases;
4084 positions[aliased_reg] =
4085 std::min(positions[aliased_reg], next_intersection);
4109 if (current->HasRegisterAssigned()) {
4117 if (current->RegisterFromControlFlow(&hint_register) ||
4118 current->RegisterFromFirstHint(&hint_register) ||
4119 current->RegisterFromBundle(&hint_register)) {
4121 "Found reg hint %s (free until [%d) for live range %d:%d (end %d[).\n",
4123 current->TopLevel()->vreg(), current->relative_id(),
4124 current->End().value());
4127 if (free_until_pos[hint_register] >= current->End()) {
4128 TRACE(
"Assigning preferred reg %s to live range %d:%d\n",
4129 RegisterName(hint_register), current->TopLevel()->vreg(),
4130 current->relative_id());
4165 int current_free = free_until_pos[
reg].ToInstructionIndex();
4166 for (
int i = 0;
i < num_codes; ++
i) {
4167 int code = codes[
i];
4171 int candidate_free = free_until_pos[
code].ToInstructionIndex();
4173 if ((candidate_free > current_free) ||
4174 (candidate_free == current_free &&
reg != hint_reg &&
4175 (
data()->HasFixedUse(current->representation(),
reg) &&
4176 !data()->HasFixedUse(current->representation(), code)))) {
4178 current_free = candidate_free;
4189 current->RegisterFromControlFlow(&hint_reg) ||
4190 current->RegisterFromFirstHint(&hint_reg) ||
4191 current->RegisterFromBundle(&hint_reg);
4211 if (gap_pos <= current->Start())
return false;
4223 current->TopLevel()->vreg(), current->relative_id());
4231 UsePosition* register_use = current->NextRegisterPosition(current->Start());
4232 if (register_use ==
nullptr) {
4237 current, current->Start(), spill_mode, &begin_spill);
4239 Spill(current, spill_mode);
4254 int cur_reg = range->assigned_register();
4255 bool is_fixed_or_cant_spill =
4256 range->TopLevel()->IsFixed() || !range->CanBeSpilled(current->Start());
4258 if (is_fixed_or_cant_spill) {
4259 block_pos[cur_reg] = use_pos[cur_reg] =
4263 block_pos[cur_reg]);
4265 range->NextLifetimePositionRegisterIsBeneficial(current->Start());
4268 int alias_base_index = -1;
4270 range->representation(), cur_reg, rep, &alias_base_index);
4271 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
4273 int aliased_reg = alias_base_index + aliases;
4274 if (is_fixed_or_cant_spill) {
4275 block_pos[aliased_reg] = use_pos[aliased_reg] =
4278 use_pos[aliased_reg] =
4279 std::min(block_pos[aliased_reg],
4280 range->NextLifetimePositionRegisterIsBeneficial(
4287 for (
int cur_reg = 0; cur_reg <
num_registers(); ++cur_reg) {
4290 DCHECK(range->End() > current->Start());
4291 DCHECK_EQ(range->assigned_register(), cur_reg);
4292 bool is_fixed = range->TopLevel()->IsFixed();
4298 DCHECK_LE(use_pos[cur_reg], block_pos[cur_reg]);
4299 if (block_pos[cur_reg] <= range->NextStart())
break;
4300 if (!is_fixed && use_pos[cur_reg] <= range->NextStart())
continue;
4304 if (!next_intersection.
IsValid())
continue;
4308 block_pos[cur_reg] = std::min(block_pos[cur_reg], next_intersection);
4309 use_pos[cur_reg] = std::min(block_pos[cur_reg], use_pos[cur_reg]);
4311 use_pos[cur_reg] = std::min(use_pos[cur_reg], next_intersection);
4314 int alias_base_index = -1;
4316 range->representation(), cur_reg, rep, &alias_base_index);
4317 DCHECK(aliases > 0 || (aliases == 0 && alias_base_index == -1));
4319 int aliased_reg = alias_base_index + aliases;
4321 block_pos[aliased_reg] =
4322 std::min(block_pos[aliased_reg], next_intersection);
4323 use_pos[aliased_reg] =
4324 std::min(block_pos[aliased_reg], use_pos[aliased_reg]);
4326 use_pos[aliased_reg] =
4327 std::min(use_pos[aliased_reg], next_intersection);
4336 current->RegisterFromControlFlow(&hint_reg) ||
4338 current->RegisterFromBundle(&hint_reg);
4341 if (use_pos[
reg] < register_use->
pos()) {
4345 register_use->
pos())) {
4346 SpillBetween(current, current->Start(), register_use->
pos(), spill_mode);
4355 if (spill_mode == SpillMode::kSpillDeferred) {
4365 if (block_pos[
reg] < new_end) {
4368 new_end = block_pos[
reg].Start();
4374 if (new_end == current->
Start()) {
4380 if (new_end != current->
End()) {
4386 DCHECK(block_pos[
reg] >= current->End());
4388 current->TopLevel()->vreg(), current->relative_id());
4399 DCHECK(current->HasRegisterAssigned());
4400 int reg = current->assigned_register();
4406 if (range->assigned_register() !=
reg) {
4411 if (!
data()->config()->AreAliases(current->representation(),
reg,
4412 range->representation(),
4413 range->assigned_register())) {
4419 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
4424 if (next_pos ==
nullptr) {
4443 for (
int cur_reg = 0; cur_reg <
num_registers(); ++cur_reg) {
4445 if (cur_reg !=
reg)
continue;
4452 !
data()->config()->AreAliases(current->representation(),
reg,
4453 range->representation(), cur_reg)) {
4457 DCHECK(range->End() > current->Start());
4458 if (range->TopLevel()->IsFixed()) {
4464 if (next_intersection.
IsValid()) {
4465 UsePosition* next_pos = range->NextRegisterPosition(current->Start());
4466 if (next_pos ==
nullptr) {
4469 next_intersection = std::min(next_intersection, next_pos->
pos());
4470 SpillBetween(range, split_pos, next_intersection, spill_mode);
4481 if (!range->is_phi())
return false;
4483 DCHECK(!range->HasSpillOperand());
4491 size_t spilled_count = 0;
4492 for (
size_t i = 0;
i < phi->operands().
size();
i++) {
4493 int op = phi->operands()[
i];
4503 if (op_range_child !=
nullptr && op_range_child->
spilled()) {
4510 if (spilled_count * 2 <= phi->operands().size()) {
4518 UsePosition*
pos = range->NextUsePositionRegisterIsBeneficial(next_pos);
4519 if (
pos ==
nullptr) {
4520 Spill(range, SpillMode::kSpillAtDefinition);
4522 }
else if (
pos->pos() > range->Start().NextStart()) {
4524 SpillMode::kSpillAtDefinition);
4533 Spill(second_part, spill_mode);
4569 if (
data()->IsBlockBoundary(
end.Start())) {
4570 third_part_end = std::max(split_start,
end.
Start());
4574 SplitBetween(second_part, split_start, third_part_end);
4575 if (GetInstructionBlock(
data()->
code(), second_part->
Start())
4578 TRACE(
"Setting control flow hint for %d:%d to %s\n",
4588 if (third_part != second_part) {
4589 Spill(second_part, spill_mode);
4601 for (
auto range :
data()->live_ranges()) {
4604 if (range->IsSpilledOnlyInDeferredBlocks(data())) {
4612 if (GetInstructionBlock(
data()->
code(), range->Start())->IsDeferred()) {
4613 TRACE(
"Live range %d is spilled and alive in deferred code only\n",
4615 range->TransitionRangeToSpillAtDefinition();
4617 TRACE(
"Live range %d is spilled deferred code only but alive outside\n",
4619 range->TransitionRangeToDeferredSpill(
data()->allocation_zone());
4629 if (range->HasSpillRange()) {
4631 spill_ranges.
push_back(range->GetSpillRange());
4637 std::set(spill_ranges.
begin(), spill_ranges.
end()).size());
4643 if (range->get_bundle() !=
nullptr) {
4644 range->get_bundle()->MergeSpillRangesAndClear();
4659 std::remove_if(spill_ranges.
begin(), spill_ranges.
end(),
4660 [](
const SpillRange* range) { return range->IsEmpty(); });
4661 for (
SpillRange** range_it = spill_ranges.
begin(); range_it < end_nonempty;
4665 DCHECK(!range->IsEmpty());
4666 constexpr size_t kMaxRetries = 1000;
4667 size_t retries = kMaxRetries;
4669 for (
SpillRange** other_it = range_it + 1; other_it < end_nonempty;
4670 other_it += stride) {
4672 DCHECK(!other->IsEmpty());
4673 if (range->TryMerge(other)) {
4674 DCHECK(other->IsEmpty());
4675 std::iter_swap(other_it, --end_nonempty);
4676 }
else if (--retries == 0) {
4677 retries = kMaxRetries;
4682 spill_ranges.
erase(end_nonempty, spill_ranges.
end());
4687 DCHECK(!range->IsEmpty());
4688 if (!range->HasSlot()) {
4691 int width = range->byte_width();
4693 range->set_assigned_slot(index);
4705 if (top_range->IsEmpty())
continue;
4707 if (top_range->HasSpillOperand()) {
4709 if (it !=
data()->slot_for_const_range().
end()) {
4710 spill_operand = *it->second;
4712 spill_operand = *top_range->GetSpillOperand();
4714 }
else if (top_range->HasSpillRange()) {
4715 spill_operand = top_range->GetSpillRangeOperand();
4717 if (top_range->is_phi()) {
4719 top_range->GetAssignedOperand());
4721 for (
LiveRange* range = top_range; range !=
nullptr;
4722 range = range->
next()) {
4724 DCHECK(!assigned.IsUnallocated());
4725 range->ConvertUsesToOperand(assigned, spill_operand);
4728 if (!spill_operand.IsInvalid()) {
4739 if (!top_range->IsSpilledOnlyInDeferredBlocks(data()) &&
4740 !top_range->HasGeneralSpillRange()) {
4743 top_range->FilterSpillMoves(
data(), spill_operand);
4744 top_range->CommitSpillMoves(
data(), spill_operand);
4756 if (safe_point > map->instruction_position())
return false;
4757 safe_point = map->instruction_position();
4766 data()->delayed_references()) {
4767 delayed_reference.map->RecordReference(
4772 int last_range_start = 0;
4784 if (!
data()->
code()->IsReference(range->vreg()))
continue;
4786 if (range->IsEmpty())
continue;
4787 if (range->has_preassigned_slot())
continue;
4790 std::sort(candidate_ranges.
begin(), candidate_ranges.
end(),
4794 int start = range->Start().ToInstructionIndex();
4795 int end = range->Children().back()->End().ToInstructionIndex();
4800 last_range_start =
start;
4801 USE(last_range_start);
4805 for (; first_it != reference_maps->
end(); ++first_it) {
4807 if (map->instruction_position() >=
start)
break;
4811 if (((range->HasSpillOperand() &&
4812 !range->GetSpillOperand()->IsConstant()) ||
4813 range->HasSpillRange())) {
4814 if (range->HasSpillOperand()) {
4815 spill_operand = *range->GetSpillOperand();
4817 spill_operand = range->GetSpillRangeOperand();
4826 for (
auto it = first_it; it != reference_maps->
end(); ++it) {
4831 if (safe_point - 1 >
end)
break;
4846 if (cur ==
nullptr) {
4847 cur = range->GetChildCovers(safe_point_pos);
4848 found = cur !=
nullptr;
4851 if (cur->
Covers(safe_point_pos)) {
4855 if (next ==
nullptr || next->
Start() > safe_point_pos) {
4869 int spill_index = range->IsSpilledOnlyInDeferredBlocks(
data()) ||
4870 range->LateSpillingSelected()
4872 : range->spill_start_index();
4874 if (!spill_operand.IsInvalid() && safe_point >= spill_index) {
4875 TRACE(
"Pointer for range %d (spilled at %d) at safe point %d\n",
4876 range->vreg(), spill_index, safe_point);
4882 "Pointer in register for range %d:%d (start at %d) "
4883 "at safe point %d\n",
4901 if (block->PredecessorCount() != 1)
return false;
4902 return block->predecessors()[0].IsNext(block->rpo_number());
4910 for (
int vreg : *live) {
4914 block->first_instruction_index());
4917 if (cur_range->
spilled())
continue;
4919 for (
const RpoNumber& pred : block->predecessors()) {
4927 if (cur_range->
CanCover(pred_end))
continue;
4938 if (pred_op.
Equals(cur_op))
continue;
4955 if (cur_range->
End() < block_end &&
4956 (successor ==
nullptr || successor->
spilled())) {
4960 bool uses_reg =
false;
4963 use_pos_it != cur_range->
positions().end(); ++use_pos_it) {
4964 if ((*use_pos_it)->operand()->IsAnyRegister()) {
4969 if (!uses_reg)
continue;
4975 TRACE(
"Adding B%d to list of spill blocks for %d\n",
4989 code()->GetInstructionBlock(move_loc)->IsDeferred());
5004 if (top->IsEmpty())
continue;
5005 if (top->IsSpilledOnlyInDeferredBlocks(data())) {
5007 }
else if (top->HasGeneralSpillRange()) {
5008 spill_placer.
Add(top);
5020 if (block->PredecessorCount() == 1) {
5021 gap_index = block->first_instruction_index();
5028 if (last->IsDeoptimizeCall()) {
5033 for (
size_t i = 0;
i < last->InputCount(); ++
i) {
5034 DCHECK(last->InputAt(
i)->IsImmediate());
5039 ->HasReferenceMap());
5054 bool connect_spilled = top_range->IsSpilledOnlyInDeferredBlocks(
data());
5056 for (
LiveRange *second_range = first_range->
next(); second_range !=
nullptr;
5057 first_range = second_range, second_range = second_range->
next()) {
5061 if (second_range->spilled())
continue;
5062 if (first_range->
End() !=
pos)
continue;
5063 if (
data()->IsBlockBoundary(
pos) &&
5069 if (prev_operand.
Equals(cur_operand))
continue;
5070 bool delay_insertion =
false;
5072 int gap_index =
pos.ToInstructionIndex();
5076 DCHECK(block->IsDeferred());
5079 top_range->GetListOfBlocksRequiringSpillOperands(
data())->Add(
5080 block->rpo_number().ToInt());
5083 if (
pos.IsGapPosition()) {
5086 if (
pos.IsStart()) {
5087 delay_insertion =
true;
5097 code()->GetInstructionBlock(gap_index)->IsDeferred());
5102 if (!delay_insertion) {
5103 move->AddMove(prev_operand, cur_operand);
5105 delayed_insertion_map.insert(
5106 std::make_pair(std::make_pair(move, prev_operand), cur_operand));
5110 if (delayed_insertion_map.empty())
return;
5116 ParallelMove* moves = delayed_insertion_map.begin()->first.first;
5117 for (
auto it = delayed_insertion_map.begin();; ++it) {
5118 bool done = it == delayed_insertion_map.end();
5119 if (done || it->first.first != moves) {
5125 moves->push_back(move);
5129 to_eliminate.clear();
5131 moves = it->first.first;
5143 DCHECK(range->IsSpilledOnlyInDeferredBlocks(data()));
5144 DCHECK(!range->spilled());
5149 TRACE(
"Live Range %d will be spilled only in deferred blocks.\n",
5153 for (
const LiveRange* child = range; child !=
nullptr;
5154 child = child->
next()) {
5158 range->AddBlockRequiringSpillOperand(
5159 code->GetInstructionBlock(
pos->pos().ToInstructionIndex())
5167 for (
int block_id : *range->GetListOfBlocksRequiringSpillOperands(data())) {
5168 worklist.push(block_id);
5175 while (!worklist.empty()) {
5176 int block_id = worklist.front();
5178 if (done_blocks.
Contains(block_id))
continue;
5179 done_blocks.
Add(block_id);
5193 LiveRange* child_range = range->GetChildCovers(pred_end);
5199 if (done_moves.find(std::make_pair(
5200 spill_block_number, range->vreg())) == done_moves.end()) {
5201 TRACE(
"Spilling deferred spill for range %d at B%d\n", range->vreg(),
5202 spill_block_number.
ToInt());
5206 done_moves.insert(std::make_pair(spill_block_number, range->vreg()));
uint8_t data_[MAX_STACK_LENGTH]
#define SLOW_DCHECK(condition)
static constexpr T decode(U value)
static constexpr U encode(T value)
static V8_NODISCARD constexpr U update(U previous, T value)
std::pair< iterator, bool > emplace(Args &&... args)
iterator erase(const iterator &position)
iterator find(const key_type &key)
constexpr bool empty() const
constexpr size_t size() const
constexpr T * begin() const
constexpr T * end() const
bool Contains(int i) const
static constexpr DwVfpRegister from_code(int8_t code)
static const RegisterConfiguration * Default()
static constexpr int kMaxRegisters
static constexpr Register from_code(int code)
void Union(const SparseBitVector &other)
V8_INLINE bool Contains(int i) const
V8_INLINE void Add(int i)
void TickAndMaybeEnterSafepoint()
void reserve(size_t new_cap)
T * insert(const T *pos, It first, It last)
const ReferenceMap ** const_iterator
void push_back(const T &value)
static AllocatedOperand * New(Zone *zone, LocationKind kind, MachineRepresentation rep, int index)
InstructionSequence * code() const
RegisterAllocationData * data() const
void MeetRegisterConstraintsForLastInstructionInBlock(const InstructionBlock *block)
void MeetRegisterConstraints()
Zone * allocation_zone() const
RegisterAllocationData * data() const
void MeetConstraintsBefore(int index)
InstructionOperand * AllocateFixed(UnallocatedOperand *operand, int pos, bool is_tagged, bool is_input, bool is_output)
void MeetConstraintsAfter(int index)
ConstraintBuilder(RegisterAllocationData *data)
RegisterAllocationData *const data_
InstructionSequence * code() const
const UseInterval * const_iterator
iterator insert(Zone *zone, const_iterator position, const T &value)
DoubleEndedSplitVector< T > SplitAt(const_iterator split_begin_const)
void Append(Zone *zone, DoubleEndedSplitVector< T > other)
void push_front(Zone *zone, const T &value)
void SetAllocatedRegisters(BitVector *regs)
int AllocateSpillSlot(int width, int alignment=0, bool is_tagged=false)
void SetAllocatedDoubleRegisters(BitVector *regs)
size_t PredecessorIndexOf(RpoNumber rpo_number) const
bool IsLoopHeader() const
RpoNumber rpo_number() const
Predecessors & predecessors()
size_t PredecessorCount() const
int last_instruction_index() const
int first_instruction_index() const
size_t SuccessorCount() const
const PhiInstructions & phis() const
bool Equals(const InstructionOperand &that) const
static void ReplaceWith(InstructionOperand *dest, const InstructionOperand *src)
bool IsFPRegister() const
static const int kInvalidVirtualRegister
bool IsFPStackSlot() const
bool IsAnyRegister() const
Instruction * InstructionAt(int index) const
int representation_mask() const
const ReferenceMaps * reference_maps() const
MachineRepresentation GetRepresentation(int virtual_register) const
InstructionBlock * GetInstructionBlock(int instruction_index) const
bool IsReference(int virtual_register) const
static MachineRepresentation DefaultRepresentation()
InstructionBlock * InstructionBlockAt(RpoNumber rpo_number)
const InstructionOperand * OutputAt(size_t i) const
size_t OutputCount() const
ParallelMove * GetParallelMove(GapPosition pos)
ParallelMove * GetOrCreateParallelMove(GapPosition pos, Zone *zone)
static LifetimePosition MaxPosition()
static bool ExistsGapPositionBetween(LifetimePosition pos1, LifetimePosition pos2)
LifetimePosition PrevStart() const
LifetimePosition FullStart() const
static LifetimePosition InstructionFromInstructionIndex(int index)
LifetimePosition NextStart() const
LifetimePosition NextFullStart() const
LifetimePosition Start() const
int ToInstructionIndex() const
static LifetimePosition GapFromInstructionIndex(int index)
static LifetimePosition Invalid()
LifetimePosition End() const
bool IsGapPosition() const
bool IsInstructionPosition() const
void AddToActive(LiveRange *range)
ZoneVector< LiveRange * > & active_live_ranges()
void MaybeUndoPreviousSplit(LiveRange *range, Zone *zone)
int LastDeferredInstructionIndex(InstructionBlock *start)
void SpillBetweenUntil(LiveRange *range, LifetimePosition start, LifetimePosition until, LifetimePosition end, SpillMode spill_mode)
void SlowDCheckInactiveLiveRangesIsSorted(int reg)
InactiveLiveRangeQueue::iterator InactiveToActive(InactiveLiveRangeQueue::iterator it, LifetimePosition position)
void SpillNotLiveRanges(RangeRegisterSmallMap &to_be_live, LifetimePosition position, SpillMode spill_mode)
UnhandledLiveRangeQueue & unhandled_live_ranges()
bool CheckConflict(MachineRepresentation rep, int reg, const RangeRegisterSmallMap &to_be_live)
bool TryReuseSpillForPhi(TopLevelLiveRange *range)
int PickRegisterThatIsAvailableLongest(LiveRange *current, int hint_reg, base::Vector< const LifetimePosition > free_until_pos)
RpoNumber ChooseOneOfTwoPredecessorStates(InstructionBlock *current_block, LifetimePosition boundary)
void ProcessCurrentRange(LiveRange *current, SpillMode spill_mode)
ZoneVector< LiveRange * >::iterator ActiveToInactive(ZoneVector< LiveRange * >::iterator it, LifetimePosition position)
void ForwardStateTo(LifetimePosition position)
void SpillAfter(LiveRange *range, LifetimePosition pos, SpillMode spill_mode)
void FindFreeRegistersForRange(LiveRange *range, base::Vector< LifetimePosition > free_until_pos)
void AllocateBlockedReg(LiveRange *range, SpillMode spill_mode)
void UpdateDeferredFixedRanges(SpillMode spill_mode, InstructionBlock *block)
void MaybeSpillPreviousRanges(LiveRange *begin_range, LifetimePosition begin_pos, LiveRange *end_range)
LiveRange * AssignRegisterOnReload(LiveRange *range, int reg)
LifetimePosition next_active_ranges_change_
InactiveLiveRangeQueue::iterator InactiveToHandled(InactiveLiveRangeQueue::iterator it)
void AddToInactive(LiveRange *range)
bool TryAllocatePreferredReg(LiveRange *range, base::Vector< const LifetimePosition > free_until_pos)
void SpillBetween(LiveRange *range, LifetimePosition start, LifetimePosition end, SpillMode spill_mode)
InactiveLiveRangeQueue & inactive_live_ranges(int reg)
void PrintRangeRow(std::ostream &os, const TopLevelLiveRange *toplevel)
LifetimePosition next_inactive_ranges_change_
bool TryAllocateFreeReg(LiveRange *range, base::Vector< const LifetimePosition > free_until_pos)
void GetSIMD128RegisterSet(int *num_regs, int *num_codes, const int **codes) const
void AddToUnhandled(LiveRange *range)
bool ConsiderBlockForControlFlow(InstructionBlock *current_block, RpoNumber predecessor)
LinearScanAllocator(RegisterAllocationData *data, RegisterKind kind, Zone *local_zone)
void GetFPRegisterSet(MachineRepresentation rep, int *num_regs, int *num_codes, const int **codes) const
bool BlockIsDeferredOrImmediatePredecessorIsNotDeferred(const InstructionBlock *block)
void SplitAndSpillIntersecting(LiveRange *range, SpillMode spill_mode)
ZoneVector< LiveRange * >::iterator ActiveToHandled(ZoneVector< LiveRange * >::iterator it)
void SetLiveRangeAssignedRegister(LiveRange *range, int reg)
void ReloadLiveRanges(RangeRegisterSmallMap const &to_be_live, LifetimePosition position)
void PrintRangeOverview()
void ComputeStateFromManyPredecessors(InstructionBlock *current_block, RangeRegisterSmallMap &to_be_live)
bool HasNonDeferredPredecessor(InstructionBlock *block)
UsePosition * Use(LifetimePosition block_start, LifetimePosition position, InstructionOperand *operand, void *hint, UsePositionHintType hint_type, SpillMode spill_mode)
static SparseBitVector * ComputeLiveOut(const InstructionBlock *block, RegisterAllocationData *data)
static int FixedLiveRangeID(int index)
void ProcessPhis(const InstructionBlock *block, SparseBitVector *live)
Zone * allocation_zone() const
RegisterAllocationData * data() const
UsePosition * NewUsePosition(LifetimePosition pos, InstructionOperand *operand, void *hint, UsePositionHintType hint_type)
SpillMode SpillModeForBlock(const InstructionBlock *block) const
InstructionSequence * code() const
static constexpr int kNumberOfFixedRangesPerRegister
TopLevelLiveRange * LiveRangeFor(InstructionOperand *operand, SpillMode spill_mode)
TopLevelLiveRange * FixedLiveRangeFor(int index, SpillMode spill_mode)
void ProcessInstructions(const InstructionBlock *block, SparseBitVector *live)
TopLevelLiveRange * FixedFPLiveRangeFor(int index, MachineRepresentation rep, SpillMode spill_mode)
void MapPhiHint(InstructionOperand *operand, UsePosition *use_pos)
ZoneVector< SparseBitVector * > & live_in_sets() const
void ProcessLoopHeader(const InstructionBlock *block, SparseBitVector *live)
UsePosition * Define(LifetimePosition position, InstructionOperand *operand, void *hint, UsePositionHintType hint_type, SpillMode spill_mode)
TopLevelLiveRange * FixedSIMD128LiveRangeFor(int index, SpillMode spill_mode)
int FixedFPLiveRangeID(int index, MachineRepresentation rep)
void AddInitialIntervals(const InstructionBlock *block, SparseBitVector *live_out)
void ResolvePhiHint(InstructionOperand *operand, UsePosition *use_pos)
const RegisterConfiguration * config() const
RegisterAllocationData *const data_
ZoneMap< InstructionOperand *, UsePosition * > phi_hints_
LiveRangeBuilder(RegisterAllocationData *data, Zone *local_zone)
ZoneVector< UseInterval > intervals_
void MergeSpillRangesAndClear()
void AddRange(TopLevelLiveRange *range)
ZoneVector< TopLevelLiveRange * > ranges_
bool TryAddRange(TopLevelLiveRange *range)
static LiveRangeBundle * TryMerge(LiveRangeBundle *lhs, LiveRangeBundle *rhs)
bool CanEagerlyResolveControlFlow(const InstructionBlock *block) const
RegisterAllocationData * data() const
void ResolveControlFlow(Zone *local_zone)
InstructionSequence * code() const
void ConnectRanges(Zone *local_zone)
LiveRangeConnector(RegisterAllocationData *data)
void CommitSpillsInDeferredBlocks(TopLevelLiveRange *range, Zone *temp_zone)
base::Vector< UsePosition * > positions() const
TopLevelLiveRange * top_level_
UseIntervalVector::iterator FirstSearchIntervalForPosition(LifetimePosition position)
friend class TopLevelLiveRange
RegisterKind kind() const
UsePosition * current_hint_position() const
void Print(const RegisterConfiguration *config, bool with_children) const
base::Vector< UsePosition * > positions_span_
UsePosition *const * NextUsePosition(LifetimePosition start) const
LifetimePosition NextLifetimePositionRegisterIsBeneficial(const LifetimePosition &start) const
void AdvanceLastProcessedMarker(UseIntervalVector::iterator to_start_of, LifetimePosition but_not_past)
LifetimePosition next_start_
void set_controlflow_hint(int reg)
LifetimePosition NextStartAfter(LifetimePosition position)
void set_spilled(bool value)
LifetimePosition FirstIntersection(LiveRange *other)
TopLevelLiveRange * TopLevel()
LiveRange(const LiveRange &)=delete
LifetimePosition NextEndAfter(LifetimePosition position)
int assigned_register() const
const UseIntervalVector & intervals() const
MachineRepresentation representation() const
bool HasRegisterAssigned() const
void AttachToNext(Zone *zone)
LifetimePosition Start() const
void SetUseHints(int register_index)
void ConvertUsesToOperand(const InstructionOperand &op, const InstructionOperand &spill_op)
size_t current_hint_position_index_
void UpdateBundleRegister(int reg) const
bool ShouldBeAllocatedBefore(const LiveRange *other) const
void UnsetAssignedRegister()
bool RegisterFromBundle(int *hint) const
bool RegisterFromFirstHint(int *register_index)
bool CanBeSpilled(LifetimePosition pos) const
LiveRange * SplitAt(LifetimePosition position, Zone *zone)
bool CanCover(LifetimePosition position) const
int controlflow_hint() const
UsePosition * NextUsePositionSpillDetrimental(LifetimePosition start) const
InstructionOperand GetAssignedOperand() const
void set_assigned_register(int reg)
UsePosition * NextUsePositionRegisterIsBeneficial(LifetimePosition start) const
bool Covers(LifetimePosition position)
UseIntervalVector intervals_
UsePosition * NextRegisterPosition(LifetimePosition start) const
LifetimePosition End() const
UseIntervalVector::iterator current_interval_
Register GetRegister() const
MachineRepresentation representation() const
static LocationOperand * cast(InstructionOperand *op)
static bool IsSupportedRepresentation(MachineRepresentation rep)
int register_code() const
const InstructionOperand & source() const
OperandAssigner(RegisterAllocationData *data)
RegisterAllocationData * data() const
void DecideSpillingMode()
void PrepareInsertAfter(MoveOperands *move, ZoneVector< MoveOperands * > *to_eliminate) const
MoveOperands * AddMove(const InstructionOperand &from, const InstructionOperand &to)
ReferenceMapPopulator(RegisterAllocationData *data)
void PopulateReferenceMaps()
RegisterAllocationData * data() const
bool SafePointsAreInOrder() const
int instruction_position() const
PhiMapValue(PhiInstruction *phi, const InstructionBlock *block, Zone *zone)
int assigned_register() const
ZoneVector< InstructionOperand * > incoming_operands_
void CommitAssignment(const InstructionOperand &operand)
const PhiInstruction * phi() const
void AddOperand(InstructionOperand *operand)
void set_assigned_register(int register_code)
const InstructionBlock * block() const
const char *const debug_name_
SpillRange * AssignSpillRangeToLiveRange(TopLevelLiveRange *range, SpillMode spill_mode)
const RegisterConfiguration * config() const
BitVector * fixed_fp_register_use_
const char * debug_name() const
ZoneVector< TopLevelLiveRange * > & fixed_double_live_ranges()
RegisterAllocationData(const RegisterAllocationData &)=delete
ZoneVector< TopLevelLiveRange * > live_ranges_
TopLevelLiveRange * GetLiveRangeFor(int index)
const ZoneVector< TopLevelLiveRange * > & fixed_live_ranges() const
ZoneVector< SparseBitVector * > & live_in_sets()
ZoneVector< TopLevelLiveRange * > & fixed_float_live_ranges()
void RememberSpillState(RpoNumber block, const ZoneVector< LiveRange * > &state)
ZoneMap< TopLevelLiveRange *, AllocatedOperand * > slot_for_const_range_
DelayedReferences & delayed_references()
RangesWithPreassignedSlots & preassigned_slot_ranges()
ZoneMap< TopLevelLiveRange *, AllocatedOperand * > & slot_for_const_range()
BitVector * fixed_register_use_
ZoneVector< ZoneVector< LiveRange * > > spill_state_
ZoneVector< LiveRange * > & GetSpillState(RpoNumber block)
BitVector * assigned_registers_
MoveOperands * AddGapMove(int index, Instruction::GapPosition position, const InstructionOperand &from, const InstructionOperand &to)
ZoneVector< TopLevelLiveRange * > fixed_double_live_ranges_
const RegisterConfiguration *const config_
ZoneVector< TopLevelLiveRange * > fixed_float_live_ranges_
bool HasFixedUse(MachineRepresentation rep, int index)
bool IsBlockBoundary(LifetimePosition pos) const
TickCounter * tick_counter()
const ZoneVector< TopLevelLiveRange * > & live_ranges() const
ZoneVector< SparseBitVector * > live_in_sets_
Zone *const allocation_zone_
void MarkFixedUse(MachineRepresentation rep, int index)
PhiMapValue * GetPhiMapValueFor(TopLevelLiveRange *top_range)
InstructionSequence *const code_
TopLevelLiveRange * NewLiveRange(int index, MachineRepresentation rep)
MachineRepresentation RepresentationFor(int virtual_register)
PhiMapValue * InitializePhiMap(const InstructionBlock *block, PhiInstruction *phi)
BitVector * fixed_simd128_register_use_
ZoneVector< TopLevelLiveRange * > fixed_live_ranges_
int virtual_register_count_
void MarkAllocated(MachineRepresentation rep, int index)
BitVector * assigned_double_registers_
static constexpr int kNumberOfFixedRangesPerRegister
BitVector * assigned_simd128_registers_
RangesWithPreassignedSlots preassigned_slot_ranges_
ZoneVector< TopLevelLiveRange * > fixed_simd128_live_ranges_
Zone * allocation_zone() const
ZoneVector< TopLevelLiveRange * > & fixed_simd128_live_ranges()
bool ExistsUseWithoutDefinition()
DelayedReferences delayed_references_
ZoneVector< SparseBitVector * > live_out_sets_
bool RangesDefinedInDeferredStayInDeferred()
TickCounter *const tick_counter_
InstructionSequence * code() const
LifetimePosition FindOptimalSpillingPos(LiveRange *range, LifetimePosition pos, SpillMode spill_mode, LiveRange **begin_spill_out)
RegisterAllocationData * data() const
void Spill(LiveRange *range, SpillMode spill_mode)
bool CanProcessRange(LiveRange *range) const
LiveRange * SplitBetween(LiveRange *range, LifetimePosition start, LifetimePosition end)
const char * RegisterName(int allocation_index) const
RegisterAllocator(RegisterAllocationData *data, RegisterKind kind)
LifetimePosition GetSplitPositionForInstruction(const LiveRange *range, int instruction_index)
const int * allocatable_register_codes() const
int num_registers() const
LifetimePosition FindOptimalSplitPos(LifetimePosition start, LifetimePosition end)
InstructionSequence * code() const
void SplitAndSpillRangesDefinedByMemoryOperand()
RegisterKind mode() const
bool check_fp_aliasing() const
LiveRange * SplitRangeAt(LiveRange *range, LifetimePosition pos)
int num_allocatable_registers() const
Zone * allocation_zone() const
static RpoNumber Invalid()
bool IsNext(const RpoNumber other) const
static RpoNumber FromInt(int index)
void Add(TopLevelLiveRange *range)
bool TryMerge(SpillRange *other)
void set_assigned_slot(int index)
int assigned_slot() const
SpillRange(TopLevelLiveRange *range, Zone *zone)
ZoneVector< UseInterval > intervals_
ZoneVector< TopLevelLiveRange * > ranges_
void RecordSpillLocation(Zone *zone, int gap_index, InstructionOperand *operand)
void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
bool IsSpilledOnlyInDeferredBlocks(const RegisterAllocationData *data) const
bool HasNoSpillType() const
LiveRange * GetChildCovers(LifetimePosition pos)
bool has_preassigned_slot() const
bool has_slot_use() const
void set_spilling_at_loop_header_not_beneficial()
bool HasSpillRange() const
void set_is_non_loop_phi(bool value)
void SetSpillOperand(InstructionOperand *operand)
void set_is_phi(bool value)
void ShortenTo(LifetimePosition start)
void register_slot_use(SlotUseKind value)
void CommitSpillMoves(RegisterAllocationData *data, const InstructionOperand &operand)
AllocatedOperand GetSpillRangeOperand() const
LiveRangeBundle * get_bundle() const
bool HasSpillOperand() const
SpillMoveInsertionList * GetSpillMoveInsertionLocations(const RegisterAllocationData *data) const
SpillRange * spill_range_
void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone *zone)
bool HasGeneralSpillRange() const
ZoneVector< LiveRange * > children_
void AddUsePosition(UsePosition *pos, Zone *zone)
void SetSpillRange(SpillRange *spill_range)
UsePositionVector positions_
InstructionOperand * GetSpillOperand() const
SparseBitVector * GetListOfBlocksRequiringSpillOperands(const RegisterAllocationData *data) const
SpillMoveInsertionList * spill_move_insertion_locations_
void SetSpillStartIndex(int start)
bool is_non_loop_phi() const
void SetLateSpillingSelected(bool late_spilling_selected)
InstructionOperand * spill_operand_
SpillRange * GetSpillRange() const
void set_spill_type(SpillType value)
SpillType spill_type() const
void FilterSpillMoves(RegisterAllocationData *data, const InstructionOperand &operand)
int32_t virtual_register() const
bool HasRegisterOrSlotOrConstantPolicy() const
bool HasSecondaryStorage() const
bool HasRegisterOrSlotPolicy() const
bool HasFixedRegisterPolicy() const
int GetSecondaryStorage() const
bool HasSlotPolicy() const
bool HasRegisterPolicy() const
bool HasFixedPolicy() const
bool HasSameAsInputPolicy() const
int fixed_register_index() const
int fixed_slot_index() const
bool HasFixedFPRegisterPolicy() const
bool HasFixedSlotPolicy() const
LifetimePosition start() const
void set_start(LifetimePosition start)
UseInterval SplitAt(LifetimePosition pos)
LifetimePosition end() const
void set_end(LifetimePosition end)
UsePosition(LifetimePosition pos, InstructionOperand *operand, void *hint, UsePositionHintType hint_type)
bool HintRegister(int *register_code) const
InstructionOperand * operand() const
void ResolveHint(UsePosition *use_pos)
UsePositionHintType hint_type() const
static UsePositionHintType HintTypeForOperand(const InstructionOperand &op)
InstructionOperand *const operand_
LifetimePosition pos() const
LifetimePosition const pos_
void set_type(UsePositionType type, bool register_beneficial)
void SetHint(UsePosition *use_pos)
void set_spill_detrimental()
RecordWriteMode const mode_
std::vector< std::unique_ptr< InstanceTypeTree > > children
std::optional< TNode< JSArray > > a
ZoneVector< RpoNumber > & result
constexpr Vector< T > VectorOf(T *start, size_t size)
template const Signature< wasm::ValueType > bool
const int * GetAllocatableRegisterCodes(const RegisterConfiguration *config, RegisterKind kind)
std::pair< ParallelMove *, InstructionOperand > DelayedInsertionMapKey
static const int32_t kUnassignedRegister
static std::optional< std::pair< UseInterval, UseInterval > > AreUseIntervalsIntersectingVector(base::Vector< const UseInterval > a, base::Vector< const UseInterval > b)
@ kRegisterOrSlotOrConstant
int GetAllocatableRegisterCount(const RegisterConfiguration *config, RegisterKind kind)
int GetRegisterCount(const RegisterConfiguration *config, RegisterKind kind)
int ByteWidthForStackSlot(MachineRepresentation rep)
ZoneVector< InstructionBlock * > InstructionBlocks
std::ostream & operator<<(std::ostream &os, AccessMode access_mode)
std::optional< std::pair< UseInterval, UseInterval > > AreUseIntervalsIntersecting(const ContainerA &a, const ContainerB &b)
Node::Uses::const_iterator begin(const Node::Uses &uses)
constexpr AliasingKind kFPAliasing
void PrintF(const char *format,...)
constexpr bool CanBeTaggedOrCompressedPointer(MachineRepresentation rep)
V8_EXPORT_PRIVATE constexpr int RepresentationBit(MachineRepresentation rep)
constexpr bool IsFloatingPoint(MachineRepresentation rep)
constexpr Register kReturnRegister0
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool IsSimd128(MachineRepresentation rep)
refactor address components for immediate indexing make OptimizeMaglevOnNextCall optimize to turbofan instead of maglev filter for tracing turbofan compilation nullptr
ZoneUnorderedMap< int, BytecodeSequenceNode * > children_
#define DCHECK_LE(v1, v2)
#define CHECK_GE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
bool operator()(const DelayedInsertionMapKey &a, const DelayedInsertionMapKey &b) const
const RegisterConfiguration * register_configuration_
InstructionOperand *const operand
SpillMoveInsertionList(int gap_index, InstructionOperand *operand, SpillMoveInsertionList *next)
SpillMoveInsertionList * next