22 while ((it !=
nodes_.end()) &&
23 ((*it)->total_latency() >= node->total_latency())) {
32 auto candidate =
nodes_.end();
33 for (
auto iterator =
nodes_.begin(); iterator !=
nodes_.end(); ++iterator) {
35 if (cycle >= (*iterator)->start_cycle()) {
41 if (candidate !=
nodes_.end()) {
54 auto candidate =
nodes_.begin();
56 static_cast<int>(
nodes_.size())));
66 unscheduled_predecessors_count_(0),
73 successors_.push_back(node);
74 node->unscheduled_predecessors_count_++;
87 if (
v8_flags.turbo_stress_instruction_scheduling) {
88 random_number_generator_ =
89 std::optional<base::RandomNumberGenerator>(v8_flags.random_seed);
104 if (
v8_flags.turbo_stress_instruction_scheduling) {
117 node->AddSuccessor(new_node);
119 graph_.push_back(new_node);
124 if (
v8_flags.turbo_stress_instruction_scheduling) {
161 load->AddSuccessor(new_node);
186 for (
size_t i = 0;
i <
instr->InputCount(); ++
i) {
188 if (input->IsUnallocated()) {
189 int32_t vreg = UnallocatedOperand::cast(input)->virtual_register();
192 it->second->AddSuccessor(new_node);
198 for (
size_t i = 0;
i <
instr->OutputCount(); ++
i) {
200 if (output->IsUnallocated()) {
201 operands_map_[UnallocatedOperand::cast(output)->virtual_register()] =
203 }
else if (output->IsConstant()) {
204 operands_map_[ConstantOperand::cast(output)->virtual_register()] =
210 graph_.push_back(new_node);
213template <
typename QueueType>
215 QueueType ready_list(
this);
222 if (!node->HasUnscheduledPredecessor()) {
223 ready_list.AddNode(node);
229 while (!ready_list.IsEmpty()) {
232 if (candidate !=
nullptr) {
236 successor->DropUnscheduledPredecessor();
237 successor->set_start_cycle(
238 std::max(successor->start_cycle(), cycle + candidate->
latency()));
240 if (!successor->HasUnscheduledPredecessor()) {
241 ready_list.AddNode(successor);
259 switch (
instr->arch_opcode()) {
261 case kArchStackCheckOffset:
262 case kArchFramePointer:
263 case kArchParentFramePointer:
268 case kArchDeoptimize:
270 case kArchBinarySearchSwitch:
272 case kArchTableSwitch:
273 case kArchThrowTerminator:
276 case kArchTruncateDoubleToI:
277 case kIeee754Float64Acos:
278 case kIeee754Float64Acosh:
279 case kIeee754Float64Asin:
280 case kIeee754Float64Asinh:
281 case kIeee754Float64Atan:
282 case kIeee754Float64Atanh:
283 case kIeee754Float64Atan2:
284 case kIeee754Float64Cbrt:
285 case kIeee754Float64Cos:
286 case kIeee754Float64Cosh:
287 case kIeee754Float64Exp:
288 case kIeee754Float64Expm1:
289 case kIeee754Float64Log:
290 case kIeee754Float64Log1p:
291 case kIeee754Float64Log10:
292 case kIeee754Float64Log2:
293 case kIeee754Float64Pow:
294 case kIeee754Float64Sin:
295 case kIeee754Float64Sinh:
296 case kIeee754Float64Tan:
297 case kIeee754Float64Tanh:
300 case kArchStackPointerGreaterThan:
306#if V8_ENABLE_WEBASSEMBLY
307 case kArchStackPointer:
308 case kArchSetStackPointer:
314 case kArchPrepareCallCFunction:
315 case kArchPrepareTailCall:
316 case kArchTailCallCodeObject:
317 case kArchTailCallAddress:
318#if V8_ENABLE_WEBASSEMBLY
319 case kArchTailCallWasm:
320 case kArchTailCallWasmIndirect:
322 case kArchAbortCSADcheck:
325 case kArchDebugBreak:
328 case kArchSaveCallerRegisters:
329 case kArchRestoreCallerRegisters:
332 case kArchCallCFunction:
333 case kArchCallCFunctionWithFrameState:
334 case kArchCallCodeObject:
335 case kArchCallJSFunction:
336#if V8_ENABLE_WEBASSEMBLY
337 case kArchCallWasmFunction:
338 case kArchCallWasmFunctionIndirect:
340 case kArchCallBuiltinPointer:
347 case kArchStoreWithWriteBarrier:
348 case kArchAtomicStoreWithWriteBarrier:
349 case kArchStoreIndirectWithWriteBarrier:
352 case kAtomicLoadInt8:
353 case kAtomicLoadUint8:
354 case kAtomicLoadInt16:
355 case kAtomicLoadUint16:
356 case kAtomicLoadWord32:
359 case kAtomicStoreWord8:
360 case kAtomicStoreWord16:
361 case kAtomicStoreWord32:
364 case kAtomicExchangeInt8:
365 case kAtomicExchangeUint8:
366 case kAtomicExchangeInt16:
367 case kAtomicExchangeUint16:
368 case kAtomicExchangeWord32:
369 case kAtomicCompareExchangeInt8:
370 case kAtomicCompareExchangeUint8:
371 case kAtomicCompareExchangeInt16:
372 case kAtomicCompareExchangeUint16:
373 case kAtomicCompareExchangeWord32:
375 case kAtomicAddUint8:
376 case kAtomicAddInt16:
377 case kAtomicAddUint16:
378 case kAtomicAddWord32:
380 case kAtomicSubUint8:
381 case kAtomicSubInt16:
382 case kAtomicSubUint16:
383 case kAtomicSubWord32:
385 case kAtomicAndUint8:
386 case kAtomicAndInt16:
387 case kAtomicAndUint16:
388 case kAtomicAndWord32:
392 case kAtomicOrUint16:
393 case kAtomicOrWord32:
395 case kAtomicXorUint8:
396 case kAtomicXorInt16:
397 case kAtomicXorUint16:
398 case kAtomicXorWord32:
401#define CASE(Name) case k##Name:
415 DCHECK_NE(-1, successor->total_latency());
416 if (successor->total_latency() > max_latency) {
417 max_latency = successor->total_latency();
421 node->set_total_latency(max_latency + node->latency());
ScheduleGraphNode * PopBestCandidate(int cycle)
Instruction * instruction()
void AddSuccessor(ScheduleGraphNode *node)
ScheduleGraphNode(Zone *zone, Instruction *instr)
ZoneDeque< ScheduleGraphNode * > & successors()
ZoneLinkedList< ScheduleGraphNode * > nodes_
void AddNode(ScheduleGraphNode *node)
ScheduleGraphNode * PopBestCandidate(int cycle)
V8_EXPORT_PRIVATE InstructionScheduler(Zone *zone, InstructionSequence *sequence)
InstructionSequence * sequence_
ZoneVector< ScheduleGraphNode * > pending_loads_
base::RandomNumberGenerator * random_number_generator()
bool IsBarrier(const Instruction *instr) const
V8_EXPORT_PRIVATE void StartBlock(RpoNumber rpo)
ZoneVector< ScheduleGraphNode * > graph_
bool HasSideEffect(const Instruction *instr) const
ScheduleGraphNode * last_deopt_or_trap_
void ComputeTotalLatencies()
bool DependsOnDeoptOrTrap(const Instruction *instr) const
V8_EXPORT_PRIVATE void AddTerminator(Instruction *instr)
InstructionSequence * sequence()
bool IsLoadOperation(const Instruction *instr) const
ZoneMap< int32_t, ScheduleGraphNode * > operands_map_
bool CanTrap(const Instruction *instr) const
V8_EXPORT_PRIVATE int GetInstructionFlags(const Instruction *instr) const
V8_EXPORT_PRIVATE void AddInstruction(Instruction *instr)
ScheduleGraphNode * last_live_in_reg_marker_
int GetTargetInstructionFlags(const Instruction *instr) const
static int GetInstructionLatency(const Instruction *instr)
bool IsFixedRegisterParameter(const Instruction *instr) const
V8_EXPORT_PRIVATE void EndBlock(RpoNumber rpo)
ScheduleGraphNode * last_side_effect_instr_
void StartBlock(RpoNumber rpo)
void EndBlock(RpoNumber rpo)
int AddInstruction(Instruction *instr)
ZoneLinkedList< BFEntry > nodes_
#define TARGET_ARCH_OPCODE_LIST(V)
ZoneVector< RpoNumber > & result
V8_EXPORT_PRIVATE FlagValues v8_flags
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)