5#ifndef V8_COMPILER_BACKEND_INSTRUCTION_H_
6#define V8_COMPILER_BACKEND_INSTRUCTION_H_
28class RegisterConfiguration;
33class SourcePositionTable;
39#if defined(V8_CC_MSVC) && defined(V8_TARGET_ARCH_IA32)
43#define INSTRUCTION_OPERAND_ALIGN
45#define INSTRUCTION_OPERAND_ALIGN ALIGNAS(8)
50 static const int kInvalidVirtualRegister = -1;
60 FIRST_LOCATION_OPERAND_KIND = ALLOCATED
68#define INSTRUCTION_OPERAND_PREDICATE(name, type) \
69 bool Is##name() const { return kind() == type; }
90#undef INSTRUCTION_OPERAND_PREDICATE
92 inline bool IsAnyLocationOperand()
const;
93 inline bool IsLocationOperand()
const;
94 inline bool IsFPLocationOperand()
const;
95 inline bool IsAnyRegister()
const;
96 inline bool IsRegister()
const;
97 inline bool IsFPRegister()
const;
98 inline bool IsFloatRegister()
const;
99 inline bool IsDoubleRegister()
const;
100 inline bool IsSimd128Register()
const;
101 inline bool IsSimd256Register()
const;
102 inline bool IsAnyStackSlot()
const;
103 inline bool IsStackSlot()
const;
104 inline bool IsFPStackSlot()
const;
105 inline bool IsFloatStackSlot()
const;
106 inline bool IsDoubleStackSlot()
const;
107 inline bool IsSimd128StackSlot()
const;
108 inline bool IsSimd256StackSlot()
const;
110 template <
typename SubKindOperand>
111 static SubKindOperand*
New(
Zone* zone,
const SubKindOperand& op) {
112 return zone->
New<SubKindOperand>(op);
123 return this == &that;
137 return this->GetCanonicalizedValue() == that.GetCanonicalizedValue();
142 return this->GetCanonicalizedValue() < that.GetCanonicalizedValue();
151 return Equals(other);
154 return !Equals(other);
160 inline uint64_t GetCanonicalizedValue()
const;
171#define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind) \
173 static OperandType* cast(InstructionOperand* op) { \
174 DCHECK_EQ(OperandKind, op->kind()); \
175 return static_cast<OperandType*>(op); \
178 static const OperandType* cast(const InstructionOperand* op) { \
179 DCHECK_EQ(OperandKind, op->kind()); \
180 return static_cast<const OperandType*>(op); \
183 static OperandType cast(const InstructionOperand& op) { \
184 DCHECK_EQ(OperandKind, op.kind()); \
185 return *static_cast<const OperandType*>(&op); \
235 value_ |=
static_cast<uint64_t
>(
static_cast<int64_t
>(
index))
331 return static_cast<int>(
static_cast<int64_t
>(
value_) >>
422 enum ImmediateType { INLINE_INT32, INLINE_INT64, INDEXED_RPO, INDEXED_IMM };
426 value_ |= TypeField::encode(type);
427 value_ |=
static_cast<uint64_t
>(
static_cast<int64_t
>(
value))
428 << ValueField::kShift;
435 return static_cast<int64_t
>(
value_) >> ValueField::kShift;
440 return static_cast<int64_t
>(
value_) >> ValueField::kShift;
445 return static_cast<int64_t
>(
value_) >> ValueField::kShift;
455 static_assert(
TypeField::kLastUsedBit < 32);
463 set_next(next_operand);
468 uintptr_t shifted_value =
469 reinterpret_cast<uintptr_t
>(next) >> kPointerShift;
470 DCHECK_EQ(
reinterpret_cast<uintptr_t
>(next),
471 shifted_value << kPointerShift);
472 value_ |= NextOperandField::encode(
static_cast<uint64_t
>(shifted_value));
476 uintptr_t shifted_value =
477 static_cast<uint64_t
>(NextOperandField::decode(
value_));
478 return reinterpret_cast<PendingOperand*
>(shifted_value << kPointerShift);
490 static const uint64_t kPointerShift = 3;
494 static_assert(NextOperandField::kLastUsedBit == 63);
506 DCHECK(IsSupportedRepresentation(rep));
507 value_ |= LocationKindField::encode(location_kind);
508 value_ |= RepresentationField::encode(rep);
509 value_ |=
static_cast<uint64_t
>(
static_cast<int64_t
>(
index))
510 << IndexField::kShift;
514 DCHECK(IsStackSlot() || IsFPStackSlot());
515 return static_cast<int64_t
>(
value_) >> IndexField::kShift;
519 DCHECK(IsRegister() || IsFPRegister());
520 return static_cast<int64_t
>(
value_) >> IndexField::kShift;
525 return Register::from_code(register_code());
529 DCHECK(IsFloatRegister());
530 return FloatRegister::from_code(register_code());
538 return DoubleRegister::from_code(register_code());
542 DCHECK(IsSimd128Register());
543 return Simd128Register::from_code(register_code());
546#if defined(V8_TARGET_ARCH_X64)
549 DCHECK(IsSimd256Register());
550 return Simd128Register::from_code(register_code());
554 DCHECK(IsSimd256Register());
555 return Simd256Register::from_code(register_code());
560 return LocationKindField::decode(
value_);
564 return RepresentationField::decode(
value_);
569 case MachineRepresentation::kWord32:
570 case MachineRepresentation::kWord64:
571 case MachineRepresentation::kFloat32:
572 case MachineRepresentation::kFloat64:
573 case MachineRepresentation::kSimd128:
574 case MachineRepresentation::kSimd256:
575 case MachineRepresentation::kTaggedSigned:
576 case MachineRepresentation::kTaggedPointer:
577 case MachineRepresentation::kTagged:
578 case MachineRepresentation::kCompressedPointer:
579 case MachineRepresentation::kCompressed:
580 case MachineRepresentation::kProtectedPointer:
581 case MachineRepresentation::kSandboxedPointer:
583 case MachineRepresentation::kBit:
584 case MachineRepresentation::kWord8:
585 case MachineRepresentation::kWord16:
586 case MachineRepresentation::kFloat16:
587 case MachineRepresentation::kNone:
589 case MachineRepresentation::kMapWord:
590 case MachineRepresentation::kIndirectPointer:
591 case MachineRepresentation::kFloat16RawBits:
616 static_assert(RepresentationField::kLastUsedBit < 32);
633#undef INSTRUCTION_OPERAND_CASTS
764 return a.CompareCanonicalized(b);
775 CheckPointerCompressionConsistency();
782#if DEBUG && V8_COMPRESS_POINTERS
783 if (!
source_.IsLocationOperand())
return;
784 if (!destination_.IsLocationOperand())
return;
786 MR dest_rep = LocationOperand::cast(&destination_)->representation();
787 if (dest_rep == MR::kTagged || dest_rep == MR::kTaggedPointer) {
788 MR src_rep = LocationOperand::cast(&
source_)->representation();
789 DCHECK_NE(src_rep, MR::kCompressedPointer);
800 CheckPointerCompressionConsistency();
806 destination_ = operand;
807 CheckPointerCompressionConsistency();
813 return destination_.IsInvalid() && !
source_.IsInvalid();
820 DCHECK_IMPLIES(!destination_.IsInvalid(), !destination_.IsConstant());
821 return IsEliminated() ||
source_.EqualsCanonicalized(destination_);
835 if (IsRedundant() && that.IsRedundant())
return true;
836 return source_.Equals(that.source_) &&
837 destination_.Equals(that.destination_);
857 return AddMove(from, to, zone());
862 Zone* operand_allocation_zone) {
863 if (from.EqualsCanonicalized(to))
return nullptr;
865 if (empty()) reserve(4);
870 bool IsRedundant()
const;
884std::ostream&
operator<<(std::ostream&,
const ParallelMove&);
891 : reference_operands_(zone), instruction_position_(-1) {}
894 return reference_operands_;
900 instruction_position_ =
pos;
924 return &operands_[
i];
928 return &operands_[
i];
938 return &operands_[OutputCount() +
i];
942 return &operands_[OutputCount() +
i];
948 return &operands_[OutputCount() + InputCount() +
i];
952 return &operands_[OutputCount() + InputCount() +
i];
958 return AddressingModeField::decode(opcode());
962 return FlagsConditionField::decode(opcode());
964 int misc()
const {
return MiscField::decode(opcode()); }
966 return compiler::HasMemoryAccessMode(arch_opcode());
970 return AccessModeField::decode(opcode());
974 return New(zone, opcode, 0,
nullptr, 0,
nullptr, 0,
nullptr);
981 DCHECK(output_count == 0 || outputs !=
nullptr);
982 DCHECK(input_count == 0 || inputs !=
nullptr);
983 DCHECK(temp_count == 0 || temps !=
nullptr);
985 CHECK(InputCountField::is_valid(input_count));
987 size_t total_extra_ops = output_count + input_count + temp_count;
988 if (total_extra_ops != 0) total_extra_ops--;
989 int size =
static_cast<int>(
993 opcode, output_count, outputs, input_count, inputs, temp_count, temps);
1010 DCHECK(NeedsReferenceMap());
1012 reference_map_ =
map;
1016 opcode_ = ArchOpcodeField::encode(kArchNop);
1018 reference_map_ =
nullptr;
1021 bool IsNop()
const {
return arch_opcode() == kArchNop; }
1024 return arch_opcode() == ArchOpcode::kArchDeoptimize ||
1029 return FlagsModeField::decode(opcode()) ==
kFlags_trap;
1032 bool IsJump()
const {
return arch_opcode() == ArchOpcode::kArchJmp; }
1033 bool IsRet()
const {
return arch_opcode() == ArchOpcode::kArchRet; }
1035#if V8_ENABLE_WEBASSEMBLY
1036 return arch_opcode() <= ArchOpcode::kArchTailCallWasmIndirect;
1038 return arch_opcode() <= ArchOpcode::kArchTailCallAddress;
1042 return arch_opcode() == ArchOpcode::kArchThrowTerminator;
1046 return arch_opcode <= ArchOpcode::kArchCallBuiltinPointer;
1049 return IsCallWithDescriptorFlags(arch_opcode());
1052 DCHECK(IsCallWithDescriptorFlags());
1053 static_assert(CallDescriptor::kFlagsBitsEncodedInInstructionCode == 10);
1055 static constexpr int kInstructionCodeFlagsMask =
1056 ((1 << CallDescriptor::kFlagsBitsEncodedInInstructionCode) - 1);
1057 DCHECK_EQ(
static_cast<int>(flag) & kInstructionCodeFlagsMask, flag);
1059 return MiscField::decode(opcode()) & flag;
1062#ifdef V8_ENABLE_WEBASSEMBLY
1063 size_t WasmSignatureHashInputIndex()
const {
1065 switch (arch_opcode()) {
1066 case kArchCallWasmFunctionIndirect:
1067 return InputCount() -
1068 (HasCallDescriptorFlag(CallDescriptor::kHasExceptionHandler)
1071 case kArchTailCallWasmIndirect:
1072 return InputCount() - 3;
1082 switch (arch_opcode()) {
1083 case kArchCallCodeObject:
1084 return InputCount() -
1085 (HasCallDescriptorFlag(CallDescriptor::kHasExceptionHandler)
1088 case kArchTailCallCodeObject:
1089 return InputCount() - 3;
1098 if (HasCallDescriptorFlag(CallDescriptor::kHasExceptionHandler)) {
1099 return InputCount() - 2;
1101 return InputCount() - 1;
1108 FIRST_GAP_POSITION = START,
1109 LAST_GAP_POSITION = END
1113 if (parallel_moves_[
pos] ==
nullptr) {
1116 return parallel_moves_[
pos];
1120 return parallel_moves_[
pos];
1124 return parallel_moves_[
pos];
1127 bool AreMovesRedundant()
const;
1148 static const size_t kMaxOutputCount = OutputCountField::kMax;
1149 static const size_t kMaxInputCount = InputCountField::kMax;
1150 static const size_t kMaxTempCount = TempCountField::kMax;
1174 static const int kInvalidRpoNumber = -1;
1183 return static_cast<size_t>(
index_);
1191 return other.index_ == this->
index_ + 1;
1236 :
type_(kExternalReference),
1237 value_(
base::bit_cast<intptr_t>(ref.raw())) {}
1251 return value_ >= std::numeric_limits<int32_t>::min() &&
1252 value_ <= std::numeric_limits<int32_t>::max();
1257 const int32_t value =
static_cast<int32_t
>(
value_);
1273 return base::bit_cast<float>(
static_cast<int32_t
>(
value_));
1280 return Float32::FromBits(
static_cast<uint32_t
>(
value_));
1285 return base::bit_cast<uint32_t>(
static_cast<int32_t
>(
value_));
1295 return ExternalReference::FromRawAddress(
static_cast<Address
>(
value_));
1300 return RpoNumber::FromInt(
static_cast<int>(
value_));
1337 MachineType::AnyTagged());
1343 MachineType::AnyTagged());
1347 MachineType::AnyTagged());
1354 MachineType::AnyTagged());
1358 MachineType::AnyTagged());
1364 MachineType::AnyTagged());
1370 MachineType::AnyTagged());
1376 return kind_ == StateValueKind::kArgumentsElements;
1379 return kind_ == StateValueKind::kArgumentsLength;
1386 return kind_ == StateValueKind::kNestedObject ||
1387 kind_ == StateValueKind::kStringConcat;
1394 kind_ == StateValueKind::kNestedObject ||
1395 kind_ == StateValueKind::kStringConcat);
1399 DCHECK(
kind_ == StateValueKind::kArgumentsElements);
1403 void Print(std::ostream& os)
const;
1421 size_t size() {
return fields_.size(); }
1430 : desc(desc), nested(nested) {}
1437 return field_iterator != other.field_iterator;
1440 return field_iterator == other.field_iterator;
1443 if (field_iterator->IsNested()) {
1451 StateValueList* nested = desc->IsNested() ? *nested_iterator :
nullptr;
1452 return Value(desc, nested);
1460 : field_iterator(it), nested_iterator(nested) {}
1468 : start_position(
start), fields_count(fields) {}
1477 fields_.push_back(StateValueDescriptor::Recursive(
id));
1479 nested_.push_back(nested);
1483 fields_.push_back(StateValueDescriptor::StringConcat(
id));
1485 nested_.push_back(nested);
1489 fields_.push_back(StateValueDescriptor::ArgumentsElements(type));
1492 fields_.push_back(StateValueDescriptor::ArgumentsLength());
1495 fields_.push_back(StateValueDescriptor::RestLength());
1498 fields_.push_back(StateValueDescriptor::Duplicate(
id));
1501 fields_.push_back(StateValueDescriptor::Plain(type));
1504 fields_.insert(fields_.end(), num, StateValueDescriptor::OptimizedOut());
1514 DCHECK(!HasNestedFieldsAfter(values_start));
1515 size_t fields_count = fields_.size() - values_start;
1516 return Slice(fields_.begin() + values_start, fields_count);
1524 auto it = fields_.begin() + values_start;
1525 for (; it != fields_.end(); it++) {
1526 if (it->IsNested())
return true;
1540 uint16_t max_arguments,
size_t locals_count,
size_t stack_count,
1544 uint32_t wasm_liftoff_frame_size = std::numeric_limits<uint32_t>::max(),
1545 uint32_t wasm_function_index = std::numeric_limits<uint32_t>::max());
1555 return shared_info_;
1558 return bytecode_array_;
1563#if V8_ENABLE_WEBASSEMBLY
1564 type_ != FrameStateType::kLiftoffFunction &&
1566 type_ != FrameStateType::kConstructInvokeStub;
1569 return FrameStateFunctionInfo::IsJSFunctionType(
type_) ||
1570 type_ == FrameStateType::kBuiltinContinuation ||
1571#if V8_ENABLE_WEBASSEMBLY
1572 type_ == FrameStateType::kJSToWasmBuiltinContinuation ||
1575 type_ == FrameStateType::kWasmInlinedIntoJS ||
1577 type_ == FrameStateType::kConstructCreateStub ||
1578 type_ == FrameStateType::kConstructInvokeStub;
1586 size_t GetHeight()
const;
1592 return total_conservative_frame_size_in_bytes_;
1595 size_t GetSize()
const;
1596 size_t GetTotalSize()
const;
1597 size_t GetFrameCount()
const;
1598 size_t GetJSFrameCount()
const;
1601 DCHECK(wasm_function_index_ != std::numeric_limits<uint32_t>::max());
1602 return wasm_function_index_;
1607 static const int kImpossibleValue = 0xdead;
1625#if V8_ENABLE_WEBASSEMBLY
1628 JSToWasmFrameStateDescriptor(
1631 size_t locals_count,
size_t stack_count,
1636 std::optional<wasm::ValueKind> return_kind()
const {
return return_kind_; }
1639 std::optional<wasm::ValueKind> return_kind_;
1656 feedback_(feedback) {
1664 NodeId node_id()
const {
return node_id_; }
1687 void SetInput(
size_t offset,
int virtual_register);
1688 void RenameInput(
size_t offset,
int virtual_register);
1723 return code_end_ - 1;
1749 return loop_header_alignment_;
1758 size_t PredecessorIndexOf(
RpoNumber rpo_number)
const;
1800 int32_t code_end_ = -1;
1842 int NextVirtualRegister();
1846 return *instruction_blocks_;
1852 return static_cast<int>(instruction_blocks_->size());
1856 return instruction_blocks_->at(rpo_number.
ToSize());
1860 return instruction_blocks_->at(block->loop_end().ToSize() - 1)
1861 ->last_instruction_index();
1865 return instruction_blocks_->at(rpo_number.
ToSize());
1869 return instructions()[instruction_index]->block();
1873 return MachineType::PointerRepresentation();
1881 bool IsFP(
int virtual_register)
const {
1886 constexpr int kFPRepMask =
1892 return (representation_mask() & kFPRepMask) != 0;
1896 constexpr int kSimd128RepMask =
1898 return (representation_mask() & kSimd128RepMask) != 0;
1908 return static_cast<int>(instructions().size()) - 1;
1914 return instructions_[
index];
1928 DCHECK_NE(Constant::kRpoNumber, constant.type());
1929 DCHECK(virtual_register >= 0 && virtual_register < next_virtual_register_);
1930 DCHECK(constants_.find(virtual_register) == constants_.end());
1931 constants_.emplace(virtual_register, constant);
1934 auto it = constants_.find(virtual_register);
1935 DCHECK(it != constants_.end());
1947 if (RelocInfo::IsNoInfo(constant.rmode())) {
1948 if (constant.type() == Constant::kRpoNumber) {
1954 RpoNumber rpo_number = constant.ToRpoNumber();
1955 DCHECK(!rpo_immediates().at(rpo_number.
ToSize()).IsValid() ||
1956 rpo_immediates().at(rpo_number.
ToSize()) == rpo_number);
1957 rpo_immediates()[rpo_number.
ToSize()] = rpo_number;
1959 rpo_number.
ToInt());
1960 }
else if (constant.type() == Constant::kInt32) {
1962 constant.ToInt32());
1963 }
else if (constant.type() == Constant::kInt64 &&
1964 constant.FitsInInt32()) {
1966 constant.ToInt32());
1969 int index =
static_cast<int>(immediates_.size());
1970 immediates_.push_back(constant);
1975 switch (op->
type()) {
1976 case ImmediateOperand::INLINE_INT32:
1978 case ImmediateOperand::INLINE_INT64:
1980 case ImmediateOperand::INDEXED_RPO: {
1983 DCHECK_GT(rpo_immediates_.size(), index);
1984 return Constant(rpo_immediates_[index]);
1986 case ImmediateOperand::INDEXED_IMM: {
1990 return immediates_[
index];
2001 return static_cast<int>(deoptimization_entries_.size());
2012 if (
instr->IsCall())
return true;
2020 void PrintBlock(
int block_id)
const;
2022 void ValidateEdgeSplitForm()
const;
2023 void ValidateDeferredBlockExitPaths()
const;
2024 void ValidateDeferredBlockEntryPaths()
const;
2025 void ValidateSSA()
const;
2027 static void SetRegisterConfigurationForTesting(
2031 void RecomputeAssemblyOrderForTesting();
2035 rpo_immediates().resize(rpo_count);
2049 void ComputeAssemblyOrder();
2072#undef INSTRUCTION_OPERAND_ALIGN
static constexpr T decode(U value)
static constexpr U encode(T value)
static V8_NODISCARD constexpr U update(U previous, T value)
static constexpr int kShift
void * Allocate(size_t size)
AllocatedOperand(LocationKind kind, MachineRepresentation rep, int index)
static AllocatedOperand * New(Zone *zone, LocationKind kind, MachineRepresentation rep, int index)
ConstantOperand(int virtual_register)
static ConstantOperand * New(Zone *zone, int virtual_register)
int32_t virtual_register() const
base::Double ToFloat64() const
RelocInfo::Mode rmode() const
Float32 ToFloat32Safe() const
RpoNumber ToRpoNumber() const
Constant(IndirectHandle< HeapObject > obj, bool is_compressed=false)
ExternalReference ToExternalReference() const
Constant(ExternalReference ref)
uint32_t ToFloat32AsInt() const
const FeedbackSource feedback_
FrameStateDescriptor * descriptor() const
const DeoptimizeKind kind_
FrameStateDescriptor *const descriptor_
DeoptimizeReason reason() const
DeoptimizationEntry(FrameStateDescriptor *descriptor, DeoptimizeKind kind, DeoptimizeReason reason, NodeId node_id, FeedbackSource const &feedback)
const DeoptimizeReason reason_
FeedbackSource const & feedback() const
DeoptimizeKind kind() const
FrameStateDescriptor * outer_state() const
OutputFrameStateCombine state_combine() const
const uint16_t max_arguments_
size_t stack_count() const
MaybeIndirectHandle< BytecodeArray > const bytecode_array_
OutputFrameStateCombine frame_state_combine_
FrameStateDescriptor *const outer_state_
uint16_t parameters_count() const
uint16_t max_arguments() const
BytecodeOffset bailout_id_
MaybeIndirectHandle< BytecodeArray > bytecode_array() const
const uint16_t parameters_count_
StateValueList * GetStateValueDescriptors()
const size_t locals_count_
BytecodeOffset bailout_id() const
MaybeIndirectHandle< SharedFunctionInfo > const shared_info_
size_t locals_count() const
const size_t total_conservative_frame_size_in_bytes_
size_t total_conservative_frame_size_in_bytes() const
const size_t stack_count_
FrameStateType type() const
uint32_t wasm_function_index_
MaybeIndirectHandle< SharedFunctionInfo > shared_info() const
uint32_t GetWasmFunctionIndex() const
void AddPhi(PhiInstruction *phi)
void set_code_start(int32_t start)
const RpoNumber rpo_number_
Successors & successors()
void set_loop_header_alignment(bool val)
const RpoNumber loop_header_
RpoNumber ao_number() const
bool code_target_alignment_
int32_t code_start() const
void set_code_end(int32_t end)
const Successors & successors() const
PhiInstruction * PhiAt(size_t i) const
bool omitted_by_jump_threading() const
bool IsLoopHeaderInAssemblyOrder() const
bool IsLoopHeader() const
void set_ao_number(RpoNumber ao_number)
bool must_deconstruct_frame_
bool must_construct_frame() const
bool ShouldAlignLoopHeader() const
RpoNumber rpo_number() const
Predecessors & predecessors()
bool IsSwitchTarget() const
void mark_must_deconstruct_frame()
void set_code_target_alignment(bool val)
RpoNumber dominator() const
void set_switch_target(bool val)
bool ShouldAlignCodeTarget() const
Predecessors predecessors_
const Predecessors & predecessors() const
const RpoNumber loop_end_
size_t PredecessorCount() const
bool omitted_by_jump_threading_
RpoNumber loop_end() const
void set_omitted_by_jump_threading()
void clear_must_deconstruct_frame()
bool must_construct_frame_
int last_instruction_index() const
RpoNumber loop_header() const
int first_instruction_index() const
bool loop_header_alignment_
size_t SuccessorCount() const
void mark_must_construct_frame()
bool must_deconstruct_frame() const
void set_dominator(RpoNumber dominator)
const PhiInstructions & phis() const
bool Equals(const InstructionOperand &that) const
bool IsSimd256Register() const
bool IsAnyLocationOperand() const
static void ReplaceWith(InstructionOperand *dest, const InstructionOperand *src)
bool IsDoubleStackSlot() const
bool EqualsCanonicalized(const InstructionOperand &that) const
InstructionOperand(Kind kind)
bool IsSimd256StackSlot() const
bool IsFPRegister() const
bool IsFloatRegister() const
static SubKindOperand * New(Zone *zone, const SubKindOperand &op)
bool CompareCanonicalized(const InstructionOperand &that) const
bool IsSimd128StackSlot() const
bool Compare(const InstructionOperand &that) const
bool IsFloatStackSlot() const
@ FIRST_LOCATION_OPERAND_KIND
uint64_t GetCanonicalizedValue() const
bool IsDoubleRegister() const
bool IsSimd128Register() const
bool operator==(const InstructionOperand &other) const
bool IsFPLocationOperand() const
bool IsLocationOperand() const
static const int kInvalidVirtualRegister
bool IsFPStackSlot() const
bool IsAnyStackSlot() const
bool operator!=(const InstructionOperand &other) const
bool IsAnyRegister() const
Instructions instructions_
Constant GetImmediate(const ImmediateOperand *op) const
static void ClearRegisterConfigurationForTesting()
Immediates & immediates()
static const RegisterConfiguration * registerConfigurationForTesting_
Instruction * InstructionAt(int index) const
int InstructionBlockCount() const
void IncreaseRpoForTesting(size_t rpo_count)
int VirtualRegisterCount() const
InstructionBlocks * ao_blocks_
Constant GetConstant(int virtual_register) const
ZoneVector< MachineRepresentation > representations_
int LastLoopInstructionIndex(const InstructionBlock *block)
RpoImmediates rpo_immediates_
void AddConstant(int virtual_register, Constant constant)
InstructionBlock * current_block_
int LastInstructionIndex() const
DeoptimizationVector deoptimization_entries_
RpoImmediates & rpo_immediates()
InstructionBlocks *const instruction_blocks_
SourcePositionMap source_positions_
int representation_mask() const
int next_virtual_register_
bool HasFPVirtualRegisters() const
InstructionSequence(const InstructionSequence &)=delete
ImmediateOperand AddImmediate(const Constant &constant)
InstructionSequence & operator=(const InstructionSequence &)=delete
Isolate * isolate() const
const ReferenceMaps * reference_maps() const
const InstructionBlocks & ao_blocks() const
ReferenceMaps reference_maps_
int GetDeoptimizationEntryCount() const
InstructionBlock * GetInstructionBlock(int instruction_index) const
bool IsReference(int virtual_register) const
bool HasSimd128VirtualRegisters() const
static MachineRepresentation DefaultRepresentation()
bool IsFP(int virtual_register) const
const InstructionBlocks & instruction_blocks() const
InstructionBlock * InstructionBlockAt(RpoNumber rpo_number)
bool ContainsCall() const
const_iterator begin() const
const_iterator end() const
const InstructionBlock * InstructionBlockAt(RpoNumber rpo_number) const
const Instructions & instructions() const
ReferenceMap * reference_map_
bool ClobbersTemps() const
bool IsDeoptimizeCall() const
ReferenceMap * reference_map() const
ArchOpcode arch_opcode() const
FlagsMode flags_mode() const
bool HasCallDescriptorFlag(CallDescriptor::Flag flag) const
const InstructionOperand * OutputAt(size_t i) const
size_t CodeEnrypointTagInputIndex() const
const InstructionOperand * Output() const
InstructionOperand * TempAt(size_t i)
InstructionCode opcode() const
ParallelMove ** parallel_moves()
InstructionOperand * InputAt(size_t i)
const InstructionOperand * InputAt(size_t i) const
InstructionBlock * block() const
size_t OutputCount() const
size_t JSCallArgumentCountInputIndex() const
bool IsCallWithDescriptorFlags() const
AddressingMode addressing_mode() const
bool ClobbersDoubleRegisters() const
ParallelMove *const * parallel_moves() const
bool HasReferenceMap() const
size_t InputCount() const
InstructionOperand * Output()
ParallelMove * GetParallelMove(GapPosition pos)
Instruction(const Instruction &)=delete
const ParallelMove * GetParallelMove(GapPosition pos) const
ParallelMove * GetOrCreateParallelMove(GapPosition pos, Zone *zone)
FlagsCondition flags_condition() const
MemoryAccessMode memory_access_mode() const
static Instruction * New(Zone *zone, InstructionCode opcode, size_t output_count, InstructionOperand *outputs, size_t input_count, InstructionOperand *inputs, size_t temp_count, InstructionOperand *temps)
static Instruction * New(Zone *zone, InstructionCode opcode)
bool ClobbersRegisters() const
bool NeedsReferenceMap() const
Instruction * MarkAsCall()
InstructionBlock * block_
void set_reference_map(ReferenceMap *map)
static constexpr bool IsCallWithDescriptorFlags(InstructionCode arch_opcode)
Instruction & operator=(const Instruction &)=delete
const InstructionOperand * TempAt(size_t i) const
bool HasMemoryAccessMode() const
InstructionOperand * OutputAt(size_t i)
void set_block(InstructionBlock *block)
LocationOperand(InstructionOperand::Kind operand_kind, LocationOperand::LocationKind location_kind, MachineRepresentation rep, int index)
static LocationOperand cast(const InstructionOperand &op)
Register GetRegister() const
Simd128Register GetSimd128Register() const
MachineRepresentation representation() const
LocationKind location_kind() const
FloatRegister GetFloatRegister() const
static LocationOperand * cast(InstructionOperand *op)
DoubleRegister GetDoubleRegister() const
static const LocationOperand * cast(const InstructionOperand *op)
static bool IsSupportedRepresentation(MachineRepresentation rep)
int register_code() const
MoveOperands(const InstructionOperand &source, const InstructionOperand &destination)
InstructionOperand & source()
MoveOperands(const MoveOperands &)=delete
const InstructionOperand & source() const
const InstructionOperand & destination() const
InstructionOperand source_
InstructionOperand destination_
bool Equals(const MoveOperands &that) const
void CheckPointerCompressionConsistency()
void set_destination(const InstructionOperand &operand)
MoveOperands & operator=(const MoveOperands &)=delete
bool IsEliminated() const
InstructionOperand & destination()
void set_source(const InstructionOperand &operand)
ParallelMove(const ParallelMove &)=delete
MoveOperands * AddMove(const InstructionOperand &from, const InstructionOperand &to)
MoveOperands * AddMove(const InstructionOperand &from, const InstructionOperand &to, Zone *operand_allocation_zone)
ParallelMove & operator=(const ParallelMove &)=delete
static PendingOperand * New(Zone *zone, PendingOperand *previous_operand)
PendingOperand * next() const
void set_next(PendingOperand *next)
PendingOperand(PendingOperand *next_operand)
const InstructionOperand & output() const
InstructionOperand & output()
int virtual_register() const
const IntVector & operands() const
const int virtual_register_
InstructionOperand output_
int instruction_position_
ZoneVector< InstructionOperand > reference_operands_
void set_instruction_position(int pos)
int instruction_position() const
const ZoneVector< InstructionOperand > & reference_operands() const
static RpoNumber Invalid()
bool operator>(RpoNumber other) const
bool IsNext(const RpoNumber other) const
bool operator<=(RpoNumber other) const
static RpoNumber FromInt(int index)
bool operator>=(RpoNumber other) const
bool operator<(RpoNumber other) const
bool operator!=(RpoNumber other) const
bool operator==(RpoNumber other) const
static StateValueDescriptor StringConcat(size_t id)
bool IsArgumentsElements() const
static StateValueDescriptor RestLength()
ArgumentsStateType args_type_
bool IsArgumentsLength() const
static StateValueDescriptor OptimizedOut()
static StateValueDescriptor ArgumentsLength()
bool IsRestLength() const
static StateValueDescriptor Recursive(size_t id)
StateValueDescriptor(StateValueKind kind, MachineType type)
ArgumentsStateType arguments_type() const
static StateValueDescriptor Plain(MachineType type)
bool IsStringConcat() const
static StateValueDescriptor ArgumentsElements(ArgumentsStateType type)
bool IsNestedObject() const
bool IsOptimizedOut() const
static StateValueDescriptor Duplicate(size_t id)
ZoneVector< StateValueDescriptor >::iterator field_iterator
iterator(ZoneVector< StateValueDescriptor >::iterator it, ZoneVector< StateValueList * >::iterator nested)
bool operator==(const iterator &other) const
bool operator!=(const iterator &other) const
ZoneVector< StateValueList * >::iterator nested_iterator
void PushPlain(MachineType type)
StateValueList * PushRecursiveField(Zone *zone, size_t id)
void PushArgumentsLength()
void PushArgumentsElements(ArgumentsStateType type)
void PushOptimizedOut(size_t num=1)
void ReserveSize(size_t size)
void PushCachedSlice(const Slice &cached)
Slice MakeSlice(size_t values_start)
ZoneVector< StateValueDescriptor > fields_
StateValueList * PushStringConcat(Zone *zone, size_t id)
void PushDuplicate(size_t id)
bool HasNestedFieldsAfter(size_t values_start)
ZoneVector< StateValueList * > nested_
StateValueList(Zone *zone)
UnallocatedOperand(ExtendedPolicy policy, int virtual_register)
int32_t virtual_register() const
UnallocatedOperand(ExtendedPolicy policy, Lifetime lifetime, int virtual_register)
bool HasRegisterOrSlotOrConstantPolicy() const
bool HasSecondaryStorage() const
UnallocatedOperand(int reg_id, int slot_id, int virtual_register)
bool HasRegisterOrSlotPolicy() const
bool HasFixedRegisterPolicy() const
int GetSecondaryStorage() const
UnallocatedOperand(ExtendedPolicy policy, int index, int virtual_register)
bool IsUsedAtStart() const
bool HasSlotPolicy() const
bool HasRegisterPolicy() const
UnallocatedOperand(int virtual_register, int input_index)
@ REGISTER_OR_SLOT_OR_CONSTANT
UnallocatedOperand(const UnallocatedOperand &other, int virtual_register)
BasicPolicy basic_policy() const
bool HasFixedPolicy() const
ExtendedPolicy extended_policy() const
bool HasSameAsInputPolicy() const
UnallocatedOperand(BasicPolicy policy, int index, int virtual_register)
int fixed_register_index() const
int fixed_slot_index() const
bool HasFixedFPRegisterPolicy() const
bool HasFixedSlotPolicy() const
const InternalIndex descriptor_
#define INSTRUCTION_OPERAND_PREDICATE(name, type)
#define INSTRUCTION_OPERAND_ALIGN
#define INSTRUCTION_OPERAND_CASTS(OperandType, OperandKind)
ZoneVector< RpoNumber > & result
Comparator::Output * output_
InstructionOperand destination
constexpr size_t kCcmpOffsetOfOpcode
constexpr size_t kNumCcmpOperands
constexpr size_t kConditionalSetEndOffsetOfNumCcmps
bool HasMemoryAccessMode(ArchOpcode opcode)
constexpr size_t kCcmpOffsetOfDefaultFlags
constexpr size_t kCcmpOffsetOfRhs
constexpr size_t kBranchEndOffsetOfTrueBlock
constexpr size_t kBranchEndOffsetOfFalseBlock
constexpr size_t kConditionalBranchEndOffsetOfCondition
constexpr size_t kCcmpOffsetOfCompareCondition
constexpr size_t kConditionalSetEndOffsetOfCondition
constexpr size_t kCcmpOffsetOfLhs
std::ostream & operator<<(std::ostream &os, AccessMode access_mode)
constexpr size_t kConditionalBranchEndOffsetOfNumCcmps
constexpr AliasingKind kFPAliasing
constexpr bool CanBeTaggedOrCompressedPointer(MachineRepresentation rep)
V8_EXPORT_PRIVATE constexpr int RepresentationBit(MachineRepresentation rep)
constexpr bool IsFloatingPoint(MachineRepresentation rep)
std::ostream & operator<<(std::ostream &os, const Operation &operation)
#define NON_EXPORTED_BASE(code)
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
constexpr T RoundUp(T x, intptr_t m)
#define V8_EXPORT_PRIVATE
bool operator()(const InstructionOperand &a, const InstructionOperand &b) const
const InstructionBlock * block_
const InstructionSequence * code_
Slice(ZoneVector< StateValueDescriptor >::iterator start, size_t fields)
ZoneVector< StateValueDescriptor >::iterator start_position
StateValueDescriptor * desc
Value(StateValueDescriptor *desc, StateValueList *nested)
EmbedderRootsHandler * handler_
std::unique_ptr< ValueMirror > value