74  if (v8_flags.trace_maglev_graph_building) { \ 
   75    std::cout << __VA_ARGS__ << std::endl;    \ 
 
   79  TRACE("Failed " << __func__ << ":" << __LINE__ << ": " << __VA_ARGS__); \ 
 
   86enum class CpuOperation {
 
   92bool IsSupported(CpuOperation op) {
 
   94    case CpuOperation::kFloat64Round:
 
   95#if defined(V8_TARGET_ARCH_X64) || defined(V8_TARGET_ARCH_IA32) 
   97#elif defined(V8_TARGET_ARCH_ARM) 
   99#elif defined(V8_TARGET_ARCH_ARM64) || defined(V8_TARGET_ARCH_PPC64) ||   \ 
  100    defined(V8_TARGET_ARCH_S390X) || defined(V8_TARGET_ARCH_RISCV64) ||   \ 
  101    defined(V8_TARGET_ARCH_RISCV32) || defined(V8_TARGET_ARCH_LOONG64) || \ 
  102    defined(V8_TARGET_ARCH_MIPS64) 
  105#error "V8 does not support this architecture." 
  110class FunctionContextSpecialization final : 
public AllStatic {
 
  112  static compiler::OptionalContextRef TryToRef(
 
  113      const MaglevCompilationUnit* 
unit, ValueNode* context, 
size_t* depth) {
 
  114    DCHECK(
unit->info()->specialize_to_function_context());
 
  115    if (Constant* n = context->TryCast<Constant>()) {
 
  116      return n->ref().AsContext().previous(
unit->broker(), depth);
 
  126    return n->context().node();
 
  130    return alloc->object()->get(
 
  135    switch (n->function_id()) {
 
  136      case Runtime::kPushBlockContext:
 
  137      case Runtime::kPushCatchContext:
 
  138      case Runtime::kNewFunctionContext:
 
  139        return n->context().node();
 
 
  154    if (parent_context == 
nullptr) 
return;
 
  155    *context = parent_context;
 
 
  163    alloc->ForceEscaping();
 
 
  179        args_(reglist.register_count()),
 
 
  241    return args_.size() - 1;
 
 
  259    DCHECK(!node->properties().is_conversion());
 
 
  299      DCHECK(!arg->properties().is_conversion());
 
 
 
  311    saved_ = 
builder_->current_speculation_feedback_;
 
  313    if (IsSpeculationAllowed(
builder_->broker(), feedback_source)) {
 
  314      builder->current_speculation_feedback_ = feedback_source;
 
  316      builder->current_speculation_feedback_ = compiler::FeedbackSource();
 
 
  320    builder_->current_speculation_feedback_ = saved_;
 
 
  331    if (!feedback_source.
IsValid()) 
return false;
 
  333        broker->GetFeedbackForCall(feedback_source);
 
  336           SpeculationMode::kAllowSpeculation;
 
 
 
  344      previous_(builder->lazy_deopt_result_location_scope_),
 
  345      result_location_(result_location),
 
  346      result_size_(result_size) {
 
 
  352  builder_->lazy_deopt_result_location_scope_ = previous_;
 
 
  358                  compiler::OptionalJSFunctionRef maybe_js_target = {})
 
  360        parent_(builder->current_deopt_scope_),
 
  362            continuation, {}, builder->GetContext(), maybe_js_target}) {
 
  363    builder_->current_interpreter_frame().virtual_objects().Snapshot();
 
  364    builder_->current_deopt_scope_ = 
this;
 
  366        data_.get<DeoptFrame::BuiltinContinuationFrameData>().context);
 
  367    DCHECK(
data_.get<DeoptFrame::BuiltinContinuationFrameData>()
 
  368               .parameters.empty());
 
 
  372                  compiler::OptionalJSFunctionRef maybe_js_target,
 
  380    builder_->current_deopt_scope_ = 
this;
 
  383    if (parameters.
size() > 0) {
 
  384      if (InlinedAllocation* receiver =
 
  385              parameters[0]->TryCast<InlinedAllocation>()) {
 
  389        receiver->ForceEscaping();
 
  394      builder_->AddDeoptUse(node);
 
 
  400        parent_(builder->current_deopt_scope_),
 
  402            *builder->compilation_unit(), builder->current_source_position_,
 
  405    builder_->current_deopt_scope_ = 
this;
 
 
  416    builder_->latest_checkpointed_frame_.reset();
 
 
  427      case Builtin::kGetIteratorWithFeedbackLazyDeoptContinuation:
 
  428      case Builtin::kCallIteratorWithFeedbackLazyDeoptContinuation:
 
  429      case Builtin::kArrayForEachLoopLazyDeoptContinuation:
 
  430      case Builtin::kArrayMapLoopLazyDeoptContinuation:
 
  431      case Builtin::kGenericLazyDeoptContinuation:
 
  432      case Builtin::kToBooleanLazyDeoptContinuation:
 
 
 
  465            sub_builder->
builder_->
zone()->New<compiler::BytecodeLivenessState>(
 
 
  469        std::initializer_list<Variable*> vars)
 
 
 
  506      : sub_builder_(sub_builder) {
 
  507    sub_builder_->TakeKnownNodeAspectsAndVOsFromParent();
 
 
  510    sub_builder_->MoveKnownNodeAspectsAndVOsToParent();
 
 
 
  522      if (data.patch_accumulator_scope &&
 
 
  545  auto& data = 
data_.bytecode_target;
 
  548        data.patch_accumulator_scope->jump_type_ == jump_type));
 
  549  } 
else if (data.patch_accumulator_scope->jump_type_ == jump_type) {
 
  551        builder_->GetRootConstant(data.patch_accumulator_scope->root_index_));
 
  553    builder_->SetAccumulatorInBranch(data.patch_accumulator_scope->node_);
 
 
  559    case kBytecodeJumpTarget:
 
  560      return &
builder_->jump_targets_[
data_.bytecode_target.jump_target_offset];
 
  561    case kLabelJumpTarget:
 
  562      return &
data_.label_target.jump_label->ref_;
 
 
  568    case kBytecodeJumpTarget:
 
  569      return &
builder_->jump_targets_[
data_.bytecode_target.fallthrough_offset];
 
  570    case kLabelJumpTarget:
 
  571      return &
data_.label_target.fallthrough;
 
 
  588    case kBytecodeJumpTarget: {
 
  591      builder_->MarkBranchDeadAndJumpIfNeeded(jump_type_ ==
 
  592                                              type_if_need_to_jump);
 
  595    case kLabelJumpTarget:
 
 
  600template <
typename ControlNodeT, 
typename... Args>
 
  602    std::initializer_list<ValueNode*> control_inputs, Args&&... 
args) {
 
  605      control_inputs, std::forward<Args>(
args)..., true_target(),
 
 
  627    std::initializer_list<Variable*> loop_vars) {
 
  662  MoveKnownNodeAspectsAndVOsToParent();
 
  664  builder_->ProcessMergePointPredecessors(*loop_state, loop_header_ref);
 
  665  builder_->StartNewBlock(
nullptr, loop_state, loop_header_ref);
 
 
  670template <
typename ControlNodeT, 
typename... Args>
 
  672    Label* true_target, std::initializer_list<ValueNode*> control_inputs,
 
  681      control_inputs, std::forward<Args>(
args)..., &true_target->
ref_,
 
  684  MergeIntoLabel(true_target, block);
 
  686  builder_->StartNewBlock(block, 
nullptr, fallthrough_ref);
 
 
  689template <
typename ControlNodeT, 
typename... Args>
 
  691    Label* false_target, std::initializer_list<ValueNode*> control_inputs,
 
  700      control_inputs, std::forward<Args>(
args)..., &fallthrough_ref,
 
  701      &false_target->
ref_);
 
  703  MergeIntoLabel(false_target, block);
 
  705  builder_->StartNewBlock(block, 
nullptr, fallthrough_ref);
 
 
  709  if (
builder_->current_block_ == 
nullptr) {
 
  710    ReducePredecessorCount(
label);
 
 
  719  MergeIntoLabel(
label, block);
 
 
  728  label->predecessor_count_ -= num;
 
  729  if (
label->merge_state_ != 
nullptr) {
 
 
  735  if (
builder_->current_block_ == 
nullptr) {
 
  745                                        pseudo_frame_, block);
 
 
  752  int predecessors_so_far = 
label->merge_state_ == 
nullptr 
  754                                : 
label->merge_state_->predecessors_so_far();
 
  757  ReducePredecessorCount(
label,
 
  758                         label->predecessor_count_ - predecessors_so_far);
 
 
  769  MoveKnownNodeAspectsAndVOsToParent();
 
  772           label->predecessor_count_);
 
 
  787template <
typename FCond, 
typename FTrue, 
typename FFalse>
 
  789    std::initializer_list<MaglevSubGraphBuilder::Variable*> vars, FCond cond,
 
  790    FTrue if_true, FFalse if_false) {
 
 
  817template <
typename FCond, 
typename FTrue, 
typename FFalse>
 
  834  subgraph.
set(ret_val, if_true());
 
  835  subgraph.
Goto(&done);
 
  836  subgraph.
Bind(&else_branch);
 
  837  subgraph.
set(ret_val, if_false());
 
  838  subgraph.
Goto(&done);
 
  839  subgraph.
Bind(&done);
 
  840  return subgraph.
get(ret_val);
 
 
  843template <
typename FCond, 
typename FTrue, 
typename FFalse>
 
  861  CHECK(result_if_true.IsDone());
 
  862  if (result_if_true.IsDoneWithValue()) {
 
  863    subgraph.
set(ret_val, result_if_true.value());
 
  866  subgraph.
Bind(&else_branch);
 
  869  if (result_if_true.IsDoneWithAbort() && result_if_false.
IsDoneWithAbort()) {
 
  873    subgraph.
set(ret_val, result_if_false.
value());
 
  876  subgraph.
Bind(&done);
 
  877  return subgraph.
get(ret_val);
 
 
  888  DCHECK(pseudo_frame_.virtual_objects().is_empty());
 
  889  pseudo_frame_.set_known_node_aspects(
 
  890      builder_->current_interpreter_frame_.known_node_aspects());
 
  891  pseudo_frame_.set_virtual_objects(
 
  892      builder_->current_interpreter_frame_.virtual_objects());
 
 
  898  builder_->current_interpreter_frame_.set_known_node_aspects(
 
  899      pseudo_frame_.known_node_aspects());
 
  900  pseudo_frame_.clear_known_node_aspects();
 
  901  builder_->current_interpreter_frame_.set_virtual_objects(
 
  902      pseudo_frame_.virtual_objects());
 
 
  910  if (
label->merge_state_ == 
nullptr) {
 
  914        predecessor, 
label->liveness_);
 
 
  941          bytecode().length() + (
is_inline() ? 1 : 0))),
 
  946          bytecode().length() + 1)),
 
 1002    CHECK(it.CurrentBytecodeIsValidOSREntry());
 
 1007    if (
v8_flags.trace_maglev_graph_building) {
 
 1008      std::cout << 
"- Non-standard entrypoint @" << 
entrypoint_ 
 
 1060  if (closure == 
nullptr &&
 
 1074  int register_index = 0;
 
 1088  if (new_target_or_generator_register.
is_valid()) {
 
 1089    int new_target_index = new_target_or_generator_register.
index();
 
 1090    for (; register_index < new_target_index; register_index++) {
 
 1095        new_target_or_generator_register,
 
 
 1108  while (offset_and_info != 
end && offset_and_info->first < 
entrypoint_) {
 
 1111  for (; offset_and_info != 
end; ++offset_and_info) {
 
 1112    int offset = offset_and_info->first;
 
 1122    if (
v8_flags.trace_maglev_graph_building) {
 
 1123      std::cout << 
"- Creating loop merge state at @" << 
offset << std::endl;
 
 1130  if (
bytecode().handler_table_size() > 0) {
 
 1132    for (
int i = 0; 
i < table.NumberOfRangeEntries(); 
i++) {
 
 1133      const int offset = table.GetRangeHandler(
i);
 
 1134      const bool was_used = table.HandlerWasUsed(
i);
 
 1140      if (
v8_flags.trace_maglev_graph_building) {
 
 1141        std::cout << 
"- Creating exception merge state at @" << 
offset 
 1142                  << (was_used ? 
"" : 
" (never used)") << 
", context register r" 
 1143                  << context_reg.
index() << std::endl;
 
 
 1154struct GetResultLocationAndSizeHelper;
 
 1158struct GetResultLocationAndSizeHelper<index> {
 
 1159  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1166  static bool HasOutputRegisterOperand() { 
return false; }
 
 1170struct GetResultLocationAndSizeHelper<index, interpreter::OperandType::kRegOut,
 
 1172  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1173      const interpreter::BytecodeArrayIterator& iterator) {
 
 1175    return {iterator.GetRegisterOperand(index), 1};
 
 1177  static bool HasOutputRegisterOperand() { 
return true; }
 
 1181struct GetResultLocationAndSizeHelper<
 
 1182    index, interpreter::OperandType::kRegOutPair, operands...> {
 
 1183  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1184      const interpreter::BytecodeArrayIterator& iterator) {
 
 1186    return {iterator.GetRegisterOperand(index), 2};
 
 1188  static bool HasOutputRegisterOperand() { 
return true; }
 
 1192struct GetResultLocationAndSizeHelper<
 
 1193    index, interpreter::OperandType::kRegOutTriple, operands...> {
 
 1194  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1195      const interpreter::BytecodeArrayIterator& iterator) {
 
 1197    DCHECK(!(GetResultLocationAndSizeHelper<
 
 1198             index + 1, operands...>::HasOutputRegisterOperand()));
 
 1199    return {iterator.GetRegisterOperand(index), 3};
 
 1201  static bool HasOutputRegisterOperand() { 
return true; }
 
 1206struct GetResultLocationAndSizeHelper<
 
 1207    index, interpreter::OperandType::kRegOutList, operands...> {
 
 1208  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1209      const interpreter::BytecodeArrayIterator& iterator) {
 
 1210    interpreter::RegisterList list = iterator.GetRegisterListOperand(index);
 
 1211    return {list.first_register(), list.register_count()};
 
 1213  static bool HasOutputRegisterOperand() { 
return true; }
 
 1219struct GetResultLocationAndSizeHelper<index, operand, operands...> {
 
 1220  static std::pair<interpreter::Register, int> GetResultLocationAndSize(
 
 1221      const interpreter::BytecodeArrayIterator& iterator) {
 
 1222    return GetResultLocationAndSizeHelper<
 
 1223        index + 1, operands...>::GetResultLocationAndSize(iterator);
 
 1225  static bool HasOutputRegisterOperand() {
 
 1226    return GetResultLocationAndSizeHelper<
 
 1227        index + 1, operands...>::HasOutputRegisterOperand();
 
 1234std::pair<interpreter::Register, int> GetResultLocationAndSizeForBytecode(
 
 1235    const interpreter::BytecodeArrayIterator& iterator) {
 
 1240    DCHECK(!(GetResultLocationAndSizeHelper<
 
 1241             0, operands...>::HasOutputRegisterOperand()));
 
 1247  return GetResultLocationAndSizeHelper<
 
 1248      0, operands...>::GetResultLocationAndSize(iterator);
 
 1253std::pair<interpreter::Register, int>
 
 1261#define CASE(Name, ...)                                           \ 
 1262  case Bytecode::k##Name:                                         \ 
 1263    return GetResultLocationAndSizeForBytecode<Bytecode::k##Name, \ 
 1264                                               __VA_ARGS__>(iterator_); 
 
 1283                          operand_reg.
index() + operand_range)) {
 
 1297  if (
static_cast<int>(
args.size()) != 
unit->parameter_count()) {
 
 
 1343        zone()->New<CompactInterpreterFrameState>(
 
 1355    while (deopt_scope != 
nullptr &&
 
 1357      deopt_scope = deopt_scope->
parent();
 
 1360    if (deopt_scope != 
nullptr) {
 
 1367      if (deopt_scope->
data().
tag() ==
 
 1372          int stack_parameter_count =
 
 1385          deopt_scope->
data(),
 
 
 1401  if (scope == 
nullptr) {
 
 1408      mark_accumulator_dead = 
false;
 
 1411      for (
int i = 0; 
i < result_size; 
i++) {
 
 1452      int stack_parameter_count =
 
 1473                        result_location, result_size, scope->
parent(),
 
 
 1484      zone()->New<CompactInterpreterFrameState>(
 
 1491  (*entry_stack_check_frame_)
 
 1496  AddDeoptUse((*entry_stack_check_frame_).closure());
 
 
 1511      as_int32_constant && 
Smi::IsValid(as_int32_constant->value())) {
 
 1518  if (
ValueNode* alt = alternative.tagged()) {
 
 1522  switch (representation) {
 
 1524      if (NodeTypeIsSmi(node_info->
type())) {
 
 1530      if (NodeTypeIsSmi(node_info->
type())) {
 
 1545      if (NodeTypeIsSmi(node_info->
type())) {
 
 
 1565      value->properties().value_representation();
 
 1572  if (
ValueNode* alt = alternative.tagged()) {
 
 1578      conversion_node->SetMode(
 
 1584  switch (representation) {
 
 1586      if (NodeTypeIsSmi(node_info->
type())) {
 
 1592      if (NodeTypeIsSmi(node_info->
type())) {
 
 
 1613  return NodeTypeIs(type, NodeType::kAnyHeapObject)
 
 1623  if (
CheckType(node, NodeType::kInternalizedString, &old_type)) 
return node;
 
 1627    if (
CheckType(node, NodeType::kInternalizedString, &old_type)) 
return node;
 
 1630  if (!
NodeTypeIs(old_type, NodeType::kString)) {
 
 1637  known_info->
alternative().set_checked_value(maybe_unwrapping_node);
 
 1640  return maybe_unwrapping_node;
 
 
 1649      value->properties().value_representation();
 
 1657  switch (value->opcode()) {
 
 1658    case Opcode::kConstant: {
 
 1659      compiler::ObjectRef 
object = value->Cast<
Constant>()->
object();
 
 1660      if (!
object.IsHeapNumber()) 
break;
 
 1665    case Opcode::kSmiConstant:
 
 1667    case Opcode::kRootConstant: {
 
 1671      int32_t truncated_value =
 
 1677    case Opcode::kFloat64Constant: {
 
 1678      int32_t truncated_value =
 
 1695  if (
ValueNode* alt = alternative.int32()) {
 
 1698  if (
ValueNode* alt = alternative.truncated_int32_to_number()) {
 
 1702  switch (representation) {
 
 1705      EnsureType(value, allowed_input_type, &old_type);
 
 1706      if (NodeTypeIsSmi(old_type)) {
 
 1711      if (allowed_input_type == NodeType::kSmi) {
 
 1714      if (
NodeTypeIs(old_type, allowed_input_type)) {
 
 1715        return alternative.set_truncated_int32_to_number(
 
 1719      return alternative.set_truncated_int32_to_number(
 
 1729      return alternative.set_truncated_int32_to_number(
 
 1737      return alternative.set_truncated_int32_to_number(
 
 
 1750  switch (value->opcode()) {
 
 1751    case Opcode::kInt32Constant:
 
 1753    case Opcode::kUint32Constant: {
 
 1755      if (uint32_value <= INT32_MAX) {
 
 1756        return static_cast<int32_t
>(uint32_value);
 
 1760    case Opcode::kSmiConstant:
 
 1762    case Opcode::kFloat64Constant: {
 
 1763      double double_value =
 
 
 1779  switch (value->opcode()) {
 
 1780    case Opcode::kInt32Constant: {
 
 1782      if (int32_value >= 0) {
 
 1783        return static_cast<uint32_t
>(int32_value);
 
 1787    case Opcode::kUint32Constant:
 
 1789    case Opcode::kSmiConstant: {
 
 1791      if (smi_value >= 0) {
 
 1792        return static_cast<uint32_t
>(smi_value);
 
 1796    case Opcode::kFloat64Constant: {
 
 1797      double double_value =
 
 
 1812                                        bool can_be_heap_number) {
 
 1816      value->properties().value_representation();
 
 1829  if (
ValueNode* alt = alternative.int32()) {
 
 1833  switch (representation) {
 
 1835      if (can_be_heap_number && !
CheckType(value, NodeType::kSmi)) {
 
 1841      if (node_info->
is_smi()) {
 
 1842        return alternative.set_int32(
 
 1851      return alternative.set_int32(
 
 
 1866  switch (value->opcode()) {
 
 1867    case Opcode::kConstant: {
 
 1868      compiler::ObjectRef 
object = value->Cast<
Constant>()->
object();
 
 1869      if (
object.IsHeapNumber()) {
 
 1870        return object.AsHeapNumber().value();
 
 1873      DCHECK(!IsOddball(*
object.
object()));
 
 1876    case Opcode::kInt32Constant:
 
 1878    case Opcode::kSmiConstant:
 
 1880    case Opcode::kFloat64Constant:
 
 1882    case Opcode::kRootConstant: {
 
 1886          IsBoolean(root_object)) {
 
 1890          IsOddball(root_object)) {
 
 1893      if (IsHeapNumber(root_object)) {
 
 
 1930  if (
ValueNode* alt = alternative.float64()) {
 
 1934  switch (representation) {
 
 1936      auto combined_type = 
CombineType(allowed_input_type, node_info->
type());
 
 1937      if (
NodeTypeIs(combined_type, NodeType::kSmi)) {
 
 1941      if (
NodeTypeIs(combined_type, NodeType::kNumber)) {
 
 1945            value, NodeType::kNumber,
 
 1948      if (
NodeTypeIs(combined_type, NodeType::kNumberOrOddball)) {
 
 1965      return alternative.set_float64(
 
 1968      switch (allowed_input_type) {
 
 1969        case NodeType::kSmi:
 
 1970        case NodeType::kNumber:
 
 1971        case NodeType::kNumberOrBoolean:
 
 1977          return alternative.set_float64(
 
 1979        case NodeType::kNumberOrOddball:
 
 1989      return alternative.set_float64(
 
 
 2002      value->properties().value_representation();
 
 
 2009int32_t ClampToUint8(int32_t value) {
 
 2010  if (value < 0) 
return 0;
 
 2011  if (value > 255) 
return 255;
 
 2017  switch (value->properties().value_representation()) {
 
 2028      if (info && info->alternative().int32()) {
 
 
 2054template <Operation kOperation>
 
 2055struct NodeForOperationHelper;
 
 2057#define NODE_FOR_OPERATION_HELPER(Name)               \ 
 2059  struct NodeForOperationHelper<Operation::k##Name> { \ 
 2060    using generic_type = Generic##Name;               \ 
 
 2063#undef NODE_FOR_OPERATION_HELPER 
 2065template <Operation kOperation>
 
 2066using GenericNodeForOperation =
 
 2067    typename NodeForOperationHelper<kOperation>::generic_type;
 
 2071template <Operation kOperation>
 
 2072constexpr bool BinaryOperationIsBitwiseInt32() {
 
 2074    case Operation::kBitwiseNot:
 
 2075    case Operation::kBitwiseAnd:
 
 2076    case Operation::kBitwiseOr:
 
 2077    case Operation::kBitwiseXor:
 
 2078    case Operation::kShiftLeft:
 
 2079    case Operation::kShiftRight:
 
 2080    case Operation::kShiftRightLogical:
 
 2093#define MAP_BINARY_OPERATION_TO_INT32_NODE(V) \ 
 2094  V(Add, Int32AddWithOverflow, 0)             \ 
 2095  V(Subtract, Int32SubtractWithOverflow, 0)   \ 
 2096  V(Multiply, Int32MultiplyWithOverflow, 1)   \ 
 2097  V(Divide, Int32DivideWithOverflow, 1)       \ 
 2098  V(Modulus, Int32ModulusWithOverflow, {})    \ 
 2099  V(BitwiseAnd, Int32BitwiseAnd, ~0)          \ 
 2100  V(BitwiseOr, Int32BitwiseOr, 0)             \ 
 2101  V(BitwiseXor, Int32BitwiseXor, 0)           \ 
 2102  V(ShiftLeft, Int32ShiftLeft, 0)             \ 
 2103  V(ShiftRight, Int32ShiftRight, 0)           \ 
 2104  V(ShiftRightLogical, Int32ShiftRightLogical, {}) 
 
 2106#define MAP_UNARY_OPERATION_TO_INT32_NODE(V) \ 
 2107  V(BitwiseNot, Int32BitwiseNot)             \ 
 2108  V(Increment, Int32IncrementWithOverflow)   \ 
 2109  V(Decrement, Int32DecrementWithOverflow)   \ 
 2110  V(Negate, Int32NegateWithOverflow) 
 
 2114#define MAP_OPERATION_TO_FLOAT64_NODE(V) \ 
 2115  V(Add, Float64Add)                     \ 
 2116  V(Subtract, Float64Subtract)           \ 
 2117  V(Multiply, Float64Multiply)           \ 
 2118  V(Divide, Float64Divide)               \ 
 2119  V(Modulus, Float64Modulus)             \ 
 2120  V(Exponentiate, Float64Exponentiate) 
 
 2122template <Operation kOperation>
 
 2125#define CASE(op, _, identity) \ 
 2126  case Operation::k##op:      \ 
 
 2136template <Operation kOperation>
 
 2137struct Int32NodeForHelper;
 
 2138#define SPECIALIZATION(op, OpNode, ...)         \ 
 2140  struct Int32NodeForHelper<Operation::k##op> { \ 
 2141    using type = OpNode;                        \ 
 
 2145#undef SPECIALIZATION 
 2147template <Operation kOperation>
 
 2148using Int32NodeFor = 
typename Int32NodeForHelper<kOperation>::type;
 
 2150template <Operation kOperation>
 
 2151struct Float64NodeForHelper;
 
 2152#define SPECIALIZATION(op, OpNode)                \ 
 2154  struct Float64NodeForHelper<Operation::k##op> { \ 
 2155    using type = OpNode;                          \ 
 2158#undef SPECIALIZATION 
 2160template <Operation kOperation>
 
 2161using Float64NodeFor = 
typename Float64NodeForHelper<kOperation>::type;
 
 2164template <Operation kOperation>
 
 2172template <Operation kOperation>
 
 2181template <Operation kOperation>
 
 2191template <Operation kOperation>
 
 2195  if (!cst.has_value()) 
return {};
 
 2197    case Operation::kBitwiseNot:
 
 2199    case Operation::kIncrement:
 
 2200      if (cst.value() < INT32_MAX) {
 
 2204    case Operation::kDecrement:
 
 2205      if (cst.value() > INT32_MIN) {
 
 2209    case Operation::kNegate:
 
 2210      if (cst.value() == 0) {
 
 2213      if (cst.value() != INT32_MIN) {
 
 
 2222template <Operation kOperation>
 
 2226  static_assert(!BinaryOperationIsBitwiseInt32<kOperation>());
 
 2230  using OpNodeT = Int32NodeFor<kOperation>;
 
 
 2240                                   allowed_input_type, conversion_type);
 
 
 2248template <Operation kOperation>
 
 2252  if (!cst_right.has_value()) 
return {};
 
 
 2256template <Operation kOperation>
 
 2260  if (!cst_left.has_value()) 
return {};
 
 2262    case Operation::kAdd: {
 
 2263      int64_t 
result = 
static_cast<int64_t
>(cst_left.value()) +
 
 2264                       static_cast<int64_t
>(cst_right);
 
 2270    case Operation::kSubtract: {
 
 2271      int64_t 
result = 
static_cast<int64_t
>(cst_left.value()) -
 
 2272                       static_cast<int64_t
>(cst_right);
 
 2278    case Operation::kMultiply: {
 
 2279      int64_t 
result = 
static_cast<int64_t
>(cst_left.value()) *
 
 2280                       static_cast<int64_t
>(cst_right);
 
 2286    case Operation::kModulus:
 
 2289    case Operation::kDivide:
 
 2292    case Operation::kBitwiseAnd:
 
 2294    case Operation::kBitwiseOr:
 
 2296    case Operation::kBitwiseXor:
 
 2298    case Operation::kShiftLeft:
 
 2300                              << (
static_cast<uint32_t
>(cst_right) % 32));
 
 2301    case Operation::kShiftRight:
 
 2303                              (
static_cast<uint32_t
>(cst_right) % 32));
 
 2304    case Operation::kShiftRightLogical:
 
 2306                               (
static_cast<uint32_t
>(cst_right) % 32));
 
 
 2312template <Operation kOperation>
 
 2316  static_assert(!BinaryOperationIsBitwiseInt32<kOperation>());
 
 2321  using OpNodeT = Int32NodeFor<kOperation>;
 
 
 2326template <Operation kOperation>
 
 2331  static_assert(BinaryOperationIsBitwiseInt32<kOperation>());
 
 2337        allowed_input_type, conversion_type);
 
 2341        allowed_input_type, conversion_type);
 
 2344                                     allowed_input_type, conversion_type);
 
 
 2352template <Operation kOperation>
 
 2357  static_assert(!BinaryOperationIsBitwiseInt32<kOperation>());
 
 2370  using OpNodeT = Int32NodeFor<kOperation>;
 
 
 2375template <Operation kOperation>
 
 2380  static_assert(BinaryOperationIsBitwiseInt32<kOperation>());
 
 2383                                   allowed_input_type, conversion_type);
 
 
 2407template <Operation kOperation>
 
 2411  if (!cst.has_value()) 
return {};
 
 2413    case Operation::kNegate:
 
 2415    case Operation::kIncrement:
 
 2417    case Operation::kDecrement:
 
 
 2424template <Operation kOperation>
 
 2429  if (!cst_right.has_value()) 
return {};
 
 2431      conversion_type, left, cst_right.value());
 
 
 2434template <Operation kOperation>
 
 2439  if (!cst_left.has_value()) 
return {};
 
 2441    case Operation::kAdd:
 
 2443    case Operation::kSubtract:
 
 2445    case Operation::kMultiply:
 
 2447    case Operation::kDivide:
 
 2449    case Operation::kModulus:
 
 2452    case Operation::kExponentiate:
 
 
 2459template <Operation kOperation>
 
 2477template <Operation kOperation>
 
 2490    case Operation::kNegate:
 
 2493    case Operation::kIncrement:
 
 2496    case Operation::kDecrement:
 
 
 2506template <Operation kOperation>
 
 2525std::tuple<NodeType, TaggedToFloat64ConversionType>
 
 2529      return std::make_tuple(NodeType::kSmi,
 
 2534      return std::make_tuple(NodeType::kNumber,
 
 2537      return std::make_tuple(NodeType::kNumberOrOddball,
 
 2550template <Operation kOperation>
 
 2554  switch (feedback_hint) {
 
 2557          DeoptimizeReason::kInsufficientTypeFeedbackForBinaryOperation);
 
 2563      auto [allowed_input_type, conversion_type] =
 
 2564          BinopHintToNodeTypeAndConversionType(feedback_hint);
 
 2565      if constexpr (BinaryOperationIsBitwiseInt32<kOperation>()) {
 
 2566        static_assert(
kOperation == Operation::kBitwiseNot);
 
 2573          allowed_input_type, conversion_type);
 
 
 2599  auto GetIsTwoByteAndMap = [&](
ValueNode* input) -> Result {
 
 2600    if (
auto maybe_constant =
 
 2602      bool two_byte = maybe_constant->map(
broker()).IsTwoByteStringMap();
 
 2603      return {
true, two_byte,
 
 2605                                       : RootIndex::kConsOneByteStringMap)};
 
 2607    switch (input->opcode()) {
 
 2608      case Opcode::kNumberToString:
 
 2609        return {
true, 
false, 
GetRootConstant(RootIndex::kConsOneByteStringMap)};
 
 2610      case Opcode::kInlinedAllocation: {
 
 2615            return {
true, cons_map->AsMap().IsTwoByteStringMap(), map};
 
 2617          return {
false, 
false, map};
 
 2624    return {
false, 
false, 
nullptr};
 
 2627  auto left_info = GetIsTwoByteAndMap(left);
 
 2628  auto right_info = GetIsTwoByteAndMap(right);
 
 2629  if (left_info.static_map) {
 
 2630    if (left_info.is_two_byte) {
 
 2634    if (right_info.static_map) {
 
 2635      if (right_info.is_two_byte) {
 
 2641    if (right_info.result_map) {
 
 2642      return right_info.result_map;
 
 2644  } 
else if (left_info.result_map) {
 
 2647    if (right_info.static_map && !right_info.is_two_byte) {
 
 2648      return left_info.result_map;
 
 2655      left_info.result_map ? left_info.result_map
 
 2658      right_info.result_map
 
 2659          ? right_info.result_map
 
 2665    std::swap(left, right);
 
 
 2675    if (maybe_constant->IsString()) {
 
 2676      return maybe_constant->AsString().length();
 
 2679  switch (string->opcode()) {
 
 2680    case Opcode::kNumberToString:
 
 2682    case Opcode::kInlinedAllocation:
 
 2689    case Opcode::kStringConcat:
 
 2690      if (max_depth == 0) 
return 0;
 
 2695    case Opcode::kPhi: {
 
 2700      if (max_depth == 0) 
return 0;
 
 2701      auto phi = 
string->Cast<
Phi>();
 
 2702      if (phi->input_count() == 0 ||
 
 2703          (phi->is_loop_phi() && phi->is_unmerged_loop_phi())) {
 
 2706      size_t overall_min_length =
 
 2708      for (
int i = 1; 
i < phi->input_count(); ++
i) {
 
 2711        if (min < overall_min_length) {
 
 2712          overall_min_length = min;
 
 2715      return overall_min_length;
 
 
 2729  if (!
v8_flags.maglev_cons_string_elision) {
 
 2738  bool result_is_cons_string =
 
 2742  if (!result_is_cons_string) {
 
 2752  auto BuildConsString = [&]() {
 
 2767        AssertCondition::kUnsignedLessThanEqual,
 
 2768        DeoptimizeReason::kStringTooLarge,
 
 2782      [&](
auto& builder) {
 
 2787      [&] { 
return right; },
 
 2790            [&](
auto& builder) {
 
 2796            [&] { 
return left; }, [&] { 
return BuildConsString(); });
 
 
 2815    if (root_constant->index() == RootIndex::kempty_string) {
 
 2822    if (root_constant->index() == RootIndex::kempty_string) {
 
 
 2836template <Operation kOperation>
 
 2840  switch (feedback_hint) {
 
 2843          DeoptimizeReason::kInsufficientTypeFeedbackForBinaryOperation);
 
 2849      auto [allowed_input_type, conversion_type] =
 
 2850          BinopHintToNodeTypeAndConversionType(feedback_hint);
 
 2851      if constexpr (BinaryOperationIsBitwiseInt32<kOperation>()) {
 
 2853            allowed_input_type, conversion_type);
 
 2855        if constexpr (
kOperation == Operation::kExponentiate) {
 
 2863            allowed_input_type, conversion_type);
 
 2868      if constexpr (
kOperation == Operation::kAdd) {
 
 2875      if constexpr (
kOperation == Operation::kAdd) {
 
 2878                ->DependOnStringWrapperToPrimitiveProtector()) {
 
 
 2899template <Operation kOperation>
 
 2903  switch (feedback_hint) {
 
 2906          DeoptimizeReason::kInsufficientTypeFeedbackForBinaryOperation);
 
 2912      const auto [allowed_input_type, conversion_type] =
 
 2913          BinopHintToNodeTypeAndConversionType(feedback_hint);
 
 2914      if constexpr (BinaryOperationIsBitwiseInt32<kOperation>()) {
 
 2916            kOperation>(allowed_input_type, conversion_type);
 
 2918        if constexpr (
kOperation == Operation::kExponentiate) {
 
 2926            allowed_input_type, conversion_type);
 
 
 2942template <Operation kOperation, 
typename type>
 
 2945    case Operation::kEqual:
 
 2946    case Operation::kStrictEqual:
 
 2947      return left == right;
 
 2948    case Operation::kLessThan:
 
 2949      return left < right;
 
 2950    case Operation::kLessThanOrEqual:
 
 2951      return left <= right;
 
 2952    case Operation::kGreaterThan:
 
 2953      return left > right;
 
 2954    case Operation::kGreaterThanOrEqual:
 
 2955      return left >= right;
 
 
 2966    return MakeRef(
broker, isolate->root_handle(c->index())).AsHeapObject();
 
 
 2974    if (constant_node) *constant_node = 
node;
 
 
 2987    if (
auto c = info->alternative().checked_value()) {
 
 
 2996template <Operation kOperation>
 
 3005  compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(left);
 
 3006  if (!maybe_constant) {
 
 3010  if (!maybe_constant) 
return false;
 
 3012  if (
CheckType(other, NodeType::kBoolean)) {
 
 3013    auto CompareOtherWith = [&](
bool constant) {
 
 3014      compiler::OptionalHeapObjectRef const_other = 
TryGetConstant(other);
 
 3016        auto bool_other = const_other->TryGetBooleanValue(
broker());
 
 3017        if (bool_other.has_value()) {
 
 3029    if (maybe_constant.equals(
broker_->true_value())) {
 
 3030      CompareOtherWith(
true);
 
 3032    } 
else if (maybe_constant.equals(
broker_->false_value())) {
 
 3033      CompareOtherWith(
false);
 
 3035    } 
else if (
kOperation == Operation::kEqual) {
 
 3038      std::optional<double> val = {};
 
 3039      if (maybe_constant.value().IsSmi()) {
 
 3040        val = maybe_constant.value().AsSmi();
 
 3041      } 
else if (maybe_constant.value().IsHeapNumber()) {
 
 3042        val = maybe_constant.value().AsHeapNumber().value();
 
 3046          CompareOtherWith(
false);
 
 3047        } 
else if (*val == 1) {
 
 3048          CompareOtherWith(
true);
 
 3059  if (
kOperation != Operation::kStrictEqual) 
return false;
 
 3066  if (maybe_constant->IsUndefined()) {
 
 3071    } 
else if (right->properties().value_representation() ==
 
 3073      holey_float = right;
 
 3083      right->properties().value_representation() !=
 
 
 3092template <Operation kOperation>
 
 3106      std::swap(left, right);
 
 3122    if (left == right) {
 
 3127                             kOperation == Operation::kGreaterThanOrEqual));
 
 3133  auto MaybeOddballs = [&]() {
 
 3144      return !
CheckType(value, NodeType::kNumber);
 
 3153          DeoptimizeReason::kInsufficientTypeFeedbackForCompareOperation);
 
 3162      SortCommute(left, right);
 
 3170      if (MaybeOddballs()) {
 
 3175      if (
kOperation == Operation::kStrictEqual && MaybeOddballs()) {
 
 3186        SortCommute(left, right);
 
 3198        allowed_input_type = NodeType::kNumberOrBoolean;
 
 3201        allowed_input_type = NodeType::kNumber;
 
 3208        double right_value =
 
 3214      SortCommute(left, right);
 
 3257        case Operation::kEqual:
 
 3258        case Operation::kStrictEqual:
 
 3261        case Operation::kLessThan:
 
 3263              {tagged_left, tagged_right});
 
 3265        case Operation::kLessThanOrEqual:
 
 3267              {tagged_left, tagged_right});
 
 3269        case Operation::kGreaterThan:
 
 3271              {tagged_left, tagged_right});
 
 3273        case Operation::kGreaterThanOrEqual:
 
 3275              {tagged_left, tagged_right});
 
 
 3322ReduceResult MaglevGraphBuilder::VisitLdaZero() {
 
 3326ReduceResult MaglevGraphBuilder::VisitLdaSmi() {
 
 3331ReduceResult MaglevGraphBuilder::VisitLdaUndefined() {
 
 3335ReduceResult MaglevGraphBuilder::VisitLdaNull() {
 
 3339ReduceResult MaglevGraphBuilder::VisitLdaTheHole() {
 
 3343ReduceResult MaglevGraphBuilder::VisitLdaTrue() {
 
 3347ReduceResult MaglevGraphBuilder::VisitLdaFalse() {
 
 3351ReduceResult MaglevGraphBuilder::VisitLdaConstant() {
 
 3360  if (slot_mutability == 
kMutable) 
return false;
 
 3363  if (!constant) 
return false;
 
 3367  compiler::OptionalObjectRef maybe_slot_value =
 
 3369  if (!maybe_slot_value.has_value()) 
return false;
 
 3371  compiler::ObjectRef slot_value = maybe_slot_value.value();
 
 3372  if (slot_value.IsHeapObject()) {
 
 3381        slot_value.AsHeapObject().map(
broker()).oddball_type(
broker());
 
 3383        slot_value.IsTheHole()) {
 
 
 3396  if (!context_node->
Is<
Constant>()) 
return {};
 
 3399  DCHECK(context.object()->IsScriptContext());
 
 3400  auto maybe_property = context.object()->GetScriptContextSideProperty(index);
 
 3401  if (!maybe_property) 
return {};
 
 3402  auto property = maybe_property.value();
 
 3406      compiler::OptionalObjectRef constant = context.get(
broker(), index);
 
 3407      if (!constant.has_value()) {
 
 3412          context, index, property, 
broker());
 
 3417          context, index, property, 
broker());
 
 3424      if (
auto mutable_heap_number = context.get(
broker(), index)) {
 
 3425        if (!mutable_heap_number->IsHeapNumber()) {
 
 3431            context, index, property, 
broker());
 
 3437          context, index, property, 
broker());
 
 3440      if (
auto mutable_heap_number = context.get(
broker(), index)) {
 
 3441        if (!mutable_heap_number->IsHeapNumber()) {
 
 3447            context, index, property, 
broker());
 
 3453          context, index, property, 
broker());
 
 
 3473    if (
v8_flags.trace_maglev_graph_building) {
 
 3474      std::cout << 
"  * Reusing cached context slot " 
 3479    return cached_value;
 
 3483      (
v8_flags.script_context_mutable_heap_number ||
 
 3488    if (cached_value) 
return cached_value;
 
 3489    return cached_value =
 
 
 3498    ValueNode* context, compiler::OptionalScopeInfoRef scope_info) {
 
 3502  if ((
true) || !
v8_flags.reuse_scope_infos) 
return true;
 
 3503  if (!scope_info.has_value()) {
 
 3507  if (!other.has_value()) {
 
 3510  return scope_info->equals(*other);
 
 
 3525      context->Cast<
Constant>()->ref().AsContext();
 
 3527  auto maybe_property =
 
 3528      context_ref.
object()->GetScriptContextSideProperty(index);
 
 3529  if (!maybe_property) {
 
 3534  auto property = maybe_property.value();
 
 3537    compiler::OptionalObjectRef constant = context_ref.
get(
broker(), index);
 
 3538    if (!constant.has_value() ||
 
 3539        (constant->IsString() && !constant->IsInternalizedString())) {
 
 3545        context_ref, index, property, 
broker());
 
 3547        value, *constant, DeoptimizeReason::kStoreToConstant);
 
 3550  if (!
v8_flags.script_context_mutable_heap_number) {
 
 3562          context_ref, index, property, 
broker());
 
 3568      if (
auto mutable_heap_number = context_ref.
get(
broker(), index)) {
 
 3569        if (!mutable_heap_number->IsHeapNumber()) {
 
 3581          context_ref, index, property, 
broker());
 
 3585      if (
auto mutable_heap_number = context_ref.
get(
broker(), index)) {
 
 3586        if (!mutable_heap_number->IsHeapNumber()) {
 
 3598          context_ref, index, property, 
broker());
 
 
 3617  Node* store = 
nullptr;
 
 3618  if ((
v8_flags.script_context_mutable_heap_number ||
 
 3624    if (!store && 
result.IsDone()) {
 
 3635  if (
v8_flags.trace_maglev_graph_building) {
 
 3636    std::cout << 
"  * Recording context slot store " 
 3645    compiler::OptionalScopeInfoRef scope_info =
 
 3647    for (
auto& cache : loaded_context_slots) {
 
 3648      if (std::get<int>(cache.first) == 
offset &&
 
 3649          std::get<ValueNode*>(cache.first) != context) {
 
 3651            cache.second != value) {
 
 3652          if (
v8_flags.trace_maglev_graph_building) {
 
 3653            std::cout << 
"  * Clearing probably aliasing value " 
 3655                                        std::get<ValueNode*>(cache.first))
 
 3660          cache.second = 
nullptr;
 
 3670  auto updated = loaded_context_slots.emplace(
key, value);
 
 3671  if (updated.second) {
 
 3677    if (updated.first->second != value) {
 
 3678      updated.first->second = 
value;
 
 3688        last_store->second = 
store;
 
 
 3698    ValueNode* context, 
size_t depth, 
int slot_index,
 
 
 3722ReduceResult MaglevGraphBuilder::VisitLdaContextSlot() {
 
 3730ReduceResult MaglevGraphBuilder::VisitLdaScriptContextSlot() {
 
 3738ReduceResult MaglevGraphBuilder::VisitLdaImmutableContextSlot() {
 
 3746ReduceResult MaglevGraphBuilder::VisitLdaCurrentContextSlot() {
 
 3752ReduceResult MaglevGraphBuilder::VisitLdaCurrentScriptContextSlot() {
 
 3759ReduceResult MaglevGraphBuilder::VisitLdaImmutableCurrentContextSlot() {
 
 3767ReduceResult MaglevGraphBuilder::VisitStaContextSlot() {
 
 3774ReduceResult MaglevGraphBuilder::VisitStaCurrentContextSlot() {
 
 3781ReduceResult MaglevGraphBuilder::VisitStaScriptContextSlot() {
 
 3789ReduceResult MaglevGraphBuilder::VisitStaCurrentScriptContextSlot() {
 
 3796ReduceResult MaglevGraphBuilder::VisitStar() {
 
 3801#define SHORT_STAR_VISITOR(Name, ...)                                          \ 
 3802  ReduceResult MaglevGraphBuilder::Visit##Name() {                             \ 
 3803    MoveNodeBetweenRegisters(                                                  \ 
 3804        interpreter::Register::virtual_accumulator(),                          \ 
 3805        interpreter::Register::FromShortStar(interpreter::Bytecode::k##Name)); \ 
 3806    return ReduceResult::Done();                                               \ 
 
 3809#undef SHORT_STAR_VISITOR 
 3811ReduceResult MaglevGraphBuilder::VisitMov() {
 
 3817ReduceResult MaglevGraphBuilder::VisitPushContext() {
 
 3824ReduceResult MaglevGraphBuilder::VisitPopContext() {
 
 3833  if (tagged_lhs == tagged_rhs) {
 
 
 3855ReduceResult MaglevGraphBuilder::VisitTestReferenceEqual() {
 
 3863  if (value->properties().value_representation() ==
 
 3866  } 
else if (value->properties().value_representation() !=
 
 3872    auto map = maybe_constant.value().map(
broker());
 
 3877  if (
CheckType(value, NodeType::kSmi, &node_type)) {
 
 3884    if (info.possible_maps_are_known()) {
 
 3886      DCHECK_GT(info.possible_maps().size(), 0);
 
 3887      bool first_is_undetectable = info.possible_maps()[0].is_undetectable();
 
 3888      bool all_the_same_value =
 
 3889          std::all_of(info.possible_maps().begin(), info.possible_maps().end(),
 
 3891                        bool is_undetectable = map.is_undetectable();
 
 3892                        return (first_is_undetectable && is_undetectable) ||
 
 3893                               (!first_is_undetectable && !is_undetectable);
 
 3895      if (all_the_same_value) {
 
 3901  enum CheckType type = GetCheckType(node_type);
 
 
 3908  switch (
result->opcode()) {
 
 3909    case Opcode::kRootConstant:
 
 3911        case RootIndex::kTrueValue:
 
 3912        case RootIndex::kUndefinedValue:
 
 3913        case RootIndex::kNullValue:
 
 3918    case Opcode::kHoleyFloat64IsHole:
 
 3921    case Opcode::kTestUndetectable:
 
 
 3930ReduceResult MaglevGraphBuilder::VisitTestUndetectable() {
 
 3935ReduceResult MaglevGraphBuilder::VisitTestNull() {
 
 3941ReduceResult MaglevGraphBuilder::VisitTestUndefined() {
 
 3947template <
typename Function>
 
 3953      value, {NodeType::kBoolean, NodeType::kNumber, NodeType::kString,
 
 3954              NodeType::kSymbol, NodeType::kCallable, NodeType::kJSArray})) {
 
 3955    case NodeType::kBoolean:
 
 3956      return GetResult(TypeOfLiteralFlag::kBoolean, RootIndex::kboolean_string);
 
 3957    case NodeType::kNumber:
 
 3958      return GetResult(TypeOfLiteralFlag::kNumber, RootIndex::knumber_string);
 
 3959    case NodeType::kString:
 
 3960      return GetResult(TypeOfLiteralFlag::kString, RootIndex::kstring_string);
 
 3961    case NodeType::kSymbol:
 
 3962      return GetResult(TypeOfLiteralFlag::kSymbol, RootIndex::ksymbol_string);
 
 3963    case NodeType::kCallable:
 
 3965          [&](
auto& builder) {
 
 3969            return GetResult(TypeOfLiteralFlag::kUndefined,
 
 3970                             RootIndex::kundefined_string);
 
 3973            return GetResult(TypeOfLiteralFlag::kFunction,
 
 3974                             RootIndex::kfunction_string);
 
 3976    case NodeType::kJSArray:
 
 3978      return GetResult(TypeOfLiteralFlag::kObject, RootIndex::kobject_string);
 
 3983  if (IsNullValue(value)) {
 
 3984    return GetResult(TypeOfLiteralFlag::kObject, RootIndex::kobject_string);
 
 3986  if (IsUndefinedValue(value)) {
 
 3987    return GetResult(TypeOfLiteralFlag::kUndefined,
 
 3988                     RootIndex::kundefined_string);
 
 
 4006  if (
literal == TypeOfLiteralFlag::kOther) {
 
 4013                                               : RootIndex::kFalseValue);
 
 4024  if (global_access_feedback.
immutable()) 
return {};
 
 
 4039  compiler::ObjectRef property_cell_value = property_cell.
value(
broker());
 
 4040  if (property_cell_value.IsPropertyCellHole()) {
 
 4043        DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
 
 4066          value, property_cell_value, DeoptimizeReason::kStoreToConstant);
 
 4070      if (property_cell_value.IsHeapObject() &&
 
 4071          !property_cell_value.AsHeapObject().map(
broker()).is_stable()) {
 
 4078      if (property_cell_value.IsHeapObject()) {
 
 4080            property_cell_value.AsHeapObject().
map(
broker());
 
 4090                            PropertyCell::kValueOffset,
 
 4100                            PropertyCell::kValueOffset,
 
 
 4113  if (!global_access_feedback.
immutable()) 
return {};
 
 4114  compiler::OptionalObjectRef maybe_slot_value =
 
 4117  if (!maybe_slot_value) 
return {};
 
 
 4144  compiler::ObjectRef property_cell_value = property_cell.
value(
broker());
 
 4145  if (property_cell_value.IsPropertyCellHole()) {
 
 4148        DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
 
 
 4204  static const int kNameOperandIndex = 0;
 
 4205  static const int kSlotOperandIndex = 1;
 
 4213ReduceResult MaglevGraphBuilder::VisitLdaGlobalInsideTypeof() {
 
 4216  static const int kNameOperandIndex = 0;
 
 4217  static const int kSlotOperandIndex = 1;
 
 4221  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 4225ReduceResult MaglevGraphBuilder::VisitStaGlobal() {
 
 4228  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 4230  const compiler::ProcessedFeedback& access_feedback =
 
 4233  if (access_feedback.IsInsufficient()) {
 
 4235        DeoptimizeReason::kInsufficientTypeFeedbackForGenericGlobalAccess);
 
 4238  const compiler::GlobalAccessFeedback& global_access_feedback =
 
 4249ReduceResult MaglevGraphBuilder::VisitLdaLookupSlot() {
 
 4256ReduceResult MaglevGraphBuilder::VisitLdaLookupContextSlot() {
 
 4267ReduceResult MaglevGraphBuilder::VisitLdaLookupScriptContextSlot() {
 
 4274      {
name, depth, slot}));
 
 4279  compiler::OptionalScopeInfoRef maybe_scope_info =
 
 4281  if (!maybe_scope_info.has_value()) 
return false;
 
 4283  for (uint32_t d = 0; d < depth; d++) {
 
 4287        !
broker()->dependencies()->DependOnEmptyContextExtension(scope_info)) {
 
 4294      compiler::OptionalHeapObjectRef maybe_ref = 
TryGetConstant(context);
 
 4295      if (!maybe_ref) 
return false;
 
 4297      compiler::OptionalObjectRef extension_ref =
 
 4303      if (!extension_ref) 
return false;
 
 4304      if (!extension_ref->IsUndefined()) 
return false;
 
 4308                             DeoptimizeReason::kUnexpectedContextExtension);
 
 
 4318ReduceResult MaglevGraphBuilder::VisitLdaLookupGlobalSlot() {
 
 4334          {name_node, depth, slot, vector});
 
 4337          {name_node, depth, slot});
 
 4344ReduceResult MaglevGraphBuilder::VisitLdaLookupSlotInsideTypeof() {
 
 4352ReduceResult MaglevGraphBuilder::VisitLdaLookupContextSlotInsideTypeof() {
 
 4360          {
name, depth, slot}));
 
 4364ReduceResult MaglevGraphBuilder::VisitLdaLookupScriptContextSlotInsideTypeof() {
 
 4372          {
name, depth, slot}));
 
 4376ReduceResult MaglevGraphBuilder::VisitLdaLookupGlobalSlotInsideTypeof() {
 
 4386        {
name, depth, slot, vector});
 
 4389        {
name, depth, slot});
 
 4397  using Flags = interpreter::StoreLookupSlotFlags;
 
 4398  switch (Flags::GetLanguageMode(sta_lookup_slot_flags)) {
 
 4400      return Runtime::kStoreLookupSlot_Strict;
 
 4402      if (Flags::IsLookupHoistingMode(sta_lookup_slot_flags)) {
 
 4403        return Runtime::kStoreLookupSlot_SloppyHoisting;
 
 4405        return Runtime::kStoreLookupSlot_Sloppy;
 
 4411ReduceResult MaglevGraphBuilder::VisitStaLookupSlot() {
 
 4424  switch (node->properties().value_representation()) {
 
 4429      return NodeType::kNumber;
 
 4431      return NodeType::kNumberOrOddball;
 
 4435  switch (node->opcode()) {
 
 4437      return node->Cast<
Phi>()->
type();
 
 4438    case Opcode::kCheckedSmiTagInt32:
 
 4439    case Opcode::kCheckedSmiTagUint32:
 
 4440    case Opcode::kCheckedSmiTagIntPtr:
 
 4441    case Opcode::kCheckedSmiTagFloat64:
 
 4442    case Opcode::kUnsafeSmiTagInt32:
 
 4443    case Opcode::kUnsafeSmiTagUint32:
 
 4444    case Opcode::kUnsafeSmiTagIntPtr:
 
 4445    case Opcode::kSmiConstant:
 
 4446      return NodeType::kSmi;
 
 4447    case Opcode::kInt32ToNumber:
 
 4448    case Opcode::kUint32ToNumber:
 
 4449    case Opcode::kIntPtrToNumber:
 
 4450    case Opcode::kFloat64ToTagged:
 
 4451      return NodeType::kNumber;
 
 4452    case Opcode::kHoleyFloat64ToTagged:
 
 4453      return NodeType::kNumberOrOddball;
 
 4454    case Opcode::kAllocationBlock:
 
 4455    case Opcode::kInlinedAllocation: {
 
 4457      if (obj->has_static_map()) {
 
 4460        switch (obj->type()) {
 
 4462            return NodeType::kNonThinString;
 
 4470    case Opcode::kRootConstant: {
 
 4472      switch (constant->index()) {
 
 4473        case RootIndex::kTrueValue:
 
 4474        case RootIndex::kFalseValue:
 
 4475          return NodeType::kBoolean;
 
 4476        case RootIndex::kUndefinedValue:
 
 4477        case RootIndex::kNullValue:
 
 4478          return NodeType::kOddball;
 
 4484    case Opcode::kConstant: {
 
 4489    case Opcode::kToNumberOrNumeric:
 
 4492        return NodeType::kNumber;
 
 4495      return NodeType::kUnknown;
 
 4496    case Opcode::kToString:
 
 4497    case Opcode::kNumberToString:
 
 4498    case Opcode::kUnwrapStringWrapper:
 
 4499      return NodeType::kString;
 
 4500    case Opcode::kStringConcat:
 
 4501    case Opcode::kUnwrapThinString:
 
 4502      return NodeType::kNonThinString;
 
 4503    case Opcode::kCheckedInternalizedString:
 
 4504      return NodeType::kInternalizedString;
 
 4505    case Opcode::kToObject:
 
 4506    case Opcode::kCreateObjectLiteral:
 
 4507    case Opcode::kCreateShallowObjectLiteral:
 
 4508      return NodeType::kJSReceiver;
 
 4509    case Opcode::kCreateArrayLiteral:
 
 4510    case Opcode::kCreateShallowArrayLiteral:
 
 4511      return NodeType::kJSArray;
 
 4512    case Opcode::kToName:
 
 4513      return NodeType::kName;
 
 4514    case Opcode::kFastCreateClosure:
 
 4515    case Opcode::kCreateClosure:
 
 4516      return NodeType::kCallable;
 
 4517    case Opcode::kInt32Compare:
 
 4518    case Opcode::kFloat64Compare:
 
 4519    case Opcode::kGenericEqual:
 
 4520    case Opcode::kGenericStrictEqual:
 
 4521    case Opcode::kGenericLessThan:
 
 4522    case Opcode::kGenericLessThanOrEqual:
 
 4523    case Opcode::kGenericGreaterThan:
 
 4524    case Opcode::kGenericGreaterThanOrEqual:
 
 4525    case Opcode::kLogicalNot:
 
 4526    case Opcode::kStringEqual:
 
 4527    case Opcode::kTaggedEqual:
 
 4528    case Opcode::kTaggedNotEqual:
 
 4529    case Opcode::kTestInstanceOf:
 
 4530    case Opcode::kTestTypeOf:
 
 4531    case Opcode::kTestUndetectable:
 
 4532    case Opcode::kToBoolean:
 
 4533    case Opcode::kToBooleanLogicalNot:
 
 4534    case Opcode::kIntPtrToBoolean:
 
 4535    case Opcode::kSetPrototypeHas:
 
 4536      return NodeType::kBoolean;
 
 4538#define GENERATE_CASE(Name) case Opcode::k##Name: 
 4543    case Opcode::kCreateFastArrayElements:
 
 4544    case Opcode::kTransitionElementsKind:
 
 4547    case Opcode::kIdentity:
 
 4548    case Opcode::kArgumentsElements:
 
 4549    case Opcode::kArgumentsLength:
 
 4550    case Opcode::kRestLength:
 
 4552    case Opcode::kCallBuiltin:
 
 4553    case Opcode::kCallCPPBuiltin:
 
 4554    case Opcode::kCallForwardVarargs:
 
 4555    case Opcode::kCallRuntime:
 
 4556    case Opcode::kCallWithArrayLike:
 
 4557    case Opcode::kCallWithSpread:
 
 4558    case Opcode::kCallKnownApiFunction:
 
 4559    case Opcode::kCallKnownJSFunction:
 
 4560    case Opcode::kCallSelf:
 
 4561    case Opcode::kConstruct:
 
 4562    case Opcode::kCheckConstructResult:
 
 4563    case Opcode::kCheckDerivedConstructResult:
 
 4564    case Opcode::kConstructWithSpread:
 
 4565    case Opcode::kConvertReceiver:
 
 4566    case Opcode::kConvertHoleToUndefined:
 
 4567    case Opcode::kCreateFunctionContext:
 
 4568    case Opcode::kCreateRegExpLiteral:
 
 4569    case Opcode::kDeleteProperty:
 
 4570    case Opcode::kEnsureWritableFastElements:
 
 4571    case Opcode::kExtendPropertiesBackingStore:
 
 4572    case Opcode::kForInPrepare:
 
 4573    case Opcode::kForInNext:
 
 4574    case Opcode::kGeneratorRestoreRegister:
 
 4575    case Opcode::kGetIterator:
 
 4576    case Opcode::kGetSecondReturnedValue:
 
 4577    case Opcode::kGetTemplateObject:
 
 4578    case Opcode::kHasInPrototypeChain:
 
 4579    case Opcode::kInitialValue:
 
 4580    case Opcode::kLoadTaggedField:
 
 4581    case Opcode::kLoadTaggedFieldForProperty:
 
 4582    case Opcode::kLoadTaggedFieldForContextSlot:
 
 4583    case Opcode::kLoadTaggedFieldForScriptContextSlot:
 
 4584    case Opcode::kLoadDoubleField:
 
 4585    case Opcode::kLoadFloat64:
 
 4586    case Opcode::kLoadInt32:
 
 4587    case Opcode::kLoadHeapInt32:
 
 4588    case Opcode::kLoadTaggedFieldByFieldIndex:
 
 4589    case Opcode::kLoadFixedArrayElement:
 
 4590    case Opcode::kLoadFixedDoubleArrayElement:
 
 4591    case Opcode::kLoadHoleyFixedDoubleArrayElement:
 
 4592    case Opcode::kLoadHoleyFixedDoubleArrayElementCheckedNotHole:
 
 4593    case Opcode::kLoadSignedIntDataViewElement:
 
 4594    case Opcode::kLoadDoubleDataViewElement:
 
 4595    case Opcode::kLoadTypedArrayLength:
 
 4596    case Opcode::kLoadSignedIntTypedArrayElement:
 
 4597    case Opcode::kLoadUnsignedIntTypedArrayElement:
 
 4598    case Opcode::kLoadDoubleTypedArrayElement:
 
 4599    case Opcode::kLoadEnumCacheLength:
 
 4600    case Opcode::kLoadGlobal:
 
 4601    case Opcode::kLoadNamedGeneric:
 
 4602    case Opcode::kLoadNamedFromSuperGeneric:
 
 4603    case Opcode::kMaybeGrowFastElements:
 
 4604    case Opcode::kMigrateMapIfNeeded:
 
 4605    case Opcode::kSetNamedGeneric:
 
 4606    case Opcode::kDefineNamedOwnGeneric:
 
 4607    case Opcode::kStoreInArrayLiteralGeneric:
 
 4608    case Opcode::kStoreGlobal:
 
 4609    case Opcode::kGetKeyedGeneric:
 
 4610    case Opcode::kSetKeyedGeneric:
 
 4611    case Opcode::kDefineKeyedOwnGeneric:
 
 4612    case Opcode::kRegisterInput:
 
 4613    case Opcode::kCheckedSmiSizedInt32:
 
 4614    case Opcode::kCheckedSmiUntag:
 
 4615    case Opcode::kUnsafeSmiUntag:
 
 4616    case Opcode::kCheckedObjectToIndex:
 
 4617    case Opcode::kCheckedTruncateNumberOrOddballToInt32:
 
 4618    case Opcode::kCheckedInt32ToUint32:
 
 4619    case Opcode::kCheckedIntPtrToUint32:
 
 4620    case Opcode::kUnsafeInt32ToUint32:
 
 4621    case Opcode::kCheckedUint32ToInt32:
 
 4622    case Opcode::kCheckedIntPtrToInt32:
 
 4623    case Opcode::kChangeInt32ToFloat64:
 
 4624    case Opcode::kChangeUint32ToFloat64:
 
 4625    case Opcode::kChangeIntPtrToFloat64:
 
 4626    case Opcode::kCheckedTruncateFloat64ToInt32:
 
 4627    case Opcode::kCheckedTruncateFloat64ToUint32:
 
 4628    case Opcode::kTruncateNumberOrOddballToInt32:
 
 4629    case Opcode::kCheckedNumberToInt32:
 
 4630    case Opcode::kTruncateUint32ToInt32:
 
 4631    case Opcode::kTruncateFloat64ToInt32:
 
 4632    case Opcode::kUnsafeTruncateUint32ToInt32:
 
 4633    case Opcode::kUnsafeTruncateFloat64ToInt32:
 
 4634    case Opcode::kInt32ToUint8Clamped:
 
 4635    case Opcode::kUint32ToUint8Clamped:
 
 4636    case Opcode::kFloat64ToUint8Clamped:
 
 4637    case Opcode::kCheckedNumberToUint8Clamped:
 
 4638    case Opcode::kFloat64ToHeapNumberForField:
 
 4639    case Opcode::kCheckedNumberOrOddballToFloat64:
 
 4640    case Opcode::kUncheckedNumberOrOddballToFloat64:
 
 4641    case Opcode::kCheckedNumberOrOddballToHoleyFloat64:
 
 4642    case Opcode::kCheckedHoleyFloat64ToFloat64:
 
 4643    case Opcode::kHoleyFloat64ToMaybeNanFloat64:
 
 4644    case Opcode::kHoleyFloat64IsHole:
 
 4645    case Opcode::kSetPendingMessage:
 
 4646    case Opcode::kStringAt:
 
 4647    case Opcode::kStringLength:
 
 4648    case Opcode::kAllocateElementsArray:
 
 4649    case Opcode::kUpdateJSArrayLength:
 
 4650    case Opcode::kVirtualObject:
 
 4651    case Opcode::kGetContinuationPreservedEmbedderData:
 
 4652    case Opcode::kExternalConstant:
 
 4653    case Opcode::kFloat64Constant:
 
 4654    case Opcode::kInt32Constant:
 
 4655    case Opcode::kUint32Constant:
 
 4656    case Opcode::kTaggedIndexConstant:
 
 4657    case Opcode::kTrustedConstant:
 
 4658    case Opcode::kInt32AbsWithOverflow:
 
 4659    case Opcode::kInt32AddWithOverflow:
 
 4660    case Opcode::kInt32SubtractWithOverflow:
 
 4661    case Opcode::kInt32MultiplyWithOverflow:
 
 4662    case Opcode::kInt32DivideWithOverflow:
 
 4663    case Opcode::kInt32ModulusWithOverflow:
 
 4664    case Opcode::kInt32BitwiseAnd:
 
 4665    case Opcode::kInt32BitwiseOr:
 
 4666    case Opcode::kInt32BitwiseXor:
 
 4667    case Opcode::kInt32ShiftLeft:
 
 4668    case Opcode::kInt32ShiftRight:
 
 4669    case Opcode::kInt32ShiftRightLogical:
 
 4670    case Opcode::kInt32BitwiseNot:
 
 4671    case Opcode::kInt32NegateWithOverflow:
 
 4672    case Opcode::kInt32IncrementWithOverflow:
 
 4673    case Opcode::kInt32DecrementWithOverflow:
 
 4674    case Opcode::kInt32ToBoolean:
 
 4675    case Opcode::kFloat64Abs:
 
 4676    case Opcode::kFloat64Add:
 
 4677    case Opcode::kFloat64Subtract:
 
 4678    case Opcode::kFloat64Multiply:
 
 4679    case Opcode::kFloat64Divide:
 
 4680    case Opcode::kFloat64Exponentiate:
 
 4681    case Opcode::kFloat64Modulus:
 
 4682    case Opcode::kFloat64Negate:
 
 4683    case Opcode::kFloat64Round:
 
 4684    case Opcode::kFloat64ToBoolean:
 
 4685    case Opcode::kFloat64Ieee754Unary:
 
 4686    case Opcode::kCheckedSmiIncrement:
 
 4687    case Opcode::kCheckedSmiDecrement:
 
 4688    case Opcode::kGenericAdd:
 
 4689    case Opcode::kGenericSubtract:
 
 4690    case Opcode::kGenericMultiply:
 
 4691    case Opcode::kGenericDivide:
 
 4692    case Opcode::kGenericModulus:
 
 4693    case Opcode::kGenericExponentiate:
 
 4694    case Opcode::kGenericBitwiseAnd:
 
 4695    case Opcode::kGenericBitwiseOr:
 
 4696    case Opcode::kGenericBitwiseXor:
 
 4697    case Opcode::kGenericShiftLeft:
 
 4698    case Opcode::kGenericShiftRight:
 
 4699    case Opcode::kGenericShiftRightLogical:
 
 4700    case Opcode::kGenericBitwiseNot:
 
 4701    case Opcode::kGenericNegate:
 
 4702    case Opcode::kGenericIncrement:
 
 4703    case Opcode::kGenericDecrement:
 
 4704    case Opcode::kBuiltinStringFromCharCode:
 
 4705    case Opcode::kBuiltinStringPrototypeCharCodeOrCodePointAt:
 
 4706    case Opcode::kConsStringMap:
 
 4707    case Opcode::kMapPrototypeGet:
 
 4708    case Opcode::kMapPrototypeGetInt32Key:
 
 4709      return NodeType::kUnknown;
 
 
 4716  if (current_type) *current_type = static_type;
 
 
 4724  if (old_type) *old_type = known_info->
type();
 
 4727  if (
auto phi = node->TryCast<
Phi>()) {
 
 
 4733template <
typename Function>
 
 4739  ensure_new_type(known_info->
type());
 
 
 4751  if (ref.IsHeapObject()) {
 
 
 4762                                        std::initializer_list<NodeType> types) {
 
 4771  return NodeType::kUnknown;
 
 
 4779  if (current_type) *current_type = it->second.type();
 
 
 4788  NodeType actual_type = it->second.type();
 
 4789  if (
auto phi = node->TryCast<
Phi>()) {
 
 4790    actual_type = 
CombineType(actual_type, phi->type());
 
 4798    DCHECK(static_type == NodeType::kSmi && actual_type == NodeType::kNumber &&
 
 
 4826      if (
auto phi = node->TryCast<
Phi>()) {
 
 4827        phi->SetUseRequires31BitValue();
 
 
 4840  if (
EnsureType(node, allowed_input_type, &old_type)) {
 
 4841    if (old_type == NodeType::kSmi) {
 
 
 4855  if (
CheckType(
object, NodeType::kAnyHeapObject)) {
 
 4858  if (
EnsureType(
object, NodeType::kSmi) && elidable) 
return object;
 
 4859  switch (object->value_representation()) {
 
 
 4893  if (
EnsureType(
object, NodeType::kString, &known_type))
 
 
 4905  if (
EnsureType(
object, NodeType::kStringOrStringWrapper, &known_type))
 
 
 4926  if (
EnsureType(
object, NodeType::kSymbol, &known_type))
 
 
 4937  if (
EnsureType(
object, NodeType::kJSReceiver, &known_type))
 
 4943                                FIRST_JS_RECEIVER_TYPE, LAST_JS_RECEIVER_TYPE);
 
 
 4950  if (
EnsureType(
object, NodeType::kJSReceiverOrNullOrUndefined, &known_type)) {
 
 4954                                  NodeType::kJSReceiverOrNullOrUndefined))) {
 
 4956        DeoptimizeReason::kNotAJavaScriptObjectOrNullOrUndefined);
 
 4959                                               GetCheckType(known_type));
 
 
 4965class KnownMapsMerger {
 
 4971  void IntersectWithKnownNodeAspects(
 
 4972      ValueNode* 
object, 
const KnownNodeAspects& known_node_aspects) {
 
 4973    auto node_info_it = known_node_aspects.FindInfo(
object);
 
 4974    bool has_node_info = known_node_aspects.IsValid(node_info_it);
 
 4976        has_node_info ? node_info_it->second.type() : NodeType::kUnknown;
 
 4977    if (has_node_info && node_info_it->second.possible_maps_are_known()) {
 
 4980           node_info_it->second.possible_maps()) {
 
 4992            InsertMap(possible_map);
 
 5007      for (compiler::MapRef map : requested_maps_) {
 
 5013  void UpdateKnownNodeAspects(ValueNode* 
object,
 
 5014                              KnownNodeAspects& known_node_aspects) {
 
 5016    auto node_info = known_node_aspects.GetOrCreateInfoFor(
 
 5017        object, broker_, 
broker_->local_isolate());
 
 5018    node_info->SetPossibleMaps(intersect_set_, any_map_is_unstable_, node_type_,
 
 5024    if (any_map_is_unstable_) {
 
 5025      known_node_aspects.any_map_for_any_node_is_unstable = 
true;
 
 5036    if (!existing_known_maps_found_) {
 
 5037      for (compiler::MapRef map : intersect_set_) {
 
 5038        if (map.is_stable()) {
 
 5039          broker_->dependencies()->DependOnStableMap(map);
 
 5048  bool known_maps_are_subset_of_requested_maps()
 const {
 
 5072  void InsertMap(compiler::MapRef map) {
 
 5073    if (map.is_migration_target()) {
 
 5077    if (new_type == NodeType::kHeapNumber) {
 
 5081    if (!map.is_stable()) {
 
 5092    std::optional<ValueNode*> map,
 
 5093    bool has_deprecated_map_without_migration_target) {
 
 5096  if (compiler::OptionalHeapObjectRef constant = 
TryGetConstant(
object)) {
 
 5102    if (std::find(maps.begin(), maps.end(), constant_map) != maps.end()) {
 
 5118  KnownMapsMerger merger(
broker(), 
zone(), maps);
 
 5122  if (merger.known_maps_are_subset_of_requested_maps()) {
 
 5136      DCHECK_NE(std::find(maps.begin(), maps.end(), possible_map), maps.end());
 
 5142  if (merger.intersect_set().is_empty()) {
 
 5150  if (merger.emit_check_with_migration()) {
 
 5152                                       GetCheckType(known_info->
type()));
 
 5153  } 
else if (has_deprecated_map_without_migration_target) {
 
 5155        {
object}, merger.intersect_set(), GetCheckType(known_info->
type()));
 
 5158                                              merger.intersect_set());
 
 5161                          GetCheckType(known_info->
type()));
 
 
 5179    CHECK(!transition_source.is_migration_target());
 
 5185      {heap_object, object_map}, transition_sources, transition_target);
 
 5191  DCHECK(transition_target.IsJSReceiverMap());
 
 
 5203    std::optional<MaglevSubGraphBuilder::Label>& if_not_matched) {
 
 5205  KnownMapsMerger merger(
broker(), 
zone(), maps);
 
 5208  if (merger.intersect_set().is_empty()) {
 
 5216  std::optional<MaglevSubGraphBuilder::Label> map_matched;
 
 5218  if (relevant_maps.
size() > 1) {
 
 5219    map_matched.emplace(sub_graph, 
static_cast<int>(relevant_maps.
size()));
 
 5220    for (
size_t map_index = 1; map_index < relevant_maps.
size(); map_index++) {
 
 5226  if_not_matched.emplace(sub_graph, 1);
 
 5228      &*if_not_matched, {object_map, 
GetConstant(relevant_maps.
at(0))});
 
 5229  if (map_matched.has_value()) {
 
 5230    sub_graph->
Goto(&*map_matched);
 
 5231    sub_graph->
Bind(&*map_matched);
 
 
 5241    std::optional<MaglevSubGraphBuilder::Label>& if_not_matched) {
 
 5250      {heap_object, object_map}, transition_sources, transition_target);
 
 5254  if_not_matched.emplace(sub_graph, 1);
 
 5256      &*if_not_matched, {new_map, 
GetConstant(transition_target)});
 
 5258  DCHECK(transition_target.IsJSReceiverMap());
 
 
 5275  if (object->Is<AllocationBlock>()) {
 
 5276    return object->Cast<AllocationBlock>();
 
 5295      allocation == GetAllocation(value)) {
 
 5296    allocation->set_elided_write_barriers_depend_on_type();
 
 5301  if (value->is_tagged()) 
return false;
 
 5307    DCHECK(tagged_alt->properties().is_conversion());
 
 5308    return CheckType(tagged_alt, NodeType::kSmi);
 
 
 5316  DCHECK(value->is_tagged());
 
 5320    CHECK(escape_deps != 
graph()->allocations_escape_map().
end());
 
 5321    escape_deps->second.push_back(inlined_value);
 
 5324    auto elided_deps = elided_map.try_emplace(inlined_value, 
zone()).first;
 
 5325    elided_deps->second.push_back(
object);
 
 5326    inlined_value->AddNonEscapingUses();
 
 5328  if (value_is_trusted) {
 
 
 5341  auto it = graph->allocations_elide_map().find(alloc);
 
 5342  if (it == graph->allocations_elide_map().end()) 
return false;
 
 5344    if (IsEscaping(graph, inner_alloc)) {
 
 5351bool VerifyIsNotEscaping(VirtualObjectList vos, InlinedAllocation* alloc) {
 
 5352  for (VirtualObject* vo : vos) {
 
 5353    if (vo->allocation() == alloc) 
continue;
 
 5354    bool escaped = 
false;
 
 5355    vo->ForEachInput([&](ValueNode* nested_value) {
 
 5356      if (escaped) 
return;
 
 5357      if (!nested_value->Is<InlinedAllocation>()) 
return;
 
 5358      ValueNode* nested_alloc = nested_value->Cast<InlinedAllocation>();
 
 5359      if (nested_alloc == alloc) {
 
 5360        if (vo->allocation()->IsEscaping() ||
 
 5361            !VerifyIsNotEscaping(vos, vo->allocation())) {
 
 5366    if (escaped) 
return false;
 
 5375  if (!
v8_flags.maglev_object_tracking) 
return false;
 
 5397    if (IsEscaping(
graph_, alloc)) 
return false;
 
 
 5450  vobject->set(
offset, value);
 
 5452  if (
v8_flags.trace_maglev_object_tracking) {
 
 5453    std::cout << 
"  * Setting value in virtual object " 
 
 5467                 !value->properties().is_conversion());
 
 5477    if (
v8_flags.maglev_pretenure_store_values) {
 
 5479        if (alloc->allocation_block()->allocation_type() ==
 
 5482            value_alloc->allocation_block()->TryPretenure();
 
 
 5500                 !value->properties().is_conversion());
 
 
 5512#ifdef V8_ENABLE_SANDBOX 
 
 5522  compiler::OptionalHeapObjectRef maybe_constant;
 
 5524      maybe_constant.value().IsFixedArray()) {
 
 5526        maybe_constant.value().AsFixedArray();
 
 5527    if (index >= 0 && 
static_cast<uint32_t
>(index) < fixed_array_ref.
length()) {
 
 5528      compiler::OptionalObjectRef maybe_value =
 
 5530      if (maybe_value) 
return GetConstant(*maybe_value);
 
 5539    DCHECK(vobject->
map().IsFixedArrayMap());
 
 5542      if (index >= 0 && index < length.value()) {
 
 
 5574        {elements, 
index, value});
 
 
 5584    if (index >= 0 && 
static_cast<uint32_t
>(index) < elements_array.
length()) {
 
 
 5629    compiler::ObjectRef receiver_prototype = receiver_map.prototype(
broker());
 
 5630    if (!receiver_prototype.IsJSObject() ||
 
 5631        !
broker()->IsArrayOrObjectPrototype(receiver_prototype.AsJSObject())) {
 
 
 5640compiler::OptionalObjectRef
 
 5647  compiler::OptionalObjectRef constant =
 
 5648      access_info.
holder()->GetOwnDictionaryProperty(
 
 5650  if (!constant.has_value()) 
return {};
 
 5656    if (!IsJSReceiverMap(*map_handle)) {
 
 5661              *map_handle, *
broker()->target_native_context().
object())
 
 5665      map = MakeRefAssumeMemoryFence(
broker(), constructor->initial_map());
 
 5666      DCHECK(IsJSObjectMap(*map.object()));
 
 
 5679  if (access_info.
holder().has_value()) {
 
 5680    return access_info.
holder();
 
 5682  if (compiler::OptionalHeapObjectRef c = 
TryGetConstant(lookup_start_object)) {
 
 5683    if (c.value().IsJSObject()) {
 
 5684      return c.value().AsJSObject();
 
 
 5696      broker()->dependencies());
 
 
 5710  compiler::ObjectRef constant = access_info.
constant().value();
 
 5721  if (constant.IsJSFunction()) {
 
 5731    if (
receiver != lookup_start_object) 
return {};
 
 
 5744  compiler::ObjectRef constant = access_info.
constant().value();
 
 5745  if (constant.IsJSFunction()) {
 
 
 5762  compiler::OptionalJSObjectRef constant_holder =
 
 5764  if (constant_holder) {
 
 5766      std::optional<Float64> constant =
 
 5768      if (constant.has_value()) {
 
 5772      compiler::OptionalObjectRef constant =
 
 5774      if (constant.has_value()) {
 
 5781  if (access_info.
holder().has_value()) {
 
 5784    load_source = lookup_start_object;
 
 5799      load_source, field_index.
offset(), name);
 
 5806    if (access_info.
field_map().has_value() &&
 
 5807        access_info.
field_map().value().is_stable()) {
 
 5809      auto map = access_info.
field_map().value();
 
 5814      known_info->
CombineType(NodeType::kAnyHeapObject);
 
 
 5834  if (known_length.
IsDone()) {
 
 5836    return known_length.
value();
 
 5840      js_array, JSArray::kLengthOffset, 
broker()->length_string());
 
 
 5852  if (map.is_stable()) {
 
 
 5863  int length = map.NextFreePropertyIndex() - map.GetInObjectProperties();
 
 
 5879  compiler::OptionalMapRef original_map;
 
 5884    if (original_map->UnusedPropertyFields() == 0) {
 
 5909    if (original_map && original_map->UnusedPropertyFields() == 0) {
 
 5915  if (field_representation.
IsDouble()) {
 
 5933  if (field_representation.
IsSmi()) {
 
 5939      if (access_info.
field_map().has_value()) {
 
 5951  if (field_representation.
IsSmi()) {
 
 5953                                        field_index.
offset(), store_mode);
 
 
 5969bool AccessInfoGuaranteedConst(
 
 5981    if (!map.is_stable()) {
 
 5995        access_info.
holder().value());
 
 5998  switch (access_info.
kind()) {
 
 6008                          AccessInfoGuaranteedConst(access_info),
 
 6013      compiler::OptionalObjectRef constant =
 
 6015      if (!constant.has_value()) 
return {};
 
 6021                                        lookup_start_object);
 
 6025          cell, Cell::kValueOffset, name);
 
 6031                          AccessInfoGuaranteedConst(access_info),
 
 6038          lookup_start_object, JSPrimitiveWrapper::kValueOffset);
 
 6043      if (
receiver != lookup_start_object) {
 
 
 6061  if (access_info.
holder().has_value()) {
 
 6064        access_info.
holder().value());
 
 6067  switch (access_info.
kind()) {
 
 6079            AccessInfoGuaranteedConst(access_info), access_mode);
 
 
 6100  switch (access_mode) {
 
 6109                                   access_info, access_mode);
 
 
 6116template <
typename GenericAccessFunc>
 
 6122    GenericAccessFunc&& build_generic_access) {
 
 6125  bool has_deprecated_map_without_migration_target = 
false;
 
 6126  if (compiler::OptionalHeapObjectRef c = 
TryGetConstant(lookup_start_object)) {
 
 6128    if (c.value().IsJSFunction() &&
 
 6129        feedback.name().equals(
broker()->prototype_string())) {
 
 6132          !function.has_instance_prototype(
broker()) ||
 
 6133          function.PrototypeRequiresRuntimeLookup(
broker()) ||
 
 6142  } 
else if (feedback.maps().empty()) {
 
 6147    if (
receiver != lookup_start_object) 
return {};
 
 6156      switch (access_mode) {
 
 6177    merger.IntersectWithKnownNodeAspects(lookup_start_object,
 
 6179    inferred_maps = merger.intersect_set();
 
 6180    has_deprecated_map_without_migration_target =
 
 6181        feedback.has_deprecated_map_without_migration_target();
 
 6192    if (map.is_deprecated()) 
continue;
 
 6197    if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(map.instance_type()) &&
 
 6204    access_infos_for_feedback.
push_back(access_info);
 
 6209          access_infos_for_feedback, access_mode, &access_infos)) {
 
 6214  if (access_infos.
size() == 1) {
 
 6228                         has_deprecated_map_without_migration_target));
 
 6233                                  feedback.name(), access_info, access_mode);
 
 6238        receiver, lookup_start_object, feedback, access_mode, access_infos,
 
 6239        build_generic_access);
 
 
 6246  switch (object->properties().value_representation()) {
 
 6253      } 
else if (
CheckType(
object, NodeType::kSmi, &old_type)) {
 
 6255        return alternative.get_or_set_int32(
 
 6261                                                GetCheckType(old_type));
 
 
 6279  switch (object->properties().value_representation()) {
 
 6285        int32_t value = constant->value().value();
 
 6294        int32_t value = constant->value();
 
 6305        double value = constant->value().get_scalar();
 
 6306        uint32_t uint32_value;
 
 
 6338      broker()->dependencies()->DependOnNoElementsProtector()) {
 
 6343        [&](
auto& builder) {
 
 6345                                            positive_index, uint32_length);
 
 6347        emit_load, [&] { 
return GetRootConstant(RootIndex::kUndefinedValue); });
 
 6350        index, length, AssertCondition::kUnsignedLessThan,
 
 6351        DeoptimizeReason::kOutOfBounds));
 
 
 6361  auto props_for_name = loaded_properties.find(name);
 
 6362  if (props_for_name == loaded_properties.end()) 
return {};
 
 6364  auto it = props_for_name->second.find(lookup_start_object);
 
 6365  if (it == props_for_name->second.end()) 
return {};
 
 6372    case AssertCondition::kEqual:
 
 6374    case AssertCondition::kNotEqual:
 
 6376    case AssertCondition::kLessThan:
 
 6378    case AssertCondition::kLessThanEqual:
 
 6380    case AssertCondition::kGreaterThan:
 
 6382    case AssertCondition::kGreaterThanEqual:
 
 6384    case AssertCondition::kUnsignedLessThan:
 
 6385      return static_cast<uint32_t
>(lhs) < 
static_cast<uint32_t
>(rhs);
 
 6386    case AssertCondition::kUnsignedLessThanEqual:
 
 6387      return static_cast<uint32_t
>(lhs) <= 
static_cast<uint32_t
>(rhs);
 
 6388    case AssertCondition::kUnsignedGreaterThan:
 
 6389      return static_cast<uint32_t
>(lhs) > 
static_cast<uint32_t
>(rhs);
 
 6390    case AssertCondition::kUnsignedGreaterThanEqual:
 
 6391      return static_cast<uint32_t
>(lhs) >= 
static_cast<uint32_t
>(rhs);
 
 6395bool CompareInt32(int32_t lhs, int32_t rhs, 
Operation operation) {
 
 6396  switch (operation) {
 
 6397    case Operation::kEqual:
 
 6398    case Operation::kStrictEqual:
 
 6400    case Operation::kLessThan:
 
 6402    case Operation::kLessThanOrEqual:
 
 6404    case Operation::kGreaterThan:
 
 6406    case Operation::kGreaterThanOrEqual:
 
 6413bool CompareUint32(uint32_t lhs, uint32_t rhs, 
Operation operation) {
 
 6414  switch (operation) {
 
 6415    case Operation::kEqual:
 
 6416    case Operation::kStrictEqual:
 
 6418    case Operation::kLessThan:
 
 6420    case Operation::kLessThanOrEqual:
 
 6422    case Operation::kGreaterThan:
 
 6424    case Operation::kGreaterThanOrEqual:
 
 6440      if (CheckConditionIn32(lhs_const.value(), rhs_const.value(), 
condition)) {
 
 6443      if (allow_unconditional_deopt) {
 
 
 6456  if (known_elements.
IsDone()) {
 
 6458    if (
v8_flags.trace_maglev_graph_building) {
 
 6459      std::cout << 
"  * Reusing non-constant [Elements] " 
 6464    return known_elements.
value();
 
 6467  DCHECK_EQ(JSObject::kElementsOffset, JSArray::kElementsOffset);
 
 
 6480  if (!is_variable_length) {
 
 6489  if (!is_variable_length) {
 
 
 6499#define BUILD_AND_RETURN_LOAD_TYPED_ARRAY(Type)                     \ 
 6500  return AddNewNode<Load##Type##TypedArrayElement>({object, index}, \ 
 6503  switch (elements_kind) {
 
 6511    case UINT32_ELEMENTS:
 
 6513    case FLOAT32_ELEMENTS:
 
 6514    case FLOAT64_ELEMENTS:
 
 6519#undef BUILD_AND_RETURN_LOAD_TYPED_ARRAY 
 
 6524#define BUILD_STORE_TYPED_ARRAY(Type, value)                           \ 
 6525  AddNewNode<Store##Type##TypedArrayElement>({object, index, (value)}, \ 
 6531  switch (elements_kind) {
 
 6541    case UINT32_ELEMENTS:
 
 6544                   NodeType::kNumberOrOddball,
 
 6547    case FLOAT32_ELEMENTS:
 
 6548    case FLOAT64_ELEMENTS:
 
 6551                      NodeType::kNumberOrOddball,
 
 6557#undef BUILD_STORE_TYPED_ARRAY 
 
 6567  if (elements_kind == FLOAT16_ELEMENTS ||
 
 6568      elements_kind == BIGUINT64_ELEMENTS ||
 
 6569      elements_kind == BIGINT64_ELEMENTS) {
 
 6584      !IsSupported(CpuOperation::kFloat64Round)) {
 
 6590  if (!
broker()->dependencies()->DependOnArrayBufferDetachingProtector()) {
 
 
 6633          elements_array, index,
 
 6661        [&](
auto& builder) {
 
 6663                                            positive_index, uint32_length);
 
 6665        emit_load, [&] { 
return GetRootConstant(RootIndex::kUndefinedValue); });
 
 6668        index, length, AssertCondition::kUnsignedLessThan,
 
 6669        DeoptimizeReason::kOutOfBounds));
 
 
 6709    ValueNode* elements_array_length = 
nullptr;
 
 6714      length = elements_array_length =
 
 6719      if (elements_array_length == 
nullptr) {
 
 6740                    {elements_array_length,
 
 6744              : elements_array_length;
 
 6746          index, limit, AssertCondition::kUnsignedLessThan,
 
 6747          DeoptimizeReason::kOutOfBounds));
 
 6751          {elements_array, object, 
index, elements_array_length},
 
 6772          index, length, AssertCondition::kUnsignedLessThan,
 
 6773          DeoptimizeReason::kOutOfBounds));
 
 
 6809          object, index_object,
 
 6819                                                     elements_kind, keyed_mode);
 
 
 6827template <
typename GenericAccessFunc>
 
 6832    GenericAccessFunc&& build_generic_access) {
 
 6835  if (feedback.transition_groups().empty()) {
 
 6863      access_infos.
empty()) {
 
 6888            !receiver_map.PrototypesElementsDoNotHaveAccessorsOrThrow(
 
 6889                broker(), &prototype_maps)) {
 
 6896        if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(
 
 6897                receiver_map.instance_type())) {
 
 6908  if (access_infos.
size() == 1) {
 
 6924                InstanceType::HEAP_NUMBER_TYPE);
 
 6926      for (
auto& transition_source : transition_sources) {
 
 6927        DCHECK_NE(transition_source.instance_type(),
 
 6928                  InstanceType::HEAP_NUMBER_TYPE);
 
 6937          object, object_map, transition_sources, transition_target));
 
 6944                                               access_info, keyed_mode);
 
 6947                                                    access_info, keyed_mode);
 
 6950                                            access_infos, build_generic_access);
 
 
 6954template <
typename GenericAccessFunc>
 
 6959    GenericAccessFunc&& build_generic_access) {
 
 6967  const int access_info_count = 
static_cast<int>(access_infos.
size());
 
 6970  std::optional<MaglevSubGraphBuilder::Variable> ret_val;
 
 6971  std::optional<MaglevSubGraphBuilder::Label> done;
 
 6972  std::optional<MaglevSubGraphBuilder::Label> generic_access;
 
 6980  for (
int i = 0; 
i < access_info_count; 
i++) {
 
 6982    std::optional<MaglevSubGraphBuilder::Label> check_next_map;
 
 6985    if (
i == access_info_count - 1) {
 
 6986      if (handle_transitions) {
 
 6998      if (handle_transitions) {
 
 7003            transition_target, &sub_graph, check_next_map);
 
 7022                                                 access_info, keyed_mode);
 
 7025          object, index_object, access_info, keyed_mode);
 
 7032        if (!done.has_value()) {
 
 7037          const int possible_predecessors = access_info_count - 
i + 1;
 
 7039            done.emplace(&sub_graph, possible_predecessors);
 
 7043                &sub_graph, possible_predecessors,
 
 7044                std::initializer_list<MaglevSubGraphBuilder::Variable*>{
 
 7048        if (!is_any_store) {
 
 7049          sub_graph.
set(*ret_val, 
result.value());
 
 7051        sub_graph.
Goto(&*done);
 
 7054        if (!generic_access.has_value()) {
 
 7058          generic_access.emplace(&sub_graph, access_info_count - 
i);
 
 7060        sub_graph.
Goto(&*generic_access);
 
 7065    if (check_next_map.has_value()) {
 
 7066      sub_graph.
Bind(&*check_next_map);
 
 7069  if (generic_access.has_value() &&
 
 7074    if (!done.has_value()) {
 
 7077    if (!is_any_store) {
 
 7078      sub_graph.
set(*ret_val, generic_result.
value());
 
 7080    sub_graph.
Goto(&*done);
 
 7082  if (done.has_value()) {
 
 
 7090template <
typename GenericAccessFunc>
 
 7096    GenericAccessFunc&& build_generic_access) {
 
 7098  const int access_info_count = 
static_cast<int>(access_infos.
size());
 
 7099  int number_map_index = -1;
 
 7101  bool needs_migration = 
false;
 
 7102  bool has_deprecated_map_without_migration_target =
 
 7103      feedback.has_deprecated_map_without_migration_target();
 
 7104  for (
int i = 0; 
i < access_info_count; 
i++) {
 
 7108      if (map.is_migration_target()) {
 
 7109        needs_migration = 
true;
 
 7111      if (map.IsHeapNumberMap()) {
 
 7115        merger.IntersectWithKnownNodeAspects(lookup_start_object,
 
 7117        if (!merger.intersect_set().is_empty()) {
 
 7119          number_map_index = 
i;
 
 7127  std::optional<MaglevSubGraphBuilder::Variable> ret_val;
 
 7128  std::optional<MaglevSubGraphBuilder::Label> done;
 
 7129  std::optional<MaglevSubGraphBuilder::Label> is_number;
 
 7130  std::optional<MaglevSubGraphBuilder::Label> generic_access;
 
 7132  if (number_map_index >= 0) {
 
 7133    is_number.emplace(&sub_graph, 2);
 
 7141  if (needs_migration &&
 
 7142      !
v8_flags.maglev_skip_migration_check_for_polymorphic_access) {
 
 7147        {lookup_start_object_map, lookup_start_object});
 
 7150  for (
int i = 0; 
i < access_info_count; 
i++) {
 
 7152    std::optional<MaglevSubGraphBuilder::Label> check_next_map;
 
 7155    if (
i == access_info_count - 1) {
 
 7158                         has_deprecated_map_without_migration_target);
 
 7169    if (
i == number_map_index) {
 
 7170      DCHECK(is_number.has_value());
 
 7171      sub_graph.
Goto(&*is_number);
 
 7172      sub_graph.
Bind(&*is_number);
 
 7178                                     feedback.name(), access_info, access_mode);
 
 7181                                    feedback.name(), access_info);
 
 7188        if (!done.has_value()) {
 
 7193          const int possible_predecessors = access_info_count - 
i + 1;
 
 7195            done.emplace(&sub_graph, possible_predecessors);
 
 7199                &sub_graph, possible_predecessors,
 
 7200                std::initializer_list<MaglevSubGraphBuilder::Variable*>{
 
 7205        if (!is_any_store) {
 
 7206          sub_graph.
set(*ret_val, 
result.value());
 
 7208        sub_graph.
Goto(&*done);
 
 7213        if (!generic_access.has_value()) {
 
 7217          generic_access.emplace(&sub_graph, access_info_count - 
i);
 
 7219        sub_graph.
Goto(&*generic_access);
 
 7225    if (check_next_map.has_value()) {
 
 7226      sub_graph.
Bind(&*check_next_map);
 
 7230  if (generic_access.has_value() &&
 
 7235    if (!done.has_value()) {
 
 7238    if (!is_any_store) {
 
 7239      sub_graph.
set(*ret_val, generic_result.
value());
 
 7241    sub_graph.
Goto(&*done);
 
 7244  if (done.has_value()) {
 
 
 7255  DCHECK(!value->properties().is_conversion());
 
 7261  auto& props_for_key =
 
 7262      loaded_properties.try_emplace(
key, 
zone()).first->second;
 
 7264  if (!is_const && IsAnyStore(access_mode)) {
 
 7274    if (
v8_flags.trace_maglev_graph_building) {
 
 7275      std::cout << 
"  * Removing all non-constant cached ";
 
 7276      switch (
key.type()) {
 
 7278          std::cout << 
"properties with name " << *
key.name().object();
 
 7281          std::cout << 
"Elements";
 
 7284          std::cout << 
"TypedArray length";
 
 7287          std::cout << 
"String length";
 
 7290      std::cout << std::endl;
 
 7292    props_for_key.clear();
 
 7295  if (
v8_flags.trace_maglev_graph_building) {
 
 7296    std::cout << 
"  * Recording " << (is_const ? 
"constant" : 
"non-constant")
 
 7297              << 
" known property " 
 7300    switch (
key.type()) {
 
 7302        std::cout << *
key.name().object();
 
 7305        std::cout << 
"Elements";
 
 7308        std::cout << 
"TypedArray length";
 
 7311        std::cout << 
"String length";
 
 7319    auto updated = props_for_key.emplace(lookup_start_object, value);
 
 7320    if (updated.second) {
 
 7322    } 
else if (updated.first->second != value) {
 
 7323      updated.first->second = 
value;
 
 7327    props_for_key[lookup_start_object] = 
value;
 
 
 7336    if (
v8_flags.trace_maglev_graph_building && 
result.IsDoneWithValue()) {
 
 7337      std::cout << 
"  * Reusing non-constant loaded property " 
 7345                                lookup_start_object, name);
 
 7347    if (
v8_flags.trace_maglev_graph_building && 
result.IsDoneWithValue()) {
 
 7348      std::cout << 
"  * Reusing constant loaded property " 
 
 7361      return vo_string->object()->string_length();
 
 7365    if (const_string->IsString()) {
 
 7370    ValueNode* input = wrapper->value_input().node();
 
 7379    if (
v8_flags.trace_maglev_graph_building && 
result.IsDoneWithValue()) {
 
 7380      std::cout << 
"  * Reusing constant [String length]" 
 
 7393template <
typename GenericAccessFunc>
 
 7397    GenericAccessFunc&& build_generic_access) {
 
 7401  switch (processed_feedback.
kind()) {
 
 7404          DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
 
 
 7419  auto build_generic_access = [
this, &
receiver, &
name, &feedback_source]() {
 
 7425                                   build_generic_access);
 
 
 7428ReduceResult MaglevGraphBuilder::VisitGetNamedProperty() {
 
 7447  if (IsThinString(*constant.object())) {
 
 7448    constant = MakeRefAssumeMemoryFence(
 
 7453  if (root_index.has_value()) {
 
 
 7468#ifdef V8_ENABLE_SANDBOX 
 
 7482ReduceResult MaglevGraphBuilder::VisitGetNamedPropertyFromSuper() {
 
 7490  ValueNode* home_object_map =
 
 7492  ValueNode* lookup_start_object =
 
 7495  auto build_generic_access = [
this, &
receiver, &lookup_start_object, &
name,
 
 7496                               &feedback_source]() {
 
 7504                                feedback_source, build_generic_access),
 
 7517    bool speculating_receiver_map_matches = 
false;
 
 7523      if (processed_feedback.
kind() !=
 
 7530      speculating_receiver_map_matches = 
true;
 
 7534        speculating_receiver_map_matches) {
 
 7538          DeoptimizeReason::kWrongMapDynamic);
 
 
 7557      object, feedback_source, processed_feedback));
 
 7559  auto build_generic_access = [
this, object, &feedback_source]() {
 
 7565  switch (processed_feedback.
kind()) {
 
 7568          DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
 
 7576          build_generic_access);
 
 7585          key, name, DeoptimizeReason::kKeyedAccessChanged));
 
 7591          object, 
object, processed_feedback.
AsNamedAccess(), feedback_source,
 
 
 7606ReduceResult MaglevGraphBuilder::VisitGetKeyedProperty() {
 
 7617  if (processed_feedback->
kind() ==
 
 7621        constant.has_value() && constant->IsName()) {
 
 7623      if (name.IsUniqueName() && !name.object()->IsArrayIndex()) {
 
 7624        processed_feedback =
 
 7633ReduceResult MaglevGraphBuilder::VisitGetEnumeratedKeyedProperty() {
 
 7637  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 7639  const compiler::ProcessedFeedback& processed_feedback =
 
 7646ReduceResult MaglevGraphBuilder::VisitLdaModuleVariable() {
 
 7654  ValueNode* exports_or_imports;
 
 7655  if (cell_index > 0) {
 
 7656    exports_or_imports =
 
 7661    exports_or_imports =
 
 7664    cell_index = -cell_index - 1;
 
 7676    compiler::OptionalContextRef maybe_ref =
 
 7679    if (maybe_ref.has_value()) {
 
 7684  for (
size_t i = 0; 
i < depth; 
i++) {
 
 
 7691ReduceResult MaglevGraphBuilder::VisitStaModuleVariable() {
 
 7698                                AbortReason::kUnsupportedModuleOperation))});
 
 7706  ValueNode* exports =
 
 7724        DeoptimizeReason::kInsufficientTypeFeedbackForGenericGlobalAccess);
 
 
 7738ReduceResult MaglevGraphBuilder::VisitSetNamedProperty() {
 
 7749  auto build_generic_access = [
this, object, &
name, &feedback_source]() {
 
 7757  switch (processed_feedback.
kind()) {
 
 7760          DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
 
 7764          object, 
object, processed_feedback.
AsNamedAccess(), feedback_source,
 
 7772  return build_generic_access();
 
 7775ReduceResult MaglevGraphBuilder::VisitDefineNamedOwnProperty() {
 
 7780  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 7782  const compiler::ProcessedFeedback& processed_feedback =
 
 7786  auto build_generic_access = [
this, object, &
name, &feedback_source]() {
 
 7793  switch (processed_feedback.kind()) {
 
 7796          DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
 
 7800          object, 
object, processed_feedback.AsNamedAccess(), feedback_source,
 
 7809  return build_generic_access();
 
 7812ReduceResult MaglevGraphBuilder::VisitSetKeyedProperty() {
 
 7816  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 7818  const compiler::ProcessedFeedback& processed_feedback =
 
 7822  auto build_generic_access = [
this, object, &feedback_source]() {
 
 7830  switch (processed_feedback.kind()) {
 
 7833          DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
 
 7841          object, index, processed_feedback.AsElementAccess(), feedback_source,
 
 7842          build_generic_access));
 
 7850  return build_generic_access();
 
 7853ReduceResult MaglevGraphBuilder::VisitDefineKeyedOwnProperty() {
 
 7859  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 7871ReduceResult MaglevGraphBuilder::VisitStaInArrayLiteral() {
 
 7876  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
 7878  const compiler::ProcessedFeedback& processed_feedback =
 
 7882  auto build_generic_access = [
this, object, 
index, &feedback_source]() {
 
 7890  switch (processed_feedback.kind()) {
 
 7893          DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess);
 
 7897          object, index, processed_feedback.AsElementAccess(), feedback_source,
 
 7898          build_generic_access));
 
 7907  return build_generic_access();
 
 7910ReduceResult MaglevGraphBuilder::VisitDefineKeyedOwnPropertyInLiteral() {
 
 7921ReduceResult MaglevGraphBuilder::VisitAdd() {
 
 7924ReduceResult MaglevGraphBuilder::VisitSub() {
 
 7927ReduceResult MaglevGraphBuilder::VisitMul() {
 
 7930ReduceResult MaglevGraphBuilder::VisitDiv() {
 
 7933ReduceResult MaglevGraphBuilder::VisitMod() {
 
 7936ReduceResult MaglevGraphBuilder::VisitExp() {
 
 7939ReduceResult MaglevGraphBuilder::VisitBitwiseOr() {
 
 7942ReduceResult MaglevGraphBuilder::VisitBitwiseXor() {
 
 7945ReduceResult MaglevGraphBuilder::VisitBitwiseAnd() {
 
 7948ReduceResult MaglevGraphBuilder::VisitShiftLeft() {
 
 7951ReduceResult MaglevGraphBuilder::VisitShiftRight() {
 
 7954ReduceResult MaglevGraphBuilder::VisitShiftRightLogical() {
 
 7958ReduceResult MaglevGraphBuilder::VisitAddSmi() {
 
 7961ReduceResult MaglevGraphBuilder::VisitSubSmi() {
 
 7964ReduceResult MaglevGraphBuilder::VisitMulSmi() {
 
 7967ReduceResult MaglevGraphBuilder::VisitDivSmi() {
 
 7970ReduceResult MaglevGraphBuilder::VisitModSmi() {
 
 7973ReduceResult MaglevGraphBuilder::VisitExpSmi() {
 
 7976ReduceResult MaglevGraphBuilder::VisitBitwiseOrSmi() {
 
 7979ReduceResult MaglevGraphBuilder::VisitBitwiseXorSmi() {
 
 7982ReduceResult MaglevGraphBuilder::VisitBitwiseAndSmi() {
 
 7985ReduceResult MaglevGraphBuilder::VisitShiftLeftSmi() {
 
 7988ReduceResult MaglevGraphBuilder::VisitShiftRightSmi() {
 
 7991ReduceResult MaglevGraphBuilder::VisitShiftRightLogicalSmi() {
 
 7995ReduceResult MaglevGraphBuilder::VisitInc() {
 
 7998ReduceResult MaglevGraphBuilder::VisitDec() {
 
 8001ReduceResult MaglevGraphBuilder::VisitNegate() {
 
 8004ReduceResult MaglevGraphBuilder::VisitBitwiseNot() {
 
 8008ReduceResult MaglevGraphBuilder::VisitToBooleanLogicalNot() {
 
 8015  switch (value->opcode()) {
 
 8017  case Opcode::k##Name: {                                  \ 
 8018    return GetBooleanConstant(                             \ 
 8019        !value->Cast<Name>()->ToBoolean(local_isolate())); \ 
 
 8034ReduceResult MaglevGraphBuilder::VisitTypeOf() {
 
 8043          DeoptimizeReason::kInsufficientTypeFeedbackForTypeOf);
 
 8054                                          GetCheckType(
GetType(value)));
 
 8066ReduceResult MaglevGraphBuilder::VisitDeletePropertyStrict() {
 
 8075ReduceResult MaglevGraphBuilder::VisitDeletePropertySloppy() {
 
 8084ReduceResult MaglevGraphBuilder::VisitGetSuperConstructor() {
 
 8088  if (compiler::OptionalHeapObjectRef constant =
 
 8090    compiler::MapRef map = constant->map(
broker());
 
 8091    if (map.is_stable()) {
 
 8110  return initial_map.GetConstructor(
broker()).equals(constructor);
 
 
 8115    std::pair<interpreter::Register, interpreter::Register> 
result) {
 
 8119  compiler::OptionalHeapObjectRef maybe_constant =
 
 8121  if (!maybe_constant) 
return false;
 
 8133    if (!current.IsJSFunction()) 
return false;
 
 8145            .ClassScopeHasPrivateBrand()) {
 
 8153      if (!
broker()->dependencies()->DependOnArrayIteratorProtector()) {
 
 8157      compiler::OptionalHeapObjectRef new_target_function =
 
 8165        if (new_target_function && new_target_function->IsJSFunction() &&
 
 8167                               current_function)) {
 
 
 8194ReduceResult MaglevGraphBuilder::VisitFindNonDefaultConstructorOrConstruct() {
 
 8213void ForceEscapeIfAllocation(ValueNode* value) {
 
 8214  if (InlinedAllocation* alloc = value->TryCast<InlinedAllocation>()) {
 
 8215    alloc->ForceEscaping();
 
 8231  if (
v8_flags.maglev_print_inlined &&
 
 8235       v8_flags.trace_maglev_inlining_verbose)) {
 
 8236    std::cout << 
"== Inlining " << 
Brief(*shared.object()) << std::endl;
 
 8238    if (
v8_flags.maglev_print_feedback) {
 
 8239      i::Print(*feedback.object(), std::cout);
 
 8241  } 
else if (
v8_flags.trace_maglev_graph_building) {
 
 8242    std::cout << 
"== Inlining " << shared.object() << std::endl;
 
 8247          shared.object(), bytecode.object(), call_site_position));
 
 8248  if (feedback.object()->invocation_count_before_stable(
kRelaxedLoad) >
 
 8249      v8_flags.invocation_count_for_early_optimization) {
 
 8277  for (
int i = 0; 
i < formal_parameter_count; 
i++) {
 
 8303      if (
v8_flags.trace_maglev_graph_building) {
 
 8304        std::cout << 
"== Finished inlining (abort) " << shared.object()
 
 8314  if (
v8_flags.trace_maglev_graph_building) {
 
 8315    std::cout << 
"== Finished inlining " << shared.object() << std::endl;
 
 
 8323#define TRACE_INLINING(...)                       \ 
 8325    if (v8_flags.trace_maglev_inlining)           \ 
 8326      StdoutStream{} << __VA_ARGS__ << std::endl; \ 
 
 8329#define TRACE_CANNOT_INLINE(...) \ 
 8330  TRACE_INLINING("  cannot inline " << shared << ": " << __VA_ARGS__) 
 
 8333                                       float call_frequency) {
 
 8334  if (
graph()->total_inlined_bytecode_size() >
 
 8348                        << 
v8_flags.max_maglev_hard_inline_depth << 
")");
 
 8356      shared.GetInlineability(
broker());
 
 8363  if (bytecode.incoming_new_target_or_generator_register().is_valid()) {
 
 8369                                           << 
") < minimum threshold (" 
 8375                        << bytecode.length() << 
") >= max-size (" 
 
 8384      ->shared_function_info()
 
 8386      ->PassesFilter(
v8_flags.maglev_print_filter);
 
 
 8394                   << shared << 
": small function, skipping max-depth");
 
 
 8402#ifdef V8_ENABLE_LEAPTIERING
 
 8415  float feedback_frequency = 0.0f;
 
 8416  if (feedback_source.
IsValid()) {
 
 8419    feedback_frequency =
 
 8420        feedback.
IsInsufficient() ? 0.0f : feedback.AsCall().frequency();
 
 8427                                feedback_cell, 
args, call_frequency);
 
 8434                        << 
v8_flags.max_maglev_inline_depth << 
")");
 
 8442                                feedback_cell, 
args, call_frequency);
 
 8448#ifdef V8_ENABLE_LEAPTIERING
 
 8462  float score = call_frequency / bytecode.length();
 
 8465          arguments, &generic_call->lazy_deopt_info()->top_frame(),
 
 8468           false, call_frequency},
 
 8469      generic_call, feedback_cell, score);
 
 8471  return generic_call;
 
 
 8478    float call_frequency) {
 
 8483  if (catch_block_details.
ref &&
 
 8504      arguments_vector, deopt_frame,
 
 8507       true, call_frequency);
 
 8526  if (
result.IsDoneWithAbort()) {
 
 
 8553  *kind_return = maps.at(0).elements_kind();
 
 8555    if (!map.supports_fast_array_iteration(
broker) ||
 
 8565MaybeReduceResult MaglevGraphBuilder::TryReduceArrayIsArray(
 
 8566    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 8569  ValueNode* node = 
args[0];
 
 8571  if (
CheckType(node, NodeType::kJSArray)) {
 
 8576  if (node_info && node_info->possible_maps_are_known()) {
 
 8577    bool has_array_map = 
false;
 
 8578    bool has_proxy_map = 
false;
 
 8579    bool has_other_map = 
false;
 
 8580    for (compiler::MapRef map : node_info->possible_maps()) {
 
 8582      if (InstanceTypeChecker::IsJSArray(type)) {
 
 8583        has_array_map = 
true;
 
 8584      } 
else if (InstanceTypeChecker::IsJSProxy(type)) {
 
 8585        has_proxy_map = 
true;
 
 8587        has_other_map = 
true;
 
 8590    if ((has_array_map ^ has_other_map) && !has_proxy_map) {
 
 8591      if (has_array_map) node_info->CombineType(NodeType::kJSArray);
 
 8600MaybeReduceResult MaglevGraphBuilder::TryReduceArrayForEach(
 
 8601    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 8607  if (
args.count() < 1) {
 
 8608    if (
v8_flags.trace_maglev_graph_building) {
 
 8609      std::cout << 
"  ! Failed to reduce Array.prototype.forEach - not enough " 
 8617  if (!node_info || !node_info->possible_maps_are_known()) {
 
 8618    if (
v8_flags.trace_maglev_graph_building) {
 
 8619      std::cout << 
"  ! Failed to reduce Array.prototype.forEach - receiver " 
 8627  if (!CanInlineArrayIteratingBuiltin(
broker(), node_info->possible_maps(),
 
 8629    if (
v8_flags.trace_maglev_graph_building) {
 
 8630      std::cout << 
"  ! Failed to reduce Array.prototype.forEach - doesn't " 
 8631                   "support fast array iteration or incompatible maps" 
 8638  if (!
broker()->dependencies()->DependOnNoElementsProtector()) {
 
 8639    if (
v8_flags.trace_maglev_graph_building) {
 
 8640      std::cout << 
"  ! Failed to reduce Array.prototype.forEach - invalidated " 
 8641                   "no elements protector" 
 8649    if (
v8_flags.trace_maglev_graph_building) {
 
 8650      std::cout << 
"  ! Failed to reduce Array.prototype.forEach - callback is " 
 8657  auto get_lazy_deopt_scope =
 
 8661        return DeoptFrameScope(
 
 8662            this, Builtin::kArrayForEachLoopLazyDeoptContinuation, target,
 
 8667  auto get_eager_deopt_scope =
 
 8671        return DeoptFrameScope(
 
 8672            this, Builtin::kArrayForEachLoopEagerDeoptContinuation, target,
 
 8678      "Array.prototype.forEach", target, 
args, get_eager_deopt_scope,
 
 8679      get_lazy_deopt_scope);
 
 8680  if (builtin_result.IsFail() || builtin_result.IsDoneWithAbort()) {
 
 8681    return builtin_result;
 
 8683  DCHECK(builtin_result.IsDoneWithoutValue());
 
 8687MaybeReduceResult MaglevGraphBuilder::TryReduceArrayMap(
 
 8688    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 8693  if (!
broker()->dependencies()->DependOnArraySpeciesProtector()) {
 
 8694    if (
v8_flags.trace_maglev_graph_building) {
 
 8695      std::cout << 
"  ! Failed to reduce Array.prototype.map - invalidated " 
 8696                   "array species protector" 
 8703  compiler::MapRef holey_smi_map =
 
 8705  compiler::MapRef holey_map =
 
 8707  compiler::MapRef holey_double_map =
 
 8710  ValueNode* result_array = 
nullptr;
 
 8723  auto initial_callback = [
this, &result_array,
 
 8724                           holey_smi_map](ValueNode* length_smi) {
 
 8727    VirtualObject* array;
 
 8729        array, 
CreateJSArray(holey_smi_map, holey_smi_map.instance_size(),
 
 8731    array->set(JSArray::kElementsOffset, elements);
 
 8736  auto process_element_callback = [
this, &holey_map, &holey_double_map,
 
 8737                                   &result_array](ValueNode* index_int32,
 
 8738                                                  ValueNode* element) {
 
 8740        {result_array, index_int32, element}, holey_map, holey_double_map);
 
 8743  auto get_lazy_deopt_scope = [
this, &result_array](
 
 8744                                  compiler::JSFunctionRef 
target,
 
 8746                                  ValueNode* 
this_arg, ValueNode* index_int32,
 
 8747                                  ValueNode* next_index_int32,
 
 8750    return DeoptFrameScope(
 
 8751        this, Builtin::kArrayMapLoopLazyDeoptContinuation, target,
 
 8756  auto get_eager_deopt_scope = [
this, &result_array](
 
 8757                                   compiler::JSFunctionRef 
target,
 
 8759                                   ValueNode* 
this_arg, ValueNode* index_int32,
 
 8760                                   ValueNode* next_index_int32,
 
 8763    return DeoptFrameScope(
 
 8764        this, Builtin::kArrayMapLoopEagerDeoptContinuation, target,
 
 8770      "Array.prototype.map", target, 
args, get_eager_deopt_scope,
 
 8771      get_lazy_deopt_scope, initial_callback, process_element_callback);
 
 8772  if (builtin_result.IsFail() || builtin_result.IsDoneWithAbort()) {
 
 8773    return builtin_result;
 
 8775  DCHECK(builtin_result.IsDoneWithoutValue());
 
 8780  return result_array;
 
 8787    const std::optional<InitialCallback>& initial_callback,
 
 8788    const std::optional<ProcessElementCallback>& process_element_callback) {
 
 8789  DCHECK_EQ(initial_callback.has_value(), process_element_callback.has_value());
 
 8796  if (
args.count() < 1) {
 
 8797    if (
v8_flags.trace_maglev_graph_building) {
 
 8798      std::cout << 
"  ! Failed to reduce " << name << 
" - not enough arguments" 
 8805  if (!node_info || !node_info->possible_maps_are_known()) {
 
 8806    if (
v8_flags.trace_maglev_graph_building) {
 
 8807      std::cout << 
"  ! Failed to reduce " << name
 
 8808                << 
" - receiver map is unknown" << std::endl;
 
 8814  if (!CanInlineArrayIteratingBuiltin(
broker(), node_info->possible_maps(),
 
 8816    if (
v8_flags.trace_maglev_graph_building) {
 
 8817      std::cout << 
"  ! Failed to reduce " << name
 
 8818                << 
" - doesn't support fast array iteration or incompatible" 
 8819                << 
" maps" << std::endl;
 
 8825  if (!
broker()->dependencies()->DependOnNoElementsProtector()) {
 
 8826    if (
v8_flags.trace_maglev_graph_building) {
 
 8827      std::cout << 
"  ! Failed to reduce " << name
 
 8828                << 
" - invalidated no elements protector" << std::endl;
 
 8835    if (
v8_flags.trace_maglev_graph_building) {
 
 8836      std::cout << 
"  ! Failed to reduce " << name
 
 8837                << 
" - callback is untagged value" << std::endl;
 
 8847  if (initial_callback) {
 
 8864  bool receiver_maps_were_unstable = node_info->possible_maps_are_unstable();
 
 8865  PossibleMaps receiver_maps_before_loop(node_info->possible_maps());
 
 8881      sub_builder.
BeginLoop({&var_index, &var_length});
 
 8887  if (receiver_maps_were_unstable) {
 
 8888    node_info->SetPossibleMaps(receiver_maps_before_loop,
 
 8889                               receiver_maps_were_unstable,
 
 8891                               NodeType::kUnknown, 
broker());
 
 8894    DCHECK_EQ(node_info->possible_maps().size(),
 
 8895              receiver_maps_before_loop.
size());
 
 8899                      sub_builder.
get(var_length), 
false,
 
 8908  Phi* index_tagged = sub_builder.
get(var_index)->
Cast<
Phi>();
 
 8913      &loop_end, {index_int32, original_length_int32}, Operation::kLessThan);
 
 8928    EnsureType(next_index_int32, NodeType::kSmi);
 
 8943  std::optional<MaglevSubGraphBuilder::Label> skip_call;
 
 8950        std::initializer_list<MaglevSubGraphBuilder::Variable*>{&var_length});
 
 8955                                                   RootIndex::kTheHoleValue);
 
 8985  if (!
result.IsDoneWithAbort()) {
 
 8986    if (process_element_callback) {
 
 8988      (*process_element_callback)(index_int32, 
value);
 
 8997    bool recheck_maps_after_call = receiver_maps_were_unstable;
 
 8998    if (recheck_maps_after_call) {
 
 9000      if (
auto receiver_info_after_call =
 
 9004        if (receiver_info_after_call &&
 
 9005            receiver_info_after_call->possible_maps_are_known()) {
 
 9006          recheck_maps_after_call = !receiver_maps_before_loop.
contains(
 
 9007              receiver_info_after_call->possible_maps());
 
 9018    if (recheck_maps_after_call) {
 
 9022      bool emit_check_with_migration = std::any_of(
 
 9023          receiver_maps_before_loop.
begin(), receiver_maps_before_loop.
end(),
 
 9025      if (emit_check_with_migration) {
 
 9027                                           receiver_maps_before_loop,
 
 9038    sub_builder.
set(var_length, current_length);
 
 9046                                      AssertCondition::kUnsignedLessThanEqual,
 
 9047                                      DeoptimizeReason::kArrayLengthChanged));
 
 9051  if (skip_call.has_value()) {
 
 9053    sub_builder.
Bind(&*skip_call);
 
 9056  sub_builder.
set(var_index, next_index_int32);
 
 9057  sub_builder.
EndLoop(&loop_header);
 
 9062  sub_builder.
Bind(&loop_end);
 
 
 9074  if (!
receiver->Is<InlinedAllocation>()) 
return {};
 
 9075  VirtualObject* iterator = 
receiver->Cast<InlinedAllocation>()->
object();
 
 9076  if (!iterator->map().IsJSArrayIteratorMap()) {
 
 9077    FAIL(
"iterator is not a JS array iterator object");
 
 9080  ValueNode* iterated_object =
 
 9081      iterator->get(JSArrayIterator::kIteratedObjectOffset);
 
 9083  base::SmallVector<compiler::MapRef, 4> maps;
 
 9084  if (iterated_object->Is<InlinedAllocation>()) {
 
 9085    VirtualObject* array = iterated_object->Cast<InlinedAllocation>()->
object();
 
 9088    if (iterated_object->Cast<InlinedAllocation>()->IsEscaping()) {
 
 9089      FAIL(
"allocation is escaping, map could have been changed");
 
 9094      FAIL(
"we're inside a loop, iterated object map could change");
 
 9096    auto map = array->map();
 
 9097    if (!map.supports_fast_array_iteration(
broker())) {
 
 9098      FAIL(
"no fast array iteration support");
 
 9100    elements_kind = map.elements_kind();
 
 9101    maps.push_back(map);
 
 9104    if (!node_info || !node_info->possible_maps_are_known()) {
 
 9105      FAIL(
"iterated object is unknown");
 
 9107    if (!CanInlineArrayIteratingBuiltin(
broker(), node_info->possible_maps(),
 
 9109      FAIL(
"no fast array iteration support or incompatible maps");
 
 9111    for (
auto map : node_info->possible_maps()) {
 
 9112      maps.push_back(map);
 
 9118    FAIL(
"no typed arrays support");
 
 9122      !
broker()->dependencies()->DependOnNoElementsProtector()) {
 
 9123    FAIL(
"no elements protector");
 
 9130  ValueNode* uint32_index;
 
 9132  ValueNode* uint32_length;
 
 9137                                              : NodeType::kNumber)));
 
 9140  MaglevSubGraphBuilder subgraph(
this, 2);
 
 9141  MaglevSubGraphBuilder::Variable is_done(0);
 
 9142  MaglevSubGraphBuilder::Variable ret_value(1);
 
 9144      {&is_done, &ret_value},
 
 9145      [&](
auto& builder) {
 
 9146        return BuildBranchIfUint32Compare(builder, Operation::kLessThan,
 
 9147                                          uint32_index, uint32_length);
 
 9150        ValueNode* int32_index = GetInt32(uint32_index);
 
 9151        subgraph.set(is_done, GetBooleanConstant(false));
 
 9153            iterator->get(JSArrayIterator::kKindOffset)->Is<Int32Constant>());
 
 9154        IterationKind iteration_kind = static_cast<IterationKind>(
 
 9155            iterator->get(JSArrayIterator::kKindOffset)
 
 9156                ->Cast<Int32Constant>()
 
 9158        if (iteration_kind == IterationKind::kKeys) {
 
 9159          subgraph.set(ret_value, index);
 
 9164              TryBuildElementLoadOnJSArrayOrJSObject(
 
 9165                  iterated_object, int32_index, base::VectorOf(maps),
 
 9166                  elements_kind, KeyedAccessLoadMode::kHandleOOBAndHoles));
 
 9167          if (iteration_kind == IterationKind::kEntries) {
 
 9168            ValueNode* key_value_array;
 
 9169            GET_VALUE_OR_ABORT(key_value_array,
 
 9170                               BuildAndAllocateKeyValueArray(index, value));
 
 9171            subgraph.set(ret_value, key_value_array);
 
 9173            subgraph.set(ret_value, value);
 
 9177        ValueNode* next_index = AddNewNode<Int32AddWithOverflow>(
 
 9178            {int32_index, GetInt32Constant(1)});
 
 9179        EnsureType(next_index, NodeType::kSmi);
 
 9181        BuildStoreTaggedFieldNoWriteBarrier(
receiver, next_index,
 
 9182                                            JSArrayIterator::kNextIndexOffset,
 
 9188        subgraph.set(is_done, GetBooleanConstant(
true));
 
 9189        subgraph.set(ret_value, GetRootConstant(RootIndex::kUndefinedValue));
 
 9202                                JSArrayIterator::kNextIndexOffset,
 
 9209  compiler::MapRef map =
 
 9210      broker()->target_native_context().iterator_result_map(
broker());
 
 9211  VirtualObject* iter_result = CreateJSIteratorResult(
 
 9212      map, subgraph.get(ret_value), subgraph.get(is_done));
 
 9213  ValueNode* allocation =
 
 9218MaybeReduceResult MaglevGraphBuilder::TryReduceArrayPrototypeEntries(
 
 9219    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9228MaybeReduceResult MaglevGraphBuilder::TryReduceArrayPrototypeKeys(
 
 9229    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9238MaybeReduceResult MaglevGraphBuilder::TryReduceArrayPrototypeValues(
 
 9239    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9248MaybeReduceResult MaglevGraphBuilder::TryReduceStringFromCharCode(
 
 9249    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9251  if (
args.count() != 1) 
return {};
 
 9253      args[0], NodeType::kNumberOrOddball,
 
 9257MaybeReduceResult MaglevGraphBuilder::TryReduceStringPrototypeCharCodeAt(
 
 9258    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9262  if (
args.count() == 0) {
 
 9272    if (cst->IsString() && index->Is<Int32Constant>()) {
 
 9273      compiler::StringRef str = cst->AsString();
 
 9274      int idx = index->Cast<Int32Constant>()->
value();
 
 9275      if (idx >= 0 && 
static_cast<uint32_t
>(idx) < str.length()) {
 
 9276        if (std::optional<uint16_t> value = str.GetChar(
broker(), idx)) {
 
 9288      index, length, AssertCondition::kUnsignedLessThan,
 
 9289      DeoptimizeReason::kOutOfBounds));
 
 9295MaybeReduceResult MaglevGraphBuilder::TryReduceStringPrototypeCodePointAt(
 
 9296    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9300  if (
args.count() == 0) {
 
 9312      index, length, AssertCondition::kUnsignedLessThan,
 
 9313      DeoptimizeReason::kOutOfBounds));
 
 9319MaybeReduceResult MaglevGraphBuilder::TryReduceStringPrototypeIterator(
 
 9320    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9325  compiler::MapRef map =
 
 9328  ValueNode* allocation =
 
 9333#ifdef V8_INTL_SUPPORT 
 9335MaybeReduceResult MaglevGraphBuilder::TryReduceStringPrototypeLocaleCompareIntl(
 
 9336    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9337  if (
args.count() < 1 || 
args.count() > 3) 
return {};
 
 9340  compiler::ObjectRef undefined_ref = 
broker()->undefined_value();
 
 9342  DirectHandle<Object> locales_handle;
 
 9343  ValueNode* locales_node = 
nullptr;
 
 9344  if (
args.count() > 1) {
 
 9346    if (!maybe_locales) 
return {};
 
 9347    compiler::HeapObjectRef locales = maybe_locales.value();
 
 9348    if (locales.equals(undefined_ref)) {
 
 9349      locales_handle = factory->undefined_value();
 
 9352      if (!locales.IsString()) 
return {};
 
 9353      compiler::StringRef sref = locales.AsString();
 
 9354      std::optional<Handle<String>> maybe_locales_handle =
 
 9355          sref.ObjectIfContentAccessible(
broker());
 
 9356      if (!maybe_locales_handle) 
return {};
 
 9357      locales_handle = *maybe_locales_handle;
 
 9358      locales_node = 
args[1];
 
 9361    locales_handle = factory->undefined_value();
 
 9365  if (
args.count() > 2) {
 
 9367    if (!maybe_options) 
return {};
 
 9368    if (!maybe_options.value().equals(undefined_ref)) 
return {};
 
 9371  DCHECK(!locales_handle.is_null());
 
 9375                                     factory->undefined_value()) !=
 
 9387#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA 
 9389MaglevGraphBuilder::TryReduceGetContinuationPreservedEmbedderData(
 
 9390    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9395MaglevGraphBuilder::TryReduceSetContinuationPreservedEmbedderData(
 
 9396    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9397  if (
args.count() == 0) 
return {};
 
 9404template <
typename LoadNode>
 
 9408  if (!
broker()->dependencies()->DependOnArrayBufferDetachingProtector()) {
 
 9415                                JS_DATA_VIEW_TYPE, JS_DATA_VIEW_TYPE);
 
 
 9424template <
typename StoreNode, 
typename Function>
 
 9428  if (!
broker()->dependencies()->DependOnArrayBufferDetachingProtector()) {
 
 9435                                JS_DATA_VIEW_TYPE, JS_DATA_VIEW_TYPE);
 
 
 9452MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeSetInt8(
 
 9458MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeGetInt16(
 
 9459    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9463MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeSetInt16(
 
 9464    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9469MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeGetInt32(
 
 9470    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9474MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeSetInt32(
 
 9475    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9480MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeGetFloat64(
 
 9481    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9485MaybeReduceResult MaglevGraphBuilder::TryReduceDataViewPrototypeSetFloat64(
 
 9486    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9490                           value, NodeType::kNumberOrOddball,
 
 9493                           std::numeric_limits<double>::quiet_NaN());
 
 9497MaybeReduceResult MaglevGraphBuilder::TryReduceFunctionPrototypeCall(
 
 9498    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9506  SaveCallSpeculationScope saved(
this);
 
 9510MaybeReduceResult MaglevGraphBuilder::TryReduceFunctionPrototypeApply(
 
 9511    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9512  compiler::OptionalHeapObjectRef maybe_receiver;
 
 9514    const compiler::ProcessedFeedback& processed_feedback =
 
 9517    const compiler::CallFeedback& call_feedback = processed_feedback.AsCall();
 
 9518    if (call_feedback.call_feedback_content() ==
 
 9520      maybe_receiver = call_feedback.target();
 
 9529template <
size_t MaxKindCount, 
typename KindsToIndexFunc>
 
 9530bool CanInlineArrayResizingBuiltin(
 
 9532    std::array<SmallZoneVector<compiler::MapRef, 2>, MaxKindCount>& map_kinds,
 
 9533    KindsToIndexFunc&& elements_kind_to_index, 
int* unique_kind_count,
 
 9535  uint8_t kind_bitmap = 0;
 
 9536  for (compiler::MapRef map : possible_maps) {
 
 9537    if (!map.supports_fast_array_resize(
broker)) {
 
 9548    uint8_t kind_index = elements_kind_to_index(
kind);
 
 9549    kind_bitmap |= 1 << kind_index;
 
 9550    map_kinds[kind_index].push_back(map);
 
 9560template <
typename MapKindsT, 
typename IndexToElementsKindFunc,
 
 9561          typename BuildKindSpecificFunc>
 
 9566    std::optional<MaglevSubGraphBuilder::Label>& do_return,
 
 9567    int unique_kind_count, IndexToElementsKindFunc&& index_to_elements_kind,
 
 9568    BuildKindSpecificFunc&& build_kind_specific) {
 
 9573  int emitted_kind_checks = 0;
 
 9574  bool any_successful = 
false;
 
 9575  for (
size_t kind_index = 0; kind_index < map_kinds.size(); kind_index++) {
 
 9576    const auto& maps = map_kinds[kind_index];
 
 9578    if (maps.empty()) 
continue;
 
 9583    if (++emitted_kind_checks < unique_kind_count) {
 
 9585      std::optional<MaglevSubGraphBuilder::Label> do_push;
 
 9586      if (maps.size() > 1) {
 
 9587        do_push.emplace(&sub_graph, 
static_cast<int>(maps.size()));
 
 9588        for (
size_t map_index = 1; map_index < maps.size(); map_index++) {
 
 9590              &*do_push, {receiver_map, 
GetConstant(maps[map_index])});
 
 9594          &check_next_map, {receiver_map, 
GetConstant(maps[0])});
 
 9595      if (do_push.has_value()) {
 
 9596        sub_graph.
Goto(&*do_push);
 
 9597        sub_graph.
Bind(&*do_push);
 
 9599      if (!build_kind_specific(
kind).IsDoneWithAbort()) {
 
 9600        any_successful = 
true;
 
 9602      DCHECK(do_return.has_value());
 
 9604      sub_graph.
Bind(&check_next_map);
 
 9606      if (!build_kind_specific(
kind).IsDoneWithAbort()) {
 
 9607        any_successful = 
true;
 
 9609      if (do_return.has_value()) {
 
 
 9622    return f(map.instance_type());
 
 9624  return std::all_of(maps.begin(), maps.end(), instance_type);
 
 9627  CHECK(!InstanceTypeChecker::IsString(type));
 
 9628  return AllOfInstanceTypesUnsafe(
 
 9629      maps, [type](
InstanceType other) { 
return type == other; });
 
 9633MaybeReduceResult MaglevGraphBuilder::TryReduceMapPrototypeGet(
 
 9634    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9646    if (
v8_flags.trace_maglev_graph_building) {
 
 9647      std::cout << 
"  ! Failed to reduce Map.prototype.Get - no receiver" 
 9652  if (
args.count() != 1) {
 
 9653    if (
v8_flags.trace_maglev_graph_building) {
 
 9654      std::cout << 
"  ! Failed to reduce Map.prototype.Get - invalid " 
 9665  if (!receiver_info || !receiver_info->possible_maps_are_known()) {
 
 9666    if (
v8_flags.trace_maglev_graph_building) {
 
 9668          << 
"  ! Failed to reduce Map.prototype.Get - unknown receiver map" 
 9674  const PossibleMaps& possible_receiver_maps = receiver_info->possible_maps();
 
 9680  if (possible_receiver_maps.is_empty()) {
 
 9685  if (!AllOfInstanceTypesAre(possible_receiver_maps, JS_MAP_TYPE)) {
 
 9686    if (
v8_flags.trace_maglev_graph_building) {
 
 9688          << 
"  ! Failed to reduce Map.prototype.Get - wrong receiver maps " 
 9699  if (key_info && key_info->alternative().int32()) {
 
 9701        {table, key_info->alternative().int32()});
 
 9709MaybeReduceResult MaglevGraphBuilder::TryReduceSetPrototypeHas(
 
 9710    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9720  if (
args.count() != 1) {
 
 9721    if (
v8_flags.trace_maglev_graph_building) {
 
 9722      std::cout << 
"  ! Failed to reduce Set.prototype.has - invalid " 
 9730  if (!node_info || !node_info->possible_maps_are_known()) {
 
 9731    if (
v8_flags.trace_maglev_graph_building) {
 
 9732      std::cout << 
"  ! Failed to reduce Set.prototype.has" 
 9733                << 
" - receiver map is unknown" << std::endl;
 
 9738  const PossibleMaps& possible_receiver_maps = node_info->possible_maps();
 
 9744  if (possible_receiver_maps.is_empty()) {
 
 9749  if (!AllOfInstanceTypesAre(possible_receiver_maps, JS_SET_TYPE)) {
 
 9750    if (
v8_flags.trace_maglev_graph_building) {
 
 9752          << 
"  ! Failed to reduce Set.prototype.has - wrong receiver maps " 
 9764MaybeReduceResult MaglevGraphBuilder::TryReduceArrayPrototypePush(
 
 9765    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9769    if (
v8_flags.trace_maglev_graph_building) {
 
 9770      std::cout << 
"  ! Failed to reduce Array.prototype.push - no receiver" 
 9776  if (
args.count() != 1) {
 
 9777    if (
v8_flags.trace_maglev_graph_building) {
 
 9778      std::cout << 
"  ! Failed to reduce Array.prototype.push - invalid " 
 9789  if (!node_info || !node_info->possible_maps_are_known()) {
 
 9790    if (
v8_flags.trace_maglev_graph_building) {
 
 9792          << 
"  ! Failed to reduce Array.prototype.push - unknown receiver map" 
 9798  const PossibleMaps& possible_maps = node_info->possible_maps();
 
 9804  if (possible_maps.is_empty()) {
 
 9809  if (!
broker()->dependencies()->DependOnNoElementsProtector()) {
 
 9810    if (
v8_flags.trace_maglev_graph_building) {
 
 9811      std::cout << 
"  ! Failed to reduce Array.prototype.push - " 
 9812                   "NoElementsProtector invalidated" 
 9820  std::array<SmallZoneVector<compiler::MapRef, 2>, 3> map_kinds = {
 
 9821      SmallZoneVector<compiler::MapRef, 2>(
zone()),
 
 9822      SmallZoneVector<compiler::MapRef, 2>(
zone()),
 
 9823      SmallZoneVector<compiler::MapRef, 2>(
zone())};
 
 9831    return static_cast<uint8_t
>(
kind) / 2;
 
 9833  auto index_to_elements_kind = [&](uint8_t kind_index) {
 
 9836  int unique_kind_count;
 
 9837  if (!CanInlineArrayResizingBuiltin(
broker(), possible_maps, map_kinds,
 
 9838                                     elements_kind_to_index, &unique_kind_count,
 
 9840    if (
v8_flags.trace_maglev_graph_building) {
 
 9841      std::cout << 
"  ! Failed to reduce Array.prototype.push - Map doesn't " 
 9842                   "support fast resizing" 
 9848  MaglevSubGraphBuilder sub_graph(
this, 0);
 
 9850  std::optional<MaglevSubGraphBuilder::Label> do_return;
 
 9851  if (unique_kind_count > 1) {
 
 9852    do_return.emplace(&sub_graph, unique_kind_count);
 
 9855  ValueNode* old_array_length_smi;
 
 9858  ValueNode* old_array_length =
 
 9860  ValueNode* new_array_length_smi =
 
 9871        {elements_array, 
receiver, old_array_length, elements_array_length},
 
 9875                                               JSArray::kLengthOffset,
 
 9881                                        old_array_length, value);
 
 9891      receiver, map_kinds, sub_graph, do_return, unique_kind_count,
 
 9892      index_to_elements_kind, build_array_push));
 
 9894  if (do_return.has_value()) {
 
 9895    sub_graph.Bind(&*do_return);
 
 9899  return new_array_length_smi;
 
 9902MaybeReduceResult MaglevGraphBuilder::TryReduceArrayPrototypePop(
 
 9903    compiler::JSFunctionRef target, CallArguments& 
args) {
 
 9907    if (
v8_flags.trace_maglev_graph_building) {
 
 9908      std::cout << 
"  ! Failed to reduce Array.prototype.pop - no receiver" 
 9919  if (!node_info || !node_info->possible_maps_are_known()) {
 
 9920    if (
v8_flags.trace_maglev_graph_building) {
 
 9922          << 
"  ! Failed to reduce Array.prototype.pop - unknown receiver map" 
 9928  const PossibleMaps& possible_maps = node_info->possible_maps();
 
 9935  if (possible_maps.is_empty()) {
 
 9940  if (!
broker()->dependencies()->DependOnNoElementsProtector()) {
 
 9941    if (
v8_flags.trace_maglev_graph_building) {
 
 9942      std::cout << 
"  ! Failed to reduce Array.prototype.pop - " 
 9943                   "NoElementsProtector invalidated" 
 9949  constexpr int max_kind_count = 4;
 
 9950  std::array<SmallZoneVector<compiler::MapRef, 2>, max_kind_count> map_kinds = {
 
 9951      SmallZoneVector<compiler::MapRef, 2>(
zone()),
 
 9952      SmallZoneVector<compiler::MapRef, 2>(
zone()),
 
 9953      SmallZoneVector<compiler::MapRef, 2>(
zone()),
 
 9954      SmallZoneVector<compiler::MapRef, 2>(
zone())};
 
 9971    uint8_t kind_int = 
static_cast<uint8_t
>(
kind);
 
 9972    uint8_t kind_index = ((kind_int & 0x4) >> 1) | (kind_int & 0x1);
 
 9976  auto index_to_elements_kind = [&](uint8_t kind_index) {
 
 9978    kind_int = ((kind_index & 0x2) << 1) | (kind_index & 0x1);
 
 9982  int unique_kind_count;
 
 9983  if (!CanInlineArrayResizingBuiltin(
broker(), possible_maps, map_kinds,
 
 9984                                     elements_kind_to_index, &unique_kind_count,
 
 9986    if (
v8_flags.trace_maglev_graph_building) {
 
 9987      std::cout << 
"  ! Failed to reduce Array.prototype.pop - Map doesn't " 
 9988                   "support fast resizing" 
 9994  MaglevSubGraphBuilder sub_graph(
this, 2);
 
 9995  MaglevSubGraphBuilder::Variable var_value(0);
 
 9996  MaglevSubGraphBuilder::Variable var_new_array_length(1);
 
 9998  std::optional<MaglevSubGraphBuilder::Label> do_return =
 
 9999      std::make_optional<MaglevSubGraphBuilder::Label>(
 
10000          &sub_graph, unique_kind_count + 1,
 
10001          std::initializer_list<MaglevSubGraphBuilder::Variable*>{
 
10002              &var_value, &var_new_array_length});
 
10003  MaglevSubGraphBuilder::Label empty_array(&sub_graph, 1);
 
10005  ValueNode* old_array_length_smi;
 
10010  sub_graph.GotoIfTrue<BranchIfReferenceEqual>(
 
10014  ValueNode* new_array_length_smi =
 
10016  ValueNode* new_array_length =
 
10018  sub_graph.set(var_new_array_length, new_array_length_smi);
 
10022    ValueNode* writable_elements_array =
 
10029                                               JSArray::kLengthOffset,
 
10038          writable_elements_array, new_array_length,
 
10051    sub_graph.set(var_value, value);
 
10056      receiver, map_kinds, sub_graph, do_return, unique_kind_count,
 
10057      index_to_elements_kind, build_array_pop));
 
10059  sub_graph.Bind(&empty_array);
 
10061  sub_graph.set(var_value, 
GetRootConstant(RootIndex::kUndefinedValue));
 
10062  sub_graph.Goto(&*do_return);
 
10064  sub_graph.Bind(&*do_return);
 
10066                      sub_graph.get(var_new_array_length), 
false,
 
10068  return sub_graph.get(var_value);
 
10071MaybeReduceResult MaglevGraphBuilder::TryReduceFunctionPrototypeHasInstance(
 
10072    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10077  if (
args.count() != 1) {
 
10080  compiler::OptionalHeapObjectRef maybe_receiver_constant =
 
10082  if (!maybe_receiver_constant) {
 
10085  compiler::HeapObjectRef receiver_object = maybe_receiver_constant.value();
 
10086  if (!receiver_object.IsJSObject() ||
 
10087      !receiver_object.map(
broker()).is_callable()) {
 
10094MaybeReduceResult MaglevGraphBuilder::TryReduceObjectPrototypeHasOwnProperty(
 
10095    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10116      auto* receiver_map =
 
10120          DeoptimizeReason::kWrongMapDynamic);
 
10142  compiler::OptionalMapRef maybe_receiver_map;
 
10144  if (receiver_ref.has_value()) {
 
10145    compiler::HeapObjectRef receiver_object = receiver_ref.value();
 
10146    compiler::MapRef receiver_map = receiver_object.map(
broker());
 
10147    maybe_receiver_map = receiver_map;
 
10150    if (known_info->possible_maps_are_known()) {
 
10152      if (possible_maps.size() == 1) {
 
10153        compiler::MapRef receiver_map = *(possible_maps.begin());
 
10154        maybe_receiver_map = receiver_map;
 
10158  if (!maybe_receiver_map.has_value()) 
return {};
 
10160  compiler::MapRef receiver_map = maybe_receiver_map.value();
 
10161  InstanceType instance_type = receiver_map.instance_type();
 
10162  int const nof = receiver_map.NumberOfOwnDescriptors();
 
10165      receiver_map.is_dictionary_map()) {
 
10170  MaglevSubGraphBuilder sub_graph(
this, 1);
 
10171  MaglevSubGraphBuilder::Variable var_result(0);
 
10172  MaglevSubGraphBuilder::Label done(
 
10173      &sub_graph, nof + 1,
 
10174      std::initializer_list<MaglevSubGraphBuilder::Variable*>{&var_result});
 
10175  const compiler::DescriptorArrayRef descriptor_array =
 
10176      receiver_map.instance_descriptors(
broker());
 
10177  for (InternalIndex key_index : InternalIndex::Range(nof)) {
 
10178    compiler::NameRef receiver_key =
 
10179        descriptor_array.GetPropertyKey(
broker(), key_index);
 
10182    sub_graph.GotoIfTrue<BranchIfReferenceEqual>(&done, {lhs, 
args[0]});
 
10185  sub_graph.Goto(&done);
 
10186  sub_graph.Bind(&done);
 
10187  return sub_graph.get(var_result);
 
10192  if (!info || !info->possible_maps_are_known()) {
 
10195  auto& possible_maps = info->possible_maps();
 
10196  if (possible_maps.is_empty()) {
 
10199  auto it = possible_maps.begin();
 
10204  DCHECK(!map.IsPrimitiveMap() && map.IsJSReceiverMap());
 
10207  for (; it != possible_maps.end(); ++it) {
 
10210        !proto.equals(map.prototype(
broker()))) {
 
10213    DCHECK(!map.IsPrimitiveMap() && map.IsJSReceiverMap());
 
 
10220  if (
args.count() != 0) 
return {};
 
10224MaybeReduceResult MaglevGraphBuilder::TryReduceObjectGetPrototypeOf(
 
10225    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10226  if (
args.count() != 1) 
return {};
 
10230MaybeReduceResult MaglevGraphBuilder::TryReduceReflectGetPrototypeOf(
 
10231    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10232  return TryReduceObjectGetPrototypeOf(target, 
args);
 
10235MaybeReduceResult MaglevGraphBuilder::TryReduceMathRound(
 
10236    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10240MaybeReduceResult MaglevGraphBuilder::TryReduceNumberParseInt(
 
10241    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10242  if (
args.count() == 0) {
 
10245  if (
args.count() != 1) {
 
10247      if (root_cst->index() != RootIndex::kUndefinedValue) {
 
10251      if (smi_cst->value().value() != 10 && smi_cst->value().value() != 0) {
 
10259  ValueNode* arg = 
args[0];
 
10261  switch (arg->value_representation()) {
 
10267      switch (
CheckTypes(arg, {NodeType::kSmi})) {
 
10268        case NodeType::kSmi:
 
10281MaybeReduceResult MaglevGraphBuilder::TryReduceMathAbs(
 
10282    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10283  if (
args.count() == 0) {
 
10286  ValueNode* arg = 
args[0];
 
10288  switch (arg->value_representation()) {
 
10297      switch (
CheckTypes(arg, {NodeType::kSmi, NodeType::kNumberOrOddball})) {
 
10298        case NodeType::kSmi:
 
10301        case NodeType::kNumberOrOddball:
 
10303              arg, NodeType::kNumberOrOddball,
 
10317MaybeReduceResult MaglevGraphBuilder::TryReduceMathFloor(
 
10318    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10322MaybeReduceResult MaglevGraphBuilder::TryReduceMathCeil(
 
10323    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10329  if (
args.count() == 0) {
 
10339  if (
CheckType(arg, NodeType::kSmi)) 
return arg;
 
10340  if (!IsSupported(CpuOperation::kFloat64Round)) {
 
10348  if (
CheckType(arg, NodeType::kNumberOrOddball)) {
 
10351            arg, NodeType::kNumberOrOddball,
 
 
10369MaybeReduceResult MaglevGraphBuilder::TryReduceStringConstructor(
 
10371  if (
args.count() == 0) {
 
10378MaybeReduceResult MaglevGraphBuilder::TryReduceMathPow(
 
10379    compiler::JSFunctionRef target, CallArguments& 
args) {
 
10380  if (
args.count() < 2) {
 
10388    if (
args.count() == 1 && 
args[0]->properties().is_tagged()) {
 
10397  if (
args[0]->properties().is_tagged() && 
args[1]->properties().is_tagged()) {
 
10408#define MATH_UNARY_IEEE_BUILTIN_REDUCER(MathName, ExtName, EnumName)          \ 
10409  MaybeReduceResult MaglevGraphBuilder::TryReduce##MathName(                  \ 
10410      compiler::JSFunctionRef target, CallArguments& args) {                  \ 
10411    if (args.count() < 1) {                                                   \ 
10412      return GetRootConstant(RootIndex::kNanValue);                           \ 
10414    if (!CanSpeculateCall()) {                                                \ 
10415      ValueRepresentation rep = args[0]->properties().value_representation(); \ 
10416      if (rep == ValueRepresentation::kTagged ||                              \ 
10417          rep == ValueRepresentation::kHoleyFloat64) {                        \ 
10421    ValueNode* value =                                                        \ 
10422        GetFloat64ForToNumber(args[0], NodeType::kNumber,                     \ 
10423                              TaggedToFloat64ConversionType::kOnlyNumber);    \ 
10424    return AddNewNode<Float64Ieee754Unary>(                                   \ 
10425        {value}, Float64Ieee754Unary::Ieee754Function::k##EnumName);          \ 
 
10429#undef MATH_UNARY_IEEE_BUILTIN_REDUCER 
10440  if (!shared.HasBuiltinId()) 
return {};
 
10441  if (
v8_flags.trace_maglev_graph_building) {
 
10442    std::cout << 
"  ! Trying to reduce builtin " 
10445  switch (shared.builtin_id()) {
 
10446#define CASE(Name, ...)  \ 
10447  case Builtin::k##Name: \ 
10448    return TryReduce##Name(target, args); 
 
10463      return args.receiver();
 
10468        broker()->target_native_context().global_proxy_object(
broker()));
 
10472  if (compiler::OptionalHeapObjectRef maybe_constant =
 
10475    if (constant.IsNullOrUndefined()) {
 
10477          broker()->target_native_context().global_proxy_object(
broker()));
 
 
10487  int arg_count = 
static_cast<int>(
args.count());
 
10490  for (
int i = 0; 
i < arg_count; 
i++) {
 
10491    arguments[
i + 1] = 
args[
i];
 
 
10496template <
typename CallNode, 
typename... Args>
 
10498                                             Args&&... extra_args) {
 
10499  size_t input_count = 
args.count_with_receiver() + CallNode::kFixedInputCount;
 
10502      [&](CallNode* call) {
 
10504        call->set_arg(arg_index++,
 
10510      std::forward<Args>(extra_args)...);
 
 
10520  switch (
args.mode()) {
 
 
10547      shared.internal_formal_parameter_count_with_receiver());
 
10551        for (
int i = 0; i < static_cast<int>(
args.count()); 
i++) {
 
 
10563    return target.object().equals(
 
10566  return target.object()->shared() ==
 
 
10579  compiler::OptionalObjectRef maybe_callback_data =
 
10581  if (!maybe_callback_data.has_value()) {
 
10589  if (maybe_shared.has_value()) {
 
10598          ? (
v8_flags.maglev_inline_api_calls
 
10606        for (
int i = 0; i < static_cast<int>(
args.count()); 
i++) {
 
 
10617  compiler::OptionalFunctionTemplateInfoRef maybe_function_template_info =
 
10618      shared.function_template_info(
broker());
 
10619  if (!maybe_function_template_info.has_value()) {
 
10626      maybe_function_template_info.value();
 
10654  switch (api_holder.
lookup) {
 
10673    builtin_name = Builtin::kCallFunctionTemplate_CheckCompatibleReceiver;
 
10675    builtin_name = Builtin::kCallFunctionTemplate_CheckAccess;
 
10678        Builtin::kCallFunctionTemplate_CheckAccessAndCompatibleReceiver;
 
10686  int kFunctionTemplateInfo = 1;
 
10689      kFunctionTemplateInfo + kArgc + kContext + 
args.count_with_receiver(),
 
10692        call_builtin->set_arg(arg_index++, GetConstant(function_template_info));
 
10693        call_builtin->set_arg(
 
10695            GetInt32Constant(JSParameterCount(static_cast<int>(args.count()))));
 
10697        call_builtin->set_arg(arg_index++, GetTaggedValue(receiver));
 
10698        for (int i = 0; i < static_cast<int>(args.count()); i++) {
 
10699          call_builtin->set_arg(arg_index++, GetTaggedValue(args[i]));
 
10702      builtin_name, GetTaggedValue(GetContext()));
 
 
10709  if (function.native_context(
broker()) != 
broker()->target_native_context()) {
 
10721    DCHECK(!shared.HasBuiltinId());
 
10726#ifdef V8_ENABLE_LEAPTIERING
 
10727        function.dispatch_handle(),
 
10729        shared, function.raw_feedback_cell(
broker()), 
args, feedback_source);
 
 
10736#ifdef V8_ENABLE_LEAPTIERING
 
10747        for (
int i = 0; i < static_cast<int>(
args.count()); 
i++) {
 
10751#ifdef V8_ENABLE_LEAPTIERING 
 
10760#ifdef V8_ENABLE_LEAPTIERING
 
10766  constexpr int kSkipReceiver = 1;
 
10767  int argcount_without_receiver =
 
10768      static_cast<int>(arguments.size()) - kSkipReceiver;
 
10769  size_t input_count =
 
10774        for (
int i = 0; 
i < argcount_without_receiver; 
i++) {
 
10778#ifdef V8_ENABLE_LEAPTIERING 
 
10787#ifdef V8_ENABLE_LEAPTIERING
 
10795#ifdef V8_ENABLE_LEAPTIERING
 
10798                                      shared, feedback_cell, 
args,
 
10802#ifdef V8_ENABLE_LEAPTIERING
 
10805                                  shared, feedback_cell, 
args, feedback_source);
 
 
10811  DCHECK(!ref.IsHeapNumber());
 
10816  if (compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(node)) {
 
10817    if (maybe_constant.value().equals(ref)) {
 
 
10830  if (ref.IsHeapObject() && !ref.IsHeapNumber()) {
 
 
10838  if (!
IsConstantNode(node->opcode()) && ref.IsInternalizedString()) {
 
 
10852  DCHECK(ref.IsSmi() || ref.IsHeapNumber());
 
10854    int ref_value = ref.AsSmi();
 
10871    DCHECK(ref.IsHeapNumber());
 
10872    Float64 ref_value = Float64::FromBits(ref.AsHeapNumber().value_as_bits());
 
10877      if (f64 == ref_value) {
 
10881    } 
else if (compiler::OptionalHeapObjectRef constant =
 
10883      if (constant.value().IsHeapNumber()) {
 
10885            Float64::FromBits(constant.value().AsHeapNumber().value_as_bits());
 
10887        if (f64 == ref_value) {
 
 
10904  if (!node->is_tagged()) 
return node;
 
10905  compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(node);
 
10906  if (maybe_constant) {
 
10907    return maybe_constant.value().IsTheHole()
 
 
10916  compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(node);
 
10917  if (maybe_constant) {
 
10918    if (maybe_constant.value().IsTheHole()) {
 
 
10938  if (!shared.HasBreakInfo(
broker())) {
 
10944    DCHECK(IsCallable(*target.object()));
 
 
10959  if (!receiver_info || !receiver_info->possible_maps_are_known()) {
 
10963  DCHECK(!receiver_info->possible_maps().is_empty());
 
10969                                                        first_receiver_map);
 
10982  CHECK(first_receiver_map.IsJSReceiverMap());
 
11005    CHECK(receiver_map.IsJSReceiverMap());
 
11006    CHECK(!receiver_map.is_access_check_needed() ||
 
 
11016      target_node, target, DeoptimizeReason::kWrongCallTarget));
 
 
11022#ifdef V8_ENABLE_LEAPTIERING
 
11034  if (!shared.HasBreakInfo(
broker())) {
 
11041        target_context, target_node,
 
11043#ifdef V8_ENABLE_LEAPTIERING
 
11046        shared, feedback_cell, 
args, feedback_source));
 
 
11058  if (maybe_receiver.has_value()) {
 
11060        function, maybe_receiver.value(), DeoptimizeReason::kWrongCallTarget));
 
11065  if (
args.count() == 0) {
 
11067    return ReduceCall(function, empty_args, feedback_source);
 
11069  auto build_call_only_with_new_receiver = [&] {
 
11071    return ReduceCall(function, new_args, feedback_source);
 
11073  if (
args.count() == 1 || IsNullValue(
args[1]) || IsUndefinedValue(
args[1])) {
 
11074    return build_call_only_with_new_receiver();
 
11076  auto build_call_with_array_like = [&] {
 
11082    return build_call_with_array_like();
 
11085      [&](
auto& builder) {
 
11088      build_call_only_with_new_receiver, build_call_with_array_like);
 
 
11098        DeoptimizeReason::kInsufficientTypeFeedbackForCall);
 
11104  if (call_feedback.
target().has_value() &&
 
11105      call_feedback.
target()->IsJSFunction()) {
 
11108        call_feedback.
target()->AsJSFunction();
 
11115          target_node, apply_function, DeoptimizeReason::kWrongCallTarget));
 
11120      feedback_target = apply_function;
 
11125        target_node, feedback_target, DeoptimizeReason::kWrongCallTarget));
 
 
11138  DCHECK(arguments_object->
map().IsJSArgumentsObjectMap() ||
 
11139         arguments_object->
map().IsJSArrayMap());
 
11140  args.PopArrayLikeArgument();
 
11142      arguments_object->
get(JSArgumentsObject::kElementsOffset);
 
11146    if (compiler::OptionalHeapObjectRef maybe_constant =
 
11148      if (maybe_constant->IsJSFunction()) {
 
11150            maybe_constant->AsJSFunction().shared(
broker());
 
11156    int start_index = 0;
 
11164                                              start_index, target_type);
 
11170              RootIndex::kEmptyFixedArray);
 
11172    return ReduceCall(target_node, new_args, feedback_source);
 
11176    DCHECK(constant_value->object().IsFixedArray());
 
11181    for (
int i = 0; i < static_cast<int>(
args.count()); 
i++) {
 
11184    for (uint32_t 
i = 0; 
i < elements.
length(); 
i++) {
 
11188    return ReduceCall(target_node, new_args, feedback_source);
 
11198  for (
int i = 0; i < static_cast<int>(
args.count()); 
i++) {
 
11209  return ReduceCall(target_node, new_args, feedback_source);
 
 
11215  return broker->target_native_context()
 
11216      .fast_aliased_arguments_map(
broker)
 
11221std::optional<VirtualObject*>
 
11237  if (!object->has_static_map()) 
return {};
 
11242  if (map.IsJSArrayMap() && object->get(JSArgumentsObject::kElementsOffset)
 
11248  if (map.IsJSArgumentsObjectMap() &&
 
11249      !IsSloppyMappedArgumentsObject(
broker(), map)) {
 
 
11261  if (std::optional<VirtualObject*> arguments_object =
 
11264        target_node, 
args, *arguments_object, feedback_source));
 
 
11274  if (compiler::OptionalHeapObjectRef maybe_constant =
 
11276    if (maybe_constant->IsJSFunction()) {
 
11278          target_node, maybe_constant->AsJSFunction(), 
args, feedback_source);
 
11289        fast_create_closure, fast_create_closure->context().node(),
 
11290#ifdef V8_ENABLE_LEAPTIERING
 
11291        fast_create_closure->feedback_cell().dispatch_handle(),
 
11293        fast_create_closure->shared_function_info(),
 
11294        fast_create_closure->feedback_cell(), 
args, feedback_source);
 
11299        create_closure, create_closure->context().node(),
 
11300#ifdef V8_ENABLE_LEAPTIERING
 
11301        create_closure->feedback_cell().dispatch_handle(),
 
11303        create_closure->shared_function_info(), create_closure->feedback_cell(),
 
11304        args, feedback_source);
 
 
11325  const int receiver_count =
 
11327  const int reg_count = arg_count + receiver_count;
 
11330  switch (reg_count) {
 
 
11354ReduceResult MaglevGraphBuilder::VisitCallAnyReceiver() {
 
11357ReduceResult MaglevGraphBuilder::VisitCallProperty() {
 
11360ReduceResult MaglevGraphBuilder::VisitCallProperty0() {
 
11363ReduceResult MaglevGraphBuilder::VisitCallProperty1() {
 
11366ReduceResult MaglevGraphBuilder::VisitCallProperty2() {
 
11369ReduceResult MaglevGraphBuilder::VisitCallUndefinedReceiver() {
 
11372ReduceResult MaglevGraphBuilder::VisitCallUndefinedReceiver0() {
 
11375ReduceResult MaglevGraphBuilder::VisitCallUndefinedReceiver1() {
 
11378ReduceResult MaglevGraphBuilder::VisitCallUndefinedReceiver2() {
 
11382ReduceResult MaglevGraphBuilder::VisitCallWithSpread() {
 
11386  compiler::FeedbackSource feedback_source(
feedback(), slot);
 
11392ReduceResult MaglevGraphBuilder::VisitCallRuntime() {
 
11399      [&](CallRuntime* call_runtime) {
 
11400        for (
int i = 0; 
i < 
args.register_count(); ++
i) {
 
11408    return BuildAbort(AbortReason::kUnexpectedReturnFromThrow);
 
11413ReduceResult MaglevGraphBuilder::VisitCallJSRuntime() {
 
11418  ValueNode* callee =
 
11428ReduceResult MaglevGraphBuilder::VisitCallRuntimeForPair() {
 
11436      [&](CallRuntime* call_runtime) {
 
11437        for (
int i = 0; 
i < 
args.register_count(); ++
i) {
 
11447ReduceResult MaglevGraphBuilder::VisitInvokeIntrinsic() {
 
11451  switch (intrinsic_id) {
 
11452#define CASE(Name, _, arg_count)                                         \ 
11453  case Runtime::kInline##Name:                                           \ 
11454    DCHECK_IMPLIES(arg_count != -1, arg_count == args.register_count()); \ 
11455    return VisitIntrinsic##Name(args); 
11463ReduceResult MaglevGraphBuilder::VisitIntrinsicCopyDataProperties(
 
11464    interpreter::RegisterList 
args) {
 
11471ReduceResult MaglevGraphBuilder::
 
11472    VisitIntrinsicCopyDataPropertiesWithExcludedPropertiesOnStack(
 
11473        interpreter::RegisterList 
args) {
 
11474  SmiConstant* excluded_property_count =
 
11477  int kExcludedPropertyCount = 1;
 
11479      args.register_count() + kContext + kExcludedPropertyCount,
 
11480      [&](CallBuiltin* call_builtin) {
 
11482        call_builtin->set_arg(arg_index++, GetTaggedValue(args[0]));
 
11483        call_builtin->set_arg(arg_index++, excluded_property_count);
 
11484        for (int i = 1; i < args.register_count(); i++) {
 
11485          call_builtin->set_arg(arg_index++, GetTaggedValue(args[i]));
 
11488      Builtin::kCopyDataPropertiesWithExcludedProperties,
 
11489      GetTaggedValue(GetContext()));
 
11490  SetAccumulator(call_builtin);
 
11494ReduceResult MaglevGraphBuilder::VisitIntrinsicCreateIterResultObject(
 
11495    interpreter::RegisterList 
args) {
 
11499  compiler::MapRef map =
 
11502  ValueNode* allocation =
 
11508ReduceResult MaglevGraphBuilder::VisitIntrinsicCreateAsyncFromSyncIterator(
 
11509    interpreter::RegisterList 
args) {
 
11517ReduceResult MaglevGraphBuilder::VisitIntrinsicCreateJSGeneratorObject(
 
11518    interpreter::RegisterList 
args) {
 
11529ReduceResult MaglevGraphBuilder::VisitIntrinsicGeneratorGetResumeMode(
 
11530    interpreter::RegisterList 
args) {
 
11538ReduceResult MaglevGraphBuilder::VisitIntrinsicGeneratorClose(
 
11539    interpreter::RegisterList 
args) {
 
11544                                      JSGeneratorObject::kContinuationOffset,
 
11550ReduceResult MaglevGraphBuilder::VisitIntrinsicGetImportMetaObject(
 
11551    interpreter::RegisterList 
args) {
 
11557ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncFunctionAwait(
 
11558    interpreter::RegisterList 
args) {
 
11565ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncFunctionEnter(
 
11566    interpreter::RegisterList 
args) {
 
11573ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncFunctionReject(
 
11574    interpreter::RegisterList 
args) {
 
11581ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncFunctionResolve(
 
11582    interpreter::RegisterList 
args) {
 
11589ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorAwait(
 
11590    interpreter::RegisterList 
args) {
 
11597ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorReject(
 
11598    interpreter::RegisterList 
args) {
 
11605ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorResolve(
 
11606    interpreter::RegisterList 
args) {
 
11614ReduceResult MaglevGraphBuilder::VisitIntrinsicAsyncGeneratorYieldWithAwait(
 
11615    interpreter::RegisterList 
args) {
 
11633        construct->
set_arg(arg_index++,
 
 
11653  array->set(JSArray::kElementsOffset, elements);
 
 
11666  array->set(JSArray::kElementsOffset, elements);
 
11669    array->set(map.GetInObjectPropertyOffset(
i),
 
11672  array->ClearSlots(map.GetInObjectPropertyOffset(
 
 
11691  compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(closure);
 
11692  if (!maybe_constant.has_value()) 
return {};
 
11693  if (!maybe_constant->IsJSFunction()) 
return {};
 
11695  if (!function.has_initial_map(
broker())) 
return {};
 
11699  DCHECK(shared.HasBytecodeArray());
 
11701  int parameter_count_no_receiver = bytecode_array.
parameter_count() - 1;
 
11702  int length = parameter_count_no_receiver + bytecode_array.
register_count();
 
11720      closure, 
receiver, register_file);
 
11725    generator->set(initial_map.GetInObjectPropertyOffset(
i), undefined);
 
11727  generator->ClearSlots(
 
11728      initial_map.GetInObjectPropertyOffset(
 
 
11739compiler::OptionalMapRef GetArrayConstructorInitialMap(
 
11741    ElementsKind elements_kind, 
size_t argc, std::optional<int> maybe_length) {
 
11743  if (argc == 1 && (!maybe_length.has_value() || *maybe_length > 0)) {
 
11748  return initial_map.AsElementsKind(
broker, elements_kind);
 
11768    compiler::OptionalAllocationSiteRef maybe_allocation_site) {
 
11770      maybe_allocation_site.has_value()
 
11771          ? maybe_allocation_site->GetElementsKind()
 
11777  std::optional<int> maybe_length;
 
11778  if (
args.count() == 1) {
 
11781  compiler::OptionalMapRef maybe_initial_map = GetArrayConstructorInitialMap(
 
11782      broker(), array_function, elements_kind, 
args.count(), maybe_length);
 
11783  if (!maybe_initial_map.has_value()) 
return {};
 
11791  bool can_inline_call = 
false;
 
11794  if (maybe_allocation_site) {
 
11795    can_inline_call = maybe_allocation_site->CanInlineCall();
 
11803    can_inline_call = array_constructor_protector.
value(
broker()).AsSmi() ==
 
11807  if (
args.count() == 0) {
 
11811        slack_tracking_prediction, allocation_type);
 
11814  if (maybe_length.has_value() && *maybe_length >= 0 &&
 
11818                                   slack_tracking_prediction, allocation_type);
 
11825  if (
args.count() == 1 && can_inline_call) {
 
11827        [&](
auto& builder) {
 
11829                                           Operation::kGreaterThanOrEqual,
 
11836                                         slack_tracking_prediction,
 
11841              static_cast<int>(MessageTemplate::kInvalidArrayLength));
 
 
11856  switch (shared_function_info.
builtin_id()) {
 
11857    case Builtin::kArrayConstructor: {
 
11861    case Builtin::kObjectConstructor: {
 
11864      if (
args.count() == 0) {
 
11866            target, builtin, DeoptimizeReason::kWrongConstructor));
 
 
11885      target, function, DeoptimizeReason::kWrongConstructor));
 
11887  int construct_arg_count = 
static_cast<int>(
args.count());
 
11890  for (
int i = 0; 
i < construct_arg_count; 
i++) {
 
11891    construct_arguments_without_receiver[
i] = 
args[
i];
 
11896    args.set_receiver(implicit_receiver);
 
11903      call_result = 
result.value();
 
11905    if (
CheckType(call_result, NodeType::kJSReceiver)) 
return call_result;
 
11907    if (compiler::OptionalHeapObjectRef maybe_constant =
 
11910      if (constant.IsJSReceiver()) 
return constant_node;
 
11920  ValueNode* implicit_receiver = 
nullptr;
 
11921  if (function.has_initial_map(
broker())) {
 
11928  if (implicit_receiver == 
nullptr) {
 
11932  EnsureType(implicit_receiver, NodeType::kJSReceiver);
 
11934  args.set_receiver(implicit_receiver);
 
11941    call_result = 
result.value();
 
11943  if (
CheckType(call_result, NodeType::kJSReceiver)) 
return call_result;
 
11946  if (compiler::OptionalHeapObjectRef maybe_constant =
 
11950    if (constant.IsJSReceiver()) 
return constant_node;
 
11951    return implicit_receiver;
 
 
11960  DCHECK(!feedback_target.IsAllocationSite());
 
11969  if (!feedback_target.IsJSFunction()) 
return {};
 
11974      function.shared(
broker());
 
11980  if (function.native_context(
broker()) != 
broker()->target_native_context()) {
 
11990  if (shared_function_info.HasBuiltinId()) {
 
11995  if (shared_function_info.construct_as_builtin()) {
 
 
12011        DeoptimizeReason::kInsufficientTypeFeedbackForConstruct);
 
12015  compiler::OptionalHeapObjectRef feedback_target =
 
12017  if (feedback_target.has_value() && feedback_target->IsAllocationSite()) {
 
12024        target, array_function, DeoptimizeReason::kWrongConstructor));
 
12027                                           feedback_target->AsAllocationSite()),
 
12030    if (feedback_target.has_value()) {
 
12036    if (compiler::OptionalHeapObjectRef maybe_constant =
 
 
12061ReduceResult MaglevGraphBuilder::VisitConstructWithSpread() {
 
12067  compiler::FeedbackSource feedback_source(
feedback(), slot);
 
12070  size_t input_count =
 
12074      [&](ConstructWithSpread* construct) {
 
12077        construct->set_arg(arg_index++,
 
12079        for (
int i = 0; 
i < 
args.register_count(); 
i++) {
 
12089ReduceResult MaglevGraphBuilder::VisitConstructForwardAllArgs() {
 
12093  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
12096    base::SmallVector<ValueNode*, 8> forwarded_args(
argument_count());
 
12101                       std::move(forwarded_args));
 
12113ReduceResult MaglevGraphBuilder::VisitTestEqual() {
 
12116ReduceResult MaglevGraphBuilder::VisitTestEqualStrict() {
 
12119ReduceResult MaglevGraphBuilder::VisitTestLessThan() {
 
12122ReduceResult MaglevGraphBuilder::VisitTestLessThanOrEqual() {
 
12125ReduceResult MaglevGraphBuilder::VisitTestGreaterThan() {
 
12128ReduceResult MaglevGraphBuilder::VisitTestGreaterThanOrEqual() {
 
12138  if (!node_info || !node_info->possible_maps_are_known()) {
 
12147  if (node_info->possible_maps().is_empty()) {
 
12165      if (!map.IsJSObjectMap()) {
 
12170      if (map_prototype.equals(prototype)) {
 
12176      if (!map.is_stable() || map.is_dictionary_map()) {
 
12190    compiler::OptionalJSObjectRef last_prototype;
 
12198      if (!prototype.IsJSObject() || !prototype.map(
broker()).is_stable()) {
 
12201      last_prototype = prototype.AsJSObject();
 
 
12227    ValueNode* callable_node_if_not_constant) {
 
12228  const bool is_constant = callable_node_if_not_constant == 
nullptr;
 
12229  if (!is_constant) 
return {};
 
12231  if (callable.IsJSBoundFunction()) {
 
12236        function.bound_target_function(
broker());
 
12238    if (bound_target_function.IsJSObject()) {
 
12240          object, bound_target_function.AsJSObject(), 
nullptr));
 
12249  if (callable.IsJSFunction()) {
 
12256        !function.has_instance_prototype(
broker()) ||
 
12257        function.PrototypeRequiresRuntimeLookup(
broker())) {
 
 
12271    ValueNode* callable_node_if_not_constant) {
 
12273      object, callable, callable_node_if_not_constant));
 
12276      {callable_node_if_not_constant
 
 
12284    ValueNode* callable_node_if_not_constant) {
 
12305    if (callable_node_if_not_constant) {
 
12307          callable_node_if_not_constant,
 
12322                                    callable_node_if_not_constant);
 
12326    compiler::OptionalJSObjectRef holder = access_info.
holder();
 
12327    bool found_on_proto = holder.has_value();
 
12329        found_on_proto ? holder.value() : callable;
 
12331    compiler::OptionalObjectRef has_instance_field =
 
12335    if (!has_instance_field.has_value() ||
 
12336        !has_instance_field->IsHeapObject() ||
 
12337        !has_instance_field->AsHeapObject().map(
broker()).is_callable()) {
 
12341    if (found_on_proto) {
 
12348    if (callable_node_if_not_constant) {
 
12352                                     DeoptimizeReason::kWrongValue));
 
12353      callable_node = callable_node_if_not_constant;
 
12364    if (has_instance_field->IsJSFunction()) {
 
12366          has_instance_field->AsJSFunction().shared(
broker());
 
12367      if (shared.HasBuiltinId() &&
 
12368          shared.builtin_id() == Builtin::kFunctionPrototypeHasInstance) {
 
12370                                        callable_node_if_not_constant);
 
12376                       {callable_node, 
object});
 
12382          this, Builtin::kToBooleanLazyDeoptContinuation);
 
12384      if (has_instance_field->IsJSFunction()) {
 
 
12402template <
bool flip>
 
12409  switch (value->value_representation()) {
 
12442  if (
CheckType(value, NodeType::kJSReceiver, &value_type)) {
 
12447    if constexpr (!flip) {
 
12453  if (
CheckType(value, NodeType::kString)) {
 
12455  } 
else if (
CheckType(value, NodeType::kSmi)) {
 
12458  if (falsy_value != 
nullptr) {
 
12460        {
value, falsy_value});
 
12462  if (
CheckType(value, NodeType::kBoolean)) {
 
12463    if constexpr (flip) {
 
12469      {value}, GetCheckType(value_type));
 
 
12478  if (feedback.IsInsufficient()) {
 
12480        DeoptimizeReason::kInsufficientTypeFeedbackForInstanceOf);
 
12485  compiler::OptionalHeapObjectRef maybe_constant;
 
12487      maybe_constant.value().IsJSObject()) {
 
12491  if (feedback_source.
IsValid()) {
 
12492    compiler::OptionalJSObjectRef callable_from_feedback =
 
12493        feedback.AsInstanceOf().value();
 
12494    if (callable_from_feedback) {
 
 
12501ReduceResult MaglevGraphBuilder::VisitTestInstanceOf() {
 
12508  MaybeReduceResult 
result =
 
12518ReduceResult MaglevGraphBuilder::VisitTestIn() {
 
12523  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
12526  USE(feedback_source);
 
12533ReduceResult MaglevGraphBuilder::VisitToName() {
 
12545  if (
CheckType(value, NodeType::kNumber)) {
 
 
12555  switch (value->value_representation()) {
 
12573  switch (
broker()->GetFeedbackForBinaryOperation(
 
 
12606ReduceResult MaglevGraphBuilder::VisitToNumeric() {
 
12610ReduceResult MaglevGraphBuilder::VisitToObject() {
 
12615  if (
CheckType(value, NodeType::kJSReceiver, &old_type)) {
 
12620                                                    GetCheckType(old_type)));
 
12625ReduceResult MaglevGraphBuilder::VisitToString() {
 
12631ReduceResult MaglevGraphBuilder::VisitToBoolean() {
 
12636ReduceResult MaglevGraphBuilder::VisitCreateRegExpLiteral() {
 
12641  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
12642  compiler::ProcessedFeedback 
const& processed_feedback =
 
12644  if (!processed_feedback.IsInsufficient()) {
 
12645    compiler::RegExpBoilerplateDescriptionRef 
literal =
 
12646        processed_feedback.AsRegExpLiteral().value();
 
12649    compiler::MapRef map =
 
12661ReduceResult MaglevGraphBuilder::VisitCreateArrayLiteral() {
 
12665  int literal_flags =
 
12667  compiler::FeedbackSource feedback_source(
feedback(), slot_index);
 
12669  compiler::ProcessedFeedback 
const& processed_feedback =
 
12672  if (processed_feedback.IsInsufficient()) {
 
12674        DeoptimizeReason::kInsufficientTypeFeedbackForArrayLiteral);
 
12677  MaybeReduceResult 
result =
 
12687        {}, constant_elements, feedback_source, literal_flags));
 
12690        {}, constant_elements, feedback_source, literal_flags));
 
12695ReduceResult MaglevGraphBuilder::VisitCreateArrayFromIterable() {
 
12702ReduceResult MaglevGraphBuilder::VisitCreateEmptyArrayLiteral() {
 
12704  compiler::FeedbackSource feedback_source(
feedback(), slot_index);
 
12705  compiler::ProcessedFeedback 
const& processed_feedback =
 
12707  if (processed_feedback.IsInsufficient()) {
 
12709        DeoptimizeReason::kInsufficientTypeFeedbackForArrayLiteral);
 
12711  compiler::AllocationSiteRef site = processed_feedback.
AsLiteral().
value();
 
12720  VirtualObject* array;
 
12727std::optional<VirtualObject*>
 
12730    int* max_properties) {
 
12734  if (max_depth == 0) 
return {};
 
12738      boilerplate_access_guard(
broker());
 
12747    compiler::OptionalMapRef current_boilerplate_map =
 
12749    if (!current_boilerplate_map.has_value() ||
 
12750        !current_boilerplate_map->equals(boilerplate_map)) {
 
12767      boilerplate.
map(
broker()).is_dictionary_map() ||
 
12772    compiler::ObjectRef properties =
 
12775        properties.IsSmi() ||
 
12776        properties.equals(MakeRef(
 
12778        properties.equals(MakeRef(
 
12781    if (!empty) 
return {};
 
12784  compiler::OptionalFixedArrayBaseRef maybe_elements =
 
12786  if (!maybe_elements.has_value()) 
return {};
 
12789      boilerplate, JSObject::kElementsOffset, boilerplate_elements);
 
12790  const uint32_t elements_length = boilerplate_elements.
length();
 
12793  if (boilerplate_map.IsJSArrayMap()) {
 
12813    if ((*max_properties)-- == 0) 
return {};
 
12832    compiler::OptionalObjectRef maybe_boilerplate_value =
 
12836    if (!maybe_boilerplate_value.has_value()) 
return {};
 
12843    compiler::ObjectRef boilerplate_value = maybe_boilerplate_value.value();
 
12845    if (boilerplate_value.IsJSObject()) {
 
12847      std::optional<VirtualObject*> maybe_object_value =
 
12849                                           max_depth - 1, max_properties);
 
12850      if (!maybe_object_value.has_value()) 
return {};
 
12851      fast_literal->
set(
offset, maybe_object_value.value());
 
12855                            boilerplate_value.AsHeapNumber().value_as_bits())));
 
12860                         !boilerplate_value.IsSmi(),
 
12861                     IsUninitialized(*boilerplate_value.object()));
 
12868  for (; index < inobject_properties; ++
index) {
 
12876  DCHECK_EQ(JSObject::kElementsOffset, JSArray::kElementsOffset);
 
12883  if (boilerplate_elements.
length() == 0 ||
 
12889    fast_literal->
set(JSObject::kElementsOffset,
 
12893    if (boilerplate_elements.IsFixedDoubleArray()) {
 
12897          JSObject::kElementsOffset,
 
12899                                 boilerplate_elements.AsFixedDoubleArray()));
 
12906          boilerplate_elements.AsFixedArray();
 
12907      for (uint32_t 
i = 0; 
i < elements_length; ++
i) {
 
12908        if ((*max_properties)-- == 0) 
return {};
 
12909        compiler::OptionalObjectRef element_value =
 
12911        if (!element_value.has_value()) 
return {};
 
12912        if (element_value->IsJSObject()) {
 
12913          std::optional<VirtualObject*> 
object =
 
12915                                               allocation, max_depth - 1,
 
12917          if (!
object.has_value()) 
return {};
 
12925      fast_literal->
set(JSObject::kElementsOffset, elements);
 
12929  return fast_literal;
 
 
12936  old->allocation()->UpdateObject(vobject);
 
 
12943  DCHECK_GT(slot_count_including_map, 0);
 
12944  uint32_t slot_count = slot_count_including_map - 1;
 
12948  std::fill_n(slots, slot_count,
 
 
12965      elements_length, elements);
 
 
12979  DCHECK(!map.is_dictionary_map());
 
12980  DCHECK(!map.IsInobjectSlackTrackingInProgress());
 
12981  int slot_count = map.instance_size() / 
kTaggedSize;
 
12984  object->set(JSObject::kPropertiesOrHashOffset,
 
12986  object->set(JSObject::kElementsOffset,
 
12988  object->ClearSlots(JSObject::kElementsOffset,
 
 
12999  object->set(JSArray::kPropertiesOrHashOffset,
 
13004  object->set(JSArray::kElementsOffset,
 
13006  object->set(JSArray::kLengthOffset, length);
 
13007  object->ClearSlots(JSArray::kLengthOffset,
 
 
13014  int slot_count = map.instance_size() / 
kTaggedSize;
 
13017  object->set(JSArrayIterator::kPropertiesOrHashOffset,
 
13019  object->set(JSArrayIterator::kElementsOffset,
 
13021  object->set(JSArrayIterator::kIteratedObjectOffset, iterated_object);
 
13023  object->set(JSArrayIterator::kKindOffset,
 
 
13037  object->set(JSObject::kPropertiesOrHashOffset,
 
13039  object->set(JSObject::kElementsOffset,
 
13041  object->ClearSlots(JSObject::kElementsOffset,
 
 
13072  for (; index < 
length; index++) {
 
 
13081    std::optional<ValueNode*> callee) {
 
13082  DCHECK_EQ(JSSloppyArgumentsObject::kLengthOffset, JSArray::kLengthOffset);
 
13083  DCHECK_EQ(JSStrictArgumentsObject::kLengthOffset, JSArray::kLengthOffset);
 
13084  int slot_count = map.instance_size() / 
kTaggedSize;
 
13085  SBXCHECK_EQ(slot_count, callee.has_value() ? 5 : 4);
 
13087  arguments->set(JSArray::kPropertiesOrHashOffset,
 
13089  arguments->set(JSArray::kElementsOffset, elements);
 
13092  arguments->set(JSArray::kLengthOffset, length);
 
13093  if (callee.has_value()) {
 
13094    arguments->set(JSSloppyArgumentsObject::kCalleeOffset, callee.value());
 
13096  DCHECK(arguments->map().IsJSArgumentsObjectMap() ||
 
13097         arguments->map().IsJSArrayMap());
 
 
13110                unmapped_elements);
 
 
13119  regexp->set(JSRegExp::kPropertiesOrHashOffset,
 
13121  regexp->set(JSRegExp::kElementsOffset,
 
13123  regexp->set(JSRegExp::kDataOffset,
 
13125                                 kRegExpDataIndirectPointerTag));
 
 
13138  DCHECK(instance_type == JS_GENERATOR_OBJECT_TYPE ||
 
13139         instance_type == JS_ASYNC_GENERATOR_OBJECT_TYPE);
 
13140  SBXCHECK_GE(slot_count, instance_type == JS_GENERATOR_OBJECT_TYPE ? 10 : 12);
 
13142  object->set(JSGeneratorObject::kPropertiesOrHashOffset,
 
13144  object->set(JSGeneratorObject::kElementsOffset,
 
13146  object->set(JSGeneratorObject::kContextOffset, context);
 
13147  object->set(JSGeneratorObject::kFunctionOffset, closure);
 
13148  object->set(JSGeneratorObject::kReceiverOffset, 
receiver);
 
13149  object->set(JSGeneratorObject::kInputOrDebugPosOffset,
 
13151  object->set(JSGeneratorObject::kResumeModeOffset,
 
13153  object->set(JSGeneratorObject::kContinuationOffset,
 
13155  object->set(JSGeneratorObject::kParametersAndRegistersOffset, register_file);
 
13156  if (instance_type == JS_ASYNC_GENERATOR_OBJECT_TYPE) {
 
13157    object->set(JSAsyncGeneratorObject::kQueueOffset,
 
13159    object->set(JSAsyncGeneratorObject::kIsAwaitingOffset, 
GetInt32Constant(0));
 
 
13167  static_assert(JSIteratorResult::kSize == 5 * 
kTaggedSize);
 
13168  int slot_count = JSIteratorResult::kSize / 
kTaggedSize;
 
13170  iter_result->
set(JSIteratorResult::kPropertiesOrHashOffset,
 
13172  iter_result->
set(JSIteratorResult::kElementsOffset,
 
13174  iter_result->
set(JSIteratorResult::kValueOffset, value);
 
13175  iter_result->
set(JSIteratorResult::kDoneOffset, done);
 
13176  return iter_result;
 
 
13181  static_assert(JSStringIterator::kHeaderSize == 5 * 
kTaggedSize);
 
13182  int slot_count = JSStringIterator::kHeaderSize / 
kTaggedSize;
 
13184  string_iter->
set(JSStringIterator::kPropertiesOrHashOffset,
 
13186  string_iter->
set(JSStringIterator::kElementsOffset,
 
13188  string_iter->
set(JSStringIterator::kStringOffset, 
string);
 
13190  return string_iter;
 
 
13224  if (!
v8_flags.maglev_escape_analysis) 
return;
 
13225  allocation->AddNonEscapingUses(use_count);
 
 
13234              nested_allocation);
 
13238               value->opcode() != Opcode::kArgumentsElements &&
 
13239               value->opcode() != Opcode::kArgumentsLength &&
 
13240               value->opcode() != Opcode::kRestLength) {
 
 
13258      static_cast<int>(offsetof(
ConsString, raw_hash_field_)));
 
 
13273  DCHECK(vobject->
map().IsHeapNumberMap());
 
 
13287  DCHECK(vobject->
map().IsFixedDoubleArrayMap());
 
 
13312  switch (vobject->
type()) {
 
13332          value_to_push = 
node;
 
13340        values.push_back(value_to_push);
 
13350      for (uint32_t 
i = 0; 
i < values.
size(); 
i++) {
 
13359  if (
v8_flags.maglev_allocation_folding < 2) {
 
 
13381    int mapped_count) {
 
13389  for (; 
i < mapped_count; 
i++) {
 
13397  return unmapped_elements;
 
 
13400template <CreateArgumentsType type>
 
13411              broker()->target_native_context().sloppy_arguments_map(
broker()),
 
13420              broker()->target_native_context().sloppy_arguments_map(
broker()),
 
13439          int mapped_count = std::min(param_count, length);
 
13443              broker()->sloppy_arguments_elements_map(), mapped_count,
 
13445          for (
int i = 0; 
i < mapped_count; 
i++, param_idx_in_ctxt--) {
 
13450              broker()->target_native_context().fast_aliased_arguments_map(
 
13459              broker()->sloppy_arguments_elements_map(), param_count,
 
13462          for (
int i = 0; 
i < param_count; 
i++, param_idx_in_ctxt--) {
 
13464                [&](
auto& builder) {
 
13466                                                   Operation::kLessThan,
 
13470                [&] { 
return the_hole_value; });
 
13474              broker()->target_native_context().fast_aliased_arguments_map(
 
13484            broker()->target_native_context().strict_arguments_map(
broker()),
 
13493            broker()->target_native_context().strict_arguments_map(
broker()),
 
13504            broker()->target_native_context().js_array_packed_elements_map(
 
13516            broker()->target_native_context().js_array_packed_elements_map(
 
13518            rest_length, elements);
 
 
13523template <CreateArgumentsType type>
 
13534  if (!site.boilerplate(
broker()).has_value()) 
return {};
 
13542      *site.boilerplate(
broker()), allocation_type,
 
13544  if (!maybe_value.has_value()) 
return {};
 
 
13555ReduceResult MaglevGraphBuilder::VisitCreateObjectLiteral() {
 
13560  int literal_flags =
 
13568        DeoptimizeReason::kInsufficientTypeFeedbackForObjectLiteral);
 
13571  MaybeReduceResult 
result =
 
13581        {}, boilerplate_desc, feedback_source, literal_flags));
 
13584        {}, boilerplate_desc, feedback_source, literal_flags));
 
13590ReduceResult MaglevGraphBuilder::VisitCreateEmptyObjectLiteral() {
 
13592  compiler::MapRef map =
 
13594  DCHECK(!map.is_dictionary_map());
 
13595  DCHECK(!map.IsInobjectSlackTrackingInProgress());
 
13601ReduceResult MaglevGraphBuilder::VisitCloneObject() {
 
13608  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
13614ReduceResult MaglevGraphBuilder::VisitGetTemplateObject() {
 
13616  compiler::SharedFunctionInfoRef shared_function_info =
 
13620  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
13622  const compiler::ProcessedFeedback& feedback =
 
13624  if (feedback.IsInsufficient()) {
 
13626        {description}, shared_function_info, feedback_source));
 
13634ReduceResult MaglevGraphBuilder::VisitCreateClosure() {
 
13635  compiler::SharedFunctionInfoRef shared_function_info =
 
13637  compiler::FeedbackCellRef feedback_cell =
 
13643        {
GetContext()}, shared_function_info, feedback_cell));
 
13648        {
GetContext()}, shared_function_info, feedback_cell, pretenured));
 
13655  const int kContextAllocationLimit = 16;
 
13656  if (context_length > kContextAllocationLimit) 
return {};
 
 
13663ReduceResult MaglevGraphBuilder::VisitCreateBlockContext() {
 
13683ReduceResult MaglevGraphBuilder::VisitCreateCatchContext() {
 
13688      broker()->target_native_context().catch_context_map(
broker()),
 
13695ReduceResult MaglevGraphBuilder::VisitCreateFunctionContext() {
 
13698  compiler::MapRef map =
 
13701  auto done = [&](ValueNode* res) {
 
13716ReduceResult MaglevGraphBuilder::VisitCreateEvalContext() {
 
13719  compiler::MapRef map =
 
13722  auto done = [&](ValueNode* res) {
 
13731  if (slot_count <= 
static_cast<uint32_t
>(
 
13742ReduceResult MaglevGraphBuilder::VisitCreateWithContext() {
 
13747      broker()->target_native_context().with_context_map(
broker()),
 
13765ReduceResult MaglevGraphBuilder::VisitCreateMappedArguments() {
 
13768  if (!shared.object()->has_duplicate_parameters()) {
 
13786ReduceResult MaglevGraphBuilder::VisitCreateUnmappedArguments() {
 
13798ReduceResult MaglevGraphBuilder::VisitCreateRestParameter() {
 
13818  if (
v8_flags.trace_maglev_graph_building) {
 
13819    std::cout << 
"  * Begin loop peeling...." << std::endl;
 
 
13836                          rem_nodes_in_call_block.
size());
 
13839  for (
Node* n : rem_nodes_in_call_block) {
 
13841    result->nodes().push_back(n);
 
13845  result->set_control_node(control_node);
 
 
13858  bool track_peeled_effects =
 
13860  if (track_peeled_effects) {
 
13887    if (track_peeled_effects) {
 
13904    if (
start < loop_header) 
break;
 
13911      if (merge_state->is_exception_handler()) {
 
13914            merge_state->exception_handler_was_used(),
 
13915            merge_state->catch_block_context_register(), 
graph_);
 
13918        DCHECK(!merge_state->is_loop());
 
13919        merge_state = 
nullptr;
 
13949                 v8_flags.maglev_optimistic_peeled_loops);
 
13954  if (track_peeled_effects) {
 
 
13969      case interpreter::Bytecode::kPushContext: {
 
 
14004  const uint32_t relative_jump_bytecode_offset =
 
14011    int reduction = relative_jump_bytecode_offset *
 
14012                    v8_flags.osr_from_maglev_interrupt_scale_factor;
 
14014                                             reduction > 0 ? reduction : 1);
 
14026  auto FinishLoopBlock = [&]() {
 
14034                                               FinishLoopBlock)) {
 
14042    BasicBlock* block = FinishLoopBlock();
 
14045    if (is_peeled_loop) {
 
14054ReduceResult MaglevGraphBuilder::VisitJump() {
 
14055  BasicBlock* block =
 
14062ReduceResult MaglevGraphBuilder::VisitJumpConstant() { 
return VisitJump(); }
 
14063ReduceResult MaglevGraphBuilder::VisitJumpIfNullConstant() {
 
14064  return VisitJumpIfNull();
 
14066ReduceResult MaglevGraphBuilder::VisitJumpIfNotNullConstant() {
 
14067  return VisitJumpIfNotNull();
 
14069ReduceResult MaglevGraphBuilder::VisitJumpIfUndefinedConstant() {
 
14070  return VisitJumpIfUndefined();
 
14072ReduceResult MaglevGraphBuilder::VisitJumpIfNotUndefinedConstant() {
 
14073  return VisitJumpIfNotUndefined();
 
14075ReduceResult MaglevGraphBuilder::VisitJumpIfUndefinedOrNullConstant() {
 
14076  return VisitJumpIfUndefinedOrNull();
 
14078ReduceResult MaglevGraphBuilder::VisitJumpIfTrueConstant() {
 
14079  return VisitJumpIfTrue();
 
14081ReduceResult MaglevGraphBuilder::VisitJumpIfFalseConstant() {
 
14082  return VisitJumpIfFalse();
 
14084ReduceResult MaglevGraphBuilder::VisitJumpIfJSReceiverConstant() {
 
14085  return VisitJumpIfJSReceiver();
 
14087ReduceResult MaglevGraphBuilder::VisitJumpIfForInDoneConstant() {
 
14088  return VisitJumpIfForInDone();
 
14090ReduceResult MaglevGraphBuilder::VisitJumpIfToBooleanTrueConstant() {
 
14091  return VisitJumpIfToBooleanTrue();
 
14093ReduceResult MaglevGraphBuilder::VisitJumpIfToBooleanFalseConstant() {
 
14094  return VisitJumpIfToBooleanFalse();
 
14104    if (jumping_to_peeled_iteration) {
 
 
14126    if (
merge_states_[target]->is_unmerged_unreachable_loop()) {
 
14127      if (
v8_flags.trace_maglev_graph_building) {
 
14128        std::cout << 
"! Killing loop merge state at @" << target << std::endl;
 
 
14193      return builder.
FromBool(alloc_lhs == alloc_rhs);
 
 
14202  if (is_jump_taken) {
 
 
14213bool IsNumberRootConstant(
RootIndex root_index) {
 
14214  switch (root_index) {
 
14215#define CASE(type, name, label) case RootIndex::k##label: 
14230  DCHECK(!IsNumberRootConstant(root_index));
 
14237  if (node->properties().value_representation() ==
 
14239    if (root_index == RootIndex::kUndefinedValue) {
 
14245  if (
CheckType(node, NodeType::kNumber)) {
 
14248  CHECK(node->is_tagged());
 
14250  if (root_index != RootIndex::kTrueValue &&
 
14251      root_index != RootIndex::kFalseValue &&
 
14259    node = logical_not->value().node();
 
14264    return builder.
FromBool(constant->index() == root_index);
 
14267  if (root_index == RootIndex::kUndefinedValue) {
 
14269      return builder.
FromBool(constant->object().IsUndefined());
 
14273  if (root_index != RootIndex::kTrueValue &&
 
14274      root_index != RootIndex::kFalseValue) {
 
14277  if (root_index == RootIndex::kFalseValue) {
 
14280  switch (node->opcode()) {
 
14281    case Opcode::kTaggedEqual:
 
14285    case Opcode::kTaggedNotEqual:
 
14291    case Opcode::kInt32Compare:
 
14296    case Opcode::kFloat64Compare:
 
14301    case Opcode::kInt32ToBoolean:
 
14307    case Opcode::kIntPtrToBoolean:
 
14313    case Opcode::kFloat64ToBoolean:
 
14319    case Opcode::kTestUndetectable:
 
14323    case Opcode::kHoleyFloat64IsHole:
 
 
14350  compiler::OptionalHeapObjectRef maybe_constant = 
TryGetConstant(node);
 
14351  if (maybe_constant.has_value()) {
 
14352    return builder.
FromBool(maybe_constant->IsNullOrUndefined());
 
14354  if (!node->is_tagged()) {
 
14355    if (node->properties().value_representation() ==
 
 
14370  if (
CheckType(node, NodeType::kBoolean)) {
 
14379  bool known_to_boolean_value = 
false;
 
14380  bool direction_is_true = 
true;
 
14382    known_to_boolean_value = 
true;
 
14390    if (node_info && 
NodeTypeIs(node_info->
type(), NodeType::kJSReceiver) &&
 
14392      bool all_detectable = 
true;
 
14393      bool all_undetectable = 
true;
 
14399      if (all_detectable || all_undetectable) {
 
14400        known_to_boolean_value = 
true;
 
14401        direction_is_true = all_detectable;
 
14405  if (known_to_boolean_value) {
 
14406    return builder.
FromBool(direction_is_true);
 
14409  switch (node->value_representation()) {
 
14442  if (
CheckType(node, NodeType::kBoolean, &old_type)) {
 
14449  if (
CheckType(node, NodeType::kString)) {
 
14452                                               RootIndex::kempty_string);
 
 
14485ReduceResult MaglevGraphBuilder::VisitJumpIfToBooleanTrue() {
 
14490ReduceResult MaglevGraphBuilder::VisitJumpIfToBooleanFalse() {
 
14495ReduceResult MaglevGraphBuilder::VisitJumpIfTrue() {
 
14500ReduceResult MaglevGraphBuilder::VisitJumpIfFalse() {
 
14505ReduceResult MaglevGraphBuilder::VisitJumpIfNull() {
 
14510ReduceResult MaglevGraphBuilder::VisitJumpIfNotNull() {
 
14515ReduceResult MaglevGraphBuilder::VisitJumpIfUndefined() {
 
14520ReduceResult MaglevGraphBuilder::VisitJumpIfNotUndefined() {
 
14525ReduceResult MaglevGraphBuilder::VisitJumpIfUndefinedOrNull() {
 
14533  if (!value->is_tagged() && value->properties().value_representation() !=
 
14537  if (
CheckType(value, NodeType::kJSReceiver)) {
 
 
14552          CompareInt32(lhs_const.value(), rhs_const.value(), op));
 
 
14565          CompareUint32(lhs_const.value(), rhs_const.value(), op));
 
 
14571ReduceResult MaglevGraphBuilder::VisitJumpIfJSReceiver() {
 
14577ReduceResult MaglevGraphBuilder::VisitJumpIfForInDone() {
 
14587ReduceResult MaglevGraphBuilder::VisitSwitchOnSmiNoFeedback() {
 
14589  interpreter::JumpTableTargetOffsets offsets =
 
14594  int case_value_base = (*offsets.begin()).case_value;
 
14596  for (interpreter::JumpTableTargetOffset 
offset : offsets) {
 
14597    BasicBlockRef* ref = &targets[
offset.case_value - case_value_base];
 
14602  BasicBlock* block =
 
14605  for (interpreter::JumpTableTargetOffset 
offset : offsets) {
 
14612ReduceResult MaglevGraphBuilder::VisitForInEnumerate() {
 
14622ReduceResult MaglevGraphBuilder::VisitForInPrepare() {
 
14628  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
14632  interpreter::Register cache_array_reg{cache_type_reg.index() + 1};
 
14633  interpreter::Register cache_length_reg{cache_type_reg.index() + 2};
 
14647      auto* receiver_map =
 
14650                                    DeoptimizeReason::kWrongMapDynamic);
 
14652      auto* descriptor_array =
 
14655          descriptor_array, DescriptorArray::kEnumCacheOffset);
 
14656      auto* cache_array =
 
14662        auto* cache_indices =
 
14701ReduceResult MaglevGraphBuilder::VisitForInNext() {
 
14704  interpreter::Register cache_type_reg, cache_array_reg;
 
14705  std::tie(cache_type_reg, cache_array_reg) =
 
14710  compiler::FeedbackSource feedback_source{
feedback(), slot};
 
14720      auto* receiver_map =
 
14723                                    DeoptimizeReason::kWrongMapDynamic);
 
14729      if (ToObject* to_object =
 
14742                 interpreter::Bytecode::kJumpIfUndefined ||
 
14744                 interpreter::Bytecode::kJumpIfUndefinedConstant);
 
14761ReduceResult MaglevGraphBuilder::VisitForInStep() {
 
14765                AddNewNode<Int32NodeFor<Operation::kIncrement>>({index}));
 
14774ReduceResult MaglevGraphBuilder::VisitSetPendingMessage() {
 
14780ReduceResult MaglevGraphBuilder::VisitThrow() {
 
14784ReduceResult MaglevGraphBuilder::VisitReThrow() {
 
14789ReduceResult MaglevGraphBuilder::VisitReturn() {
 
14794                                               relative_jump_bytecode_offset);
 
14808    BasicBlock* block =
 
14818ReduceResult MaglevGraphBuilder::VisitThrowReferenceErrorIfHole() {
 
14825    if (IsTheHoleValue(value)) {
 
14834  switch (value->value_representation()) {
 
14853    auto& alt = info->alternative();
 
14854    if (alt.int32() || alt.truncated_int32_to_number() || alt.float64()) {
 
14864ReduceResult MaglevGraphBuilder::VisitThrowSuperNotCalledIfHole() {
 
14870    if (IsTheHoleValue(value)) {
 
14878ReduceResult MaglevGraphBuilder::VisitThrowSuperAlreadyCalledIfNotHole() {
 
14883    if (!IsTheHoleValue(value)) {
 
14891ReduceResult MaglevGraphBuilder::VisitThrowIfNotSuperConstructor() {
 
14899ReduceResult MaglevGraphBuilder::VisitSwitchOnGeneratorState() {
 
14903  int generator_prologue_block_offset = 1;
 
14906  interpreter::JumpTableTargetOffsets offsets =
 
14918      {maybe_generator}, RootIndex::kUndefinedValue,
 
14924  StartNewBlock(generator_prologue_block_offset, block_is_generator_undefined);
 
14927  ValueNode* generator = maybe_generator;
 
14932                                      JSGeneratorObject::kContinuationOffset,
 
14934  ValueNode* context =
 
14944  int case_value_base = (*offsets.begin()).case_value;
 
14946  for (interpreter::JumpTableTargetOffset 
offset : offsets) {
 
14947    BasicBlockRef* ref = &targets[
offset.case_value - case_value_base];
 
14950  ValueNode* case_value =
 
14953      {case_value}, case_value_base, targets, offsets.size());
 
14954  for (interpreter::JumpTableTargetOffset 
offset : offsets) {
 
14960ReduceResult MaglevGraphBuilder::VisitSuspendGenerator() {
 
14974      [&](GeneratorStore* node) {
 
14977          node->set_parameters_and_registers(arg_index++,
 
14980        const compiler::BytecodeLivenessState* liveness = 
GetOutLiveness();
 
14981        for (
int i = 0; 
i < 
args.register_count(); ++
i) {
 
14982          ValueNode* value = liveness->RegisterIsLive(
args[
i].
index())
 
14985          node->set_parameters_and_registers(arg_index++, value);
 
14989      context, generator, suspend_id, debug_pos_offset);
 
14995ReduceResult MaglevGraphBuilder::VisitResumeGenerator() {
 
14999      generator, JSGeneratorObject::kParametersAndRegistersOffset);
 
15009        {register_size, array_length}, AssertCondition::kLessThanEqual,
 
15010        AbortReason::kInvalidParametersAndRegistersInGenerator);
 
15013  const compiler::BytecodeLivenessState* liveness =
 
15016  for (
int i = 0; 
i < 
registers.register_count(); ++
i) {
 
15017    if (liveness->RegisterIsLive(
registers[
i].index())) {
 
15020                                      {array, stale}, array_index));
 
15024      generator, JSGeneratorObject::kInputOrDebugPosOffset));
 
15037        this, Builtin::kGetIteratorWithFeedbackLazyDeoptContinuation, {},
 
15043      return result_load;
 
15046    iterator_method = result_load.
value();
 
15048  auto throw_iterator_error = [&] {
 
15052    return throw_iterator_error();
 
15054  auto throw_symbol_iterator_invalid = [&] {
 
15057  auto call_iterator_method = [&] {
 
15059        this, Builtin::kCallIteratorWithFeedbackLazyDeoptContinuation);
 
15067    if (result_call.IsDoneWithAbort()) 
return result_call;
 
15068    DCHECK(result_call.IsDoneWithValue());
 
15070        [&](
auto& builder) {
 
15073        [&] { 
return result_call; }, throw_symbol_iterator_invalid);
 
15077      [&](
auto& builder) {
 
15080      throw_iterator_error, call_iterator_method);
 
 
15097ReduceResult MaglevGraphBuilder::VisitDebugger() {
 
15101ReduceResult MaglevGraphBuilder::VisitIncBlockCounter() {
 
15109ReduceResult MaglevGraphBuilder::VisitAbort() {
 
15114ReduceResult MaglevGraphBuilder::VisitWide() { 
UNREACHABLE(); }
 
15115ReduceResult MaglevGraphBuilder::VisitExtraWide() { 
UNREACHABLE(); }
 
15116#define DEBUG_BREAK(Name, ...) \ 
15117  ReduceResult MaglevGraphBuilder::Visit##Name() { UNREACHABLE(); } 
 
15120ReduceResult MaglevGraphBuilder::VisitIllegal() { 
UNREACHABLE(); }
 
#define SHORT_STAR_VISITOR(Name,...)
#define DEBUG_BREAK(Name,...)
uint8_t data_[MAX_STACK_LENGTH]
interpreter::Bytecode bytecode
#define SHORT_STAR_BYTECODE_LIST(V)
#define DEBUG_BREAK_BYTECODE_LIST(V)
#define BYTECODE_LIST(V, V_TSA)
#define SBXCHECK_LT(lhs, rhs)
#define SBXCHECK_EQ(lhs, rhs)
#define SBXCHECK_GE(lhs, rhs)
#define SLOW_DCHECK(condition)
static constexpr T decode(U value)
constexpr const auto & get() const
constexpr Tag tag() const
constexpr size_t size() const
bool Contains(int i) const
static CallInterfaceDescriptor CallInterfaceDescriptorFor(Builtin builtin)
static V8_EXPORT_PRIVATE int GetStackParameterCount(Builtin builtin)
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
V8_EXPORT_PRIVATE void Disassemble(std::ostream &os)
static constexpr BytecodeOffset None()
constexpr int ToInt() const
int GetStackParameterCount() const
int GetParameterCount() const
static const uint32_t kMinLength
static int MaximumFunctionContextSlots()
static V8_INLINE constexpr int OffsetOfElementAt(int index)
@ MIN_CONTEXT_EXTENDED_SLOTS
static bool IsSupported(CpuFeature f)
CompareOperationHint GetCompareOperationFeedback() const
TypeOfFeedback::Result GetTypeOfFeedback() const
BinaryOperationHint GetBinaryOperationFeedback() const
static FeedbackSlot ToSlot(intptr_t index)
int property_index() const
static FieldIndex ForDetails(Tagged< Map > map, PropertyDetails details)
static FieldIndex ForInObjectOffset(int offset, Encoding encoding)
static constexpr int kMaxLength
double get_scalar() const
static constexpr int kHeaderSize
static constexpr int kMapOffset
static V8_EXPORT_PRIVATE CompareStringsOptions CompareStringsOptionsFor(IsolateT *isolate, DirectHandle< Object > locales, DirectHandle< Object > options)
static const int kInitialMaxFastElementArray
static const int kPreallocatedArrayElements
static const int kGeneratorClosed
static const int kGeneratorExecuting
static const uint32_t kMaxGap
static constexpr int Size()
static constexpr int kLastIndexOffset
static constexpr int kInitialLastIndexValue
v8::internal::LocalFactory * factory()
Tagged< Object > root(RootIndex index) const
static std::optional< Tagged< JSFunction > > GetConstructorFunction(Tagged< Map > map, Tagged< Context > native_context)
static constexpr int kEmptyHashField
static constexpr int OffsetOfElementAt(int index)
static constexpr int SizeFor(int length)
PropertyLocation location() const
Representation representation() const
PropertyCellType cell_type() const
PropertyKind kind() const
bool IsConfigurable() const
static const int kProtectorValid
constexpr bool IsHeapObject() const
constexpr bool IsTagged() const
constexpr bool IsSmi() const
constexpr bool IsDouble() const
static bool constexpr IsValid(T value)
static const int kNotInlined
static const uint32_t kMaxLength
static constexpr int OffsetOfElementAt(int index)
static constexpr int SizeFor(int capacity)
V8_INLINE constexpr int32_t value() const
const_iterator begin() const
bool contains(ZoneCompactSet< T > const &other) const
const_iterator end() const
void reserve(size_t new_cap)
void push_back(const T &value)
T * AllocateArray(size_t length)
base::Vector< T > AllocateVector(size_t length)
bool ComputeElementAccessInfos(ElementAccessFeedback const &feedback, ZoneVector< ElementAccessInfo > *access_infos) const
bool FinalizePropertyAccessInfos(ZoneVector< PropertyAccessInfo > infos, AccessMode access_mode, ZoneVector< PropertyAccessInfo > *result) const
const LoopInfo & GetLoopInfoFor(int header_offset) const
interpreter::Register incoming_new_target_or_generator_register() const
int register_count() const
uint16_t parameter_count() const
int live_value_count() const
void MarkRegisterLive(int index)
void MarkRegisterDead(int index)
void MarkAccumulatorDead()
bool AccumulatorIsLive() const
void Add(interpreter::Register r)
OptionalHeapObjectRef target() const
CallFeedbackContent call_feedback_content() const
SpeculationMode speculation_mode() const
bool DependOnNoProfilingProtector()
void DependOnConstantInDictionaryPrototypeChain(MapRef receiver_map, NameRef property_name, ObjectRef constant, PropertyKind kind)
HeapObjectRef DependOnPrototypeProperty(JSFunctionRef function)
void DependOnElementsKind(AllocationSiteRef site)
void DependOnElementsKinds(AllocationSiteRef site)
void DependOnNoSlackTrackingChange(MapRef map)
SlackTrackingPrediction DependOnInitialMapInstanceSizePrediction(JSFunctionRef function)
void DependOnGlobalProperty(PropertyCellRef cell)
void DependOnObjectSlotValue(HeapObjectRef object, int offset, ObjectRef value)
void DependOnStablePrototypeChain(MapRef receiver_maps, WhereToStart start, OptionalJSObjectRef last_prototype=OptionalJSObjectRef())
void DependOnStablePrototypeChains(ZoneVector< MapRef > const &receiver_maps, WhereToStart start, OptionalJSObjectRef last_prototype=OptionalJSObjectRef())
void DependOnStableMap(MapRef map)
bool DependOnScriptContextSlotProperty(ContextRef script_context, size_t index, ContextSidePropertyCell::Property property, JSHeapBroker *broker)
bool DependOnNoElementsProtector()
AllocationType DependOnPretenureMode(AllocationSiteRef site)
OptionalObjectRef get(JSHeapBroker *broker, int index) const
IndirectHandle< Context > object() const
bool HasOnlyStringMaps(JSHeapBroker *broker) const
KeyedAccessMode keyed_mode() const
ZoneVector< TransitionGroup > const & transition_groups() const
ElementAccessFeedback const & Refine(JSHeapBroker *broker, ZoneVector< MapRef > const &inferred_maps) const
ZoneVector< MapRef > const & lookup_start_object_maps() const
ZoneVector< MapRef > const & transition_sources() const
ElementsKind elements_kind() const
OptionalFeedbackVectorRef feedback_vector(JSHeapBroker *broker) const
FeedbackCellRef GetClosureFeedbackCell(JSHeapBroker *broker, int index) const
OptionalObjectRef TryGet(JSHeapBroker *broker, int i) const
IndirectHandle< FixedArray > object() const
Float64 GetFromImmutableFixedDoubleArray(int i) const
HolderLookupResult LookupHolderOfExpectedType(JSHeapBroker *broker, MapRef receiver_map)
bool is_signature_undefined(JSHeapBroker *broker) const
OptionalObjectRef callback_data(JSHeapBroker *broker) const
bool accept_any_receiver() const
bool IsPropertyCell() const
ContextRef script_context() const
PropertyCellRef property_cell() const
bool IsMegamorphic() const
bool IsScriptContextSlot() const
OptionalMapRef map_direct_read(JSHeapBroker *broker) const
V8_EXPORT_PRIVATE MapRef map(JSHeapBroker *broker) const
ContextRef context(JSHeapBroker *broker) const
SharedFunctionInfoRef shared(JSHeapBroker *broker) const
MapRef initial_map(JSHeapBroker *broker) const
ProcessedFeedback const & GetFeedbackForPropertyAccess(FeedbackSource const &source, AccessMode mode, OptionalNameRef static_name)
CompilationDependencies * dependencies() const
ProcessedFeedback const & GetFeedbackForArrayOrObjectLiteral(FeedbackSource const &source)
std::optional< RootIndex > FindRootIndex(HeapObjectRef object)
ProcessedFeedback const & GetFeedbackForGlobalAccess(FeedbackSource const &source)
ProcessedFeedback const & GetFeedbackForInstanceOf(FeedbackSource const &source)
ForInHint GetFeedbackForForIn(FeedbackSource const &source)
ProcessedFeedback const & GetFeedbackForCall(FeedbackSource const &source)
PropertyAccessInfo GetPropertyAccessInfo(MapRef map, NameRef name, AccessMode access_mode)
NativeContextRef target_native_context() const
ProcessedFeedback const & GetFeedbackForTemplateObject(FeedbackSource const &source)
ProcessedFeedback const & GetFeedbackForRegExpLiteral(FeedbackSource const &source)
OptionalObjectRef GetOwnFastConstantDataProperty(JSHeapBroker *broker, Representation field_representation, FieldIndex index, CompilationDependencies *dependencies) const
OptionalFixedArrayBaseRef elements(JSHeapBroker *broker, RelaxedLoadTag) const
OptionalObjectRef RawInobjectPropertyAt(JSHeapBroker *broker, FieldIndex index) const
std::optional< Float64 > GetOwnFastConstantDoubleProperty(JSHeapBroker *broker, FieldIndex index, CompilationDependencies *dependencies) const
OptionalObjectRef raw_properties_or_hash(JSHeapBroker *broker) const
bool IsElementsTenured(FixedArrayBaseRef elements)
AccessMode access_mode() const
KeyedAccessStoreMode store_mode() const
KeyedAccessLoadMode load_mode() const
PropertyDetails GetPropertyDetails(JSHeapBroker *broker, InternalIndex descriptor_index) const
IndirectHandle< Map > object() const
int GetInObjectPropertyOffset(int index) const
bool is_deprecated() const
bool is_access_check_needed() const
bool IsFixedCowArrayMap(JSHeapBroker *broker) const
HeapObjectRef prototype(JSHeapBroker *broker) const
int instance_size() const
bool is_migration_target() const
InstanceType instance_type() const
HeapObjectRef GetBackPointer(JSHeapBroker *broker) const
int NumberOfOwnDescriptors() const
bool has_prototype_slot() const
ElementsKind elements_kind() const
int GetInObjectProperties() const
MapRef GetInitialJSArrayMap(JSHeapBroker *broker, ElementsKind kind) const
ElementAccessFeedback const & AsElementAccess() const
bool IsInsufficient() const
GlobalAccessFeedback const & AsGlobalAccess() const
TemplateObjectFeedback const & AsTemplateObject() const
NamedAccessFeedback const & AsNamedAccess() const
LiteralFeedback const & AsLiteral() const
CallFeedback const & AsCall() const
Representation field_representation() const
bool IsStringLength() const
@ kDictionaryProtoAccessorConstant
@ kDictionaryProtoDataConstant
OptionalMapRef transition_map() const
bool IsDictionaryProtoAccessorConstant() const
bool IsDictionaryProtoDataConstant() const
InternalIndex dictionary_index() const
ElementsKind elements_kind() const
OptionalJSObjectRef holder() const
FieldIndex field_index() const
ZoneVector< MapRef > const & lookup_start_object_maps() const
bool HasDictionaryHolder() const
bool IsFastDataConstant() const
bool HasTransitionMap() const
OptionalObjectRef constant() const
OptionalMapRef field_map() const
void RecordDependencies(CompilationDependencies *dependencies)
void CacheAsProtector(JSHeapBroker *broker) const
V8_WARN_UNUSED_RESULT bool Cache(JSHeapBroker *broker) const
PropertyDetails property_details() const
ObjectRef value(JSHeapBroker *broker) const
bool HasOuterScopeInfo() const
bool HasContextExtensionSlot() const
ScopeInfoRef OuterScopeInfo(JSHeapBroker *broker) const
ScopeType scope_type() const
int ContextLength() const
int context_parameters_start() const
bool HasBreakInfo(JSHeapBroker *broker) const
Builtin builtin_id() const
int inobject_property_count() const
int instance_size() const
Bytecode current_bytecode() const
FeedbackSlot GetSlotOperand(int operand_index) const
Register GetRegisterOperand(int operand_index) const
int GetJumpTargetOffset() const
uint32_t GetUnsignedImmediateOperand(int operand_index) const
int32_t GetImmediateOperand(int operand_index) const
int current_offset() const
std::pair< Register, Register > GetRegisterPairOperand(int operand_index) const
void AdvanceTo(int offset)
int GetRegisterOperandRange(int operand_index) const
RegisterList GetRegisterListOperand(int operand_index) const
uint32_t GetNativeContextIndexOperand(int operand_index) const
JumpTableTargetOffsets GetJumpTableTargetOffsets() const
void SetOffset(int offset)
Bytecode next_bytecode() const
Runtime::FunctionId GetIntrinsicIdOperand(int operand_index) const
uint32_t GetIndexOperand(int operand_index) const
Runtime::FunctionId GetRuntimeIdOperand(int operand_index) const
static constexpr bool WritesImplicitRegister(ImplicitRegisterUse implicit_register_use)
static constexpr bool WritesAccumulator(ImplicitRegisterUse implicit_register_use)
static bool ClobbersAccumulator(Bytecode bytecode)
static bool WritesAccumulator(Bytecode bytecode)
static bool IsRegisterOutputOperandType(OperandType operand_type)
static OperandType GetOperandType(Bytecode bytecode, int i)
static int NumberOfOperands(Bytecode bytecode)
static bool WritesOrClobbersAccumulator(Bytecode bytecode)
int register_count() const
static constexpr Register virtual_accumulator()
static constexpr Register FromParameterIndex(int index)
constexpr int index() const
static constexpr Register current_context()
static constexpr Register receiver()
static constexpr Register invalid_value()
static constexpr Register function_closure()
constexpr bool is_valid() const
constexpr bool is_parameter() const
static LiteralFlag Decode(uint8_t raw_flag)
AllocationType allocation_type() const
void Add(InlinedAllocation *alloc)
CreateArgumentsType type() const
BasicBlock * block_ptr() const
ZoneVector< Node * > & nodes()
CallArguments(ConvertReceiverMode receiver_mode, interpreter::RegisterList reglist, const InterpreterFrameState &frame, Mode mode=kDefault)
void PopArrayLikeArgument()
base::SmallVector< ValueNode *, 8 > args_
ConvertReceiverMode receiver_mode_
CallArguments(ConvertReceiverMode receiver_mode, base::SmallVector< ValueNode *, 8 > &&args, Mode mode=kDefault)
void set_receiver(ValueNode *receiver)
ValueNode * operator[](size_t i) const
void set_arg(size_t i, ValueNode *node)
ValueNode * array_like_argument()
void CheckArgumentsAreNotConversionNodes()
size_t count_with_receiver() const
ConvertReceiverMode receiver_mode() const
void PopReceiver(ConvertReceiverMode new_receiver_mode)
CallArguments(ConvertReceiverMode receiver_mode, std::initializer_list< ValueNode * > args, Mode mode=kDefault)
ValueNode * receiver() const
CallArguments(ConvertReceiverMode receiver_mode)
static constexpr int kFixedInputCount
static constexpr int kFixedInputCount
static constexpr int kFixedInputCount
static constexpr int kFixedInputCount
static constexpr int kFixedInputCount
void set_arg(int i, ValueNode *node)
static constexpr int kFixedInputCount
const InlinedArgumentsDeoptFrame & as_inlined_arguments() const
@ kBuiltinContinuationFrame
constexpr Operation operation() const
static Builtin continuation(Kind kind)
constexpr bool flip() const
static constexpr int kFixedInputCount
void record_scope_info(ValueNode *context, compiler::OptionalScopeInfoRef scope_info)
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_elide_map()
compiler::ZoneRefMap< compiler::ObjectRef, Constant * > & constants()
ZoneVector< MaglevCallSiteInfo * > & inlineable_calls()
ZoneVector< InitialValue * > & osr_values()
compiler::OptionalScopeInfoRef TryGetScopeInfo(ValueNode *context, compiler::JSHeapBroker *broker)
void set_has_resumable_generator()
ZoneVector< OptimizedCompilationInfo::InlinedFunctionHolder > & inlined_functions()
compiler::ZoneRefMap< compiler::HeapObjectRef, TrustedConstant * > & trusted_constants()
void set_has_recursive_calls(bool value)
void Add(BasicBlock *block)
ZoneMap< InlinedAllocation *, SmallAllocationVector > & allocations_escape_map()
void add_inlined_bytecode_size(int size)
VirtualObject * object() const
constexpr Operation operation() const
constexpr bool flip() const
constexpr bool flip() const
const CompactInterpreterFrameState * frame_state() const
void set(interpreter::Register reg, ValueNode *value)
void set_accumulator(ValueNode *value)
void add_object(VirtualObject *vobject)
KnownNodeAspects * known_node_aspects()
void set_known_node_aspects(KnownNodeAspects *known_node_aspects)
void set_virtual_objects(const VirtualObjectList &virtual_objects)
ValueNode * get(interpreter::Register reg) const
const VirtualObjectList & virtual_objects() const
ValueNode * accumulator() const
static LoadedPropertyMapKey Elements()
static LoadedPropertyMapKey TypedArrayLength()
static LoadedPropertyMapKey StringLength()
void set_could_not_inline_all_candidates()
bool specialize_to_function_context() const
IndirectHandle< JSFunction > toplevel_function() const
MaglevCompilationUnit * toplevel_compilation_unit() const
BytecodeOffset toplevel_osr_offset() const
compiler::SharedFunctionInfoRef shared_function_info() const
int register_count() const
MaglevCompilationInfo * info() const
const MaglevCompilationUnit * GetTopLevelCompilationUnit() const
static MaglevCompilationUnit * NewInner(Zone *zone, const MaglevCompilationUnit *caller, compiler::SharedFunctionInfoRef shared_function_info, compiler::FeedbackCellRef feedback_cell)
uint16_t parameter_count() const
BytecodeOffset osr_offset() const
compiler::BytecodeArrayRef bytecode() const
int inlining_depth() const
compiler::FeedbackVectorRef feedback() const
MaglevGraphBuilder::MaglevSubGraphBuilder * sub_builder_
void StartFallthroughBlock(BasicBlock *predecessor)
BranchResult AlwaysFalse() const
BranchResult Build(std::initializer_list< ValueNode * > inputs, Args &&... args)
BasicBlockRef * true_target()
BranchResult AlwaysTrue() const
void SetBranchSpecializationMode(BranchSpecializationMode mode)
BasicBlockRef * fallthrough()
BasicBlockRef * false_target()
void SetAccumulatorInBranch(BranchType jump_type) const
MaglevGraphBuilder * builder_
BasicBlockRef * jump_target()
BranchResult FromBool(bool value) const
DeoptFrameScope(MaglevGraphBuilder *builder, Builtin continuation, compiler::OptionalJSFunctionRef maybe_js_target={})
MaglevGraphBuilder * builder_
const DeoptFrame::FrameData & data() const
DeoptFrameScope * parent_
DeoptFrameScope(MaglevGraphBuilder *builder, ValueNode *receiver)
DeoptFrameScope * parent() const
DeoptFrame::FrameData data_
bool IsLazyDeoptContinuationFrame() const
DeoptFrameScope(MaglevGraphBuilder *builder, Builtin continuation, compiler::OptionalJSFunctionRef maybe_js_target, base::Vector< ValueNode *const > parameters)
DeoptFrame::FrameData & data()
MaglevGraphBuilder * builder_
~LazyDeoptResultLocationScope()
LazyDeoptResultLocationScope(MaglevGraphBuilder *builder, interpreter::Register result_location, int result_size)
MaglevSubGraphBuilder * sub_builder_
~BorrowParentKnownNodeAspectsAndVOs()
BorrowParentKnownNodeAspectsAndVOs(MaglevSubGraphBuilder *sub_builder)
MergePointInterpreterFrameState * merge_state_
Label(MaglevSubGraphBuilder *sub_builder, int predecessor_count, std::initializer_list< Variable * > vars)
Label(MergePointInterpreterFrameState *merge_state, BasicBlock *basic_block)
Label(MaglevSubGraphBuilder *sub_builder, int predecessor_count)
compiler::BytecodeLivenessState * liveness_
MergePointInterpreterFrameState * merge_state_
BasicBlock * loop_header_
LoopLabel(MergePointInterpreterFrameState *merge_state, BasicBlock *loop_header)
interpreter::Register pseudo_register_
void set(Variable &var, ValueNode *value)
MaglevCompilationUnit * compilation_unit_
V8_NODISCARD ReduceResult TrimPredecessorsAndBind(Label *label)
void GotoIfTrue(Label *true_target, std::initializer_list< ValueNode * > control_inputs, Args &&... args)
void GotoOrTrim(Label *label)
void MergeIntoLabel(Label *label, BasicBlock *predecessor)
void GotoIfFalse(Label *false_target, std::initializer_list< ValueNode * > control_inputs, Args &&... args)
MaglevGraphBuilder * builder_
ValueNode * get(const Variable &var) const
void ReducePredecessorCount(Label *label, unsigned num=1)
void TakeKnownNodeAspectsAndVOsFromParent()
InterpreterFrameState pseudo_frame_
void EndLoop(LoopLabel *loop_label)
void MoveKnownNodeAspectsAndVOsToParent()
MaglevSubGraphBuilder(MaglevGraphBuilder *builder, int variable_count)
ReduceResult Branch(std::initializer_list< Variable * > vars, FCond cond, FTrue if_true, FFalse if_false)
LoopLabel BeginLoop(std::initializer_list< Variable * > loop_vars)
compiler::FeedbackSource saved_
const compiler::FeedbackSource & value()
SaveCallSpeculationScope(MaglevGraphBuilder *builder, compiler::FeedbackSource feedback_source=compiler::FeedbackSource())
~SaveCallSpeculationScope()
static bool IsSpeculationAllowed(compiler::JSHeapBroker *broker, compiler::FeedbackSource feedback_source)
MaglevGraphBuilder * builder_
MaybeReduceResult TryBuildPropertyCellLoad(const compiler::GlobalAccessFeedback &global_access_feedback)
ValueNode * BuildLoadTaggedField(ValueNode *object, uint32_t offset, Args &&... args)
void DecrementDeadPredecessorAndAccountForPeeling(uint32_t offset)
int argument_count() const
static compiler::OptionalHeapObjectRef TryGetConstant(compiler::JSHeapBroker *broker, LocalIsolate *isolate, ValueNode *node)
MaybeReduceResult TryFoldInt32BinaryOperation(ValueNode *left, ValueNode *right)
BranchResult BuildBranchIfIntPtrToBooleanTrue(BranchBuilder &builder, ValueNode *node)
MaybeReduceResult TryBuildPolymorphicElementAccess(ValueNode *object, ValueNode *index, const compiler::KeyedAccessMode &keyed_mode, const ZoneVector< compiler::ElementAccessInfo > &access_infos, GenericAccessFunc &&build_generic_access)
ReduceResult BuildInt32BinaryOperationNode()
ValueNode * TrySpecializeLoadScriptContextSlot(ValueNode *context, int index)
ValueNode * GetTruncatedInt32ForToNumber(ValueNode *value, NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
bool IsInsideLoop() const
ReduceResult BuildCheckString(ValueNode *object)
bool TryReduceCompareEqualAgainstConstant()
bool HasDisjointType(ValueNode *lhs, NodeType rhs_type)
BasicBlock * current_block_
MaybeReduceResult TryReduceCallForTarget(ValueNode *target_node, compiler::JSFunctionRef target, CallArguments &args, const compiler::FeedbackSource &feedback_source)
BranchResult BuildBranchIfToBooleanTrue(BranchBuilder &builder, ValueNode *node)
void BuildInitializeStore(InlinedAllocation *alloc, ValueNode *value, int offset)
ReduceResult BuildEagerInlineCall(ValueNode *context, ValueNode *function, ValueNode *new_target, compiler::SharedFunctionInfoRef shared, compiler::FeedbackCellRef feedback_cell, CallArguments &args, float call_frequency)
const compiler::BytecodeLivenessState * GetInLiveness() const
const FeedbackNexus FeedbackNexusForOperand(int slot_operand_index) const
MaybeReduceResult TryBuildLoadDataView(const CallArguments &args, ExternalArrayType type)
ValueNode * BuildGenericConstruct(ValueNode *target, ValueNode *new_target, ValueNode *context, const CallArguments &args, const compiler::FeedbackSource &feedback_source=compiler::FeedbackSource())
compiler::JSHeapBroker * broker_
Uint32Constant * GetUint32Constant(int constant)
void BuildStoreTaggedFieldNoWriteBarrier(ValueNode *object, ValueNode *value, int offset, StoreTaggedMode store_mode)
compiler::FeedbackSource current_speculation_feedback_
BranchResult BuildBranchIfTrue(BranchBuilder &builder, ValueNode *node)
void UpdatePredecessorCount(uint32_t offset, int diff)
bool in_optimistic_peeling_iteration() const
MaybeReduceResult TryBuildGetKeyedPropertyWithEnumeratedKey(ValueNode *object, const compiler::FeedbackSource &feedback_source, const compiler::ProcessedFeedback &processed_feedback)
BranchResult BuildBranchIfFloat64ToBooleanTrue(BranchBuilder &builder, ValueNode *node)
ValueNode * GetContext() const
bool HaveDisjointTypes(ValueNode *lhs, ValueNode *rhs)
std::optional< Float64 > TryFoldLoadConstantDoubleField(compiler::JSObjectRef holder, compiler::PropertyAccessInfo const &access_info)
void SetArgument(int i, ValueNode *value)
VirtualObject * DeepCopyVirtualObject(VirtualObject *vobj)
NodeT * CreateNewConstantNode(Args &&... args) const
ZoneVector< Node * > & node_buffer()
compiler::ref_traits< T >::ref_type GetRefOperand(int operand_index)
ValueNode * BuildLoadFixedArrayLength(ValueNode *fixed_array)
InlinedAllocation * ExtendOrReallocateCurrentAllocationBlock(AllocationType allocation_type, VirtualObject *value)
int bailout_for_entrypoint()
std::optional< VirtualObject * > TryGetNonEscapingArgumentsObject(ValueNode *value)
NodeType CheckTypes(ValueNode *node, std::initializer_list< NodeType > types)
bool CanTreatHoleAsUndefined(base::Vector< const compiler::MapRef > const &receiver_maps)
ValueNode * GetUint8ClampedForToNumber(ValueNode *value)
VirtualObject * CreateFixedArray(compiler::MapRef map, int length)
ReduceResult BuildInlineFunction(SourcePosition call_site_position, ValueNode *context, ValueNode *function, ValueNode *new_target)
std::optional< InterpretedDeoptFrame > entry_stack_check_frame_
const InterpreterFrameState & current_interpreter_frame() const
LocalIsolate *const local_isolate_
void BuildStoreTrustedPointerField(ValueNode *object, ValueNode *value, int offset, IndirectPointerTag tag, StoreTaggedMode store_mode)
ValueNode * BuildLogicalNot(ValueNode *value)
MaybeReduceResult TryBuildPropertyCellStore(const compiler::GlobalAccessFeedback &global_access_feedback)
ReduceResult BuildCheckStringOrStringWrapper(ValueNode *object)
ValueNode * BuildToString(ValueNode *value, ToString::ConversionMode mode)
ValueNode * GetRegisterInput(Register reg)
compiler::OptionalObjectRef TryFoldLoadDictPrototypeConstant(compiler::PropertyAccessInfo const &access_info)
ValueNode * BuildUnwrapThinString(ValueNode *input)
ZoneVector< int > decremented_predecessor_offsets_
BranchResult BuildBranchIfUint32Compare(BranchBuilder &builder, Operation op, ValueNode *lhs, ValueNode *rhs)
MaybeReduceResult TryBuildElementAccessOnString(ValueNode *object, ValueNode *index, compiler::KeyedAccessMode const &keyed_mode)
SourcePosition current_source_position_
bool TopLevelFunctionPassMaglevPrintFilter()
int register_count() const
ReduceResult StoreAndCacheContextSlot(ValueNode *context, int index, ValueNode *value, ContextKind context_kind)
bool TargetIsCurrentCompilingUnit(compiler::JSFunctionRef target)
void StartFallthroughBlock(int next_block_offset, BasicBlock *predecessor)
MaybeReduceResult TryReuseKnownPropertyLoad(ValueNode *lookup_start_object, compiler::NameRef name)
void EnsureInt32(ValueNode *value, bool can_be_heap_number=false)
uint32_t GetFlag16Operand(int operand_index) const
BranchResult BuildBranchIfReferenceEqual(BranchBuilder &builder, ValueNode *lhs, ValueNode *rhs)
MaybeReduceResult TryBuildCallKnownJSFunction(compiler::JSFunctionRef function, ValueNode *new_target, CallArguments &args, const compiler::FeedbackSource &feedback_source)
MaybeReduceResult TryBuildElementAccessOnTypedArray(ValueNode *object, ValueNode *index, const compiler::ElementAccessInfo &access_info, compiler::KeyedAccessMode const &keyed_mode)
MaybeReduceResult TryBuildPropertyStore(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NameRef name, compiler::PropertyAccessInfo const &access_info, compiler::AccessMode access_mode)
MaybeReduceResult TryReduceCallForConstant(compiler::JSFunctionRef target, CallArguments &args, const compiler::FeedbackSource &feedback_source=compiler::FeedbackSource())
ValueNode * GetContextAtDepth(ValueNode *context, size_t depth)
void BuildGenericBinarySmiOperationNode()
void MergeDeadIntoFrameState(int target)
MaybeReduceResult TryBuildStoreField(compiler::PropertyAccessInfo const &access_info, ValueNode *receiver, compiler::AccessMode access_mode)
bool MayBeNullOrUndefined(ValueNode *node)
ReduceResult BuildInt32BinarySmiOperationNode()
MaybeReduceResult TryBuildElementAccessOnJSArrayOrJSObject(ValueNode *object, ValueNode *index, const compiler::ElementAccessInfo &access_info, compiler::KeyedAccessMode const &keyed_mode)
ReduceResult BuildCallFromRegisters(int argc_count, ConvertReceiverMode receiver_mode)
MaybeReduceResult TryBuildCheckInt32Condition(ValueNode *lhs, ValueNode *rhs, AssertCondition condition, DeoptimizeReason reason, bool allow_unconditional_deopt=true)
DeoptFrame * GetDeoptFrameForEagerCall(const MaglevCompilationUnit *unit, ValueNode *closure, base::Vector< ValueNode * > args)
MaglevCallerDetails * caller_details_
ValueNode * Select(FCond cond, FTrue if_true, FFalse if_false)
std::optional< VirtualObject * > TryReadBoilerplateForFastLiteral(compiler::JSObjectRef boilerplate, AllocationType allocation, int max_depth, int *max_properties)
const compiler::BytecodeLivenessState * GetInLivenessFor(int offset) const
MaybeReduceResult TryReduceFunctionPrototypeApplyCallWithReceiver(compiler::OptionalHeapObjectRef maybe_receiver, CallArguments &args, const compiler::FeedbackSource &feedback_source)
MaybeReduceResult TryBuildFastCreateObjectOrArrayLiteral(const compiler::LiteralFeedback &feedback)
VirtualObject * CreateJSStringIterator(compiler::MapRef map, ValueNode *string)
SourcePositionTableIterator source_position_iterator_
CatchBlockDetails GetCurrentTryCatchBlock()
std::optional< uint32_t > TryGetUint32Constant(ValueNode *value)
bool is_non_eager_inlining_enabled() const
size_t StringLengthStaticLowerBound(ValueNode *string, int max_depth=2)
ReduceResult BuildCheckMaps(ValueNode *object, base::Vector< const compiler::MapRef > maps, std::optional< ValueNode * > map={}, bool has_deprecated_map_without_migration_target=false)
void AddDeoptUse(ValueNode *node)
BasicBlock * FinishBlock(std::initializer_list< ValueNode * > control_inputs, Args &&... args)
DeoptFrame GetLatestCheckpointedFrame()
ValueNode * BuildLoadElements(ValueNode *object)
int max_inlined_bytecode_size()
bool IsRegisterEqualToAccumulator(int operand_index)
ValueNode * BuildNewConsStringMap(ValueNode *left, ValueNode *right)
bool CanTrackObjectChanges(ValueNode *object, TrackObjectMode mode)
ReduceResult VisitUnaryOperation()
ReduceResult ReduceCall(ValueNode *target_node, CallArguments &args, const compiler::FeedbackSource &feedback_source=compiler::FeedbackSource())
VirtualObject * CreateMappedArgumentsElements(compiler::MapRef map, int mapped_count, ValueNode *context, ValueNode *unmapped_elements)
ValueNode * GetAccumulatorTruncatedInt32ForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
MaybeReduceResult TryBuildGlobalStore(const compiler::GlobalAccessFeedback &global_access_feedback)
bool ShouldEmitOsrInterruptBudgetChecks()
MaybeReduceResult TryBuildCallKnownApiFunction(compiler::JSFunctionRef function, compiler::SharedFunctionInfoRef shared, CallArguments &args)
MaybeReduceResult TryBuildLoadNamedProperty(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NameRef name, compiler::FeedbackSource &feedback_source, GenericAccessFunc &&build_generic_access)
compiler::OptionalJSObjectRef TryGetConstantDataFieldHolder(compiler::PropertyAccessInfo const &access_info, ValueNode *lookup_start_object)
NodeT * AddNewNode(size_t input_count, Function &&post_create_input_initializer, Args &&... args)
ValueNode * GetTrustedConstant(compiler::HeapObjectRef ref, IndirectPointerTag tag)
ValueNode * GetTaggedValue(ValueNode *value, UseReprHintRecording record_use_repr_hint=UseReprHintRecording::kRecord)
MaybeReduceResult TryReduceCallForNewClosure(ValueNode *target_node, ValueNode *target_context, compiler::SharedFunctionInfoRef shared, compiler::FeedbackCellRef feedback_cell, CallArguments &args, const compiler::FeedbackSource &feedback_source)
void StoreRegister(interpreter::Register target, NodeT *value)
AllocationBlock * current_allocation_block_
DeoptFrame * GetCallerDeoptFrame()
int argument_count_without_receiver() const
ReduceResult BuildTransitionElementsKindOrCheckMap(ValueNode *heap_object, ValueNode *object_map, const ZoneVector< compiler::MapRef > &transition_sources, compiler::MapRef transition_target)
MaybeReduceResult TryBuildFastInstanceOf(ValueNode *object, compiler::JSObjectRef callable_ref, ValueNode *callable_node)
uint32_t GetFlag8Operand(int operand_index) const
ValueNode * BuildInlinedArgumentsElements(int start_index, int length)
bool CheckType(ValueNode *node, NodeType type, NodeType *old=nullptr)
ReduceResult BuildFloat64BinaryOperationNodeForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
ValueNode * BuildLoadTypedArrayElement(ValueNode *object, ValueNode *index, ElementsKind elements_kind)
BasicBlock * EndPrologue()
ValueNode * GetConstant(compiler::ObjectRef ref)
BitVector loop_headers_to_peel_
NodeInfo * GetOrCreateInfoFor(ValueNode *node)
ValueNode * BuildTestUndetectable(ValueNode *value)
ValueNode * BuildInlinedUnmappedArgumentsElements(int mapped_count)
ValueNode * BuildUnwrapStringWrapper(ValueNode *input)
VirtualObject * CreateHeapNumber(Float64 value)
MaybeReduceResult BuildJSArrayBuiltinMapSwitchOnElementsKind(ValueNode *receiver, const MapKindsT &map_kinds, MaglevSubGraphBuilder &sub_graph, std::optional< MaglevSubGraphBuilder::Label > &do_return, int unique_kind_count, IndexToElementsKindFunc &&index_to_elements_kind, BuildKindSpecificFunc &&build_kind_specific)
ValueNode * GetFloat64(ValueNode *value)
void MergeIntoInlinedReturnFrameState(BasicBlock *block)
MaybeReduceResult TryBuildAndAllocateJSGeneratorObject(ValueNode *closure, ValueNode *receiver)
void CalculatePredecessorCounts()
ValueNode * GetAccumulator()
bool CanAllocateSloppyArgumentElements()
std::optional< double > TryGetFloat64Constant(ValueNode *value, TaggedToFloat64ConversionType conversion_type)
int parameter_count() const
int parameter_count_without_receiver() const
ValueNode * BuildLoadFixedDoubleArrayElement(ValueNode *elements, int index)
ReduceResult EmitUnconditionalDeopt(DeoptimizeReason reason)
compiler::BytecodeAnalysis bytecode_analysis_
MaglevGraphLabeller * graph_labeller() const
ValueNode * BuildElementsArray(int length)
void ClobberAccumulator()
DeoptFrame * AddInlinedArgumentsToDeoptFrame(DeoptFrame *deopt_frame, const MaglevCompilationUnit *unit, ValueNode *closure, base::Vector< ValueNode * > args)
interpreter::TestTypeOfFlags::LiteralFlag TypeOfLiteralFlag
ValueNode * BuildLoadStringLength(ValueNode *string)
void ProcessMergePoint(int offset, bool preserve_known_node_aspects)
ReduceResult BuildTruncatingInt32BitwiseNotForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
void MinimizeContextChainDepth(ValueNode **context, size_t *depth)
InlinedAllocation * BuildInlinedAllocationForConsString(VirtualObject *object, AllocationType allocation)
float GetCurrentCallFrequency()
VirtualObject * CreateContext(compiler::MapRef map, int length, compiler::ScopeInfoRef scope_info, ValueNode *previous_context, std::optional< ValueNode * > extension={})
ReduceResult BuildCheckValueByReference(ValueNode *node, compiler::HeapObjectRef ref, DeoptimizeReason reason)
ValueNode * BuildNumberOrOddballToFloat64(ValueNode *node, NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
Node * BuildStoreTaggedField(ValueNode *object, ValueNode *value, int offset, StoreTaggedMode store_mode)
ValueNode * GetAccumulatorUint8ClampedForToNumber()
void SetContext(ValueNode *context)
MaybeReduceResult TryBuildNamedAccess(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NamedAccessFeedback const &feedback, compiler::FeedbackSource const &feedback_source, compiler::AccessMode access_mode, GenericAccessFunc &&build_generic_access)
ValueNode * GetNumberConstant(double constant)
void MoveNodeBetweenRegisters(interpreter::Register src, interpreter::Register dst)
LoopEffects * loop_effects_
ReduceResult BuildCheckSmi(ValueNode *object, bool elidable=true)
ReduceResult BuildCheckInternalizedStringValueOrByReference(ValueNode *node, compiler::HeapObjectRef ref, DeoptimizeReason reason)
RootConstant * GetRootConstant(RootIndex index)
BranchResult BuildBranchIfUndefined(BranchBuilder &builder, ValueNode *node)
LocalIsolate * local_isolate() const
BranchBuilder CreateBranchBuilder(BranchType jump_type=BranchType::kBranchIfTrue)
bool CanAllocateInlinedArgumentElements()
ReduceResult BuildFloat64UnaryOperationNodeForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
DeoptFrame GetDeoptFrameForLazyDeopt(interpreter::Register result_location, int result_size)
ZoneDeque< LoopEffects * > loop_effects_stack_
VirtualObject * CreateConsString(ValueNode *map, ValueNode *length, ValueNode *first, ValueNode *second)
bool MaglevIsTopTier() const
ReduceResult BuildCompareMaps(ValueNode *heap_object, ValueNode *object_map, base::Vector< const compiler::MapRef > maps, MaglevSubGraphBuilder *sub_graph, std::optional< MaglevSubGraphBuilder::Label > &if_not_matched)
ValueNode * GetFloat64ForToNumber(ValueNode *value, NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
bool EnsureType(ValueNode *node, NodeType type, NodeType *old=nullptr)
MaybeReduceResult TryBuildFastOrdinaryHasInstance(ValueNode *object, compiler::JSObjectRef callable, ValueNode *callable_node)
int max_inlined_bytecode_size_small()
VirtualObject * BuildVirtualArgumentsObject()
int max_inlined_bytecode_size_cumulative()
ValueNode * BuildAndAllocateArgumentsObject()
MaybeReduceResult TryBuildInlinedAllocatedContext(compiler::MapRef map, compiler::ScopeInfoRef scope, int context_length)
MaybeReduceResult TryBuildScriptContextStore(const compiler::GlobalAccessFeedback &global_access_feedback)
CallNode * AddNewCallNode(const CallArguments &args, Args &&... extra_args)
CallKnownJSFunction * BuildCallKnownJSFunction(ValueNode *context, ValueNode *function, ValueNode *new_target, compiler::SharedFunctionInfoRef shared, compiler::FeedbackCellRef feedback_cell, CallArguments &args, const compiler::FeedbackSource &feedback_source)
MaybeReduceResult TryBuildScriptContextLoad(const compiler::GlobalAccessFeedback &global_access_feedback)
BranchResult BuildBranchIfUndefinedOrNull(BranchBuilder &builder, ValueNode *node)
compiler::FeedbackVectorRef feedback() const
TaggedIndexConstant * GetTaggedIndexConstant(int constant)
MaybeReduceResult DoTryReduceMathRound(CallArguments &args, Float64Round::Kind kind)
void InitializeRegister(interpreter::Register reg, ValueNode *value)
MaybeReduceResult TryReduceGetIterator(ValueNode *receiver, int load_slot, int call_slot)
MaybeReduceResult TryBuildElementAccess(ValueNode *object, ValueNode *index, compiler::ElementAccessFeedback const &feedback, compiler::FeedbackSource const &feedback_source, GenericAccessFunc &&build_generic_access)
interpreter::BytecodeArrayIterator iterator_
InterpretedDeoptFrame GetDeoptFrameForEntryStackCheck()
void AddNonEscapingUses(InlinedAllocation *allocation, int use_count)
void BuildStoreTypedArrayElement(ValueNode *object, ValueNode *index, ElementsKind elements_kind)
constexpr bool RuntimeFunctionCanThrow(Runtime::FunctionId function_id)
void MergeDeadLoopIntoFrameState(int target)
MaybeReduceResult TryReduceConstructGeneric(compiler::JSFunctionRef function, compiler::SharedFunctionInfoRef shared_function_info, ValueNode *target, ValueNode *new_target, CallArguments &args, compiler::FeedbackSource &feedback_source)
int inline_exit_offset() const
ValueNode * BuildAndAllocateJSArrayIterator(ValueNode *array, IterationKind iteration_kind)
ReduceResult BuildTruncatingInt32BinaryOperationNodeForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
ValueNode * BuildSmiUntag(ValueNode *node)
base::Vector< ValueNode * > GetArgumentsAsArrayOfValueNodes(compiler::SharedFunctionInfoRef shared, const CallArguments &args)
bool ShouldEmitInterruptBudgetChecks()
BranchResult BuildBranchIfInt32Compare(BranchBuilder &builder, Operation op, ValueNode *lhs, ValueNode *rhs)
RootConstant * GetBooleanConstant(bool value)
MaybeReduceResult TryReduceCallForApiFunction(compiler::FunctionTemplateInfoRef api_callback, compiler::OptionalSharedFunctionInfoRef maybe_shared, CallArguments &args)
ValueNode * BuildGenericCall(ValueNode *target, Call::TargetType target_type, const CallArguments &args)
int next_handler_table_index_
ValueNode * BuildLoadJSArrayLength(ValueNode *js_array, NodeType length_type=NodeType::kSmi)
void BuildStoreMap(ValueNode *object, compiler::MapRef map, StoreMap::Kind kind)
CatchBlockDetails GetTryCatchBlockFromInfo(ExceptionHandlerInfo *info)
MaybeReduceResult TryReduceGetProto(ValueNode *node)
ReduceResult BuildOrdinaryHasInstance(ValueNode *object, compiler::JSObjectRef callable, ValueNode *callable_node)
VirtualObject * GetObjectFromAllocation(InlinedAllocation *allocation)
void BuildRegisterFrameInitialization(ValueNode *context=nullptr, ValueNode *closure=nullptr, ValueNode *new_target=nullptr)
ValueNode * BuildTaggedEqual(ValueNode *lhs, ValueNode *rhs)
ReduceResult BuildToNumberOrToNumeric(Object::Conversion mode)
void StartNewBlock(int offset, BasicBlock *predecessor)
VirtualObject * GetModifiableObjectFromAllocation(InlinedAllocation *allocation)
ReduceResult BuildCheckNumericalValueOrByReference(ValueNode *node, compiler::ObjectRef ref, DeoptimizeReason reason)
ReduceResult ReduceCallWithArrayLikeForArgumentsObject(ValueNode *target_node, CallArguments &args, VirtualObject *arguments_object, const compiler::FeedbackSource &feedback_source)
void BuildStoreFixedArrayElement(ValueNode *elements, ValueNode *index, ValueNode *value)
VirtualObject * CreateJSIteratorResult(compiler::MapRef map, ValueNode *value, ValueNode *done)
void BuildLoadContextSlot(ValueNode *context, size_t depth, int slot_index, ContextSlotMutability slot_mutability, ContextKind context_kind)
DeoptFrameScope * current_deopt_scope_
compiler::BytecodeArrayRef bytecode() const
InterpreterFrameState current_interpreter_frame_
ReduceResult CreateJSArray(compiler::MapRef map, int instance_size, ValueNode *length)
VirtualObject * CreateVirtualObject(compiler::MapRef map, uint32_t slot_count_including_map)
bool is_loop_effect_tracking()
const compiler::BytecodeLivenessState * GetOutLiveness() const
bool ShouldEagerInlineCall(compiler::SharedFunctionInfoRef shared)
void StoreRegisterPair(std::pair< interpreter::Register, interpreter::Register > target, NodeT *value)
DeoptFrame GetDeoptFrameForLazyDeoptHelper(interpreter::Register result_location, int result_size, DeoptFrameScope *scope, bool mark_accumulator_dead)
MaybeReduceResult TryReduceConstructBuiltin(compiler::JSFunctionRef builtin, compiler::SharedFunctionInfoRef shared_function_info, ValueNode *target, CallArguments &args)
ValueNode * LoadAndCacheContextSlot(ValueNode *context, int offset, ContextSlotMutability slot_mutability, ContextKind context_kind)
int peeled_iteration_count_
bool TryBuildFindNonDefaultConstructorOrConstruct(ValueNode *this_function, ValueNode *new_target, std::pair< interpreter::Register, interpreter::Register > result)
ReduceResult BuildCheckNumericalValue(ValueNode *node, compiler::ObjectRef ref, DeoptimizeReason reason)
MaybeReduceResult TryBuildFastHasInPrototypeChain(ValueNode *object, compiler::HeapObjectRef prototype)
void SetKnownValue(ValueNode *node, compiler::ObjectRef constant, NodeType new_node_type)
void MarkNodeDead(Node *node)
ValueNode * inlined_new_target_
bool CanInlineCall(compiler::SharedFunctionInfoRef shared, float call_frequency)
ValueNode * GetInternalizedString(interpreter::Register reg)
BranchResult BuildBranchIfJSReceiver(BranchBuilder &builder, ValueNode *value)
bool CanElideWriteBarrier(ValueNode *object, ValueNode *value)
MaybeReduceResult TryReduceConstructArrayConstructor(compiler::JSFunctionRef array_function, CallArguments &args, compiler::OptionalAllocationSiteRef maybe_allocation_site={})
LazyDeoptResultLocationScope * lazy_deopt_result_location_scope_
MaybeReduceResult TryReduceConstruct(compiler::HeapObjectRef feedback_target, ValueNode *target, ValueNode *new_target, CallArguments &args, compiler::FeedbackSource &feedback_source)
MergePointInterpreterFrameState ** merge_states_
MaybeReduceResult TryReduceTypeOf(ValueNode *value, const Function &GetResult)
MaybeReduceResult TryBuildScriptContextConstantLoad(const compiler::GlobalAccessFeedback &global_access_feedback)
std::optional< int32_t > TryGetInt32Constant(ValueNode *value)
MaglevCallerDetails * caller_details() const
MergePointInterpreterFrameState * GetCatchBlockFrameState()
BasicBlock * FinishInlinedBlockForCaller(ControlNode *control_node, ZoneVector< Node * > rem_nodes_in_call_block)
ReduceResult GetUint32ElementIndex(interpreter::Register reg)
ReduceResult BuildCheckJSReceiver(ValueNode *object)
VirtualObject * CreateArgumentsObject(compiler::MapRef map, ValueNode *length, ValueNode *elements, std::optional< ValueNode * > callee={})
ReduceResult BuildCallRuntime(Runtime::FunctionId function_id, std::initializer_list< ValueNode * > inputs)
ReduceResult BuildCheckHeapObject(ValueNode *object)
BasicBlockRef * jump_targets_
VirtualObject * CreateJSConstructor(compiler::JSFunctionRef constructor)
Float64Constant * GetFloat64Constant(double constant)
InlinedAllocation * BuildInlinedAllocationForDoubleFixedArray(VirtualObject *object, AllocationType allocation)
ValueNode * GetClosure() const
ReduceResult VisitBinarySmiOperation()
MaybeReduceResult TryFoldFloat64UnaryOperationForToNumber(TaggedToFloat64ConversionType conversion_type, ValueNode *value)
ValueNode * GetInlinedArgument(int i)
void SetAccumulator(NodeT *node)
void MergeIntoFrameState(BasicBlock *block, int target)
MaybeReduceResult TrySpecializeStoreScriptContextSlot(ValueNode *context, int index, ValueNode *value, Node **store)
ValueNode * GetInt32(ValueNode *value, bool can_be_heap_number=false)
MaglevGraphBuilder(LocalIsolate *local_isolate, MaglevCompilationUnit *compilation_unit, Graph *graph, MaglevCallerDetails *caller_details=nullptr)
VirtualObject * CreateJSObject(compiler::MapRef map)
ValueNode * BuildLoadFixedArrayElement(ValueNode *elements, int index)
ValueNode * BuildLoadHoleyFixedDoubleArrayElement(ValueNode *elements, ValueNode *index, bool convert_hole)
void EndLoopEffects(int loop_header)
VirtualObject * CreateJSGeneratorObject(compiler::MapRef map, int instance_size, ValueNode *context, ValueNode *closure, ValueNode *receiver, ValueNode *register_file)
compiler::OptionalObjectRef TryFoldLoadConstantDataField(compiler::JSObjectRef holder, compiler::PropertyAccessInfo const &access_info)
ValueNode * GetAccumulatorHoleyFloat64ForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
ReduceResult BuildStringConcat(ValueNode *left, ValueNode *right)
void KillPeeledLoopTargets(int peelings)
ZoneStack< HandlerTableEntry > catch_block_stack_
BranchResult BuildBranchIfUndetectable(BranchBuilder &builder, ValueNode *value)
MaybeReduceResult TryFoldFloat64BinaryOperationForToNumber(TaggedToFloat64ConversionType conversion_type, ValueNode *left, ValueNode *right)
MaybeReduceResult TryBuildPropertyLoad(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NameRef name, compiler::PropertyAccessInfo const &access_info)
ReduceResult BuildHasInPrototypeChain(ValueNode *object, compiler::HeapObjectRef prototype)
ValueNode * TryGetParentContext(ValueNode *node)
MaybeReduceResult TryBuildPropertySetterCall(compiler::PropertyAccessInfo const &access_info, ValueNode *receiver, ValueNode *lookup_start_object, ValueNode *value)
ReduceResult VisitCompareOperation()
compiler::HolderLookupResult TryInferApiHolderValue(compiler::FunctionTemplateInfoRef function_template_info, ValueNode *receiver)
void BuildLoopForPeeling()
ReduceResult BuildCheckNumber(ValueNode *object)
ValueNode * GetArgument(int i)
VirtualObject * CreateJSArrayIterator(compiler::MapRef map, ValueNode *iterated_object, IterationKind kind)
ReduceResult BuildCallFromRegisterList(ConvertReceiverMode receiver_mode)
void BeginLoopEffects(int loop_header)
MaybeReduceResult TryBuildElementLoadOnJSArrayOrJSObject(ValueNode *object, ValueNode *index, base::Vector< const compiler::MapRef > maps, ElementsKind kind, KeyedAccessLoadMode load_mode)
ValueNode * GetConvertReceiver(compiler::SharedFunctionInfoRef shared, const CallArguments &args)
BranchResult BuildBranchIfFloat64IsHole(BranchBuilder &builder, ValueNode *node)
MaybeReduceResult TryBuildNewConsString(ValueNode *left, ValueNode *right, AllocationType allocation_type=AllocationType::kYoung)
ReduceResult BuildLoadGlobal(compiler::NameRef name, compiler::FeedbackSource &feedback_source, TypeofMode typeof_mode)
void ClearCurrentAllocationBlock()
bool ContextMayAlias(ValueNode *context, compiler::OptionalScopeInfoRef scope_info)
ValueNode * LoadRegisterHoleyFloat64ForToNumber(int operand_index, NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
ReduceResult BuildTruncatingInt32BinarySmiOperationNodeForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
void VisitSingleBytecode()
int inlining_depth() const
void RecordKnownProperty(ValueNode *lookup_start_object, KnownNodeAspects::LoadedPropertyMapKey key, ValueNode *value, bool is_const, compiler::AccessMode access_mode)
std::pair< interpreter::Register, int > GetResultLocationAndSize() const
ReduceResult BuildCheckSymbol(ValueNode *object)
ReduceResult BuildCallWithFeedback(ValueNode *target_node, CallArguments &args, const compiler::FeedbackSource &feedback_source)
MaybeReduceResult TryBuildGlobalLoad(const compiler::GlobalAccessFeedback &global_access_feedback)
KnownNodeAspects & known_node_aspects()
BranchResult BuildBranchIfRootConstant(BranchBuilder &builder, ValueNode *node, RootIndex root_index)
MaybeReduceResult GetAccumulatorSmi(UseReprHintRecording record_use_repr_hint=UseReprHintRecording::kRecord)
ReduceResult BuildInt32UnaryOperationNode()
bool in_peeled_iteration() const
bool IsInsideTryBlock() const
ValueNode * BuildCallSelf(ValueNode *context, ValueNode *function, ValueNode *new_target, compiler::SharedFunctionInfoRef shared, CallArguments &args)
ReduceResult BuildAbort(AbortReason reason)
bool CheckContextExtensions(size_t depth)
ZoneUnorderedMap< KnownNodeAspects::LoadedContextSlotsKey, Node * > unobserved_context_slot_stores_
MaglevCompilationUnit * compilation_unit() const
CallBuiltin * BuildCallBuiltin(std::initializer_list< ValueNode * > inputs)
NodeType GetType(ValueNode *node)
MaybeReduceResult TryBuildFastInstanceOfWithFeedback(ValueNode *object, ValueNode *callable, compiler::FeedbackSource feedback_source)
InferHasInPrototypeChainResult
ValueNode * GetHoleyFloat64ForToNumber(ValueNode *value, NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
bool CanSpeculateCall() const
ReduceResult VisitBinaryOperation()
MaybeReduceResult TryBuildInlineCall(ValueNode *context, ValueNode *function, ValueNode *new_target, compiler::SharedFunctionInfoRef shared, compiler::FeedbackCellRef feedback_cell, CallArguments &args, const compiler::FeedbackSource &feedback_source)
static constexpr bool kLoopsMustBeEnteredThroughHeader
void BuildGenericUnaryOperationNode()
void InitializePredecessorCount(uint32_t offset, int amount)
VirtualObject * CreateDoubleFixedArray(uint32_t elements_length, compiler::FixedDoubleArrayRef elements)
ReduceResult BuildStoreContextSlot(ValueNode *context, size_t depth, int slot_index, ValueNode *value, ContextKind context_kind)
ReduceResult BuildCheckNotHole(ValueNode *node)
std::function< DeoptFrameScope( compiler::JSFunctionRef, ValueNode *, ValueNode *, ValueNode *, ValueNode *, ValueNode *, ValueNode *)> GetDeoptScopeCallback
MaybeReduceResult TryBuildPropertyGetterCall(compiler::PropertyAccessInfo const &access_info, ValueNode *receiver, ValueNode *lookup_start_object)
ReduceResult GetSmiValue(ValueNode *value, UseReprHintRecording record_use_repr_hint=UseReprHintRecording::kRecord)
ValueNode * GetFeedbackCell()
MaglevCompilationUnit *const compilation_unit_
ValueNode * BuildLoadField(compiler::PropertyAccessInfo const &access_info, ValueNode *lookup_start_object, compiler::NameRef name)
ReduceResult BuildGetKeyedProperty(ValueNode *object, const compiler::FeedbackSource &feedback_source, const compiler::ProcessedFeedback &processed_feedback)
ValueNode * GetSilencedNaN(ValueNode *value)
ReduceResult BuildLoadTypedArrayLength(ValueNode *object, ElementsKind elements_kind)
const compiler::BytecodeAnalysis & bytecode_analysis() const
ReduceResult BuildAndAllocateJSArray(compiler::MapRef map, ValueNode *length, ValueNode *elements, const compiler::SlackTrackingPrediction &slack_tracking_prediction, AllocationType allocation_type)
FeedbackSlot GetSlotOperand(int operand_index) const
MaybeReduceResult SelectReduction(FCond cond, FTrue if_true, FFalse if_false)
std::optional< ValueNode * > TryGetConstantAlternative(ValueNode *node)
void TryBuildStoreTaggedFieldToAllocation(ValueNode *object, ValueNode *value, int offset)
ForInState current_for_in_state
void BuildGenericBinaryOperationNode()
ReduceResult BuildCheckJSReceiverOrNullOrUndefined(ValueNode *object)
const compiler::BytecodeLivenessState * GetOutLivenessFor(int offset) const
ValueNode * LoadRegister(int operand_index)
ValueNode * BuildExtendPropertiesBackingStore(compiler::MapRef map, ValueNode *receiver, ValueNode *property_array)
ReduceResult ConvertForStoring(ValueNode *node, ElementsKind kind)
bool is_loop_effect_tracking_enabled()
MaybeReduceResult TryBuildElementStoreOnJSArrayOrJSObject(ValueNode *object, ValueNode *index_object, ValueNode *value, base::Vector< const compiler::MapRef > maps, ElementsKind kind, const compiler::KeyedAccessMode &keyed_mode)
MaybeReduceResult TryBuildPolymorphicPropertyAccess(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NamedAccessFeedback const &feedback, compiler::AccessMode access_mode, const ZoneVector< compiler::PropertyAccessInfo > &access_infos, GenericAccessFunc &&build_generic_access)
void MarkBranchDeadAndJumpIfNeeded(bool is_jump_taken)
ValueNode * GetValueOrUndefined(ValueNode *maybe_value)
SmiConstant * GetSmiConstant(int constant) const
InferHasInPrototypeChainResult InferHasInPrototypeChain(ValueNode *receiver, compiler::HeapObjectRef prototype)
ReduceResult BuildConstruct(ValueNode *target, ValueNode *new_target, CallArguments &args, compiler::FeedbackSource &feedback_source)
ValueNode * GetInt32ElementIndex(interpreter::Register reg)
compiler::JSHeapBroker * broker() const
VirtualObject * CreateRegExpLiteralObject(compiler::MapRef map, compiler::RegExpBoilerplateDescriptionRef literal)
MaybeReduceResult TryFoldInt32UnaryOperation(ValueNode *value)
BranchResult BuildBranchIfInt32ToBooleanTrue(BranchBuilder &builder, ValueNode *node)
MaybeReduceResult TryReduceArrayIteratingBuiltin(const char *name, compiler::JSFunctionRef target, CallArguments &args, GetDeoptScopeCallback get_eager_deopt_scope, GetDeoptScopeCallback get_lazy_deopt_scope, const std::optional< InitialCallback > &initial_callback={}, const std::optional< ProcessElementCallback > &process_element_callback={})
uint32_t predecessor_count(uint32_t offset)
Int32Constant * GetInt32Constant(int32_t constant)
MaybeReduceResult TryReduceBuiltin(compiler::JSFunctionRef target, compiler::SharedFunctionInfoRef shared, CallArguments &args, const compiler::FeedbackSource &feedback_source)
bool CheckStaticType(ValueNode *node, NodeType type, NodeType *old=nullptr)
ValueNode * BuildConvertHoleToUndefined(ValueNode *node)
float min_inlining_frequency()
ReduceResult BuildAndAllocateKeyValueArray(ValueNode *key, ValueNode *value)
InlinedAllocation * BuildInlinedAllocationForHeapNumber(VirtualObject *object, AllocationType allocation)
MaybeReduceResult TryBuildStoreDataView(const CallArguments &args, ExternalArrayType type, Function &&getValue)
std::optional< DeoptFrame > latest_checkpointed_frame_
bool TrySpecializeLoadContextSlotToFunctionContext(ValueNode *context, int slot_index, ContextSlotMutability slot_mutability)
ReduceResult BuildFloat64BinarySmiOperationNodeForToNumber(NodeType allowed_input_type, TaggedToFloat64ConversionType conversion_type)
bool HasValidInitialMap(compiler::JSFunctionRef new_target, compiler::JSFunctionRef constructor)
InlinedAllocation * BuildInlinedAllocation(VirtualObject *object, AllocationType allocation)
void RecordUseReprHintIfPhi(ValueNode *node, UseRepresentation repr)
ValueNode * BuildToBoolean(ValueNode *node)
BranchResult BuildBranchIfNull(BranchBuilder &builder, ValueNode *node)
ReduceResult ReduceCallWithArrayLike(ValueNode *target_node, CallArguments &args, const compiler::FeedbackSource &feedback_source)
void BuildStoreFixedDoubleArrayElement(ValueNode *elements, ValueNode *index, ValueNode *value)
MaybeReduceResult TryBuildPropertyAccess(ValueNode *receiver, ValueNode *lookup_start_object, compiler::NameRef name, compiler::PropertyAccessInfo const &access_info, compiler::AccessMode access_mode)
ReduceResult BuildTransitionElementsKindAndCompareMaps(ValueNode *heap_object, ValueNode *object_map, const ZoneVector< compiler::MapRef > &transition_sources, compiler::MapRef transition_target, MaglevSubGraphBuilder *sub_graph, std::optional< MaglevSubGraphBuilder::Label > &if_not_matched)
static MaybeReduceResult Fail()
ValueNode * value() const
bool IsDoneWithValue() const
bool IsDoneWithAbort() const
void MergeDead(const MaglevCompilationUnit &compilation_unit, unsigned num=1)
friend T * Zone::New(Args &&... args)
void Merge(MaglevGraphBuilder *graph_builder, InterpreterFrameState &unmerged, BasicBlock *predecessor)
void MergeLoop(MaglevGraphBuilder *graph_builder, InterpreterFrameState &loop_end_state, BasicBlock *loop_end_block)
static MergePointInterpreterFrameState * NewForLoop(const InterpreterFrameState &start_state, const MaglevCompilationUnit &info, int merge_offset, int predecessor_count, const compiler::BytecodeLivenessState *liveness, const compiler::LoopInfo *loop_info, bool has_been_peeled=false)
void MergeDeadLoop(const MaglevCompilationUnit &compilation_unit)
void MergeThrow(MaglevGraphBuilder *handler_builder, const MaglevCompilationUnit *handler_unit, const KnownNodeAspects &known_node_aspects, const VirtualObjectList virtual_objects)
static MergePointInterpreterFrameState * NewForCatchBlock(const MaglevCompilationUnit &unit, const compiler::BytecodeLivenessState *liveness, int handler_offset, bool was_used, interpreter::Register context_register, Graph *graph)
void set_loop_effects(LoopEffects *loop_effects)
uint32_t predecessor_count() const
void InitializeLoop(MaglevGraphBuilder *graph_builder, MaglevCompilationUnit &compilation_unit, InterpreterFrameState &unmerged, BasicBlock *predecessor, bool optimistic_initial_state=false, LoopEffects *loop_effects=nullptr)
void set_owner(BasicBlock *block)
constexpr bool Is() const
static constexpr Opcode opcode_of
static Derived * New(Zone *zone, std::initializer_list< ValueNode * > inputs, Args &&... args)
constexpr Opcode opcode() const
constexpr OpProperties properties() const
void SetPossibleMaps(const PossibleMaps &possible_maps, bool any_map_is_unstable, NodeType possible_type, compiler::JSHeapBroker *broker)
const PossibleMaps & possible_maps() const
NodeType IntersectType(NodeType other)
bool possible_maps_are_known() const
const AlternativeNodes & alternative() const
NodeType CombineType(NodeType other)
constexpr ValueRepresentation value_representation() const
constexpr bool is_conversion() const
constexpr bool is_tagged() const
static ReduceResult DoneWithAbort()
static ReduceResult Done()
Tagged< Smi > value() const
CheckType check_type() const
Object::Conversion mode() const
IndirectPointerTag tag() const
void set_predecessor_id(int id)
constexpr ValueRepresentation value_representation() const
constexpr bool is_tagged() const
constexpr bool has_static_map() const
void set(uint32_t offset, ValueNode *value)
void ForEachInput(Function &&callback)
compiler::FixedDoubleArrayRef double_elements() const
ValueNode * get(uint32_t offset) const
VirtualObject * Clone(uint32_t new_object_id, Zone *zone, bool empty_clone=false) const
void set_allocation(InlinedAllocation *allocation)
uint32_t double_elements_length() const
const VirtualConsString & cons_string() const
compiler::MapRef map() const
#define V8_MAP_PACKING_BOOL
#define V8_DICT_PROPERTY_CONST_TRACKING_BOOL
JSHeapBroker *const broker_
Handle< SharedFunctionInfo > info
Handle< Context > context_
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
#define INTRINSICS_LIST(V)
TNode< Object > original_length
SharedFunctionInfoRef shared
ZoneVector< RpoNumber > & result
MovableLabel continuation
FunctionLiteral * literal
RegListBase< RegisterT > registers
#define BUILD_AND_RETURN_LOAD_TYPED_ARRAY(Type)
#define BUILD_STORE_TYPED_ARRAY(Type, value)
#define MAP_UNARY_OPERATION_TO_INT32_NODE(V)
bool known_maps_are_subset_of_requested_maps_
bool existing_known_maps_found_
#define MAP_BINARY_OPERATION_TO_INT32_NODE(V)
bool any_map_is_unstable_
#define NODE_FOR_OPERATION_HELPER(Name)
compiler::ZoneRefSet< Map > intersect_set_
bool emit_check_with_migration_
#define MAP_OPERATION_TO_FLOAT64_NODE(V)
base::Vector< const compiler::MapRef > requested_maps_
#define SPECIALIZATION(op, OpNode,...)
#define TRACE_CANNOT_INLINE(...)
#define GENERATE_CASE(Name)
#define MATH_UNARY_IEEE_BUILTIN_REDUCER(MathName, ExtName, EnumName)
#define TRACE_INLINING(...)
#define GET_VALUE_OR_ABORT(variable, result)
#define RETURN_IF_ABORT(result)
#define PROCESS_AND_RETURN_IF_DONE(result, value_processor)
#define RETURN_IF_DONE(result)
#define MAGLEV_REDUCED_BUILTIN(V)
#define NON_VALUE_NODE_LIST(V)
#define CONTROL_NODE_LIST(V)
#define IEEE_754_UNARY_LIST(V)
#define CONSTANT_VALUE_NODE_LIST(V)
InstructionOperand destination
constexpr unsigned CountPopulation(T value)
constexpr bool IsInRange(T value, U lower_limit, U higher_limit)
constexpr Vector< T > VectorOf(T *start, size_t size)
V8_INLINE constexpr bool IsReferenceComparable(InstanceType instance_type)
constexpr size_t input_count()
const int kMaxFastLiteralDepth
bool IsAnyStore(AccessMode mode)
const int kMaxFastLiteralProperties
ref_traits< T >::ref_type MakeRefAssumeMemoryFence(JSHeapBroker *broker, Tagged< T > object)
ZoneCompactSet< typename ref_traits< T >::ref_type > ZoneRefSet
UINT32_ELEMENTS INT32_ELEMENTS
constexpr NodeType CombineType(NodeType left, NodeType right)
static constexpr std::optional< int > Int32Identity()
NodeType StaticTypeForConstant(compiler::JSHeapBroker *broker, compiler::ObjectRef ref)
bool OperationValue(type left, type right)
bool HasOnlyStringMaps(base::Vector< const compiler::MapRef > maps)
constexpr NodeType EmptyNodeType()
UINT32_ELEMENTS INT8_ELEMENTS
bool HasOnlyNumberMaps(base::Vector< const compiler::MapRef > maps)
constexpr bool NodeTypeIs(NodeType type, NodeType to_check)
constexpr bool IsEmptyNodeType(NodeType type)
bool HasOnlyJSTypedArrayMaps(base::Vector< const compiler::MapRef > maps)
constexpr NodeType IntersectType(NodeType left, NodeType right)
constexpr bool IsConstantNode(Opcode opcode)
UINT32_ELEMENTS INT16_ELEMENTS
NodeType StaticTypeForNode(compiler::JSHeapBroker *broker, LocalIsolate *isolate, ValueNode *node)
bool IsInstanceOfNodeType(compiler::MapRef map, NodeType type, compiler::JSHeapBroker *broker)
bool FromConstantToBool(LocalIsolate *local_isolate, ValueNode *node)
constexpr bool IsConditionalControlNode(Opcode opcode)
NodeType StaticTypeForMap(compiler::MapRef map, compiler::JSHeapBroker *broker)
TaggedToFloat64ConversionType
compiler::ZoneRefSet< Map > PossibleMaps
bool NodeTypeMayBeNullOrUndefined(NodeType type)
bool HasOnlyJSArrayMaps(base::Vector< const compiler::MapRef > maps)
bool HasOnlyJSObjectMaps(base::Vector< const compiler::MapRef > maps)
double pow(double x, double y)
bool IsUint32Double(double value)
@ kReceiverOrNullOrUndefined
constexpr int kFastElementsKindCount
V8_EXPORT_PRIVATE base::Vector< Flag > Flags()
bool TryCast(Tagged< From > value, Tagged< To > *out)
constexpr int kTaggedSize
bool StoreModeIsInBounds(KeyedAccessStoreMode store_mode)
bool StoreModeCanGrow(KeyedAccessStoreMode store_mode)
constexpr int kMaxRegularHeapObjectSize
constexpr bool IsHoleyElementsKind(ElementsKind kind)
bool IsClassConstructor(FunctionKind kind)
bool DoubleToUint32IfEqualToSelf(double value, uint32_t *uint32_value)
bool Is(IndirectHandle< U > value)
bool IsSmiDouble(double value)
bool IsSpecialReceiverInstanceType(InstanceType instance_type)
unsigned int FastD2UI(double x)
bool IsTypedArrayElementsKind(ElementsKind kind)
bool IsRabGsabTypedArrayElementsKind(ElementsKind kind)
bool IsDerivedConstructor(FunctionKind kind)
bool IsInt32Double(double value)
constexpr bool IsSmiElementsKind(ElementsKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats store(v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) DEFINE_GENERIC_IMPLICATION(trace_gc_object_stats
constexpr int kFastElementsKindPackedToHoley
constexpr uint64_t kHoleNanInt64
constexpr bool IsObjectElementsKind(ElementsKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit is_undetectable
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long mode(MIPS/PPC only)") DEFINE_BOOL(partial_constant_pool
void Print(Tagged< Object > obj)
bool IsSmiOrObjectElementsKind(ElementsKind kind)
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES HOLEY_ELEMENTS
constexpr bool SmiValuesAre31Bits()
bool IsFastElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
int32_t DoubleToInt32(double x)
bool IsHoleyOrDictionaryElementsKind(ElementsKind kind)
bool UnionElementsKindUptoSize(ElementsKind *a_out, ElementsKind b)
bool StoreModeIgnoresTypeArrayOOB(KeyedAccessStoreMode store_mode)
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr bool SmiValuesAre32Bits()
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long enable use of partial constant none
@ kEnumCacheKeysAndIndices
base::Vector< T > CloneVector(Zone *zone, base::Vector< const T > other)
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit requires_instance_members_initializer
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
@ kDefaultDerivedConstructor
@ kDefaultBaseConstructor
bool LoadModeHandlesHoles(KeyedAccessLoadMode load_mode)
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr uint32_t kMaxUInt32
bool LoadModeHandlesOOB(KeyedAccessLoadMode load_mode)
constexpr Register kJavaScriptCallNewTargetRegister
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset prototype
!IsContextMap !IsContextMap native_context
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr RelaxedLoadTag kRelaxedLoad
#define OPERATION_LIST(V)
BytecodeSequenceNode * parent_
#define STRONG_READ_ONLY_HEAP_NUMBER_ROOT_LIST(V)
#define DCHECK_LE(v1, v2)
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NOT_NULL(val)
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_NE(v1, v2)
#define CHECK_NE(lhs, rhs)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
OptionalJSObjectRef holder
CallOptimization::HolderLookup lookup
BytecodeLoopAssignments & assignments()
bool exception_handler_was_used
const base::Vector< ValueNode * > parameters
compiler::OptionalJSFunctionRef maybe_js_target
std::tuple< ValueNode *, int > LoadedContextSlotsKey
bool IsValid(NodeInfos::iterator &it)
KnownNodeAspects * Clone(Zone *zone) const
ZoneMap< std::tuple< ValueNode *, int >, ValueNode * > loaded_context_constants
bool any_map_for_any_node_is_unstable
LoadedPropertyMap loaded_properties
const NodeInfo * TryGetInfoFor(ValueNode *node) const
void UpdateMayHaveAliasingContexts(ValueNode *context)
NodeInfos::iterator FindInfo(ValueNode *node)
LoadedPropertyMap loaded_constant_properties
LoadedContextSlots loaded_context_slots
ZoneSet< InlinedAllocation * > allocations
void Merge(const LoopEffects *other)
ZoneSet< ValueNode * > objects_written
ZoneSet< KnownNodeAspects::LoadedContextSlotsKey > context_slot_written
ZoneSet< KnownNodeAspects::LoadedPropertyMapKey > keys_cleared
bool may_have_aliasing_contexts
ZoneUnorderedMap< KnownNodeAspects::LoadedContextSlotsKey, Node * > unobserved_context_slot_stores
base::Vector< ValueNode * > arguments
LoopEffects * loop_effects
ValueNode * enum_cache_indices
bool receiver_needs_map_check
BytecodeJumpTarget bytecode_target
LabelJumpTarget label_target
#define V8_LIKELY(condition)
#define V8_UNLIKELY(condition)