20    if (v8_flags.trace_turbo_inlining)            \ 
   21      StdoutStream{} << __VA_ARGS__ << std::endl; \ 
 
   25bool IsSmall(
int const size) {
 
   26  return size <= 
v8_flags.max_inlined_bytecode_size_small;
 
   29bool CanConsiderForInlining(JSHeapBroker* 
broker,
 
   30                            FeedbackCellRef feedback_cell) {
 
   31  OptionalFeedbackVectorRef feedback_vector =
 
   32      feedback_cell.feedback_vector(
broker);
 
   33  if (!feedback_vector.has_value()) {
 
   34    TRACE(
"Cannot consider " << feedback_cell
 
   35                             << 
" for inlining (no feedback vector)");
 
   38  SharedFunctionInfoRef shared = feedback_vector->shared_function_info(
broker);
 
   40  if (!shared.HasBytecodeArray()) {
 
   41    TRACE(
"Cannot consider " << shared << 
" for inlining (no bytecode)");
 
   46  shared.GetBytecodeArray(
broker);
 
   50  OptionalFeedbackVectorRef feedback_vector_again =
 
   51      feedback_cell.feedback_vector(
broker);
 
   52  if (!feedback_vector_again.has_value()) {
 
   53    TRACE(
"Cannot consider " << shared << 
" for inlining (no feedback vector)");
 
   56  if (!feedback_vector_again->equals(*feedback_vector)) {
 
   59    TRACE(
"Not considering " << shared
 
   60                             << 
" for inlining (feedback vector changed)");
 
   65      shared.GetInlineability(
broker);
 
   67    TRACE(
"Cannot consider " 
   68          << shared << 
" for inlining (reason: " << inlineability << 
")");
 
   72  TRACE(
"Considering " << shared << 
" for inlining with " << *feedback_vector);
 
   76bool CanConsiderForInlining(JSHeapBroker* 
broker, JSFunctionRef function) {
 
   77  FeedbackCellRef feedback_cell = function.raw_feedback_cell(
broker);
 
   78  bool const result = CanConsiderForInlining(
broker, feedback_cell);
 
   81        feedback_cell.shared_function_info(
broker).value()));
 
   89    Node* node, 
int functions_size) {
 
   96  if (
m.HasResolvedValue() && 
m.Ref(
broker()).IsJSFunction()) {
 
   98    out.functions[0] = function;
 
   99    if (CanConsiderForInlining(
broker(), function)) {
 
  100      out.bytecode[0] = function.shared(
broker()).GetBytecodeArray(
broker());
 
  101      out.num_functions = 1;
 
  106    int const value_input_count = 
m.node()->op()->ValueInputCount();
 
  107    if (value_input_count > functions_size) {
 
  108      out.num_functions = 0;
 
  111    for (
int n = 0; n < value_input_count; ++
n) {
 
  114        out.num_functions = 0;
 
  118      out.functions[
n] = m2.
Ref(
broker()).AsJSFunction();
 
  120      if (CanConsiderForInlining(
broker(), function)) {
 
  121        out.bytecode[
n] = function.shared(
broker()).GetBytecodeArray(
broker());
 
  124    out.num_functions = value_input_count;
 
  127  if (
m.IsCheckClosure()) {
 
  128    DCHECK(!out.functions[0].has_value());
 
  130    if (CanConsiderForInlining(
broker(), feedback_cell)) {
 
  132      out.bytecode[0] = out.shared_info->GetBytecodeArray(
broker());
 
  134    out.num_functions = 1;
 
  137  if (
m.IsJSCreateClosure()) {
 
  138    DCHECK(!out.functions[0].has_value());
 
  141    if (CanConsiderForInlining(
broker(), feedback_cell)) {
 
  143      out.bytecode[0] = out.shared_info->GetBytecodeArray(
broker());
 
  144      CHECK(out.shared_info->equals(n.Parameters().shared_info()));
 
  146    out.num_functions = 1;
 
  149  out.num_functions = 0;
 
 
  154#if V8_ENABLE_WEBASSEMBLY 
  156    if (node->opcode() == IrOpcode::kJSWasmCall) {
 
  157      return inliner_.ReduceJSWasmCall(node);
 
  178    TRACE(
"Not considering call site #" 
  179          << node->id() << 
":" << node->op()->mnemonic()
 
  180          << 
", because polymorphic inlining is disabled");
 
  184  bool can_inline_candidate = 
false, candidate_is_small = 
true;
 
  187  FrameStateInfo const& frame_info = frame_state.frame_state_info();
 
  190    if (!candidate.
bytecode[
i].has_value()) {
 
  205                  shared.IsInlineable(
broker()) ||
 
  206                      shared.GetInlineability(
broker()) ==
 
  217    if (frame_info.
shared_info().ToHandle(&frame_shared_info) &&
 
  218        frame_shared_info.
equals(shared.object())) {
 
  219      TRACE(
"Not considering call site #" << node->id() << 
":" 
  220                                          << node->op()->mnemonic()
 
  221                                          << 
", because of recursive inlining");
 
  225      can_inline_candidate = 
true;
 
  228      unsigned inlined_bytecode_size = 0;
 
  229      if (OptionalJSFunctionRef function = candidate.
functions[
i]) {
 
  230        if (OptionalCodeRef code = function->code(
broker())) {
 
  231          inlined_bytecode_size = code->GetInlinedBytecodeSize();
 
  232          candidate.
total_size += inlined_bytecode_size;
 
  235      candidate_is_small = candidate_is_small &&
 
  236                           IsSmall(bytecode.length() + inlined_bytecode_size);
 
  239  if (!can_inline_candidate) 
return NoChange();
 
  242  if (node->opcode() == IrOpcode::kJSCall) {
 
  264  seen_.insert(node->id());
 
  268  if (candidate_is_small) {
 
  269    TRACE(
"Inlining small function(s) at call site #" 
  270          << node->id() << 
":" << node->op()->mnemonic());
 
 
  298    double size_of_candidate =
 
  303      info_->set_could_not_inline_all_candidates();
 
  309    if (reduction.Changed()) 
return;
 
 
  320bool CollectStateValuesOwnedUses(Node* node, Node* state_values,
 
  321                                 NodeAndIndex* uses_buffer, 
size_t* use_count,
 
  324  if (state_values->UseCount() > 1) 
return true;
 
  325  for (
int i = 0; 
i < state_values->InputCount(); 
i++) {
 
  326    Node* input = state_values->InputAt(
i);
 
  327    if (input->opcode() == IrOpcode::kStateValues) {
 
  328      if (!CollectStateValuesOwnedUses(node, input, uses_buffer, use_count,
 
  332    } 
else if (input == node) {
 
  333      if (*use_count >= max_uses) 
return false;
 
  334      uses_buffer[*use_count] = {state_values, 
i};
 
  348  if (state_values->
UseCount() > 1) 
return state_values;
 
  353    if (input->opcode() == IrOpcode::kStateValues) {
 
  355    } 
else if (input == from) {
 
  360    if (processed != input) {
 
  367  return copy ? copy : state_values;
 
 
  372bool CollectFrameStateUniqueUses(
Node* node, 
FrameState frame_state,
 
  373                                 NodeAndIndex* uses_buffer, 
size_t* use_count,
 
  376  if (frame_state->UseCount() > 1) 
return true;
 
  377  if (frame_state.
stack() == node) {
 
  378    if (*use_count >= max_uses) 
return false;
 
  382  if (!CollectStateValuesOwnedUses(node, frame_state.
locals(), uses_buffer,
 
  383                                   use_count, max_uses)) {
 
  395  if (frame_state->UseCount() > 1) 
return frame_state;
 
  398  if (frame_state.
stack() == from) {
 
  406  if (new_locals != locals) {
 
  412  return copy != 
nullptr ? 
FrameState{copy} : frame_state;
 
 
  417                                           Node** inputs, 
int input_count,
 
  508  if (callee->
opcode() != IrOpcode::kPhi) 
return false;
 
  519  Node* checkpoint = 
nullptr;
 
  521  if (effect->opcode() == IrOpcode::kCheckpoint) {
 
  526  if (effect->opcode() != IrOpcode::kEffectPhi) 
return false;
 
  528  Node* effect_phi = effect;
 
  532  for (
Node* merge_use : merge->
uses()) {
 
  533    if (merge_use != effect_phi && merge_use != callee && merge_use != node &&
 
  534        merge_use != checkpoint) {
 
  540  for (
Node* effect_phi_use : effect_phi->
uses()) {
 
  541    if (effect_phi_use != node && effect_phi_use != checkpoint) 
return false;
 
  566  const size_t kMaxUses = 8;
 
  567  NodeAndIndex replaceable_uses[kMaxUses];
 
  568  size_t replaceable_uses_count = 0;
 
  571  Node* checkpoint_state = 
nullptr;
 
  573    checkpoint_state = checkpoint->
InputAt(0);
 
  574    if (!CollectFrameStateUniqueUses(callee, 
FrameState{checkpoint_state},
 
  575                                     replaceable_uses, &replaceable_uses_count,
 
  583  if (!CollectFrameStateUniqueUses(callee, frame_state, replaceable_uses,
 
  584                                   &replaceable_uses_count, kMaxUses)) {
 
  592    if (edge.from() == node && edge.index() == 0) 
continue;
 
  595    for (
size_t i = 0; 
i < replaceable_uses_count; 
i++) {
 
  596      if (replaceable_uses[
i].node == edge.from() &&
 
  597          replaceable_uses[
i].index == edge.index()) {
 
  602    if (!found) 
return false;
 
  605  *num_calls = callee->
op()->ValueInputCount();
 
  609  for (
int i = 0; 
i < *num_calls; ++
i) {
 
  625          checkpoint->
op(), new_checkpoint_state, effect_phi_effect, control);
 
  630        frame_state, callee, target,
 
  633    inputs[input_count - 3] = new_lazy_frame_state;
 
  634    inputs[input_count - 2] = effect_phi_effect;
 
  635    inputs[input_count - 1] = control;
 
  636    calls[
i] = if_successes[
i] =
 
  641  node->ReplaceInput(input_count - 1, 
jsgraph()->Dead());
 
 
  654    Node** calls, 
Node** inputs, 
int input_count, 
int* num_calls) {
 
  657  if (
TryReuseDispatch(node, callee, if_successes, calls, inputs, input_count,
 
  668  for (
int i = 0; 
i < *num_calls; ++
i) {
 
  673    if (
i != (*num_calls - 1)) {
 
  681      if_successes[
i] = fallthrough_control;
 
  690    if (node->opcode() == IrOpcode::kJSConstruct) {
 
  693      if (inputs[n.TargetIndex()] == inputs[n.NewTargetIndex()]) {
 
  694        inputs[n.NewTargetIndex()] = 
target;
 
  697    inputs[JSCallOrConstructNode::TargetIndex()] = 
target;
 
  698    inputs[input_count - 1] = if_successes[
i];
 
  699    calls[
i] = if_successes[
i] =
 
 
  705                                               bool small_function) {
 
  708#if V8_ENABLE_WEBASSEMBLY 
  709  DCHECK_NE(node->opcode(), IrOpcode::kJSWasmCall);
 
  711  if (num_calls == 1) {
 
  713    if (reduction.Changed()) {
 
  727  int const input_count = node->InputCount();
 
  729  for (
int i = 0; 
i < input_count; ++
i) {
 
  735                        input_count, &num_calls);
 
  738  Node* if_exception = 
nullptr;
 
  741    for (
int i = 0; 
i < num_calls; ++
i) {
 
  748    Node* exception_control =
 
  750    if_exceptions[num_calls] = exception_control;
 
  752                                              num_calls + 1, if_exceptions);
 
  763  calls[num_calls] = control;
 
  768                       num_calls + 1, calls);
 
  778      Node* call = calls[
i];
 
  780      if (reduction.Changed()) {
 
 
  794  constexpr bool kInlineLeftFirst = 
true, kInlineRightFirst = 
false;
 
  795  if (right.frequency.IsUnknown()) {
 
  801        return kInlineLeftFirst;
 
  802      } 
else if (left.
total_size > right.total_size) {
 
  803        return kInlineRightFirst;
 
  805        return left.
node->
id() > right.node->id();
 
  808      return kInlineLeftFirst;
 
  811    return kInlineRightFirst;
 
  815  float right_score = right.frequency.value() / right.total_size;
 
  817  if (left_score > right_score) {
 
  818    return kInlineLeftFirst;
 
  819  } 
else if (left_score < right_score) {
 
  820    return kInlineRightFirst;
 
  822    return left.
node->
id() > right.node->id();
 
 
  828  os << 
candidates_.size() << 
" candidate(s) for inlining:" << std::endl;
 
  830    os << 
"- candidate: " << candidate.node->op()->mnemonic() << 
" node #" 
  831       << candidate.node->id() << 
" with frequency " << candidate.frequency
 
  832       << 
", " << candidate.num_functions << 
" target(s):" << std::endl;
 
  833    for (
int i = 0; 
i < candidate.num_functions; ++
i) {
 
  835          candidate.functions[
i].has_value()
 
  836              ? candidate.functions[
i]->shared(
broker())
 
  837              : candidate.shared_info.value();
 
  838      os << 
"  - target: " << 
shared;
 
  839      if (candidate.bytecode[
i].has_value()) {
 
  840        os << 
", bytecode size: " << candidate.bytecode[
i]->length();
 
  841        if (OptionalJSFunctionRef function = candidate.functions[
i]) {
 
  842          if (OptionalCodeRef code = function->code(
broker())) {
 
  843            unsigned inlined_bytecode_size = code->GetInlinedBytecodeSize();
 
  844            if (inlined_bytecode_size > 0) {
 
  845              os << 
", existing opt code's inlined bytecode size: " 
  846                 << inlined_bytecode_size;
 
  851        os << 
", no bytecode";
 
 
bool equals(Handle< T > other) const
@ kHasOptimizationDisabled
T * AllocateArray(size_t length)
void ReplaceWithValue(Node *node, Node *value, Node *effect=nullptr, Node *control=nullptr)
static Reduction Replace(Node *node)
CallFrequency const & frequency() const
CallFrequency const & frequency() const
OptionalSharedFunctionInfoRef shared_function_info(JSHeapBroker *broker) const
MaybeIndirectHandle< SharedFunctionInfo > shared_info() const
static constexpr int kFrameStateLocalsInput
static constexpr int kFrameStateStackInput
static bool IsInlineeOpcode(Value value)
static constexpr bool kHaveIdenticalLayouts
SimplifiedOperatorBuilder * simplified() const
Node * ConstantNoHole(ObjectRef ref, JSHeapBroker *broker)
CompilationDependencies * dependencies() const
Reduction ReduceJSCall(Node *node)
SimplifiedOperatorBuilder * simplified() const
Node * DuplicateStateValuesAndRename(Node *state_values, Node *from, Node *to, StateCloneMode mode)
OptimizedCompilationInfo * info_
JSGraph * jsgraph() const
Reduction Reduce(Node *node) final
SourcePositionTable * source_positions_
CommonOperatorBuilder * common() const
CompilationDependencies * dependencies() const
JSHeapBroker * broker() const
bool TryReuseDispatch(Node *node, Node *callee, Node **if_successes, Node **calls, Node **inputs, int input_count, int *num_calls)
void CreateOrReuseDispatch(Node *node, Node *callee, Candidate const &candidate, Node **if_successes, Node **calls, Node **inputs, int input_count, int *num_calls)
int total_inlined_bytecode_size_
const int max_inlined_bytecode_size_cumulative_
Candidate CollectFunctions(Node *node, int functions_size)
static const int kMaxCallPolymorphism
Reduction InlineCandidate(Candidate const &candidate, bool small_function)
FrameState DuplicateFrameStateAndRename(FrameState frame_state, Node *from, Node *to, StateCloneMode mode)
const int max_inlined_bytecode_size_absolute_
CommonOperatorBuilder * common() const
static Node * GetEffectInput(Node *node, int index=0)
static Node * GetFrameStateInput(Node *node)
static Node * GetValueInput(Node *node, int index)
static bool IsExceptionalCall(Node *node, Node **out_exception=nullptr)
static Node * GetControlInput(Node *node, int index=0)
constexpr IrOpcode::Value opcode() const
const Operator * op() const
void ReplaceInput(int index, Node *new_to)
Node * InputAt(int index) const
static Reduction NoChange()
SourcePosition GetSourcePosition(Node *node) const
Node * CloneNode(const Node *node)
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
SharedFunctionInfoRef shared
ZoneVector< RpoNumber > & result
const CallParameters & CallParametersOf(const Operator *op)
Handle< FeedbackCell > FeedbackCellOf(const Operator *op)
ConstructParameters const & ConstructParametersOf(Operator const *op)
ref_traits< T >::ref_type MakeRef(JSHeapBroker *broker, Tagged< T > object)
V8_EXPORT_PRIVATE FlagValues v8_flags
#define CHECK_IMPLIES(lhs, rhs)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
HeapObjectRef Ref(JSHeapBroker *broker) const
bool operator()(const Candidate &left, const Candidate &right) const
bool can_inline_function[kMaxCallPolymorphism]
OptionalJSFunctionRef functions[kMaxCallPolymorphism]
OptionalBytecodeArrayRef bytecode[kMaxCallPolymorphism]
OptionalSharedFunctionInfoRef shared_info
bool HasResolvedValue() const