42static_assert(std::is_convertible_v<TNode<Number>, TNode<Object>>,
45 std::is_convertible_v<TNode<Number>, TNode<UnionOf<Smi, HeapObject>>>,
48 !std::is_convertible_v<TNode<UnionOf<Smi, HeapObject>>, TNode<Number>>,
59 zone, descriptor, descriptor.GetStackParameterCount(),
61 kind, name, builtin) {}
68 isolate, zone->New<
TFGraph>(zone), call_descriptor,
75 code_generated_(false),
78 isolate, raw_assembler_->graph(), raw_assembler_->common(),
80 raw_assembler_->machine())) {}
91void CodeAssemblerState::PrintCurrentBlock(std::ostream& os) {
127 graph->AddDecorator(decorator);
168 Isolate* isolate, std::unique_ptr<TurbofanCompilationJob> job) {
169#ifdef V8_USE_ADDRESS_SANITIZER
170 constexpr size_t kInputZoneBatchSize = 128UL *
MB;
172 constexpr size_t kInputZoneBatchSize = 1536UL *
MB;
178 DCHECK(job->compilation_info()->code_kind() == CodeKind::BUILTIN ||
179 job->compilation_info()->code_kind() == CodeKind::BYTECODE_HANDLER);
183 if (current_batch_zone_size_ >= kInputZoneBatchSize) {
184 AwaitAndFinalizeCurrentBatch(isolate);
187 QueueJob(isolate, std::move(job));
191 Isolate* isolate, std::unique_ptr<TurbofanCompilationJob> job) {
192 current_batch_zone_size_ +=
193 job->compilation_info()->zone()->allocation_size();
194 if (
v8_flags.concurrent_builtin_generation) {
195 auto* dispatcher = isolate->optimizing_compile_dispatcher();
197 while (!dispatcher->TryQueueForOptimization(job)) {
198 std::this_thread::yield();
202 job->ExecuteJob(isolate->counters()->runtime_call_stats(),
203 isolate->main_thread_local_isolate()));
205 main_thread_output_queue_.push_back(std::move(job));
210 FinalizeJobOnMainThread(isolate, job.get());
218 builtins_installed_count_++;
223 if (
v8_flags.concurrent_builtin_generation) {
224 auto* dispatcher = isolate->optimizing_compile_dispatcher();
225 dispatcher->WaitUntilCompilationJobsDone();
226 builtins_installed_count_ =
227 dispatcher->InstallGeneratedBuiltins(builtins_installed_count_);
230 while (!main_thread_output_queue_.empty()) {
231 FinalizeJobOnMainThread(isolate, main_thread_output_queue_.front().get());
232 main_thread_output_queue_.pop_front();
235 current_batch_zone_size_ = 0;
352 if (
isolate()->IsGeneratingEmbeddedBuiltins() &&
353 !
isolate()->roots_table().IsRootHandle(
object, &dummy_root) &&
354 !
isolate()->builtins()->IsBuiltinHandle(
object, &dummy_builtin) &&
355 !IsInstructionStream(*
object)) {
420 int32_t* out_value) {
423 if (
m.HasResolvedValue() &&
424 m.IsInRange(std::numeric_limits<int32_t>::min(),
425 std::numeric_limits<int32_t>::max())) {
426 *out_value =
static_cast<int32_t
>(
m.ResolvedValue());
433 if (
m.HasResolvedValue()) {
434 *out_value =
m.ResolvedValue();
443 int64_t* out_value) {
445 if (
m.HasResolvedValue()) *out_value =
m.ResolvedValue();
446 return m.HasResolvedValue();
451 if (node->opcode() == IrOpcode::kBitcastWordToTaggedSigned) {
452 node = node->InputAt(0);
460 if (
m.HasResolvedValue()) {
461 intptr_t value =
m.ResolvedValue();
472 if (node->opcode() == IrOpcode::kBitcastWordToTaggedSigned ||
473 node->opcode() == IrOpcode::kBitcastWordToTagged) {
474 node = node->InputAt(0);
480 intptr_t* out_value) {
482 if (
m.HasResolvedValue()) *out_value =
m.ResolvedValue();
483 return m.HasResolvedValue();
508 DCHECK(call_descriptor->IsJSFunctionCall());
510 static_cast<int>(call_descriptor->JSParameterCount())));
576 raw_assembler()->call_descriptor()->GetReturnType(0).representation());
598 raw_assembler()->call_descriptor()->GetReturnType(0).representation());
601 raw_assembler()->call_descriptor()->GetReturnType(1).representation());
609 raw_assembler()->call_descriptor()->GetReturnType(0).representation());
612 raw_assembler()->call_descriptor()->GetReturnType(1).representation());
620 raw_assembler()->call_descriptor()->GetReturnType(0).representation());
629 raw_assembler()->call_descriptor()->GetReturnType(0).representation());
646 Label if_return(
this), if_continue(
this);
665 if (!
v8_flags.code_comments)
return;
707#if V8_ENABLE_WEBASSEMBLY
732#define DEFINE_CODE_ASSEMBLER_BINARY_OP(name, ResType, Arg1Type, Arg2Type) \
733 TNode<ResType> CodeAssembler::name(TNode<Arg1Type> a, TNode<Arg2Type> b) { \
734 return UncheckedCast<ResType>(raw_assembler()->name(a, b)); \
737#undef DEFINE_CODE_ASSEMBLER_BINARY_OP
743 lhs_lo_word, lhs_hi_word, rhs_lo_word, rhs_hi_word));
750 lhs_lo_word, lhs_hi_word, rhs_lo_word, rhs_hi_word));
773#define CODE_ASSEMBLER_COMPARE(Name, ArgT, VarT, ToConstant, op) \
774 TNode<BoolT> CodeAssembler::Name(TNode<ArgT> left, TNode<ArgT> right) { \
776 if (ToConstant(left, &lhs) && ToConstant(right, &rhs)) { \
777 return BoolConstant(lhs op rhs); \
779 return UncheckedCast<BoolT>(raw_assembler()->Name(left, right)); \
789#undef CODE_ASSEMBLER_COMPARE
846#define DEFINE_CODE_ASSEMBLER_UNARY_OP(name, ResType, ArgType) \
847 TNode<ResType> CodeAssembler::name(TNode<ArgType> a) { \
848 return UncheckedCast<ResType>(raw_assembler()->name(a)); \
851#undef DEFINE_CODE_ASSEMBLER_UNARY_OP
903Node* CodeAssembler::PackMapWord(
Node* value) {
908 return BitcastWordToTaggedSigned(packed);
916 map = PackMapWord(map);
958 switch (write_barrier) {
989 object,
offset, tag, value,
1093#define ATOMIC_FUNCTION(name) \
1094 TNode<Word32T> CodeAssembler::Atomic##name( \
1095 MachineType type, TNode<RawPtrT> base, TNode<UintPtrT> offset, \
1096 TNode<Word32T> value) { \
1097 return UncheckedCast<Word32T>( \
1098 raw_assembler()->Atomic##name(type, base, offset, value)); \
1100 template <class Type> \
1101 TNode<Type> CodeAssembler::Atomic##name##64( \
1102 TNode<RawPtrT> base, TNode<UintPtrT> offset, TNode<UintPtrT> value, \
1103 TNode<UintPtrT> value_high) { \
1104 return UncheckedCast<Type>( \
1105 raw_assembler()->Atomic##name##64(base, offset, value, value_high)); \
1107 template TNode<AtomicInt64> CodeAssembler::Atomic##name##64 < AtomicInt64 > \
1108 (TNode<RawPtrT> base, TNode<UintPtrT> offset, TNode<UintPtrT> value, \
1109 TNode<UintPtrT> value_high); \
1110 template TNode<AtomicUint64> CodeAssembler::Atomic##name##64 < \
1111 AtomicUint64 > (TNode<RawPtrT> base, TNode<UintPtrT> offset, \
1112 TNode<UintPtrT> value, TNode<UintPtrT> value_high);
1119#undef ATOMIC_FUNCTION
1130template <
class Type>
1139 base,
offset, old_value, old_value_high, new_value, new_value_high));
1165 DCHECK_LT(index, value->op()->ValueOutputCount());
1185 success.MergeVariables();
1186 exception.MergeVariables();
1193 label->AddInputs({
CAST(exception_value)});
1201template <
size_t kMaxSize>
1204 void Add(
Node* node) {
1209 Node*
const*
data()
const {
return arr_; }
1210 int size()
const {
return static_cast<int>(
ptr_ -
arr_); }
1218bool IsValidArgumentCountFor(
const CallInterfaceDescriptor& descriptor,
1219 size_t argument_count) {
1221 if (descriptor.AllowVarArgs()) {
1234#if V8_ENABLE_WEBASSEMBLY
1235 bool switch_to_the_central_stack =
1244 bool switch_to_the_central_stack =
false;
1250 constexpr size_t kMaxNumArgs = 7;
1252 int argc =
static_cast<int>(
args.size());
1262 NodeArray<kMaxNumArgs + 4> inputs;
1263 inputs.Add(centry_code);
1264 for (
const auto& arg :
args) inputs.Add(arg);
1267 inputs.Add(context);
1270 Node* return_value =
1274 return return_value;
1279#if V8_ENABLE_WEBASSEMBLY
1282 ExternalReference::wasm_switch_to_the_central_stack_for_js());
1290 SetStackPointer(central_stack_sp);
1296 ExternalReference::wasm_switch_from_the_central_stack_for_js());
1297 CodeAssemblerLabel skip(
this);
1303 SetStackPointer(old_sp);
1310 Label no_switch(
this);
1314 GotoIf(is_on_central_stack_flag, &no_switch);
1318 return old_sp.value();
1326#if V8_ENABLE_WEBASSEMBLY
1327 bool switch_to_the_central_stack =
1336 bool switch_to_the_central_stack =
false;
1343 constexpr size_t kMaxNumArgs = 6;
1345 int argc =
static_cast<int>(
args.size());
1353 NodeArray<kMaxNumArgs + 4> inputs;
1354 inputs.Add(centry_code);
1355 for (
const auto& arg :
args) inputs.Add(arg);
1358 inputs.Add(context);
1365 int input_count,
Node*
const* inputs) {
1372 int argc = input_count - implicit_nodes;
1373 DCHECK(IsValidArgumentCountFor(descriptor, argc));
1383 Node* return_value =
1387 return return_value;
1392 std::initializer_list<Node*>
args) {
1393 constexpr size_t kMaxNumArgs = 11;
1395 DCHECK(IsValidArgumentCountFor(descriptor,
args.size()));
1400 NodeArray<kMaxNumArgs + 2> inputs;
1402 for (
auto arg :
args) inputs.Add(arg);
1404 inputs.Add(context);
1413 std::initializer_list<Node*>
args) {
1416 DCHECK(IsValidArgumentCountFor(descriptor,
args.size()));
1418 constexpr size_t kMaxNumArgs = 10;
1421 NodeArray<kMaxNumArgs + 2> inputs;
1423 for (
auto arg :
args) inputs.Add(arg);
1425 inputs.Add(context);
1428 return CallStubN(call_mode, descriptor, inputs.size(), inputs.data());
1436 std::initializer_list<Node*>
args) {
1437 constexpr size_t kMaxNumArgs = 10;
1439 NodeArray<kMaxNumArgs + 6> inputs;
1442 inputs.Add(function);
1447#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
1448 if (dispatch_handle) {
1449 inputs.Add(*dispatch_handle);
1452 for (
auto arg :
args) inputs.Add(arg);
1454 DCHECK(IsValidArgumentCountFor(descriptor, inputs.size()));
1456 inputs.Add(context);
1465 std::initializer_list<Node*>
args) {
1466 constexpr size_t kMaxNumArgs = 6;
1468 DCHECK(IsValidArgumentCountFor(descriptor,
args.size()));
1470 int argc =
static_cast<int>(
args.size());
1478 NodeArray<kMaxNumArgs + 2> inputs;
1480 for (
auto arg :
args) inputs.Add(arg);
1481 inputs.Add(context);
1486template <
class... TArgs>
1517#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
1519 arg_count, dispatch_handle, context};
1529 int input_count,
Node*
const* inputs) {
1535 Node* function, std::optional<MachineType> return_type,
1536 std::initializer_list<CodeAssembler::CFunctionArg>
args) {
1542 std::initializer_list<CodeAssembler::CFunctionArg>
args) {
1544 function, return_type,
args);
1549 std::initializer_list<CodeAssembler::CFunctionArg>
args) {
1550 DCHECK(return_type.LessThanOrEqualPointerSize());
1552 function, return_type, mode,
args);
1562 Label false_label(
this);
1575 Label true_label(
this);
1592 return Goto(constant ? true_label : false_label);
1598 false_label->
label_, branch_hint);
1602 const std::function<
void()>& true_body,
1603 const std::function<
void()>& false_body) {
1606 return constant ? true_body() : false_body();
1609 Label vtrue(
this), vfalse(
this);
1620 const std::function<
void()>& false_body) {
1623 return constant ?
Goto(true_label) : false_body();
1633 const std::function<
void()>& true_body,
1634 Label* false_label) {
1637 return constant ? true_body() :
Goto(false_label);
1647 const int32_t* case_values,
Label** case_labels,
1648 size_t case_count) {
1651 for (
size_t i = 0;
i < case_count; ++
i) {
1652 labels[
i] = case_labels[
i]->
label_;
1657 labels, case_count);
1703 debug_info_ = debug_info;
1709 return var_id_ < other.var_id_;
1725 assembler->state()->NextVariableId())),
1732 Node* initial_value)
1734 Bind(initial_value);
1741 :
impl_(assembler->zone()->New<Impl>(rep,
1742 assembler->state()->NextVariableId())),
1743 state_(assembler->state()) {
1744 impl_->set_debug_info(debug_info);
1751 Node* initial_value)
1752 : CodeAssemblerVariable(assembler, debug_info, rep) {
1753 impl_->set_debug_info(debug_info);
1754 Bind(initial_value);
1767 std::stringstream str;
1768 str <<
"#Use of unbound variable:"
1769 <<
"#\n Variable: " << *
this <<
"#\n Current Block: ";
1770 state_->PrintCurrentBlock(str);
1771 FATAL(
"%s", str.str().c_str());
1774 std::stringstream str;
1775 str <<
"#Accessing variable value outside a block:"
1776 <<
"#\n Variable: " << *
this;
1777 FATAL(
"%s", str.str().c_str());
1791 if (info.name) os <<
"V" <<
info;
1798 os << *variable.
impl_;
1808 state_(assembler->state()),
1810 label_ = assembler->zone()->New<RawMachineLabel>(
1813 for (
size_t i = 0;
i < vars_count; ++
i) {
1825 if (node !=
nullptr) {
1828 i->second.push_back(node);
1859 if (find_if(
i->second.begin(),
i->second.end(),
1860 [node](
Node* e) ->
bool { return node != e; }) !=
1862 std::stringstream str;
1863 str <<
"Unmerged variable found when jumping to block. \n"
1864 <<
"# Variable: " << *var;
1868 str <<
"\n# Current Block: ";
1869 state_->PrintCurrentBlock(str);
1870 FATAL(
"%s", str.str().c_str());
1882 std::stringstream str;
1883 str <<
"Cannot bind the same label twice:"
1884 <<
"\n# current: " << debug_info
1886 FATAL(
"%s", str.str().c_str());
1888 if (
v8_flags.enable_source_at_csa_bind) {
1890 {debug_info.
file, debug_info.
line});
1907 Node* shared_value =
nullptr;
1910 for (
auto value :
i->second) {
1912 if (value != shared_value) {
1913 if (shared_value ==
nullptr) {
1914 shared_value =
value;
1929 std::stringstream str;
1930 str <<
"A variable that has been marked as beeing merged at the label"
1931 <<
"\n# doesn't have a bound value along all of the paths that "
1932 <<
"\n# have been merged into the label up to this point."
1934 <<
"\n# This can happen in the following cases:"
1935 <<
"\n# - By explicitly marking it so in the label constructor"
1936 <<
"\n# - By having seen different bound values at branches"
1939 <<
" vs. found=" << (not_found ? 0 :
i->second.
size())
1940 <<
"\n# Variable: " << *var_impl
1942 FATAL(
"%s", str.str().c_str());
1946 var.first->rep_,
static_cast<int>(
merge_count_), &(
i->second[0]));
1955 var->value_ =
i->second;
1959 var->value_ = j->second.back();
1961 var->value_ =
nullptr;
1972 for (
size_t i = 0;
i < inputs.size(); ++
i) {
1979 for (
size_t i = 0;
i < inputs.size(); ++
i) {
1987 for (
Node* input : inputs) {
1990 if (input ==
nullptr)
return nullptr;
1997 std::vector<MachineRepresentation> representations) {
2019 : has_handler_(
label != nullptr),
2021 compatibility_label_(nullptr),
2022 exception_(nullptr) {
2031 : has_handler_(
label != nullptr),
2033 compatibility_label_(
label),
2034 exception_(exception) {
2036 label_ = std::make_unique<CodeAssemblerExceptionHandlerLabel>(
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
uint32_t AddObject(Handle< Object > object)
static constexpr Builtin RuntimeCEntry(int result_size, bool switch_to_central_stack=false)
int GetStackParameterCount() const
int GetParameterCount() const
int GetRegisterParameterCount() const
bool HasContextParameter() const
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static ExternalReference Create(const SCTableReference &table_ref)
Handle< Boolean > ToBoolean(bool value)
Handle< String > InternalizeString(base::Vector< const char > str, bool convert_encoding=false)
static constexpr int root_slot_offset(RootIndex root_index)
BuiltinsConstantsTableBuilder * builtins_constants_table_builder() const
v8::internal::Factory * factory()
Handle< Object > root_handle(RootIndex index)
static constexpr MachineType Float64()
static constexpr MachineType Pointer()
static constexpr MachineType Uint8()
static constexpr MachineType Int32()
static constexpr MachineType Uint64()
static constexpr MachineType Uint32()
static constexpr MachineType Float32()
static constexpr MachineRepresentation PointerRepresentation()
static constexpr MachineType IntPtr()
V8_EXPORT_PRIVATE V8_WARN_UNUSED_RESULT Status FinalizeJob(Isolate *isolate)
static constexpr bool IsImmortalImmovable(RootIndex root_index)
static V8_EXPORT_PRIVATE const Function * FunctionForId(FunctionId id)
static bool MayAllocate(FunctionId id)
static constexpr Tagged< Smi > FromInt(int value)
static constexpr int GetStackParameterCount()
static constexpr int GetParameterCount()
static TNode UncheckedCast(compiler::Node *node)
static bool constexpr IsValid(intptr_t value)
static ThreadId Current()
T * AllocateArray(size_t length)
void Decorate(Node *node) final
BreakOnNodeDecorator(NodeId node_id)
bool IsJSFunctionCall() const
std::map< CodeAssemblerVariable::Impl *, std::vector< Node * >, CodeAssemblerVariable::ImplComparator > variable_merges_
CodeAssemblerState * state_
CodeAssemblerLabel(CodeAssembler *assembler, CodeAssemblerLabel::Type type=CodeAssemblerLabel::kNonDeferred)
std::map< CodeAssemblerVariable::Impl *, Node *, CodeAssemblerVariable::ImplComparator > variable_phis_
void UpdateVariablesAfterBind()
void AddInputs(std::vector< Node * > inputs)
CodeAssemblerState * state_
std::vector< Node * > phi_nodes_
std::vector< std::vector< Node * > > phi_inputs_
const std::vector< Node * > & CreatePhis(std::vector< MachineRepresentation > representations)
Node * CreatePhi(MachineRepresentation rep, const std::vector< Node * > &inputs)
void PushExceptionHandler(CodeAssemblerExceptionHandlerLabel *label)
void PopExceptionHandler()
std::vector< FileAndLine > macro_call_stack_
std::vector< CodeAssemblerExceptionHandlerLabel * > exception_handler_labels_
std::unique_ptr< RawMachineAssembler > raw_assembler_
CodeAssemblerCallback call_epilogue_
CodeAssemblerCallback call_prologue_
CodeAssemblerState(Isolate *isolate, Zone *zone, const CallInterfaceDescriptor &descriptor, CodeKind kind, const char *name, Builtin builtin=Builtin::kNoBuiltinId)
ZoneSet< CodeAssemblerVariable::Impl *, CodeAssemblerVariable::ImplComparator > variables_
int parameter_count() const
void SetInitialDebugInformation(const char *msg, const char *file, int line)
MachineRepresentation rep_
bool operator<(const CodeAssemblerVariable::Impl &other) const
CodeAssemblerState::VariableId var_id_
Impl(MachineRepresentation rep, CodeAssemblerState::VariableId id)
CodeAssemblerState * state_
MachineRepresentation rep() const
CodeAssemblerVariable(const CodeAssemblerVariable &)=delete
void CompileCode(Isolate *isolate, std::unique_ptr< TurbofanCompilationJob > job)
~BuiltinCompilationScheduler()
size_t current_batch_zone_size_
void QueueJob(Isolate *isolate, std::unique_ptr< TurbofanCompilationJob > job)
std::deque< std::unique_ptr< TurbofanCompilationJob > > main_thread_output_queue_
void FinalizeJobOnMainThread(Isolate *isolate, TurbofanCompilationJob *job)
void AwaitAndFinalizeCurrentBatch(Isolate *isolate)
Node * CallStubN(StubCallMode call_mode, const CallInterfaceDescriptor &descriptor, int input_count, Node *const *inputs)
CodeAssemblerState * state()
TNode< IntPtrT > UniqueIntPtrConstant(intptr_t value)
TNode< BoolT > Word32NotEqual(TNode< Word32T > left, TNode< Word32T > right)
bool IsExceptionHandlerActive() const
void HandleException(Node *result)
Node * CallCFunctionWithoutFunctionDescriptor(Node *function, MachineType return_type, CArgs... cargs)
bool IsInt64AbsWithOverflowSupported() const
void OptimizedStoreFieldAssertNoWriteBarrier(MachineRepresentation rep, TNode< HeapObject > object, int offset, Node *value)
const std::vector< FileAndLine > & GetMacroSourcePositionStack() const
Node * CallRuntimeImpl(Runtime::FunctionId function, TNode< Object > context, std::initializer_list< TNode< Object > > args)
Node * UntypedParameter(int value)
TNode< PairT< Word32T, Word32T > > Int32PairAdd(TNode< Word32T > lhs_lo_word, TNode< Word32T > lhs_hi_word, TNode< Word32T > rhs_lo_word, TNode< Word32T > rhs_hi_word)
CodeAssemblerState * state_
void SetDynamicJSParameterCount(TNode< Uint16T > parameter_count)
void StaticAssert(TNode< BoolT > value, const char *source="unknown position")
TNode< Type > AtomicLoad64(AtomicMemoryOrder order, TNode< RawPtrT > base, TNode< WordT > offset)
bool IsWord64CtzSupported() const
bool IsUndefinedConstant(TNode< Object > node)
TNode< RawPtrT > SwitchToTheCentralStack()
Node * LoadProtectedPointerFromObject(TNode< Object > object, TNode< IntPtrT > offset)
void TailCallRuntimeImpl(Runtime::FunctionId function, TNode< Int32T > arity, TNode< Object > context, std::initializer_list< TNode< Object > > args)
TNode< IntPtrT > IntPtrConstant(intptr_t value)
Node * CallCFunctionWithCallerSavedRegisters(Node *function, MachineType return_type, SaveFPRegsMode mode, CArgs... cargs)
void OptimizedStoreFieldUnsafeNoWriteBarrier(MachineRepresentation rep, TNode< HeapObject > object, int offset, Node *value)
TNode< Int64T > Int64Constant(int64_t value)
bool IsWord64PopcntSupported() const
void StoreEphemeronKey(Node *base, Node *offset, Node *value)
void UnsafeStoreNoWriteBarrier(MachineRepresentation rep, Node *base, Node *value)
TNode< UintPtrT > ChangeUint32ToWord(TNode< Word32T > value)
TNode< AnyTaggedT > LoadRootMapWord(RootIndex root_index)
void ReturnIf(TNode< BoolT > condition, TNode< Object > value)
TNode< T > UncheckedCast(Node *value)
TNode< IntPtrT > WordShl(TNode< IntPtrT > left, TNode< IntegralT > right)
TNode< UintPtrT > ChangeFloat64ToUintPtr(TNode< Float64T > value)
TNode< Float32T > Float32Constant(double value)
TNode< Uint8T > LoadUint8FromRootRegister(TNode< IntPtrT > offset)
TNode< Type > HeapConstantHole(Handle< Type > object)
bool UnalignedStoreSupported(MachineRepresentation rep) const
TNode< BoolT > WordEqual(TNode< WordT > left, TNode< WordT > right)
void PushSourcePosition()
void GotoIfNot(TNode< IntegralT > condition, Label *false_label, GotoHint goto_hint=GotoHint::kNone)
void Return(TNode< Object > value)
TNode< RawPtrT > LoadFramePointer()
void PopAndReturn(Node *pop, Node *value)
TNode< HeapObject > UntypedHeapConstantMaybeHole(Handle< HeapObject > object)
void UnregisterCallGenerationCallbacks()
TNode< IntPtrT > WordSar(TNode< IntPtrT > left, TNode< IntegralT > right)
TNode< RawPtrT > SwitchToTheCentralStackIfNeeded()
bool IsInt32AbsWithOverflowSupported() const
void OptimizedStoreIndirectPointerField(TNode< HeapObject > object, int offset, IndirectPointerTag tag, Node *value)
Isolate * isolate() const
void StoreToObject(MachineRepresentation rep, TNode< Object > object, TNode< IntPtrT > offset, Node *value, StoreToObjectWriteBarrier write_barrier)
Node * LoadFromObject(MachineType type, TNode< Object > object, TNode< IntPtrT > offset)
bool IsFloat64RoundTruncateSupported() const
TNode< Object > LoadFullTagged(Node *base)
TNode< Type > HeapConstantMaybeHole(Handle< Type > object)
TNode< Int32T > UniqueInt32Constant(int32_t value)
bool IsJSFunctionCall() const
bool IsNullConstant(TNode< Object > node)
void RegisterCallGenerationCallbacks(const CodeAssemblerCallback &call_prologue, const CodeAssemblerCallback &call_epilogue)
void TailCallBytecodeDispatch(const CallInterfaceDescriptor &descriptor, TNode< RawPtrT > target, TArgs... args)
bool UnalignedLoadSupported(MachineRepresentation rep) const
void AbortCSADcheck(Node *message)
TNode< Boolean > BooleanConstant(bool value)
bool TryToSmiConstant(TNode< IntegralT > node, Tagged< Smi > *out_value)
TNode< T > ReinterpretCast(Node *value)
bool IsFloat64RoundTiesEvenSupported() const
TNode< Type > AtomicCompareExchange64(TNode< RawPtrT > base, TNode< WordT > offset, TNode< UintPtrT > old_value, TNode< UintPtrT > new_value, TNode< UintPtrT > old_value_high, TNode< UintPtrT > new_value_high)
TNode< HeapObject > UntypedHeapConstantHole(Handle< HeapObject > object)
bool IsIntPtrAbsWithOverflowSupported() const
void EmitComment(std::string msg)
TNode< IntPtrT > BitcastTaggedToWord(TNode< Smi > node)
bool IsWord32CtzSupported() const
TNode< String > StringConstant(const char *str)
bool IsMapOffsetConstant(Node *node)
static constexpr int kTargetParameterIndex
Node * CallCFunctionN(Signature< MachineType > *signature, int input_count, Node *const *inputs)
TNode< Int64T > TruncateFloat64ToInt64(TNode< Float64T > value)
TNode< RawPtrT > LoadPointerFromRootRegister(TNode< IntPtrT > offset)
JSGraph * jsgraph() const
void AtomicStore(MachineRepresentation rep, AtomicMemoryOrder order, TNode< RawPtrT > base, TNode< WordT > offset, TNode< Word32T > value)
void StoreFullTaggedNoWriteBarrier(TNode< RawPtrT > base, TNode< Object > tagged_value)
TNode< BoolT > Word64NotEqual(TNode< Word64T > left, TNode< Word64T > right)
Factory * factory() const
bool IsFloat64RoundUpSupported() const
TNode< Smi > SmiConstant(Tagged< Smi > value)
void TailCallStubThenBytecodeDispatchImpl(const CallInterfaceDescriptor &descriptor, Node *target, Node *context, std::initializer_list< Node * > args)
Node * CallStubRImpl(StubCallMode call_mode, const CallInterfaceDescriptor &descriptor, TNode< Object > target, TNode< Object > context, std::initializer_list< Node * > args)
TNode< Word32T > AtomicCompareExchange(MachineType type, TNode< RawPtrT > base, TNode< WordT > offset, TNode< Word32T > old_value, TNode< Word32T > new_value)
void GotoIf(TNode< IntegralT > condition, Label *true_label, GotoHint goto_hint=GotoHint::kNone)
TNode< RawPtrT > StackSlotPtr(int size, int alignment)
Node * Load(MachineType type, Node *base)
void OptimizedStoreField(MachineRepresentation rep, TNode< HeapObject > object, int offset, Node *value)
void OptimizedStoreMap(TNode< HeapObject > object, TNode< Map >)
TNode< Float64T > RoundIntPtrToFloat64(Node *value)
TNode< IntPtrT > ChangeFloat64ToIntPtr(TNode< Float64T > value)
TNode< IntPtrT > ChangeInt32ToIntPtr(TNode< Word32T > value)
TNode< RawPtrT > LoadParentFramePointer()
void BreakOnNode(int node_id)
void TailCallJSCode(TNode< Code > code, TNode< Context > context, TNode< JSFunction > function, TNode< Object > new_target, TNode< Int32T > arg_count, TNode< JSDispatchHandleT > dispatch_handle)
TNode< BoolT > IntPtrEqual(TNode< WordT > left, TNode< WordT > right)
bool TryToIntPtrConstant(TNode< IntegralT > node, intptr_t *out_value)
TNode< Int64T > UniqueInt64Constant(int64_t value)
TNode< Int32T > TruncateFloat32ToInt32(TNode< Float32T > value)
void Switch(Node *index, Label *default_label, const int32_t *case_values, Label **case_labels, size_t case_count)
bool TryToInt32Constant(TNode< IntegralT > node, int32_t *out_value)
bool IsWord32PopcntSupported() const
bool HasDynamicJSParameterCount()
bool TryToInt64Constant(TNode< IntegralT > node, int64_t *out_value)
RawMachineAssembler * raw_assembler() const
TNode< Context > GetJSContextParameter()
TNode< typename std::tuple_element< index, std::tuple< T1, T2 > >::type > Projection(TNode< PairT< T1, T2 > > value)
TNode< Object > LoadRoot(RootIndex root_index)
void OptimizedStoreIndirectPointerFieldNoWriteBarrier(TNode< HeapObject > object, int offset, IndirectPointerTag tag, Node *value)
TNode< Float64T > Float64Constant(double value)
void CanonicalizeEmbeddedBuiltinsConstantIfNeeded(Handle< HeapObject > object)
TNode< Float64T > ChangeUintPtrToFloat64(TNode< UintPtrT > value)
void SetSourcePosition(const char *file, int line)
TNode< Uint32T > Word32Shr(TNode< Uint32T > left, TNode< Uint32T > right)
TNode< ExternalReference > ExternalConstant(ExternalReference address)
void StoreRoot(RootIndex root_index, TNode< Object > value)
TNode< TaggedIndex > TaggedIndexConstant(intptr_t value)
void AtomicStore64(AtomicMemoryOrder order, TNode< RawPtrT > base, TNode< WordT > offset, TNode< UintPtrT > value, TNode< UintPtrT > value_high)
TNode< Int32T > Int32Constant(int32_t value)
TNode< BoolT > WordNotEqual(TNode< WordT > left, TNode< WordT > right)
Node * CallCFunction(Node *function, std::optional< MachineType > return_type, CArgs... cargs)
TNode< RawPtrT > PointerConstant(void *value)
TNode< Type > HeapConstantNoHole(Handle< Type > object)
void MemoryBarrier(AtomicMemoryOrder order)
TNode< Type > UnalignedLoad(TNode< RawPtrT > base, TNode< IntPtrT > offset)
TNode< Int32T > Word32Sar(TNode< Int32T > left, TNode< Int32T > right)
TNode< BoolT > Word32Equal(TNode< Word32T > left, TNode< Word32T > right)
void SwitchFromTheCentralStack(TNode< RawPtrT > old_sp)
TNode< PairT< Word32T, Word32T > > Int32PairSub(TNode< Word32T > lhs_lo_word, TNode< Word32T > lhs_hi_word, TNode< Word32T > rhs_lo_word, TNode< Word32T > rhs_hi_word)
TNode< UintPtrT > UintPtrConstant(uintptr_t value)
TNode< Number > NumberConstant(double value)
TNode< ExternalReference > IsolateField(IsolateFieldId id)
TypedCodeAssemblerVariable< T > TVariable
TNode< Type > AtomicLoad(AtomicMemoryOrder order, TNode< RawPtrT > base, TNode< WordT > offset)
void Store(Node *base, Node *value)
TNode< UintPtrT > WordShr(TNode< UintPtrT > left, TNode< IntegralT > right)
Node * CallJSStubImpl(const CallInterfaceDescriptor &descriptor, TNode< Object > target, TNode< Object > context, TNode< Object > function, std::optional< TNode< Object > > new_target, TNode< Int32T > arity, std::optional< TNode< JSDispatchHandleT > > dispatch_handle, std::initializer_list< Node * > args)
void TailCallStubImpl(const CallInterfaceDescriptor &descriptor, TNode< Code > target, TNode< Object > context, std::initializer_list< Node * > args)
TNode< HeapObject > UntypedHeapConstantNoHole(Handle< HeapObject > object)
bool IsTruncateFloat64ToFloat16RawBitsSupported() const
void Branch(TNode< IntegralT > condition, Label *true_label, Label *false_label, BranchHint branch_hint=BranchHint::kNone)
TNode< HeapObject > OptimizedAllocate(TNode< IntPtrT > size, AllocationType allocation)
void StoreNoWriteBarrier(MachineRepresentation rep, Node *base, Node *value)
bool Word32ShiftIsSafe() const
TNode< Uint16T > DynamicJSParameterCount()
TNode< T > Parameter(int value, const SourceLocation &loc=SourceLocation::Current())
CheckedNode< Object, false > Cast(Node *value, const char *location="")
bool IsFloat64RoundDownSupported() const
TNode< BoolT > Word64Equal(TNode< Word64T > left, TNode< Word64T > right)
const Operator * IfException()
static constexpr int GetJSCallContextParamIndex(int parameter_count)
static CallDescriptor * GetSimplifiedCDescriptor(Zone *zone, const MachineSignature *sig, CallDescriptor::Flags flags=CallDescriptor::kNoFlags, Operator::Properties properties=Operator::kNoThrow)
static CallDescriptor * GetStubCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count, CallDescriptor::Flags flags, Operator::Properties properties=Operator::kNoProperties, StubCallMode stub_mode=StubCallMode::kCallCodeObject)
static CallDescriptor * GetRuntimeCallDescriptor(Zone *zone, Runtime::FunctionId function, int js_parameter_count, Operator::Properties properties, CallDescriptor::Flags flags, LazyDeoptOnThrow lazy_deopt_on_throw=LazyDeoptOnThrow::kNo)
static CallDescriptor * GetBytecodeDispatchCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count)
bool Word32ShiftIsSafe() const
const OptionalOperator Float64RoundUp()
bool UnalignedLoadSupported(MachineRepresentation rep)
const OptionalOperator Word64Popcnt()
const OptionalOperator Float64RoundTiesEven()
const OptionalOperator Word32Ctz()
const OptionalOperator Word64Ctz()
const OptionalOperator TruncateFloat64ToFloat16RawBits()
bool UnalignedStoreSupported(MachineRepresentation rep)
const OptionalOperator Int64AbsWithOverflow()
const OptionalOperator Float64RoundDown()
const OptionalOperator Float64RoundTruncate()
const OptionalOperator Int32AbsWithOverflow()
const OptionalOperator Word32Popcnt()
void StoreToObject(MachineRepresentation rep, Node *object, Node *offset, Node *value, WriteBarrierKind write_barrier)
void AbortCSADcheck(Node *message)
void OptimizedStoreIndirectPointerField(Node *object, int offset, IndirectPointerTag tag, Node *value, WriteBarrierKind write_barrier)
void OptimizedStoreField(MachineRepresentation rep, Node *object, int offset, Node *value, WriteBarrierKind write_barrier)
void OptimizedStoreMap(Node *object, Node *value, WriteBarrierKind write_barrier=kMapWriteBarrier)
void Branch(Node *condition, RawMachineLabel *true_val, RawMachineLabel *false_val, BranchHint branch_hint=BranchHint::kNone)
void Continuations(Node *call, RawMachineLabel *if_success, RawMachineLabel *if_exception)
Node * AddNode(const Operator *op, int input_count, Node *const *inputs)
Node * AtomicLoad(AtomicLoadParameters rep, Node *base, Node *index)
Isolate * isolate() const
Node * AtomicStore64(AtomicStoreParameters params, Node *base, Node *index, Node *value, Node *value_high)
MachineOperatorBuilder * machine()
void Goto(RawMachineLabel *label)
CommonOperatorBuilder * common()
void Comment(const std::string &msg)
Node * dynamic_js_parameter_count()
CallDescriptor * call_descriptor() const
Node * Projection(int index, Node *a)
Node * Load(MachineType type, Node *base)
Node * Store(MachineRepresentation rep, Node *base, Node *value, WriteBarrierKind write_barrier)
Node * Int32PairAdd(Node *a_low, Node *a_high, Node *b_low, Node *b_high)
void TailCallN(CallDescriptor *call_descriptor, int input_count, Node *const *inputs)
Node * UnalignedLoad(MachineType type, Node *base)
void SetCurrentExternalSourcePosition(FileAndLine file_and_line)
FileAndLine GetCurrentExternalSourcePosition() const
Node * AtomicStore(AtomicStoreParameters params, Node *base, Node *index, Node *value)
Node * Int32PairSub(Node *a_low, Node *a_high, Node *b_low, Node *b_high)
Node * CallCFunctionWithCallerSavedRegisters(Node *function, MachineType return_type, SaveFPRegsMode mode, CArgs... cargs)
void Switch(Node *index, RawMachineLabel *default_label, const int32_t *case_values, RawMachineLabel **case_labels, size_t case_count)
bool IsMapOffsetConstant(Node *node)
Node * MemoryBarrier(AtomicMemoryOrder order)
Node * CallCFunction(Node *function, std::optional< MachineType > return_type, CArgs... cargs)
Node * LoadFromObject(MachineType type, Node *base, Node *offset)
void set_dynamic_js_parameter_count(Node *parameter_count)
Node * CallN(CallDescriptor *call_descriptor, int input_count, Node *const *inputs)
void StaticAssert(Node *value, const char *source)
void PopAndReturn(Node *pop, Node *value)
Node * Parameter(size_t index)
Node * LoadProtectedPointerFromObject(Node *base, Node *offset)
Node * CallCFunctionWithoutFunctionDescriptor(Node *function, MachineType return_type, CArgs... cargs)
BasicBlock * block() const
~ScopedExceptionHandler()
TypedCodeAssemblerVariable< Object > * exception_
CodeAssembler * assembler_
ScopedExceptionHandler(CodeAssembler *assembler, CodeAssemblerExceptionHandlerLabel *label)
std::unique_ptr< CodeAssemblerExceptionHandlerLabel > label_
CodeAssemblerLabel * compatibility_label_
static bool IsWasmBuiltinId(Builtin id)
#define CODE_ASSEMBLER_COMPARE(Name, ArgT, VarT, ToConstant, op)
#define DEFINE_CODE_ASSEMBLER_UNARY_OP(name, ResType, ArgType)
#define ATOMIC_FUNCTION(name)
#define DEFINE_CODE_ASSEMBLER_BINARY_OP(name, ResType, Arg1Type, Arg2Type)
#define CODE_ASSEMBLER_UNARY_OP_LIST(V)
#define CODE_ASSEMBLER_BINARY_OP_LIST(V)
Handle< SharedFunctionInfo > info
enum v8::internal::@1270::DeoptimizableCodeIterator::@67 state_
base::Vector< const DirectHandle< Object > > args
DirectHandle< Object > new_target
BytecodeAssembler & assembler_
Vector< const uint8_t > OneByteVector(const char *data, size_t length)
@ kIndirectPointerWriteBarrier
@ kEphemeronKeyWriteBarrier
std::function< void()> CodeAssemblerCallback
std::ostream & operator<<(std::ostream &os, AccessMode access_mode)
bool DoubleToSmiInteger(double value, int *smi_int_value)
V8_INLINE constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
StoreToObjectWriteBarrier
constexpr bool CanBeTaggedPointer(MachineRepresentation rep)
V8_EXPORT_PRIVATE FlagValues v8_flags
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage * MB
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
static constexpr int kMaxSize
#define DCHECK_LE(v1, v2)
#define DCHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
#define DCHECK_GE(v1, v2)
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
#define DCHECK_LT(v1, v2)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define V8_EXPORT_PRIVATE
static constexpr MachineType value
static constexpr MachineType value
static constexpr MachineType value
bool operator()(const CodeAssemblerVariable::Impl *a, const CodeAssemblerVariable::Impl *b) const