v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
csa-generator.cc
Go to the documentation of this file.
1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8
12#include "src/torque/types.h"
13#include "src/torque/utils.h"
14
15namespace v8::internal::torque {
16
17std::optional<Stack<std::string>> CSAGenerator::EmitGraph(
18 Stack<std::string> parameters) {
19 for (BottomOffset i = {0}; i < parameters.AboveTop(); ++i) {
21 parameters.Peek(i));
22 }
23
24 for (Block* block : cfg_.blocks()) {
25 if (block->IsDead()) continue;
26
27 out() << " compiler::CodeAssemblerParameterizedLabel<";
28 bool first = true;
29 DCHECK_EQ(block->InputTypes().Size(), block->InputDefinitions().Size());
30 for (BottomOffset i = {0}; i < block->InputTypes().AboveTop(); ++i) {
31 if (block->InputDefinitions().Peek(i).IsPhiFromBlock(block)) {
32 if (!first) out() << ", ";
33 out() << block->InputTypes().Peek(i)->GetGeneratedTNodeTypeName();
34 first = false;
35 }
36 }
37 out() << "> " << BlockName(block) << "(&ca_, compiler::CodeAssemblerLabel::"
38 << (block->IsDeferred() ? "kDeferred" : "kNonDeferred") << ");\n";
39 }
40
41 EmitInstruction(GotoInstruction{cfg_.start()}, &parameters);
42 for (Block* block : cfg_.blocks()) {
43 if (cfg_.end() && *cfg_.end() == block) continue;
44 if (block->IsDead()) continue;
45 out() << "\n";
46
47 // Redirect the output of non-declarations into a buffer and only output
48 // declarations right away.
49 std::stringstream out_buffer;
50 std::ostream* old_out = out_;
51 out_ = &out_buffer;
52
53 out() << " if (" << BlockName(block) << ".is_used()) {\n";
54 EmitBlock(block);
55 out() << " }\n";
56
57 // All declarations have been printed now, so we can append the buffered
58 // output and redirect back to the original output stream.
59 out_ = old_out;
60 out() << out_buffer.str();
61 }
62 if (cfg_.end()) {
63 out() << "\n";
64 return EmitBlock(*cfg_.end());
65 }
66 return std::nullopt;
67}
68
71 std::stringstream phi_names;
72
73 for (BottomOffset i = {0}; i < block->InputTypes().AboveTop(); ++i) {
74 const auto& def = block->InputDefinitions().Peek(i);
75 stack.Push(DefinitionToVariable(def));
76 if (def.IsPhiFromBlock(block)) {
77 decls() << " TNode<"
78 << block->InputTypes().Peek(i)->GetGeneratedTNodeTypeName()
79 << "> " << stack.Top() << ";\n";
80 phi_names << ", &" << stack.Top();
81 }
82 }
83 out() << " ca_.Bind(&" << BlockName(block) << phi_names.str() << ");\n";
84
85 for (const Instruction& instruction : block->instructions()) {
86 TorqueCodeGenerator::EmitInstruction(instruction, &stack);
87 }
88 return stack;
89}
90
92 const std::string& file = SourceFileMap::AbsolutePath(pos.source);
93 if (always_emit || !previous_position_.CompareStartIgnoreColumn(pos)) {
94 // Lines in Torque SourcePositions are zero-based, while the
95 // CodeStubAssembler and downwind systems are one-based.
96 out() << " ca_.SetSourcePosition(\"" << file << "\", "
97 << (pos.start.line + 1) << ");\n";
99 }
100}
101
103 const PushUninitializedInstruction& instruction,
104 Stack<std::string>* stack) {
105 // TODO(turbofan): This can trigger an error in CSA if it is used. Instead, we
106 // should prevent usage of uninitialized in the type system. This
107 // requires "if constexpr" being evaluated at Torque time.
108 const std::string str = "ca_.Uninitialized<" +
109 instruction.type->GetGeneratedTNodeTypeName() + ">()";
110 stack->Push(str);
111 SetDefinitionVariable(instruction.GetValueDefinition(), str);
112}
113
115 const PushBuiltinPointerInstruction& instruction,
116 Stack<std::string>* stack) {
117 const std::string str =
118 "ca_.UncheckedCast<BuiltinPtr>(ca_.SmiConstant(Builtin::k" +
119 instruction.external_name + "))";
120 stack->Push(str);
121 SetDefinitionVariable(instruction.GetValueDefinition(), str);
122}
123
125 const NamespaceConstantInstruction& instruction,
126 Stack<std::string>* stack) {
127 const Type* type = instruction.constant->type();
128 std::vector<std::string> results;
129
130 const auto lowered = LowerType(type);
131 for (std::size_t i = 0; i < lowered.size(); ++i) {
132 results.push_back(DefinitionToVariable(instruction.GetValueDefinition(i)));
133 stack->Push(results.back());
134 decls() << " TNode<" << lowered[i]->GetGeneratedTNodeTypeName() << "> "
135 << stack->Top() << ";\n";
136 }
137
138 out() << " ";
139 if (type->StructSupertype()) {
140 out() << "std::tie(";
141 PrintCommaSeparatedList(out(), results);
142 out() << ") = ";
143 } else if (results.size() == 1) {
144 out() << results[0] << " = ";
145 }
146 out() << instruction.constant->external_name() << "(state_)";
147 if (type->StructSupertype()) {
148 out() << ".Flatten();\n";
149 } else {
150 out() << ";\n";
151 }
152}
153
155 const TypeVector& parameter_types,
156 std::vector<std::string> constexpr_arguments, Stack<std::string>* stack) {
157 std::vector<std::string> args;
158 for (auto it = parameter_types.rbegin(); it != parameter_types.rend(); ++it) {
159 const Type* type = *it;
160 if (type->IsConstexpr()) {
161 args.push_back(std::move(constexpr_arguments.back()));
162 constexpr_arguments.pop_back();
163 } else {
164 std::stringstream s;
165 size_t slot_count = LoweredSlotCount(type);
166 VisitResult arg = VisitResult(type, stack->TopRange(slot_count));
167 EmitCSAValue(arg, *stack, s);
168 args.push_back(s.str());
169 stack->PopMany(slot_count);
170 }
171 }
172 std::reverse(args.begin(), args.end());
173 return args;
174}
175
177 Stack<std::string>* stack) {
178 TypeVector parameter_types =
179 instruction.intrinsic->signature().parameter_types.types;
180 std::vector<std::string> args = ProcessArgumentsCommon(
181 parameter_types, instruction.constexpr_arguments, stack);
182
183 Stack<std::string> pre_call_stack = *stack;
184 const Type* return_type = instruction.intrinsic->signature().return_type;
185 std::vector<std::string> results;
186
187 const auto lowered = LowerType(return_type);
188 for (std::size_t i = 0; i < lowered.size(); ++i) {
189 results.push_back(DefinitionToVariable(instruction.GetValueDefinition(i)));
190 stack->Push(results.back());
191 decls() << " TNode<" << lowered[i]->GetGeneratedTNodeTypeName() << "> "
192 << stack->Top() << ";\n";
193 }
194
195 out() << " ";
196 if (return_type->StructSupertype()) {
197 out() << "std::tie(";
198 PrintCommaSeparatedList(out(), results);
199 out() << ") = ";
200 } else {
201 if (results.size() == 1) {
202 out() << results[0] << " = ";
203 }
204 }
205
206 if (instruction.intrinsic->ExternalName() == "%RawDownCast") {
207 if (parameter_types.size() != 1) {
208 ReportError("%RawDownCast must take a single parameter");
209 }
210 const Type* original_type = parameter_types[0];
211 bool is_subtype =
212 return_type->IsSubtypeOf(original_type) ||
213 (original_type == TypeOracle::GetUninitializedHeapObjectType() &&
215 if (!is_subtype) {
216 ReportError("%RawDownCast error: ", *return_type, " is not a subtype of ",
217 *original_type);
218 }
219 if (!original_type->StructSupertype() &&
220 return_type->GetGeneratedTNodeTypeName() !=
221 original_type->GetGeneratedTNodeTypeName()) {
222 if (return_type->IsSubtypeOf(TypeOracle::GetTaggedType())) {
223 out() << "TORQUE_CAST";
224 } else {
225 out() << "ca_.UncheckedCast<"
226 << return_type->GetGeneratedTNodeTypeName() << ">";
227 }
228 }
229 } else if (instruction.intrinsic->ExternalName() == "%GetClassMapConstant") {
230 if (!parameter_types.empty()) {
231 ReportError("%GetClassMapConstant must not take parameters");
232 }
233 if (instruction.specialization_types.size() != 1) {
235 "%GetClassMapConstant must take a single class as specialization "
236 "parameter");
237 }
238 const ClassType* class_type =
239 ClassType::DynamicCast(instruction.specialization_types[0]);
240 if (!class_type) {
241 ReportError("%GetClassMapConstant must take a class type parameter");
242 }
243 // If the class isn't actually used as the parameter to a TNode,
244 // then we can't rely on the class existing in C++ or being of the same
245 // type (e.g. it could be a template), so don't use the template CSA
246 // machinery for accessing the class' map.
247 std::string class_name =
248 class_type->name() != class_type->GetGeneratedTNodeTypeName()
249 ? std::string("void")
250 : class_type->name();
251
252 out() << std::string("CodeStubAssembler(state_).GetClassMapConstant<") +
253 class_name + ">";
254 } else if (instruction.intrinsic->ExternalName() == "%FromConstexpr") {
255 if (parameter_types.size() != 1 || !parameter_types[0]->IsConstexpr()) {
257 "%FromConstexpr must take a single parameter with constexpr "
258 "type");
259 }
260 if (return_type->IsConstexpr()) {
261 ReportError("%FromConstexpr must return a non-constexpr type");
262 }
263 if (return_type->IsSubtypeOf(TypeOracle::GetSmiType())) {
264 out() << "ca_.SmiConstant";
265 } else if (return_type->IsSubtypeOf(TypeOracle::GetNumberType())) {
266 out() << "ca_.NumberConstant";
267 } else if (return_type->IsSubtypeOf(TypeOracle::GetStringType())) {
268 out() << "ca_.StringConstant";
269 } else if (return_type->IsSubtypeOf(TypeOracle::GetObjectType())) {
271 "%FromConstexpr cannot cast to subclass of HeapObject unless it's a "
272 "String or Number");
273 } else if (return_type->IsSubtypeOf(TypeOracle::GetIntPtrType())) {
274 out() << "ca_.IntPtrConstant";
275 } else if (return_type->IsSubtypeOf(TypeOracle::GetUIntPtrType())) {
276 out() << "ca_.UintPtrConstant";
277 } else if (return_type->IsSubtypeOf(TypeOracle::GetInt32Type())) {
278 out() << "ca_.Int32Constant";
279 } else if (return_type->IsSubtypeOf(TypeOracle::GetUint8Type())) {
280 out() << "TNode<Uint8T>::UncheckedCast(ca_.Uint32Constant";
281 } else if (return_type->IsSubtypeOf(TypeOracle::GetUint32Type())) {
282 out() << "ca_.Uint32Constant";
283 } else if (return_type->IsSubtypeOf(TypeOracle::GetInt64Type())) {
284 out() << "ca_.Int64Constant";
285 } else if (return_type->IsSubtypeOf(TypeOracle::GetUint64Type())) {
286 out() << "ca_.Uint64Constant";
287 } else if (return_type->IsSubtypeOf(TypeOracle::GetBoolType())) {
288 out() << "ca_.BoolConstant";
289 } else {
290 std::stringstream s;
291 s << "%FromConstexpr does not support return type " << *return_type;
292 ReportError(s.str());
293 }
294 // Wrap the raw constexpr value in a static_cast to ensure that
295 // enums get properly casted to their backing integral value.
296 out() << "(CastToUnderlyingTypeIfEnum";
297 } else {
298 ReportError("no built in intrinsic with name " +
299 instruction.intrinsic->ExternalName());
300 }
301
302 out() << "(";
304 if (instruction.intrinsic->ExternalName() == "%FromConstexpr") {
305 out() << ")";
306 if (return_type->IsSubtypeOf(TypeOracle::GetUint8Type())) {
307 out() << ")";
308 }
309 }
310 if (return_type->StructSupertype()) {
311 out() << ").Flatten();\n";
312 } else {
313 out() << ");\n";
314 }
315}
316
317void CSAGenerator::EmitInstruction(const CallCsaMacroInstruction& instruction,
318 Stack<std::string>* stack) {
319 TypeVector parameter_types =
320 instruction.macro->signature().parameter_types.types;
321 std::vector<std::string> args = ProcessArgumentsCommon(
322 parameter_types, instruction.constexpr_arguments, stack);
323
324 Stack<std::string> pre_call_stack = *stack;
325 const Type* return_type = instruction.macro->signature().return_type;
326 std::vector<std::string> results;
327
328 const auto lowered = LowerType(return_type);
329 for (std::size_t i = 0; i < lowered.size(); ++i) {
330 results.push_back(DefinitionToVariable(instruction.GetValueDefinition(i)));
331 stack->Push(results.back());
332 decls() << " TNode<" << lowered[i]->GetGeneratedTNodeTypeName() << "> "
333 << stack->Top() << ";\n";
334 }
335
336 std::string catch_name =
337 PreCallableExceptionPreparation(instruction.catch_block);
338 out() << " ";
339 bool needs_flattening = return_type->StructSupertype().has_value();
340 if (needs_flattening) {
341 out() << "std::tie(";
342 PrintCommaSeparatedList(out(), results);
343 out() << ") = ";
344 } else {
345 if (results.size() == 1) {
346 out() << results[0] << " = ";
347 } else {
348 DCHECK_EQ(0, results.size());
349 }
350 }
351 if (ExternMacro* extern_macro = ExternMacro::DynamicCast(instruction.macro)) {
352 out() << extern_macro->external_assembler_name() << "(state_).";
353 } else {
354 args.insert(args.begin(), "state_");
355 }
356 out() << instruction.macro->ExternalName() << "(";
358 if (needs_flattening) {
359 out() << ").Flatten();\n";
360 } else {
361 out() << ");\n";
362 }
363 PostCallableExceptionPreparation(catch_name, return_type,
364 instruction.catch_block, &pre_call_stack,
365 instruction.GetExceptionObjectDefinition());
366}
367
369 const CallCsaMacroAndBranchInstruction& instruction,
370 Stack<std::string>* stack) {
371 TypeVector parameter_types =
372 instruction.macro->signature().parameter_types.types;
373 std::vector<std::string> args = ProcessArgumentsCommon(
374 parameter_types, instruction.constexpr_arguments, stack);
375
376 Stack<std::string> pre_call_stack = *stack;
377 std::vector<std::string> results;
378 const Type* return_type = instruction.macro->signature().return_type;
379
380 if (return_type != TypeOracle::GetNeverType()) {
381 const auto lowered = LowerType(return_type);
382 for (std::size_t i = 0; i < lowered.size(); ++i) {
383 results.push_back(
384 DefinitionToVariable(instruction.GetValueDefinition(i)));
385 decls() << " TNode<" << lowered[i]->GetGeneratedTNodeTypeName() << "> "
386 << results.back() << ";\n";
387 }
388 }
389
390 std::vector<std::string> label_names;
391 std::vector<std::vector<std::string>> var_names;
392 const LabelDeclarationVector& labels = instruction.macro->signature().labels;
393 DCHECK_EQ(labels.size(), instruction.label_blocks.size());
394 for (size_t i = 0; i < labels.size(); ++i) {
395 TypeVector label_parameters = labels[i].types;
396 label_names.push_back(FreshLabelName());
397 var_names.push_back({});
398 for (size_t j = 0; j < label_parameters.size(); ++j) {
399 var_names[i].push_back(FreshNodeName());
400 const auto def = instruction.GetLabelValueDefinition(i, j);
401 SetDefinitionVariable(def, var_names[i].back() + ".value()");
402 decls() << " compiler::TypedCodeAssemblerVariable<"
403 << label_parameters[j]->GetGeneratedTNodeTypeName() << "> "
404 << var_names[i][j] << "(&ca_);\n";
405 }
406 out() << " compiler::CodeAssemblerLabel " << label_names[i]
407 << "(&ca_);\n";
408 }
409
410 std::string catch_name =
411 PreCallableExceptionPreparation(instruction.catch_block);
412 out() << " ";
413 if (results.size() == 1) {
414 out() << results[0] << " = ";
415 } else if (results.size() > 1) {
416 out() << "std::tie(";
417 PrintCommaSeparatedList(out(), results);
418 out() << ") = ";
419 }
420 if (ExternMacro* extern_macro = ExternMacro::DynamicCast(instruction.macro)) {
421 out() << extern_macro->external_assembler_name() << "(state_).";
422 } else {
423 args.insert(args.begin(), "state_");
424 }
425 out() << instruction.macro->ExternalName() << "(";
427 bool first = args.empty();
428 for (size_t i = 0; i < label_names.size(); ++i) {
429 if (!first) out() << ", ";
430 out() << "&" << label_names[i];
431 first = false;
432 for (size_t j = 0; j < var_names[i].size(); ++j) {
433 out() << ", &" << var_names[i][j];
434 }
435 }
436 if (return_type->StructSupertype()) {
437 out() << ").Flatten();\n";
438 } else {
439 out() << ");\n";
440 }
441
442 PostCallableExceptionPreparation(catch_name, return_type,
443 instruction.catch_block, &pre_call_stack,
444 instruction.GetExceptionObjectDefinition());
445
446 if (instruction.return_continuation) {
447 out() << " ca_.Goto(&" << BlockName(*instruction.return_continuation);
448 DCHECK_EQ(stack->Size() + results.size(),
449 (*instruction.return_continuation)->InputDefinitions().Size());
450
451 const auto& input_definitions =
452 (*instruction.return_continuation)->InputDefinitions();
453 for (BottomOffset i = {0}; i < input_definitions.AboveTop(); ++i) {
454 if (input_definitions.Peek(i).IsPhiFromBlock(
455 *instruction.return_continuation)) {
456 out() << ", "
457 << (i < stack->AboveTop() ? stack->Peek(i) : results[i.offset]);
458 }
459 }
460 out() << ");\n";
461 }
462 for (size_t l = 0; l < label_names.size(); ++l) {
463 out() << " if (" << label_names[l] << ".is_used()) {\n";
464 out() << " ca_.Bind(&" << label_names[l] << ");\n";
465 out() << " ca_.Goto(&" << BlockName(instruction.label_blocks[l]);
466 DCHECK_EQ(stack->Size() + var_names[l].size(),
467 instruction.label_blocks[l]->InputDefinitions().Size());
468
469 const auto& label_definitions =
470 instruction.label_blocks[l]->InputDefinitions();
471
472 BottomOffset i = {0};
473 for (; i < stack->AboveTop(); ++i) {
474 if (label_definitions.Peek(i).IsPhiFromBlock(
475 instruction.label_blocks[l])) {
476 out() << ", " << stack->Peek(i);
477 }
478 }
479 for (std::size_t k = 0; k < var_names[l].size(); ++k, ++i) {
480 if (label_definitions.Peek(i).IsPhiFromBlock(
481 instruction.label_blocks[l])) {
482 out() << ", " << var_names[l][k] << ".value()";
483 }
484 }
485 out() << ");\n";
486 out() << " }\n";
487 }
488}
489
490void CSAGenerator::EmitInstruction(const MakeLazyNodeInstruction& instruction,
491 Stack<std::string>* stack) {
492 TypeVector parameter_types =
493 instruction.macro->signature().parameter_types.types;
494 std::vector<std::string> args = ProcessArgumentsCommon(
495 parameter_types, instruction.constexpr_arguments, stack);
496
497 std::string result_name =
498 DefinitionToVariable(instruction.GetValueDefinition());
499
500 stack->Push(result_name);
501
502 decls() << " " << instruction.result_type->GetGeneratedTypeName() << " "
503 << result_name << ";\n";
504
505 // We assume here that the CodeAssemblerState will outlive any usage of
506 // the generated std::function that binds it. Likewise, copies of TNode values
507 // are only valid during generation of the current builtin.
508 out() << " " << result_name << " = [=] () { return ";
509 bool first = true;
510 if (const ExternMacro* extern_macro =
511 ExternMacro::DynamicCast(instruction.macro)) {
512 out() << extern_macro->external_assembler_name() << "(state_)."
513 << extern_macro->ExternalName() << "(";
514 } else {
515 out() << instruction.macro->ExternalName() << "(state_";
516 first = false;
517 }
518 if (!args.empty()) {
519 if (!first) out() << ", ";
521 }
522 out() << "); };\n";
523}
524
525void CSAGenerator::EmitInstruction(const CallBuiltinInstruction& instruction,
526 Stack<std::string>* stack) {
527 std::vector<std::string> arguments = stack->PopMany(instruction.argc);
528 std::vector<const Type*> result_types =
529 LowerType(instruction.builtin->signature().return_type);
530 if (instruction.is_tailcall) {
531 if (instruction.builtin->IsJavaScript()) {
532 out() << " CodeStubAssembler(state_).TailCallJSBuiltin(Builtin::k"
533 << instruction.builtin->ExternalName();
534 } else {
535 out() << " CodeStubAssembler(state_).TailCallBuiltin(Builtin::k"
536 << instruction.builtin->ExternalName();
537 }
538 if (!instruction.builtin->signature().HasContextParameter()) {
539 // Add dummy context parameter to satisfy the TailCallBuiltin signature.
540 out() << ", TNode<Object>()";
541 }
542 for (const std::string& argument : arguments) {
543 out() << ", " << argument;
544 }
545 out() << ");\n";
546 } else {
547 std::vector<std::string> result_names(result_types.size());
548 for (size_t i = 0; i < result_types.size(); ++i) {
549 result_names[i] = DefinitionToVariable(instruction.GetValueDefinition(i));
550 decls() << " TNode<" << result_types[i]->GetGeneratedTNodeTypeName()
551 << "> " << result_names[i] << ";\n";
552 }
553
554 std::string lhs_name;
555 std::string lhs_type;
556 switch (result_types.size()) {
557 case 0:
558 // If a builtin call is annotated to never return, it has 0 return
559 // types (defining true void builtins is not allowed).
560 break;
561 case 1:
562 lhs_name = result_names[0];
563 lhs_type = result_types[0]->GetGeneratedTNodeTypeName();
564 break;
565 case 2:
566 // If a builtin returns two values, the return type is represented as a
567 // TNode containing a pair. We need a temporary place to store that
568 // result so we can unpack it into separate TNodes.
569 lhs_name = result_names[0] + "_and_" + result_names[1];
570 lhs_type = "PairT<" + result_types[0]->GetGeneratedTNodeTypeName() +
571 ", " + result_types[1]->GetGeneratedTNodeTypeName() + ">";
572 decls() << " TNode<" << lhs_type << "> " << lhs_name << ";\n";
573 break;
574 default:
576 "Torque can only call builtins that return one or two values, not ",
577 result_types.size());
578 }
579
580 std::string catch_name =
581 PreCallableExceptionPreparation(instruction.catch_block);
582 Stack<std::string> pre_call_stack = *stack;
583
584 for (const std::string& name : result_names) {
585 stack->Push(name);
586 }
587 // Currently we don't support calling javascript builtins directly. If ever
588 // needed, supporting that should be as easy as generating a call to
589 // CodeStubAssembler::CallJSBuiltin here though.
590 DCHECK(!instruction.builtin->IsJavaScript());
591 if (result_types.empty()) {
592 out() << "ca_.CallBuiltinVoid(Builtin::k"
593 << instruction.builtin->ExternalName();
594 } else {
595 out() << " " << lhs_name << " = ";
596 out() << "ca_.CallBuiltin<" << lhs_type << ">(Builtin::k"
597 << instruction.builtin->ExternalName();
598 }
599 if (!instruction.builtin->signature().HasContextParameter()) {
600 // Add dummy context parameter to satisfy the CallBuiltin signature.
601 out() << ", TNode<Object>()";
602 }
603 for (const std::string& argument : arguments) {
604 out() << ", " << argument;
605 }
606 out() << ");\n";
607
608 if (result_types.size() > 1) {
609 for (size_t i = 0; i < result_types.size(); ++i) {
610 out() << " " << result_names[i] << " = ca_.Projection<" << i << ">("
611 << lhs_name << ");\n";
612 }
613 }
614
616 catch_name,
617 result_types.empty() ? TypeOracle::GetVoidType() : result_types[0],
618 instruction.catch_block, &pre_call_stack,
619 instruction.GetExceptionObjectDefinition());
620 }
621}
622
624 const CallBuiltinPointerInstruction& instruction,
625 Stack<std::string>* stack) {
626 std::vector<std::string> arguments = stack->PopMany(instruction.argc);
627 std::string function = stack->Pop();
628 std::vector<const Type*> result_types =
629 LowerType(instruction.type->return_type());
630 if (result_types.size() != 1) {
631 ReportError("builtins must have exactly one result");
632 }
633 if (instruction.is_tailcall) {
634 ReportError("tail-calls to builtin pointers are not supported");
635 }
636
637 DCHECK_EQ(1, instruction.GetValueDefinitionCount());
638 stack->Push(DefinitionToVariable(instruction.GetValueDefinition(0)));
639 std::string generated_type = result_types[0]->GetGeneratedTNodeTypeName();
640 decls() << " TNode<" << generated_type << "> " << stack->Top() << ";\n";
641 out() << stack->Top() << " = ";
642 if (generated_type != "Object") out() << "TORQUE_CAST(";
643 out() << "CodeStubAssembler(state_).CallBuiltinPointer(Builtins::"
644 "CallInterfaceDescriptorFor("
645 "ExampleBuiltinForTorqueFunctionPointerType("
646 << instruction.type->function_pointer_type_id() << ")), " << function;
647 if (!instruction.type->HasContextParameter()) {
648 // Add dummy context parameter to satisfy the CallBuiltinPointer signature.
649 out() << ", TNode<Object>()";
650 }
651 for (const std::string& argument : arguments) {
652 out() << ", " << argument;
653 }
654 out() << ")";
655 if (generated_type != "Object") out() << ")";
656 out() << ";\n";
657}
658
660 std::optional<Block*> catch_block) {
661 std::string catch_name;
662 if (catch_block) {
663 catch_name = FreshCatchName();
664 out() << " compiler::CodeAssemblerExceptionHandlerLabel " << catch_name
665 << "__label(&ca_, compiler::CodeAssemblerLabel::kDeferred);\n";
666 out() << " { compiler::ScopedExceptionHandler s(&ca_, &" << catch_name
667 << "__label);\n";
668 }
669 return catch_name;
670}
671
673 const std::string& catch_name, const Type* return_type,
674 std::optional<Block*> catch_block, Stack<std::string>* stack,
675 const std::optional<DefinitionLocation>& exception_object_definition) {
676 if (catch_block) {
677 DCHECK(exception_object_definition);
678 std::string block_name = BlockName(*catch_block);
679 out() << " }\n";
680 out() << " if (" << catch_name << "__label.is_used()) {\n";
681 out() << " compiler::CodeAssemblerLabel " << catch_name
682 << "_skip(&ca_);\n";
683 if (!return_type->IsNever()) {
684 out() << " ca_.Goto(&" << catch_name << "_skip);\n";
685 }
686 decls() << " TNode<JSAny> "
687 << DefinitionToVariable(*exception_object_definition) << ";\n";
688 out() << " ca_.Bind(&" << catch_name << "__label, &"
689 << DefinitionToVariable(*exception_object_definition) << ");\n";
690 out() << " ca_.Goto(&" << block_name;
691
692 DCHECK_EQ(stack->Size() + 1, (*catch_block)->InputDefinitions().Size());
693 const auto& input_definitions = (*catch_block)->InputDefinitions();
694 for (BottomOffset i = {0}; i < input_definitions.AboveTop(); ++i) {
695 if (input_definitions.Peek(i).IsPhiFromBlock(*catch_block)) {
696 if (i < stack->AboveTop()) {
697 out() << ", " << stack->Peek(i);
698 } else {
699 DCHECK_EQ(i, stack->AboveTop());
700 out() << ", " << DefinitionToVariable(*exception_object_definition);
701 }
702 }
703 }
704 out() << ");\n";
705
706 if (!return_type->IsNever()) {
707 out() << " ca_.Bind(&" << catch_name << "_skip);\n";
708 }
709 out() << " }\n";
710 }
711}
712
714 Stack<std::string>* stack) {
715 std::vector<std::string> arguments = stack->PopMany(instruction.argc);
716 const Type* return_type =
717 instruction.runtime_function->signature().return_type;
718 std::vector<const Type*> result_types;
719 if (return_type != TypeOracle::GetNeverType()) {
720 result_types = LowerType(return_type);
721 }
722 if (result_types.size() > 1) {
723 ReportError("runtime function must have at most one result");
724 }
725 if (instruction.is_tailcall) {
726 out() << " CodeStubAssembler(state_).TailCallRuntime(Runtime::k"
727 << instruction.runtime_function->ExternalName() << ", ";
728 PrintCommaSeparatedList(out(), arguments);
729 out() << ");\n";
730 } else {
731 std::string result_name;
732 if (result_types.size() == 1) {
733 result_name = DefinitionToVariable(instruction.GetValueDefinition(0));
734 decls() << " TNode<" << result_types[0]->GetGeneratedTNodeTypeName()
735 << "> " << result_name << ";\n";
736 }
737 std::string catch_name =
738 PreCallableExceptionPreparation(instruction.catch_block);
739 Stack<std::string> pre_call_stack = *stack;
740 if (result_types.size() == 1) {
741 std::string generated_type = result_types[0]->GetGeneratedTNodeTypeName();
742 stack->Push(result_name);
743 out() << " " << result_name << " = ";
744 if (generated_type != "Object") out() << "TORQUE_CAST(";
745 out() << "CodeStubAssembler(state_).CallRuntime(Runtime::k"
746 << instruction.runtime_function->ExternalName() << ", ";
747 PrintCommaSeparatedList(out(), arguments);
748 out() << ")";
749 if (generated_type != "Object") out() << ")";
750 out() << "; \n";
751 } else {
752 DCHECK_EQ(0, result_types.size());
753 out() << " CodeStubAssembler(state_).CallRuntime(Runtime::k"
754 << instruction.runtime_function->ExternalName() << ", ";
755 PrintCommaSeparatedList(out(), arguments);
756 out() << ");\n";
757 if (return_type == TypeOracle::GetNeverType()) {
758 out() << " CodeStubAssembler(state_).Unreachable();\n";
759 } else {
760 DCHECK(return_type == TypeOracle::GetVoidType());
761 }
762 }
764 catch_name, return_type, instruction.catch_block, &pre_call_stack,
765 instruction.GetExceptionObjectDefinition());
766 }
767}
768
769void CSAGenerator::EmitInstruction(const BranchInstruction& instruction,
770 Stack<std::string>* stack) {
771 out() << " ca_.Branch(" << stack->Pop() << ", &"
772 << BlockName(instruction.if_true) << ", std::vector<compiler::Node*>{";
773
774 const auto& true_definitions = instruction.if_true->InputDefinitions();
775 DCHECK_EQ(stack->Size(), true_definitions.Size());
776 bool first = true;
777 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
778 if (true_definitions.Peek(i).IsPhiFromBlock(instruction.if_true)) {
779 if (!first) out() << ", ";
780 out() << stack->Peek(i);
781 first = false;
782 }
783 }
784
785 out() << "}, &" << BlockName(instruction.if_false)
786 << ", std::vector<compiler::Node*>{";
787
788 const auto& false_definitions = instruction.if_false->InputDefinitions();
789 DCHECK_EQ(stack->Size(), false_definitions.Size());
790 first = true;
791 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
792 if (false_definitions.Peek(i).IsPhiFromBlock(instruction.if_false)) {
793 if (!first) out() << ", ";
794 out() << stack->Peek(i);
795 first = false;
796 }
797 }
798
799 out() << "});\n";
800}
801
803 const ConstexprBranchInstruction& instruction, Stack<std::string>* stack) {
804 out() << " if ((" << instruction.condition << ")) {\n";
805 out() << " ca_.Goto(&" << BlockName(instruction.if_true);
806
807 const auto& true_definitions = instruction.if_true->InputDefinitions();
808 DCHECK_EQ(stack->Size(), true_definitions.Size());
809 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
810 if (true_definitions.Peek(i).IsPhiFromBlock(instruction.if_true)) {
811 out() << ", " << stack->Peek(i);
812 }
813 }
814
815 out() << ");\n";
816 out() << " } else {\n";
817 out() << " ca_.Goto(&" << BlockName(instruction.if_false);
818
819 const auto& false_definitions = instruction.if_false->InputDefinitions();
820 DCHECK_EQ(stack->Size(), false_definitions.Size());
821 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
822 if (false_definitions.Peek(i).IsPhiFromBlock(instruction.if_false)) {
823 out() << ", " << stack->Peek(i);
824 }
825 }
826
827 out() << ");\n";
828 out() << " }\n";
829}
830
831void CSAGenerator::EmitInstruction(const GotoInstruction& instruction,
832 Stack<std::string>* stack) {
833 out() << " ca_.Goto(&" << BlockName(instruction.destination);
834 const auto& destination_definitions =
835 instruction.destination->InputDefinitions();
836 DCHECK_EQ(stack->Size(), destination_definitions.Size());
837 for (BottomOffset i = {0}; i < stack->AboveTop(); ++i) {
838 if (destination_definitions.Peek(i).IsPhiFromBlock(
839 instruction.destination)) {
840 out() << ", " << stack->Peek(i);
841 }
842 }
843 out() << ");\n";
844}
845
846void CSAGenerator::EmitInstruction(const GotoExternalInstruction& instruction,
847 Stack<std::string>* stack) {
848 for (auto it = instruction.variable_names.rbegin();
849 it != instruction.variable_names.rend(); ++it) {
850 out() << " *" << *it << " = " << stack->Pop() << ";\n";
851 }
852 out() << " ca_.Goto(" << instruction.destination << ");\n";
853}
854
855void CSAGenerator::EmitInstruction(const ReturnInstruction& instruction,
856 Stack<std::string>* stack) {
858 out() << " " << ARGUMENTS_VARIABLE_STRING << ".PopAndReturn(";
859 } else {
860 out() << " CodeStubAssembler(state_).Return(";
861 }
862 std::vector<std::string> values = stack->PopMany(instruction.count);
863 PrintCommaSeparatedList(out(), values);
864 out() << ");\n";
865}
866
867void CSAGenerator::EmitInstruction(const PrintErrorInstruction& instruction,
868 Stack<std::string>* stack) {
869 out() << " CodeStubAssembler(state_).PrintErr("
870 << StringLiteralQuote(instruction.message) << ");\n";
871}
872
873void CSAGenerator::EmitInstruction(const AbortInstruction& instruction,
874 Stack<std::string>* stack) {
875 switch (instruction.kind) {
877 DCHECK(instruction.message.empty());
878 out() << " CodeStubAssembler(state_).Unreachable();\n";
879 break;
881 DCHECK(instruction.message.empty());
882 out() << " CodeStubAssembler(state_).DebugBreak();\n";
883 break;
885 std::string file = StringLiteralQuote(
886 SourceFileMap::PathFromV8Root(instruction.pos.source));
887 out() << " {\n";
888 out() << " auto pos_stack = ca_.GetMacroSourcePositionStack();\n";
889 out() << " pos_stack.push_back({" << file << ", "
890 << instruction.pos.start.line + 1 << "});\n";
891 out() << " CodeStubAssembler(state_).FailAssert("
892 << StringLiteralQuote(instruction.message) << ", pos_stack);\n";
893 out() << " }\n";
894 break;
895 }
896 }
897}
898
899void CSAGenerator::EmitInstruction(const UnsafeCastInstruction& instruction,
900 Stack<std::string>* stack) {
901 const std::string str =
902 "ca_.UncheckedCast<" +
903 instruction.destination_type->GetGeneratedTNodeTypeName() + ">(" +
904 stack->Top() + ")";
905 stack->Poke(stack->AboveTop() - 1, str);
906 SetDefinitionVariable(instruction.GetValueDefinition(), str);
907}
908
909void CSAGenerator::EmitInstruction(const LoadReferenceInstruction& instruction,
910 Stack<std::string>* stack) {
911 // We should never load or store builtin pointers because the heap may be
912 // corrupted.
913 CHECK(!instruction.type->IsBuiltinPointerType());
914
915 std::string result_name =
916 DefinitionToVariable(instruction.GetValueDefinition());
917
918 std::string offset = stack->Pop();
919 std::string object = stack->Pop();
920 stack->Push(result_name);
921
922 decls() << " " << instruction.type->GetGeneratedTypeName() << " "
923 << result_name << ";\n";
924 out() << " " << result_name
925 << " = CodeStubAssembler(state_).LoadReference<"
926 << instruction.type->GetGeneratedTNodeTypeName()
927 << ">(CodeStubAssembler::Reference{" << object << ", " << offset
928 << "});\n";
929}
930
931void CSAGenerator::EmitInstruction(const StoreReferenceInstruction& instruction,
932 Stack<std::string>* stack) {
933 // We should never load or store builtin pointers because the heap may be
934 // corrupted.
935 CHECK(!instruction.type->IsBuiltinPointerType());
936
937 std::string value = stack->Pop();
938 std::string offset = stack->Pop();
939 std::string object = stack->Pop();
940
941 out() << " CodeStubAssembler(state_).StoreReference<"
942 << instruction.type->GetGeneratedTNodeTypeName()
943 << ">(CodeStubAssembler::"
944 "Reference{"
945 << object << ", " << offset << "}, " << value << ");\n";
946}
947
948namespace {
949std::string GetBitFieldSpecialization(const Type* container,
950 const BitField& field) {
951 auto smi_tagged_type =
953 std::string container_type = smi_tagged_type
954 ? "uintptr_t"
955 : container->GetConstexprGeneratedTypeName();
956 int offset = smi_tagged_type
958 : field.offset;
959 std::stringstream stream;
960 stream << "base::BitField<"
961 << field.name_and_type.type->GetConstexprGeneratedTypeName() << ", "
962 << offset << ", " << field.num_bits << ", " << container_type << ">";
963 return stream.str();
964}
965} // namespace
966
967void CSAGenerator::EmitInstruction(const LoadBitFieldInstruction& instruction,
968 Stack<std::string>* stack) {
969 std::string result_name =
970 DefinitionToVariable(instruction.GetValueDefinition());
971
972 std::string bit_field_struct = stack->Pop();
973 stack->Push(result_name);
974
975 const Type* struct_type = instruction.bit_field_struct_type;
976 const Type* field_type = instruction.bit_field.name_and_type.type;
977 auto smi_tagged_type =
979 bool struct_is_pointer_size =
980 IsPointerSizeIntegralType(struct_type) || smi_tagged_type;
981 DCHECK_IMPLIES(!struct_is_pointer_size, Is32BitIntegralType(struct_type));
982 bool field_is_pointer_size = IsPointerSizeIntegralType(field_type);
983 DCHECK_IMPLIES(!field_is_pointer_size, Is32BitIntegralType(field_type));
984 std::string struct_word_type = struct_is_pointer_size ? "WordT" : "Word32T";
985 std::string decoder =
986 struct_is_pointer_size
987 ? (field_is_pointer_size ? "DecodeWord" : "DecodeWord32FromWord")
988 : (field_is_pointer_size ? "DecodeWordFromWord32" : "DecodeWord32");
989
990 decls() << " " << field_type->GetGeneratedTypeName() << " " << result_name
991 << ";\n";
992
993 if (smi_tagged_type) {
994 // If the container is a SMI, then UncheckedCast is insufficient and we must
995 // use a bit cast.
996 bit_field_struct =
997 "ca_.BitcastTaggedToWordForTagAndSmiBits(" + bit_field_struct + ")";
998 }
999
1000 out() << " " << result_name << " = ca_.UncheckedCast<"
1001 << field_type->GetGeneratedTNodeTypeName()
1002 << ">(CodeStubAssembler(state_)." << decoder << "<"
1003 << GetBitFieldSpecialization(struct_type, instruction.bit_field)
1004 << ">(ca_.UncheckedCast<" << struct_word_type << ">("
1005 << bit_field_struct << ")));\n";
1006}
1007
1008void CSAGenerator::EmitInstruction(const StoreBitFieldInstruction& instruction,
1009 Stack<std::string>* stack) {
1010 std::string result_name =
1011 DefinitionToVariable(instruction.GetValueDefinition());
1012
1013 std::string value = stack->Pop();
1014 std::string bit_field_struct = stack->Pop();
1015 stack->Push(result_name);
1016
1017 const Type* struct_type = instruction.bit_field_struct_type;
1018 const Type* field_type = instruction.bit_field.name_and_type.type;
1019 auto smi_tagged_type =
1021 bool struct_is_pointer_size =
1022 IsPointerSizeIntegralType(struct_type) || smi_tagged_type;
1023 DCHECK_IMPLIES(!struct_is_pointer_size, Is32BitIntegralType(struct_type));
1024 bool field_is_pointer_size = IsPointerSizeIntegralType(field_type);
1025 DCHECK_IMPLIES(!field_is_pointer_size, Is32BitIntegralType(field_type));
1026 std::string struct_word_type = struct_is_pointer_size ? "WordT" : "Word32T";
1027 std::string field_word_type = field_is_pointer_size ? "UintPtrT" : "Uint32T";
1028 std::string encoder =
1029 struct_is_pointer_size
1030 ? (field_is_pointer_size ? "UpdateWord" : "UpdateWord32InWord")
1031 : (field_is_pointer_size ? "UpdateWordInWord32" : "UpdateWord32");
1032
1033 decls() << " " << struct_type->GetGeneratedTypeName() << " " << result_name
1034 << ";\n";
1035
1036 if (smi_tagged_type) {
1037 // If the container is a SMI, then UncheckedCast is insufficient and we must
1038 // use a bit cast.
1039 bit_field_struct =
1040 "ca_.BitcastTaggedToWordForTagAndSmiBits(" + bit_field_struct + ")";
1041 }
1042
1043 std::string result_expression =
1044 "CodeStubAssembler(state_)." + encoder + "<" +
1045 GetBitFieldSpecialization(struct_type, instruction.bit_field) +
1046 ">(ca_.UncheckedCast<" + struct_word_type + ">(" + bit_field_struct +
1047 "), ca_.UncheckedCast<" + field_word_type + ">(" + value + ")" +
1048 (instruction.starts_as_zero ? ", true" : "") + ")";
1049
1050 if (smi_tagged_type) {
1051 result_expression =
1052 "ca_.BitcastWordToTaggedSigned(" + result_expression + ")";
1053 }
1054
1055 out() << " " << result_name << " = ca_.UncheckedCast<"
1056 << struct_type->GetGeneratedTNodeTypeName() << ">(" << result_expression
1057 << ");\n";
1058}
1059
1060// static
1062 const Stack<std::string>& values,
1063 std::ostream& out) {
1064 if (!result.IsOnStack()) {
1065 out << result.constexpr_value();
1066 } else if (auto struct_type = result.type()->StructSupertype()) {
1067 out << (*struct_type)->GetGeneratedTypeName() << "{";
1068 bool first = true;
1069 for (auto& field : (*struct_type)->fields()) {
1070 if (!first) {
1071 out << ", ";
1072 }
1073 first = false;
1074 EmitCSAValue(ProjectStructField(result, field.name_and_type.name), values,
1075 out);
1076 }
1077 out << "}";
1078 } else {
1079 DCHECK_EQ(1, result.stack_range().Size());
1080 out << result.type()->GetGeneratedTypeName() << "{"
1081 << values.Peek(result.stack_range().begin()) << "}";
1082 }
1083}
1084
1085} // namespace v8::internal::torque
SourcePosition pos
std::optional< Stack< std::string > > EmitGraph(Stack< std::string > parameters)
std::optional< Builtin::Kind > linkage_
std::vector< std::string > ProcessArgumentsCommon(const TypeVector &parameter_types, std::vector< std::string > constexpr_arguments, Stack< std::string > *stack)
void PostCallableExceptionPreparation(const std::string &catch_name, const Type *return_type, std::optional< Block * > catch_block, Stack< std::string > *stack, const std::optional< DefinitionLocation > &exception_object_definition)
static constexpr const char * ARGUMENTS_VARIABLE_STRING
Stack< std::string > EmitBlock(const Block *block)
static void EmitCSAValue(VisitResult result, const Stack< std::string > &values, std::ostream &out)
std::string PreCallableExceptionPreparation(std::optional< Block * > catch_block)
void EmitSourcePosition(SourcePosition pos, bool always_emit=false) override
static DefinitionLocation Parameter(std::size_t index)
static const std::string & PathFromV8Root(SourceId file)
static std::string AbsolutePath(SourceId file)
BottomOffset AboveTop() const
Definition utils.h:280
const T & Peek(BottomOffset from_bottom) const
Definition utils.h:244
void SetDefinitionVariable(const DefinitionLocation &definition, const std::string &str)
std::string DefinitionToVariable(const DefinitionLocation &location)
std::string BlockName(const Block *block)
void EmitInstruction(const Instruction &instruction, Stack< std::string > *stack)
static GenericType * GetSmiTaggedGeneric()
static const Type * GetUninitializedHeapObjectType()
static const Type * GetStringType()
static const Type * GetUint32Type()
static const Type * GetTaggedType()
static const Type * GetHeapObjectType()
static const Type * GetUint8Type()
static const Type * GetUint64Type()
static const Type * GetObjectType()
static const Type * GetVoidType()
static const Type * GetInt32Type()
static const Type * GetNumberType()
static const Type * GetInt64Type()
static const Type * GetUIntPtrType()
static const Type * GetIntPtrType()
static const Type * GetBoolType()
static const Type * GetSmiType()
static const Type * GetNeverType()
bool IsNever() const
Definition types.h:126
static std::optional< const Type * > MatchUnaryGeneric(const Type *type, GenericType *generic)
Definition types.cc:573
std::string GetGeneratedTNodeTypeName() const
Definition types.cc:189
virtual bool IsSubtypeOf(const Type *supertype) const
Definition types.cc:101
std::optional< const StructType * > StructSupertype() const
Definition types.cc:133
virtual bool IsConstexpr() const
Definition types.h:136
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
int32_t offset
ZoneVector< RpoNumber > & result
ZoneStack< RpoNumber > & stack
int s
Definition mul-fft.cc:297
size_t LoweredSlotCount(const Type *type)
Definition types.cc:1216
std::string StringLiteralQuote(const std::string &s)
Definition utils.cc:54
void ReportError(Args &&... args)
Definition utils.h:96
std::vector< LabelDeclaration > LabelDeclarationVector
Definition types.h:895
VisitResult ProjectStructField(VisitResult structure, const std::string &fieldname)
Definition types.cc:1177
bool Is32BitIntegralType(const Type *type)
Definition types.cc:1427
bool IsPointerSizeIntegralType(const Type *type)
Definition types.cc:1422
std::vector< const Type * > TypeVector
Definition types.h:85
TypeVector LowerType(const Type *type)
Definition types.cc:1210
void PrintCommaSeparatedList(std::ostream &os, const T &list, C &&transform)
Definition utils.h:163
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
bool CompareStartIgnoreColumn(const SourcePosition &pos) const