v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
bytecode-generator.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <map>
8#include <optional>
9#include <unordered_map>
10#include <unordered_set>
11
13#include "src/api/api-inl.h"
15#include "src/ast/ast.h"
16#include "src/ast/scopes.h"
20#include "src/common/globals.h"
32#include "src/logging/log.h"
36#include "src/objects/objects.h"
37#include "src/objects/smi.h"
40#include "src/parsing/token.h"
41#include "src/utils/ostreams.h"
42
43namespace v8 {
44namespace internal {
45namespace interpreter {
46
47// Scoped class tracking context objects created by the visitor. Represents
48// mutations of the context chain within the function body, allowing pushing and
49// popping of the current {context_register} during visitation.
51 public:
53 Register outer_context_reg = Register())
54 : generator_(generator),
55 scope_(scope),
56 outer_(generator_->execution_context()),
57 register_(Register::current_context()),
58 depth_(0) {
59 DCHECK(scope->NeedsContext() || outer_ == nullptr);
60 if (outer_) {
61 depth_ = outer_->depth_ + 1;
62
63 // Push the outer context into a new context register.
64 if (!outer_context_reg.is_valid()) {
65 outer_context_reg = generator_->register_allocator()->NewRegister();
66 }
67 outer_->set_register(outer_context_reg);
68 generator_->builder()->PushContext(outer_context_reg);
69 }
70 generator_->set_execution_context(this);
71 }
72
74 if (outer_) {
75 DCHECK_EQ(register_.index(), Register::current_context().index());
76 generator_->builder()->PopContext(outer_->reg());
77 outer_->set_register(register_);
78 }
79 generator_->set_execution_context(outer_);
80 }
81
82 ContextScope(const ContextScope&) = delete;
84
85 // Returns the depth of the given |scope| for the current execution context.
87 return scope_->ContextChainLength(scope);
88 }
89
90 // Returns the execution context at |depth| in the current context chain if it
91 // is a function local execution context, otherwise returns nullptr.
92 ContextScope* Previous(int depth) {
93 if (depth > depth_) {
94 return nullptr;
95 }
96
97 ContextScope* previous = this;
98 for (int i = depth; i > 0; --i) {
99 previous = previous->outer_;
100 }
101 return previous;
102 }
103
104 Register reg() const { return register_; }
105
106 private:
107 const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
108
109 void set_register(Register reg) { register_ = reg; }
110
116};
117
118// Scoped class for tracking control statements entered by the
119// visitor.
121 public:
122 explicit ControlScope(BytecodeGenerator* generator)
123 : generator_(generator),
124 outer_(generator->execution_control()),
125 context_(generator->execution_context()) {
126 generator_->set_execution_control(this);
127 }
128 ~ControlScope() { generator_->set_execution_control(outer()); }
129 ControlScope(const ControlScope&) = delete;
131
132 void Break(Statement* stmt) {
133 PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
134 }
135 void Continue(Statement* stmt) {
136 PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
137 }
138 void ReturnAccumulator(int source_position) {
139 PerformCommand(CMD_RETURN, nullptr, source_position);
140 }
141 void AsyncReturnAccumulator(int source_position) {
142 PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
143 }
144
145 class DeferredCommands;
146
147 protected:
155 static constexpr bool CommandUsesAccumulator(Command command) {
156 return command != CMD_BREAK && command != CMD_CONTINUE;
157 }
158
159 void PerformCommand(Command command, Statement* statement,
160 int source_position);
161 virtual bool Execute(Command command, Statement* statement,
162 int source_position) = 0;
163
164 // Helper to pop the context chain to a depth expected by this control scope.
165 // Note that it is the responsibility of each individual {Execute} method to
166 // trigger this when commands are handled and control-flow continues locally.
167 void PopContextToExpectedDepth();
168
170 ControlScope* outer() const { return outer_; }
171 ContextScope* context() const { return context_; }
172
173 private:
177};
178
179// Helper class for a try-finally control scope. It can record intercepted
180// control-flow commands that cause entry into a finally-block, and re-apply
181// them after again leaving that block. Special tokens are used to identify
182// paths going through the finally-block to dispatch after leaving the block.
184 public:
185 DeferredCommands(BytecodeGenerator* generator, Register token_register,
186 Register result_register, Register message_register)
187 : generator_(generator),
188 deferred_(generator->zone()),
189 token_register_(token_register),
190 result_register_(result_register),
191 message_register_(message_register),
192 return_token_(-1),
193 async_return_token_(-1),
194 fallthrough_from_try_block_needed_(false) {
195 // There's always a rethrow path.
196 // TODO(leszeks): We could decouple deferred_ index and token to allow us
197 // to still push this lazily.
198 static_assert(
199 static_cast<int>(TryFinallyContinuationToken::kRethrowToken) == 0);
200 deferred_.push_back(
201 {CMD_RETHROW, nullptr,
202 static_cast<int>(TryFinallyContinuationToken::kRethrowToken)});
203 }
204
205 // One recorded control-flow command.
206 struct Entry {
207 Command command; // The command type being applied on this path.
208 Statement* statement; // The target statement for the command or {nullptr}.
209 int token; // A token identifying this particular path.
210 };
211
212 // Records a control-flow command while entering the finally-block. This also
213 // generates a new dispatch token that identifies one particular path. This
214 // expects the result to be in the accumulator.
216 int token = GetTokenForCommand(command, statement);
217
218 DCHECK_LT(token, deferred_.size());
219 DCHECK_EQ(deferred_[token].command, command);
220 DCHECK_EQ(deferred_[token].statement, statement);
221 DCHECK_EQ(deferred_[token].token, token);
222
223 if (CommandUsesAccumulator(command)) {
224 builder()->StoreAccumulatorInRegister(result_register_);
225 }
226 builder()->LoadLiteral(Smi::FromInt(token));
227 builder()->StoreAccumulatorInRegister(token_register_);
228 if (!CommandUsesAccumulator(command)) {
229 // If we're not saving the accumulator in the result register, shove a
230 // harmless value there instead so that it is still considered "killed" in
231 // the liveness analysis. Normally we would LdaUndefined first, but the
232 // Smi token value is just as good, and by reusing it we save a bytecode.
233 builder()->StoreAccumulatorInRegister(result_register_);
234 }
235 if (command == CMD_RETHROW) {
236 // Clear message object as we enter the catch block. It will be restored
237 // if we rethrow.
238 builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
239 message_register_);
240 }
241 }
242
243 // Records the dispatch token to be used to identify the re-throw path when
244 // the finally-block has been entered through the exception handler. This
245 // expects the exception to be in the accumulator.
247 // The accumulator contains the exception object.
248 RecordCommand(CMD_RETHROW, nullptr);
249 }
250
251 // Records the dispatch token to be used to identify the implicit fall-through
252 // path at the end of a try-block into the corresponding finally-block.
254 fallthrough_from_try_block_needed_ = true;
255 builder()->LoadLiteral(Smi::FromInt(
256 static_cast<int>(TryFinallyContinuationToken::kFallthroughToken)));
257 builder()->StoreAccumulatorInRegister(token_register_);
258 // Since we're not saving the accumulator in the result register, shove a
259 // harmless value there instead so that it is still considered "killed" in
260 // the liveness analysis. Normally we would LdaUndefined first, but the Smi
261 // token value is just as good, and by reusing it we save a bytecode.
262 builder()->StoreAccumulatorInRegister(result_register_);
263 }
264
265 void ApplyDeferredCommand(const Entry& entry) {
266 if (entry.command == CMD_RETHROW) {
267 // Pending message object is restored on exit.
268 builder()
269 ->LoadAccumulatorWithRegister(message_register_)
270 .SetPendingMessage();
271 }
272
273 if (CommandUsesAccumulator(entry.command)) {
274 builder()->LoadAccumulatorWithRegister(result_register_);
275 }
276 execution_control()->PerformCommand(entry.command, entry.statement,
278 }
279
280 // Applies all recorded control-flow commands after the finally-block again.
281 // This generates a dynamic dispatch on the token from the entry point.
283 if (deferred_.empty()) return;
284
285 BytecodeLabel fall_through_from_try_block;
286
287 if (deferred_.size() == 1) {
288 // For a single entry, just jump to the fallthrough if we don't match the
289 // entry token.
290 const Entry& entry = deferred_[0];
291
292 if (fallthrough_from_try_block_needed_) {
293 builder()
294 ->LoadLiteral(Smi::FromInt(entry.token))
295 .CompareReference(token_register_)
296 .JumpIfFalse(ToBooleanMode::kAlreadyBoolean,
297 &fall_through_from_try_block);
298 }
299
300 ApplyDeferredCommand(entry);
301 } else {
302 // For multiple entries, build a jump table and switch on the token,
303 // jumping to the fallthrough if none of them match.
304 //
305 // If fallthrough from the try block is not needed, generate a jump table
306 // with one (1) fewer entries and reuse the fallthrough path for the final
307 // entry.
308 const int jump_table_base_value =
309 fallthrough_from_try_block_needed_ ? 0 : 1;
310 const int jump_table_size =
311 static_cast<int>(deferred_.size() - jump_table_base_value);
312
313 if (jump_table_size == 1) {
314 DCHECK_EQ(2, deferred_.size());
315 BytecodeLabel fall_through_to_final_entry;
316 const Entry& first_entry = deferred_[0];
317 const Entry& final_entry = deferred_[1];
318 builder()
319 ->LoadLiteral(Smi::FromInt(first_entry.token))
320 .CompareReference(token_register_)
321 .JumpIfFalse(ToBooleanMode::kAlreadyBoolean,
322 &fall_through_to_final_entry);
323 ApplyDeferredCommand(first_entry);
324 builder()->Bind(&fall_through_to_final_entry);
325 ApplyDeferredCommand(final_entry);
326 } else {
327 BytecodeJumpTable* jump_table = builder()->AllocateJumpTable(
328 jump_table_size, jump_table_base_value);
329 builder()
330 ->LoadAccumulatorWithRegister(token_register_)
331 .SwitchOnSmiNoFeedback(jump_table);
332
333 const Entry& first_entry = deferred_.front();
334 if (fallthrough_from_try_block_needed_) {
335 builder()->Jump(&fall_through_from_try_block);
336 builder()->Bind(jump_table, first_entry.token);
337 }
338 ApplyDeferredCommand(first_entry);
339
340 for (const Entry& entry : base::IterateWithoutFirst(deferred_)) {
341 builder()->Bind(jump_table, entry.token);
342 ApplyDeferredCommand(entry);
343 }
344 }
345 }
346
347 if (fallthrough_from_try_block_needed_) {
348 builder()->Bind(&fall_through_from_try_block);
349 }
350 }
351
352 BytecodeArrayBuilder* builder() { return generator_->builder(); }
353 ControlScope* execution_control() { return generator_->execution_control(); }
354
355 private:
357 switch (command) {
358 case CMD_RETURN:
359 return GetReturnToken();
360 case CMD_ASYNC_RETURN:
361 return GetAsyncReturnToken();
362 case CMD_RETHROW:
363 return static_cast<int>(TryFinallyContinuationToken::kRethrowToken);
364 default:
365 // TODO(leszeks): We could also search for entries with the same
366 // command and statement.
367 return GetNewTokenForCommand(command, statement);
368 }
369 }
370
372 if (return_token_ == -1) {
373 return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
374 }
375 return return_token_;
376 }
377
379 if (async_return_token_ == -1) {
380 async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
381 }
382 return async_return_token_;
383 }
384
386 int token = static_cast<int>(deferred_.size());
387 deferred_.push_back({command, statement, token});
388 return token;
389 }
390
396
397 // Tokens for commands that don't need a statement.
400
401 // Whether a fallthrough is possible.
403};
404
405// Scoped class for dealing with control flow reaching the function level.
408 public:
410 : ControlScope(generator) {}
411
412 protected:
414 int source_position) override {
415 switch (command) {
416 case CMD_BREAK: // We should never see break/continue in top-level.
417 case CMD_CONTINUE:
418 UNREACHABLE();
419 case CMD_RETURN:
420 // No need to pop contexts, execution leaves the method body.
421 generator()->BuildReturn(source_position);
422 return true;
423 case CMD_ASYNC_RETURN:
424 // No need to pop contexts, execution leaves the method body.
425 generator()->BuildAsyncReturn(source_position);
426 return true;
427 case CMD_RETHROW:
428 // No need to pop contexts, execution leaves the method body.
430 return true;
431 }
432 return false;
433 }
434};
435
436// Scoped class for enabling break inside blocks and switch blocks.
439 public:
442 BreakableControlFlowBuilder* control_builder)
443 : ControlScope(generator),
445 control_builder_(control_builder) {}
446
447 protected:
449 int source_position) override {
450 if (statement != statement_) return false;
451 switch (command) {
452 case CMD_BREAK:
455 return true;
456 case CMD_CONTINUE:
457 case CMD_RETURN:
458 case CMD_ASYNC_RETURN:
459 case CMD_RETHROW:
460 break;
461 }
462 return false;
463 }
464
465 private:
468};
469
470// Scoped class for enabling 'break' and 'continue' in iteration
471// constructs, e.g. do...while, while..., for...
474 public:
477 LoopBuilder* loop_builder)
478 : ControlScope(generator),
480 loop_builder_(loop_builder) {}
481
482 protected:
484 int source_position) override {
485 if (statement != statement_) return false;
486 switch (command) {
487 case CMD_BREAK:
490 return true;
491 case CMD_CONTINUE:
494 return true;
495 case CMD_RETURN:
496 case CMD_ASYNC_RETURN:
497 case CMD_RETHROW:
498 break;
499 }
500 return false;
501 }
502
503 private:
506};
507
508// Scoped class for enabling 'throw' in try-catch constructs.
511 public:
513 TryCatchBuilder* try_catch_builder)
514 : ControlScope(generator) {}
515
516 protected:
518 int source_position) override {
519 switch (command) {
520 case CMD_BREAK:
521 case CMD_CONTINUE:
522 case CMD_RETURN:
523 case CMD_ASYNC_RETURN:
524 break;
525 case CMD_RETHROW:
526 // No need to pop contexts, execution re-enters the method body via the
527 // stack unwinding mechanism which itself restores contexts correctly.
529 return true;
530 }
531 return false;
532 }
533};
534
535// Scoped class for enabling control flow through try-finally constructs.
538 public:
540 TryFinallyBuilder* try_finally_builder,
541 DeferredCommands* commands)
542 : ControlScope(generator),
543 try_finally_builder_(try_finally_builder),
544 commands_(commands) {}
545
546 protected:
548 int source_position) override {
549 switch (command) {
550 case CMD_BREAK:
551 case CMD_CONTINUE:
552 case CMD_RETURN:
553 case CMD_ASYNC_RETURN:
554 case CMD_RETHROW:
556 // We don't record source_position here since we don't generate return
557 // bytecode right here and will generate it later as part of finally
558 // block. Each return bytecode generated in finally block will get own
559 // return source position from corresponded return statement or we'll
560 // use end of function if no return statement is presented.
563 return true;
564 }
565 return false;
566 }
567
568 private:
571};
572
573// Scoped class for collecting 'return' statements in a derived constructor.
574// Derived constructors can only return undefined or objects, and this check
575// must occur right before return (e.g., after `finally` blocks execute).
578 public:
580 Register result_register,
581 BytecodeLabels* check_return_value_labels)
582 : ControlScope(generator),
583 result_register_(result_register),
584 check_return_value_labels_(check_return_value_labels) {}
585
586 protected:
588 int source_position) override {
589 // Constructors are never async.
590 DCHECK_NE(CMD_ASYNC_RETURN, command);
591 if (command == CMD_RETURN) {
593 generator()->builder()->SetStatementPosition(source_position);
596 return true;
597 }
598 return false;
599 }
600
601 private:
604};
605
606// Allocate and fetch the coverage indices tracking NaryLogical Expressions.
608 public:
610 : generator_(generator) {
611 if (generator_->block_coverage_builder_ == nullptr) return;
612 for (size_t i = 0; i < expr->subsequent_length(); i++) {
613 coverage_slots_.push_back(
615 }
616 }
617
618 int GetSlotFor(size_t subsequent_expr_index) const {
619 if (generator_->block_coverage_builder_ == nullptr) {
621 }
622 DCHECK(coverage_slots_.size() > subsequent_expr_index);
623 return coverage_slots_[subsequent_expr_index];
624 }
625
626 private:
628 std::vector<int> coverage_slots_;
629};
630
633 int source_position) {
634 ControlScope* current = this;
635 do {
636 if (current->Execute(command, statement, source_position)) {
637 return;
638 }
639 current = current->outer();
640 } while (current != nullptr);
641 UNREACHABLE();
642}
643
645 // Pop context to the expected depth. Note that this can in fact pop multiple
646 // contexts at once because the {PopContext} bytecode takes a saved register.
647 if (generator()->execution_context() != context()) {
648 generator()->builder()->PopContext(context()->reg());
649 }
650}
651
653 public:
655 : generator_(generator),
656 outer_next_register_index_(
657 generator->register_allocator()->next_register_index()) {}
658
660 generator_->register_allocator()->ReleaseRegisters(
661 outer_next_register_index_);
662 }
663
666
668
669 private:
672};
673
675 public:
678 : generator_(generator) {
679 if (mode == AccumulatorPreservingMode::kPreserve) {
680 saved_accumulator_register_ =
681 generator_->register_allocator()->NewRegister();
682 generator_->builder()->StoreAccumulatorInRegister(
683 saved_accumulator_register_);
684 }
685 }
686
688 if (saved_accumulator_register_.is_valid()) {
689 generator_->builder()->LoadAccumulatorWithRegister(
690 saved_accumulator_register_);
691 }
692 }
693
696 delete;
697
698 private:
701};
702
703// Scoped base class for determining how the result of an expression will be
704// used.
706 public:
708 : outer_(generator->execution_result()),
709 allocator_(generator),
710 kind_(kind),
711 type_hint_(TypeHint::kUnknown) {
712 generator->set_execution_result(this);
713 }
714
716 allocator_.generator()->set_execution_result(outer_);
717 }
718
721
722 bool IsEffect() const { return kind_ == Expression::kEffect; }
723 bool IsValue() const { return kind_ == Expression::kValue; }
724 bool IsTest() const { return kind_ == Expression::kTest; }
725
727 DCHECK(IsTest());
728 return reinterpret_cast<TestResultScope*>(this);
729 }
730
731 // Specify expression always returns a Boolean result value.
733 DCHECK_EQ(type_hint_, TypeHint::kUnknown);
734 type_hint_ = TypeHint::kBoolean;
735 }
736
738 DCHECK_EQ(type_hint_, TypeHint::kUnknown);
739 type_hint_ = TypeHint::kString;
740 }
741
743 DCHECK_EQ(type_hint_, TypeHint::kUnknown);
744 type_hint_ = TypeHint::kInternalizedString;
745 }
746
747 TypeHint type_hint() const { return type_hint_; }
748
749 private:
754};
755
756// Scoped class used when the result of the current expression is not
757// expected to produce a result.
759 : public ExpressionResultScope {
760 public:
762 : ExpressionResultScope(generator, Expression::kEffect) {}
763};
764
765// Scoped class used when the result of the current expression to be
766// evaluated should go into the interpreter's accumulator.
768 : public ExpressionResultScope {
769 public:
772};
773
774// Scoped class used when the result of the current expression to be
775// evaluated is only tested with jumps to two branches.
777 : public ExpressionResultScope {
778 public:
780 BytecodeLabels* else_labels, TestFallthrough fallthrough)
781 : ExpressionResultScope(generator, Expression::kTest),
782 result_consumed_by_test_(false),
783 fallthrough_(fallthrough),
784 then_labels_(then_labels),
785 else_labels_(else_labels) {}
786
789
790 // Used when code special cases for TestResultScope and consumes any
791 // possible value by testing and jumping to a then/else label.
792 void SetResultConsumedByTest() { result_consumed_by_test_ = true; }
793 bool result_consumed_by_test() { return result_consumed_by_test_; }
794
795 // Inverts the control flow of the operation, swapping the then and else
796 // labels and the fallthrough.
798 std::swap(then_labels_, else_labels_);
799 fallthrough_ = inverted_fallthrough();
800 }
801
802 BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
803 BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
804
805 BytecodeLabels* then_labels() const { return then_labels_; }
806 BytecodeLabels* else_labels() const { return else_labels_; }
807
808 void set_then_labels(BytecodeLabels* then_labels) {
809 then_labels_ = then_labels;
810 }
811 void set_else_labels(BytecodeLabels* else_labels) {
812 else_labels_ = else_labels;
813 }
814
815 TestFallthrough fallthrough() const { return fallthrough_; }
817 switch (fallthrough_) {
818 case TestFallthrough::kThen:
819 return TestFallthrough::kElse;
820 case TestFallthrough::kElse:
821 return TestFallthrough::kThen;
822 default:
823 return TestFallthrough::kNone;
824 }
825 }
827 fallthrough_ = fallthrough;
828 }
829
830 private:
835};
836
837// Used to build a list of toplevel declaration data.
839 public:
840 template <typename IsolateT>
842 BytecodeGenerator* generator,
843 Handle<Script> script,
844 IsolateT* isolate) {
845 DCHECK(has_constant_pool_entry_);
846
847 Handle<FixedArray> data =
848 isolate->factory()->NewFixedArray(entry_slots_, AllocationType::kOld);
849
850 int array_index = 0;
851 if (info->scope()->is_module_scope()) {
852 for (Declaration* decl : *info->scope()->declarations()) {
853 Variable* var = decl->var();
854 if (!var->is_used()) continue;
855 if (var->location() != VariableLocation::MODULE) continue;
856#ifdef DEBUG
857 int start = array_index;
858#endif
859 if (decl->IsFunctionDeclaration()) {
860 FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
862 Compiler::GetSharedFunctionInfo(f, script, isolate));
863 // Return a null handle if any initial values can't be created. Caller
864 // will set stack overflow.
865 if (sfi.is_null()) return Handle<FixedArray>();
866 data->set(array_index++, *sfi);
867 int literal_index = generator->GetCachedCreateClosureSlot(f);
868 data->set(array_index++, Smi::FromInt(literal_index));
869 DCHECK(var->IsExport());
870 data->set(array_index++, Smi::FromInt(var->index()));
871 DCHECK_EQ(start + kModuleFunctionDeclarationSize, array_index);
872 } else if (var->IsExport() && var->binding_needs_init()) {
873 data->set(array_index++, Smi::FromInt(var->index()));
874 DCHECK_EQ(start + kModuleVariableDeclarationSize, array_index);
875 }
876 }
877 } else {
878 for (Declaration* decl : *info->scope()->declarations()) {
879 Variable* var = decl->var();
880 if (!var->is_used()) continue;
881 if (var->location() != VariableLocation::UNALLOCATED) continue;
882#ifdef DEBUG
883 int start = array_index;
884#endif
885 if (decl->IsVariableDeclaration()) {
886 data->set(array_index++, *var->raw_name()->string());
887 DCHECK_EQ(start + kGlobalVariableDeclarationSize, array_index);
888 } else {
889 FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
891 Compiler::GetSharedFunctionInfo(f, script, isolate));
892 // Return a null handle if any initial values can't be created. Caller
893 // will set stack overflow.
894 if (sfi.is_null()) return Handle<FixedArray>();
895 data->set(array_index++, *sfi);
896 int literal_index = generator->GetCachedCreateClosureSlot(f);
897 data->set(array_index++, Smi::FromInt(literal_index));
898 DCHECK_EQ(start + kGlobalFunctionDeclarationSize, array_index);
899 }
900 }
901 }
902 DCHECK_EQ(array_index, data->length());
903 return data;
904 }
905
907 DCHECK(has_constant_pool_entry_);
908 return constant_pool_entry_;
909 }
910
911 void set_constant_pool_entry(size_t constant_pool_entry) {
912 DCHECK(has_top_level_declaration());
913 DCHECK(!has_constant_pool_entry_);
914 constant_pool_entry_ = constant_pool_entry;
915 has_constant_pool_entry_ = true;
916 }
917
919 entry_slots_ += kGlobalVariableDeclarationSize;
920 }
922 entry_slots_ += kGlobalFunctionDeclarationSize;
923 }
925 entry_slots_ += kModuleVariableDeclarationSize;
926 }
928 entry_slots_ += kModuleFunctionDeclarationSize;
929 }
930 bool has_top_level_declaration() { return entry_slots_ > 0; }
931 bool processed() { return processed_; }
932 void mark_processed() { processed_ = true; }
933
934 private:
935 const int kGlobalVariableDeclarationSize = 1;
936 const int kGlobalFunctionDeclarationSize = 2;
937 const int kModuleVariableDeclarationSize = 1;
938 const int kModuleFunctionDeclarationSize = 3;
939
940 size_t constant_pool_entry_ = 0;
941 int entry_slots_ = 0;
942 bool has_constant_pool_entry_ = false;
943 bool processed_ = false;
944};
945
947 public:
949 : generator_(generator), outer_scope_(generator->current_scope()) {
950 if (scope != nullptr) {
951 DCHECK_EQ(outer_scope_, scope->outer_scope());
952 generator_->set_current_scope(scope);
953 }
954 }
956 if (outer_scope_ != generator_->current_scope()) {
957 generator_->set_current_scope(outer_scope_);
958 }
959 }
960 CurrentScope(const CurrentScope&) = delete;
962
963 private:
966};
967
969 public:
971 : generator_(generator), scope_(scope), is_in_scope_(false) {
972 if (scope) {
973 inner_context_ = generator->register_allocator()->NewRegister();
974 outer_context_ = generator->register_allocator()->NewRegister();
975 generator->BuildNewLocalBlockContext(scope_);
976 generator->builder()->StoreAccumulatorInRegister(inner_context_);
977 }
978 }
979
981 RegisterAllocationScope register_scope(generator_);
982 if (condition && scope_ != nullptr && !is_in_scope_) {
983 EnterScope();
984 } else if (!condition && is_in_scope_) {
985 ExitScope();
986 }
987 }
988
990
992 delete;
994 const MultipleEntryBlockContextScope&) = delete;
995
996 private:
997 void EnterScope() {
998 DCHECK(inner_context_.is_valid());
999 DCHECK(outer_context_.is_valid());
1000 DCHECK(!is_in_scope_);
1001 generator_->builder()->LoadAccumulatorWithRegister(inner_context_);
1002 current_scope_.emplace(generator_, scope_);
1003 context_scope_.emplace(generator_, scope_, outer_context_);
1004 is_in_scope_ = true;
1005 }
1006
1007 void ExitScope() {
1008 DCHECK(inner_context_.is_valid());
1009 DCHECK(outer_context_.is_valid());
1010 DCHECK(is_in_scope_);
1011 context_scope_ = std::nullopt;
1012 current_scope_ = std::nullopt;
1013 is_in_scope_ = false;
1014 }
1015
1021 std::optional<CurrentScope> current_scope_;
1022 std::optional<ContextScope> context_scope_;
1023};
1024
1026 public:
1027 enum class SlotKind {
1033 kLoadSuperProperty,
1036 kClosureFeedbackCell
1037 };
1038
1039 explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
1040
1041 void Put(SlotKind slot_kind, Variable* variable, int slot_index) {
1042 PutImpl(slot_kind, 0, variable, slot_index);
1043 }
1044 void Put(SlotKind slot_kind, AstNode* node, int slot_index) {
1045 PutImpl(slot_kind, 0, node, slot_index);
1046 }
1047 void Put(SlotKind slot_kind, int variable_index, const AstRawString* name,
1048 int slot_index) {
1049 PutImpl(slot_kind, variable_index, name, slot_index);
1050 }
1051 void Put(SlotKind slot_kind, const AstRawString* name, int slot_index) {
1052 PutImpl(slot_kind, 0, name, slot_index);
1053 }
1054
1055 int Get(SlotKind slot_kind, Variable* variable) const {
1056 return GetImpl(slot_kind, 0, variable);
1057 }
1058 int Get(SlotKind slot_kind, AstNode* node) const {
1059 return GetImpl(slot_kind, 0, node);
1060 }
1061 int Get(SlotKind slot_kind, int variable_index,
1062 const AstRawString* name) const {
1063 return GetImpl(slot_kind, variable_index, name);
1064 }
1065 int Get(SlotKind slot_kind, const AstRawString* name) const {
1066 return GetImpl(slot_kind, 0, name);
1067 }
1068
1069 private:
1070 using Key = std::tuple<SlotKind, int, const void*>;
1071
1072 void PutImpl(SlotKind slot_kind, int index, const void* node,
1073 int slot_index) {
1074 Key key = std::make_tuple(slot_kind, index, node);
1075 auto entry = std::make_pair(key, slot_index);
1076 map_.insert(entry);
1077 }
1078
1079 int GetImpl(SlotKind slot_kind, int index, const void* node) const {
1080 Key key = std::make_tuple(slot_kind, index, node);
1081 auto iter = map_.find(key);
1082 if (iter != map_.end()) {
1083 return iter->second;
1084 }
1085 return -1;
1086 }
1087
1089};
1090
1091// Scoped class to help elide hole checks within a conditionally executed basic
1092// block. Each conditionally executed basic block must have a scope to emit
1093// hole checks correctly.
1094//
1095// The duration of the scope must correspond to a basic block. Numbered
1096// Variables (see Variable::HoleCheckBitmap) are remembered in the bitmap when
1097// the first hole check is emitted. Subsequent hole checks are elided.
1098//
1099// On scope exit, the hole check state at construction time is restored.
1101 public:
1102 explicit HoleCheckElisionScope(BytecodeGenerator* bytecode_generator)
1103 : HoleCheckElisionScope(&bytecode_generator->hole_check_bitmap_) {}
1104
1105 ~HoleCheckElisionScope() { *bitmap_ = prev_bitmap_value_; }
1106
1107 protected:
1108 explicit HoleCheckElisionScope(Variable::HoleCheckBitmap* bitmap)
1109 : bitmap_(bitmap), prev_bitmap_value_(*bitmap) {}
1110
1111 Variable::HoleCheckBitmap* bitmap_;
1112 Variable::HoleCheckBitmap prev_bitmap_value_;
1113};
1114
1115// Scoped class to help elide hole checks within control flow that branch and
1116// merge.
1117//
1118// Each such control flow construct (e.g., if-else, ternary expressions) must
1119// have a scope to emit hole checks correctly. Additionally, each branch must
1120// have a Branch.
1121//
1122// The Merge or MergeIf method must be called to merge variables that have been
1123// hole-checked along every branch are marked as no longer needing a hole check.
1124//
1125// Example:
1126//
1127// HoleCheckElisionMergeScope merge_elider(this);
1128// {
1129// HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
1130// Visit(then_branch);
1131// }
1132// {
1133// HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
1134// Visit(else_branch);
1135// }
1136// merge_elider.Merge();
1137//
1138// Conversely, it is incorrect to use this class for control flow constructs
1139// that do not merge (e.g., if without else). HoleCheckElisionScope should be
1140// used for those cases.
1142 public:
1143 explicit HoleCheckElisionMergeScope(BytecodeGenerator* bytecode_generator)
1144 : bitmap_(&bytecode_generator->hole_check_bitmap_) {}
1145
1147 // Did you forget to call Merge or MergeIf?
1148 DCHECK(merge_called_);
1149 }
1150
1151 void Merge() {
1152 DCHECK_NE(UINT64_MAX, merge_value_);
1153 *bitmap_ = merge_value_;
1154#ifdef DEBUG
1155 merge_called_ = true;
1156#endif
1157 }
1158
1159 void MergeIf(bool cond) {
1160 if (cond) Merge();
1161#ifdef DEBUG
1162 merge_called_ = true;
1163#endif
1164 }
1165
1167 public:
1169 : HoleCheckElisionScope(merge_into.bitmap_),
1170 merge_into_bitmap_(&merge_into.merge_value_) {}
1171
1172 ~Branch() { *merge_into_bitmap_ &= *bitmap_; }
1173
1174 private:
1175 Variable::HoleCheckBitmap* merge_into_bitmap_;
1176 };
1177
1178 private:
1179 Variable::HoleCheckBitmap* bitmap_;
1180 Variable::HoleCheckBitmap merge_value_ = UINT64_MAX;
1181
1182#ifdef DEBUG
1183 bool merge_called_ = false;
1184#endif
1185};
1186
1188 public:
1189 IteratorRecord(Register object_register, Register next_register,
1190 IteratorType type = IteratorType::kNormal)
1191 : type_(type), object_(object_register), next_(next_register) {
1192 DCHECK(object_.is_valid() && next_.is_valid());
1193 }
1194
1195 inline IteratorType type() const { return type_; }
1196 inline Register object() const { return object_; }
1197 inline Register next() const { return next_; }
1198
1199 private:
1203};
1204
1206 public:
1208 : bytecode_generator_(bytecode_generator),
1209 labels_(bytecode_generator->zone()) {
1210 prev_ = bytecode_generator_->optional_chaining_null_labels_;
1211 bytecode_generator_->optional_chaining_null_labels_ = &labels_;
1212 }
1213
1215 bytecode_generator_->optional_chaining_null_labels_ = prev_;
1216 }
1217
1218 BytecodeLabels* labels() { return &labels_; }
1219
1220 private:
1224};
1225
1226// LoopScope delimits the scope of {loop}, from its header to its final jump.
1227// It should be constructed iff a (conceptual) back edge should be produced. In
1228// the case of creating a LoopBuilder but never emitting the loop, it is valid
1229// to skip the creation of LoopScope.
1231 public:
1232 explicit LoopScope(BytecodeGenerator* bytecode_generator, LoopBuilder* loop)
1233 : bytecode_generator_(bytecode_generator),
1234 parent_loop_scope_(bytecode_generator_->current_loop_scope()),
1235 loop_builder_(loop) {
1236 loop_builder_->LoopHeader();
1237 bytecode_generator_->set_current_loop_scope(this);
1238 bytecode_generator_->loop_depth_++;
1239 }
1240
1242 bytecode_generator_->loop_depth_--;
1243 bytecode_generator_->set_current_loop_scope(parent_loop_scope_);
1244 DCHECK_GE(bytecode_generator_->loop_depth_, 0);
1245 loop_builder_->JumpToHeader(
1246 bytecode_generator_->loop_depth_,
1247 parent_loop_scope_ ? parent_loop_scope_->loop_builder_ : nullptr);
1248 }
1249
1250 private:
1254};
1255
1257 public:
1258 explicit ForInScope(BytecodeGenerator* bytecode_generator,
1259 ForInStatement* stmt, Register enum_index,
1260 Register cache_type)
1261 : bytecode_generator_(bytecode_generator),
1262 parent_for_in_scope_(bytecode_generator_->current_for_in_scope()),
1263 each_var_(nullptr),
1264 enum_index_(enum_index),
1265 cache_type_(cache_type) {
1266 if (v8_flags.enable_enumerated_keyed_access_bytecode) {
1267 Expression* each = stmt->each();
1268 if (each->IsVariableProxy()) {
1269 Variable* each_var = each->AsVariableProxy()->var();
1270 if (each_var->IsStackLocal()) {
1271 each_var_ = each_var;
1272 bytecode_generator_->SetVariableInRegister(
1273 each_var_,
1274 bytecode_generator_->builder()->Local(each_var_->index()));
1275 }
1276 }
1277 bytecode_generator_->set_current_for_in_scope(this);
1278 }
1279 }
1280
1282 if (v8_flags.enable_enumerated_keyed_access_bytecode) {
1283 bytecode_generator_->set_current_for_in_scope(parent_for_in_scope_);
1284 }
1285 }
1286
1287 // Get corresponding {ForInScope} for a given {each} variable.
1289 DCHECK(v8_flags.enable_enumerated_keyed_access_bytecode);
1290 ForInScope* scope = this;
1291 do {
1292 if (each == scope->each_var_) break;
1293 scope = scope->parent_for_in_scope_;
1294 } while (scope != nullptr);
1295 return scope;
1296 }
1297
1298 Register enum_index() { return enum_index_; }
1299 Register cache_type() { return cache_type_; }
1300
1301 private:
1307};
1308
1310 public:
1311 explicit DisposablesStackScope(BytecodeGenerator* bytecode_generator)
1312 : bytecode_generator_(bytecode_generator),
1313 prev_disposables_stack_(
1314 bytecode_generator_->current_disposables_stack_) {
1315 bytecode_generator_->set_current_disposables_stack(
1316 bytecode_generator->register_allocator()->NewRegister());
1317 bytecode_generator->builder()->CallRuntime(
1318 Runtime::kInitializeDisposableStack);
1319 bytecode_generator->builder()->StoreAccumulatorInRegister(
1320 bytecode_generator_->current_disposables_stack());
1321 }
1322
1324 bytecode_generator_->set_current_disposables_stack(prev_disposables_stack_);
1325 }
1326
1327 private:
1330};
1331
1332namespace {
1333
1334template <typename PropertyT>
1335struct Accessors : public ZoneObject {
1336 Accessors() : getter(nullptr), setter(nullptr) {}
1337 PropertyT* getter;
1338 PropertyT* setter;
1339};
1340
1341// A map from property names to getter/setter pairs allocated in the zone that
1342// also provides a way of accessing the pairs in the order they were first
1343// added so that the generated bytecode is always the same.
1344template <typename PropertyT>
1345class AccessorTable
1346 : public base::TemplateHashMap<Literal, Accessors<PropertyT>,
1347 bool (*)(void*, void*),
1348 ZoneAllocationPolicy> {
1349 public:
1350 explicit AccessorTable(Zone* zone)
1351 : base::TemplateHashMap<Literal, Accessors<PropertyT>,
1352 bool (*)(void*, void*), ZoneAllocationPolicy>(
1353 Literal::Match, ZoneAllocationPolicy(zone)),
1354 zone_(zone) {}
1355
1356 Accessors<PropertyT>* LookupOrInsert(Literal* key) {
1357 auto it = this->find(key, true);
1358 if (it->second == nullptr) {
1359 it->second = zone_->New<Accessors<PropertyT>>();
1360 ordered_accessors_.push_back({key, it->second});
1361 }
1362 return it->second;
1363 }
1364
1365 const std::vector<std::pair<Literal*, Accessors<PropertyT>*>>&
1366 ordered_accessors() {
1367 return ordered_accessors_;
1368 }
1369
1370 private:
1371 std::vector<std::pair<Literal*, Accessors<PropertyT>*>> ordered_accessors_;
1372
1374};
1375
1376} // namespace
1377
1378#ifdef DEBUG
1379
1380static bool IsInEagerLiterals(
1381 FunctionLiteral* literal,
1382 const std::vector<FunctionLiteral*>& eager_literals) {
1383 for (FunctionLiteral* eager_literal : eager_literals) {
1384 if (literal == eager_literal) return true;
1385 }
1386 return false;
1387}
1388
1389#endif // DEBUG
1390
1392 LocalIsolate* local_isolate, Zone* compile_zone,
1394 const AstStringConstants* ast_string_constants,
1395 std::vector<FunctionLiteral*>* eager_inner_literals, Handle<Script> script)
1396 : local_isolate_(local_isolate),
1397 zone_(compile_zone),
1398 builder_(zone(), info->num_parameters_including_this(),
1399 info->scope()->num_stack_slots(), info->feedback_vector_spec(),
1400 info->SourcePositionRecordingMode()),
1401 info_(info),
1402 ast_string_constants_(ast_string_constants),
1403 closure_scope_(info->scope()),
1404 current_scope_(info->scope()),
1405 eager_inner_literals_(eager_inner_literals),
1406 script_(script),
1407 feedback_slot_cache_(zone()->New<FeedbackSlotCache>(zone())),
1408 top_level_builder_(zone()->New<TopLevelDeclarationsBuilder>()),
1409 block_coverage_builder_(nullptr),
1410 function_literals_(0, zone()),
1411 native_function_literals_(0, zone()),
1412 object_literals_(0, zone()),
1413 array_literals_(0, zone()),
1414 class_literals_(0, zone()),
1415 template_objects_(0, zone()),
1416 vars_in_hole_check_bitmap_(0, zone()),
1417 eval_calls_(0, zone()),
1418 execution_control_(nullptr),
1419 execution_context_(nullptr),
1420 execution_result_(nullptr),
1421 incoming_new_target_or_generator_(),
1422 current_disposables_stack_(),
1423 optional_chaining_null_labels_(nullptr),
1424 dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
1425 generator_jump_table_(nullptr),
1426 suspend_count_(0),
1427 loop_depth_(0),
1428 hole_check_bitmap_(0),
1429 current_loop_scope_(nullptr),
1430 current_for_in_scope_(nullptr),
1431 catch_prediction_(HandlerTable::UNCAUGHT) {
1432 DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
1433 if (info->has_source_range_map()) {
1435 zone(), builder(), info->source_range_map());
1436 }
1437}
1438
1439namespace {
1440
1441template <typename Isolate>
1442struct NullContextScopeHelper;
1443
1444template <>
1445struct NullContextScopeHelper<Isolate> {
1446 using Type = NullContextScope;
1447};
1448
1449template <>
1450struct NullContextScopeHelper<LocalIsolate> {
1451 class V8_NODISCARD DummyNullContextScope {
1452 public:
1453 explicit DummyNullContextScope(LocalIsolate*) {}
1454 };
1455 using Type = DummyNullContextScope;
1456};
1457
1458template <typename Isolate>
1459using NullContextScopeFor = typename NullContextScopeHelper<Isolate>::Type;
1460
1461} // namespace
1462
1463template <typename IsolateT>
1465 IsolateT* isolate, Handle<Script> script) {
1466 DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1467#ifdef DEBUG
1468 // Unoptimized compilation should be context-independent. Verify that we don't
1469 // access the native context by nulling it out during finalization.
1470 NullContextScopeFor<IsolateT> null_context_scope(isolate);
1471#endif
1472
1473 AllocateDeferredConstants(isolate, script);
1474
1476 Handle<CoverageInfo> coverage_info =
1477 isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots());
1478 info()->set_coverage_info(coverage_info);
1479 if (v8_flags.trace_block_coverage) {
1480 StdoutStream os;
1481 coverage_info->CoverageInfoPrint(os, info()->literal()->GetDebugName());
1482 }
1483 }
1484
1485 if (HasStackOverflow()) return Handle<BytecodeArray>();
1486 Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
1487
1489 bytecode_array->set_incoming_new_target_or_generator_register(
1491 }
1492
1493 return bytecode_array;
1494}
1495
1497 Isolate* isolate, Handle<Script> script);
1499 LocalIsolate* isolate, Handle<Script> script);
1500
1501template <typename IsolateT>
1503 IsolateT* isolate) {
1504 DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1505#ifdef DEBUG
1506 // Unoptimized compilation should be context-independent. Verify that we don't
1507 // access the native context by nulling it out during finalization.
1508 NullContextScopeFor<IsolateT> null_context_scope(isolate);
1509#endif
1510
1511 DirectHandle<TrustedByteArray> source_position_table =
1512 builder()->ToSourcePositionTable(isolate);
1513
1514 LOG_CODE_EVENT(isolate,
1515 CodeLinePosInfoRecordEvent(
1516 info_->bytecode_array()->GetFirstBytecodeAddress(),
1517 *source_position_table, JitCodeEvent::BYTE_CODE));
1518
1519 return source_position_table;
1520}
1521
1526
1527#ifdef DEBUG
1528int BytecodeGenerator::CheckBytecodeMatches(Tagged<BytecodeArray> bytecode) {
1529 return builder()->CheckBytecodeMatches(bytecode);
1530}
1531#endif
1532
1533template <typename IsolateT>
1535 Handle<Script> script) {
1536 if (top_level_builder()->has_top_level_declaration()) {
1537 // Build global declaration pair array.
1539 info(), this, script, isolate);
1540 if (declarations.is_null()) return SetStackOverflow();
1542 top_level_builder()->constant_pool_entry(), declarations);
1543 }
1544
1545 // Find or build shared function infos.
1546 for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
1547 FunctionLiteral* expr = literal.first;
1549 Compiler::GetSharedFunctionInfo(expr, script, isolate);
1550 if (shared_info.is_null()) return SetStackOverflow();
1552 literal.second, indirect_handle(shared_info, isolate));
1553 }
1554
1555 // Find or build shared function infos for the native function templates.
1556 for (std::pair<NativeFunctionLiteral*, size_t> literal :
1558 // This should only happen for main-thread compilations.
1559 DCHECK((std::is_same<Isolate, v8::internal::Isolate>::value));
1560
1561 NativeFunctionLiteral* expr = literal.first;
1562 v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1563
1564 // Compute the function template for the native function.
1567 v8_isolate, Utils::ToLocal(expr->name()));
1568 DCHECK(!info.IsEmpty());
1569
1570 Handle<SharedFunctionInfo> shared_info =
1572 isolate, Utils::OpenDirectHandle(*info), expr->name());
1573 DCHECK(!shared_info.is_null());
1574 builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1575 }
1576
1577 for (std::pair<Call*, Scope*> call : eval_calls_) {
1578 script->infos()->set(call.first->eval_scope_info_index(),
1579 MakeWeak(*call.second->scope_info()));
1580 }
1581
1582 // Build object literal constant properties
1583 for (std::pair<ObjectLiteralBoilerplateBuilder*, size_t> literal :
1585 ObjectLiteralBoilerplateBuilder* object_literal_builder = literal.first;
1586 if (object_literal_builder->properties_count() > 0) {
1587 // If constant properties is an empty fixed array, we've already added it
1588 // to the constant pool when visiting the object literal.
1589 Handle<ObjectBoilerplateDescription> constant_properties =
1590 object_literal_builder->GetOrBuildBoilerplateDescription(isolate);
1591
1593 constant_properties);
1594 }
1595 }
1596
1597 // Build array literal constant elements
1598 for (std::pair<ArrayLiteralBoilerplateBuilder*, size_t> literal :
1600 ArrayLiteralBoilerplateBuilder* array_literal_builder = literal.first;
1601 Handle<ArrayBoilerplateDescription> constant_elements =
1602 array_literal_builder->GetOrBuildBoilerplateDescription(isolate);
1603 builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1604 }
1605
1606 // Build class literal boilerplates.
1607 for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1608 ClassLiteral* class_literal = literal.first;
1609 Handle<ClassBoilerplate> class_boilerplate =
1610 ClassBoilerplate::New(isolate, class_literal, AllocationType::kOld);
1611 builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1612 }
1613
1614 // Build template literals.
1615 for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1616 GetTemplateObject* get_template_object = literal.first;
1618 get_template_object->GetOrBuildDescription(isolate);
1619 builder()->SetDeferredConstantPoolEntry(literal.second, description);
1620 }
1621}
1622
1624 Isolate* isolate, Handle<Script> script);
1626 LocalIsolate* isolate, Handle<Script> script);
1627
1628namespace {
1629bool NeedsContextInitialization(DeclarationScope* scope) {
1630 return scope->NeedsContext() && !scope->is_script_scope() &&
1631 !scope->is_module_scope();
1632}
1633} // namespace
1634
1635void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1636 InitializeAstVisitor(stack_limit);
1637 if (v8_flags.stress_lazy_compilation && local_isolate_->is_main_thread() &&
1639 // Trigger stack overflow with 1/stress_lazy_compilation probability.
1640 // Do this only for the main thread compilations because querying random
1641 // numbers from background threads will make the random values dependent
1642 // on the thread scheduling and thus non-deterministic.
1643 stack_overflow_ = local_isolate_->fuzzer_rng()->NextInt(
1644 v8_flags.stress_lazy_compilation) == 0;
1645 }
1646
1647 // Initialize the incoming context.
1648 ContextScope incoming_context(this, closure_scope());
1649
1650 // Initialize control scope.
1651 ControlScopeForTopLevel control(this);
1652
1653 RegisterAllocationScope register_scope(this);
1654
1656
1658 info()->literal()->start_position());
1659
1660 if (info()->literal()->CanSuspend()) {
1662 }
1663
1664 if (NeedsContextInitialization(closure_scope())) {
1665 // Push a new inner context scope for the function.
1667 ContextScope local_function_context(this, closure_scope());
1670 } else {
1672 }
1673
1674 // Reset variables with hole check bitmap indices for subsequent compilations
1675 // in the same parsing zone.
1677 var->ResetHoleCheckBitmapIndex();
1678 }
1679
1680 // Check that we are not falling off the end.
1681 DCHECK(builder()->RemainderOfBlockIsDead());
1682}
1683
1702
1704 // Build the arguments object if it is used.
1705 VisitArgumentsObject(closure_scope()->arguments());
1706
1707 // Build rest arguments array if it is used.
1708 Variable* rest_parameter = closure_scope()->rest_parameter();
1709 VisitRestArgumentsArray(rest_parameter);
1710
1711 // Build assignment to the function name or {.this_function}
1712 // variables if used.
1713 VisitThisFunctionVariable(closure_scope()->function_var());
1714 VisitThisFunctionVariable(closure_scope()->this_function_var());
1715
1716 // Build assignment to {new.target} variable if it is used.
1717 VisitNewTargetVariable(closure_scope()->new_target_var());
1718
1719 // Create a generator object if necessary and initialize the
1720 // {.generator_object} variable.
1722 if (IsResumableFunction(literal->kind())) {
1724 }
1725
1726 // Emit tracing call if requested to do so.
1727 if (v8_flags.trace) builder()->CallRuntime(Runtime::kTraceEnter);
1728
1729 // Increment the function-scope block coverage counter.
1731
1732 // Visit declarations within the function scope.
1733 if (closure_scope()->is_script_scope()) {
1734 VisitGlobalDeclarations(closure_scope()->declarations());
1735 } else if (closure_scope()->is_module_scope()) {
1736 VisitModuleDeclarations(closure_scope()->declarations());
1737 } else {
1738 VisitDeclarations(closure_scope()->declarations());
1739 }
1740
1741 // Emit initializing assignments for module namespace imports (if any).
1743}
1744
1747
1749
1750 // The derived constructor case is handled in VisitCallSuper.
1751 if (literal->class_scope_has_private_brand()) {
1752 ClassScope* scope = info()->scope()->outer_scope()->AsClassScope();
1753 DCHECK_NOT_NULL(scope->brand());
1754 BuildPrivateBrandInitialization(builder()->Receiver(), scope->brand());
1755 }
1756
1757 if (literal->requires_instance_members_initializer()) {
1759 builder()->Receiver());
1760 }
1761
1763}
1764
1767
1769
1770 // Per spec, derived constructors can only return undefined or an object;
1771 // other primitives trigger an exception in ConstructStub.
1772 //
1773 // Since the receiver is popped by the callee, derived constructors return
1774 // <this> if the original return value was undefined.
1775 //
1776 // Also per spec, this return value check is done after all user code (e.g.,
1777 // finally blocks) are executed. For example, the following code does not
1778 // throw.
1779 //
1780 // class C extends class {} {
1781 // constructor() {
1782 // try { throw 42; }
1783 // catch(e) { return; }
1784 // finally { super(); }
1785 // }
1786 // }
1787 // new C();
1788 //
1789 // This check is implemented by jumping to the check instead of emitting a
1790 // return bytecode in-place inside derived constructors.
1791 //
1792 // Note that default derived constructors do not need this check as they
1793 // just forward a super call.
1794
1795 BytecodeLabels check_return_value(zone());
1797 ControlScopeForDerivedConstructor control(this, result, &check_return_value);
1798
1799 {
1800 HoleCheckElisionScope elider(this);
1802 }
1803
1804 if (check_return_value.empty()) {
1805 if (!builder()->RemainderOfBlockIsDead()) {
1807 BuildReturn(literal->return_position());
1808 }
1809 } else {
1810 BytecodeLabels return_this(zone());
1811
1812 if (!builder()->RemainderOfBlockIsDead()) {
1813 builder()->Jump(return_this.New());
1814 }
1815
1816 check_return_value.Bind(builder());
1818 builder()->JumpIfUndefined(return_this.New());
1819 BuildReturn(literal->return_position());
1820
1821 {
1822 return_this.Bind(builder());
1824 BuildReturn(literal->return_position());
1825 }
1826 }
1827}
1828
1833
1834 // Async functions always return promises. Return values fulfill that promise,
1835 // while synchronously thrown exceptions reject that promise. This is handled
1836 // by surrounding the body statements in a try-catch block as follows:
1837 //
1838 // try {
1839 // <inner_block>
1840 // } catch (.catch) {
1841 // return %_AsyncFunctionReject(.generator_object, .catch);
1842 // }
1843
1845
1846 HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
1847 // When compiling a REPL script, use UNCAUGHT_ASYNC_AWAIT to preserve the
1848 // pending message so DevTools can inspect it.
1849 set_catch_prediction(literal->scope()->is_repl_mode_scope()
1852
1854 [&]() {
1856 set_catch_prediction(outer_catch_prediction);
1857 },
1858 [&](Register context) {
1860 builder()
1862 .StoreAccumulatorInRegister(args[1]); // exception
1863 if (!literal->scope()->is_repl_mode_scope()) {
1865 }
1866 builder()->CallRuntime(Runtime::kInlineAsyncFunctionReject, args);
1867 // TODO(358404372): Should this return have a statement position?
1868 // Without one it is not possible to apply a debugger breakpoint.
1870 },
1872}
1873
1877
1878 // For ES2017 Async Generators, we produce:
1879 //
1880 // try {
1881 // InitialYield;
1882 // ...body...;
1883 // } catch (.catch) {
1884 // %AsyncGeneratorReject(generator, .catch);
1885 // } finally {
1886 // %_GeneratorClose(generator);
1887 // }
1888 //
1889 // - InitialYield yields the actual generator object.
1890 // - Any return statement inside the body will have its argument wrapped
1891 // in an iterator result object with a "done" property set to `true`.
1892 // - If the generator terminates for whatever reason, we must close it.
1893 // Hence the finally clause.
1894 // - BytecodeGenerator performs special handling for ReturnStatements in
1895 // async generator functions, resolving the appropriate Promise with an
1896 // "done" iterator result object containing a Promise-unwrapped value.
1897
1898 // In async generator functions, when parameters are not simple,
1899 // a parameter initialization block will be added as the first block to the
1900 // AST. Since this block can throw synchronously, it should not be wrapped
1901 // in the following try-finally. We visit this block outside the try-finally
1902 // and remove it from the AST.
1903 int start = 0;
1904 ZonePtrList<Statement>* statements = info()->literal()->body();
1905 Statement* stmt = statements->at(0);
1906 if (stmt->IsBlock()) {
1907 Block* block = static_cast<Block*>(statements->at(0));
1908 if (block->is_initialization_block_for_parameters()) {
1910 start = 1;
1911 }
1912 }
1913
1915 [&]() {
1917 [&]() { GenerateBodyStatements(start); },
1918 [&](Register context) {
1919 RegisterAllocationScope register_scope(this);
1921 builder()
1923 .StoreAccumulatorInRegister(args[1]) // exception
1924 .LoadTheHole()
1926 .CallRuntime(Runtime::kInlineAsyncGeneratorReject, args);
1928 },
1930 },
1931 [&](Register body_continuation_token, Register body_continuation_result,
1932 Register message) {
1933 RegisterAllocationScope register_scope(this);
1935 builder()
1937 .CallRuntime(Runtime::kInlineGeneratorClose, arg);
1938 },
1940}
1941
1944
1945 // Emit an implicit return instruction in case control flow can fall off the
1946 // end of the function without an explicit return being present on all paths.
1947 //
1948 // ControlScope is used instead of building the Return bytecode directly, as
1949 // the entire body is wrapped in a try-finally block for async generators.
1950 if (!builder()->RemainderOfBlockIsDead()) {
1952 const int pos = info()->literal()->return_position();
1956 } else {
1958 }
1959 }
1960}
1961
1963 int start) {
1964 ZonePtrList<Statement>* body = info()->literal()->body();
1965 if (v8_flags.js_explicit_resource_management && closure_scope() != nullptr &&
1966 (closure_scope()->has_using_declaration() ||
1967 closure_scope()->has_await_using_declaration())) {
1968 BuildDisposeScope([&]() { VisitStatements(body, start); },
1970 } else {
1971 VisitStatements(body, start);
1972 }
1973}
1974
1976 if (IsResumableFunction(info()->literal()->kind())) {
1977 // Either directly use generator_object_var or allocate a new register for
1978 // the incoming generator object.
1979 Variable* generator_object_var = closure_scope()->generator_object_var();
1980 if (generator_object_var->location() == VariableLocation::LOCAL) {
1982 GetRegisterForLocalVariable(generator_object_var);
1983 } else {
1985 }
1986 } else if (closure_scope()->new_target_var()) {
1987 // Either directly use new_target_var or allocate a new register for
1988 // the incoming new target object.
1989 Variable* new_target_var = closure_scope()->new_target_var();
1990 if (new_target_var->location() == VariableLocation::LOCAL) {
1992 GetRegisterForLocalVariable(new_target_var);
1993 } else {
1995 }
1996 }
1997}
1998
2000 DCHECK_GT(info()->literal()->suspend_count(), 0);
2002 builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
2003
2004 // If the generator is not undefined, this is a resume, so perform state
2005 // dispatch.
2007
2008 // Otherwise, fall-through to the ordinary function prologue, after which we
2009 // will run into the generator object creation and other extra code inserted
2010 // by the parser.
2011}
2012
2013void BytecodeGenerator::VisitBlock(Block* stmt) {
2014 // Visit declarations and statements.
2015 CurrentScope current_scope(this, stmt->scope());
2016 if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
2017 BuildNewLocalBlockContext(stmt->scope());
2018 ContextScope scope(this, stmt->scope());
2020 } else {
2022 }
2023}
2024
2026 if (v8_flags.js_explicit_resource_management && stmt->scope() != nullptr &&
2027 (stmt->scope()->has_using_declaration() ||
2028 stmt->scope()->has_await_using_declaration())) {
2030 stmt->scope()->has_await_using_declaration());
2031 } else {
2033 }
2034}
2035
2037 BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
2038 ControlScopeForBreakable execution_control(this, stmt, &block_builder);
2039 if (stmt->scope() != nullptr) {
2040 VisitDeclarations(stmt->scope()->declarations());
2041 }
2042 if (V8_UNLIKELY(stmt->is_breakable())) {
2043 // Loathsome labeled blocks can be the target of break statements, which
2044 // causes unconditional blocks to act conditionally, and therefore to
2045 // require their own elision scope.
2046 //
2047 // lbl: {
2048 // if (cond) break lbl;
2049 // x;
2050 // }
2051 // x; <-- Cannot elide TDZ check
2052 HoleCheckElisionScope elider(this);
2053 VisitStatements(stmt->statements());
2054 } else {
2055 VisitStatements(stmt->statements());
2056 }
2057}
2058
2059void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
2060 Variable* variable = decl->var();
2061 // Unused variables don't need to be visited.
2062 if (!variable->is_used()) return;
2063
2064 switch (variable->location()) {
2067 UNREACHABLE();
2069 if (variable->binding_needs_init()) {
2070 Register destination(builder()->Local(variable->index()));
2072 }
2073 break;
2075 if (variable->binding_needs_init()) {
2076 Register destination(builder()->Parameter(variable->index()));
2078 }
2079 break;
2081 // REPL let's are stored in script contexts. They get initialized
2082 // with the hole the same way as normal context allocated variables.
2084 if (variable->binding_needs_init()) {
2085 DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
2087 variable, 0);
2088 }
2089 break;
2091 DCHECK_EQ(VariableMode::kDynamic, variable->mode());
2092 DCHECK(!variable->binding_needs_init());
2093
2094 Register name = register_allocator()->NewRegister();
2095
2096 builder()
2097 ->LoadLiteral(variable->raw_name())
2099 .CallRuntime(Runtime::kDeclareEvalVar, name);
2100 break;
2101 }
2102 }
2103}
2104
2105void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
2106 Variable* variable = decl->var();
2107 DCHECK(variable->mode() == VariableMode::kLet ||
2108 variable->mode() == VariableMode::kVar ||
2109 variable->mode() == VariableMode::kDynamic);
2110 // Unused variables don't need to be visited.
2111 if (!variable->is_used()) return;
2112
2113 switch (variable->location()) {
2116 UNREACHABLE();
2119 VisitFunctionLiteral(decl->fun());
2120 BuildVariableAssignment(variable, Token::kInit, HoleCheckMode::kElided);
2121 break;
2122 }
2125 DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
2126 VisitFunctionLiteral(decl->fun());
2127 builder()->StoreContextSlot(execution_context()->reg(), variable, 0);
2128 break;
2129 }
2131 RegisterList args = register_allocator()->NewRegisterList(2);
2132 builder()
2133 ->LoadLiteral(variable->raw_name())
2135 VisitFunctionLiteral(decl->fun());
2137 Runtime::kDeclareEvalFunction, args);
2138 break;
2139 }
2140 }
2142 eager_inner_literals_ != nullptr && decl->fun()->ShouldEagerCompile(),
2143 IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
2144}
2145
2147 if (!closure_scope()->is_module_scope()) return;
2148
2149 RegisterAllocationScope register_scope(this);
2150 Register module_request = register_allocator()->NewRegister();
2151
2152 SourceTextModuleDescriptor* descriptor =
2154 for (auto entry : descriptor->namespace_imports()) {
2155 builder()
2156 ->LoadLiteral(Smi::FromInt(entry->module_request))
2157 .StoreAccumulatorInRegister(module_request)
2158 .CallRuntime(Runtime::kGetModuleNamespace, module_request);
2159 Variable* var = closure_scope()->LookupInModule(entry->local_name);
2161 }
2162}
2163
2165 if (!top_level_builder()->has_top_level_declaration()) return;
2166 DCHECK(!top_level_builder()->processed());
2167
2169 builder()->AllocateDeferredConstantPoolEntry());
2170
2171 // Emit code to declare globals.
2173 builder()
2174 ->LoadConstantPoolEntry(top_level_builder()->constant_pool_entry())
2177 .CallRuntime(id, args);
2178
2180}
2181
2183 RegisterAllocationScope register_scope(this);
2184 for (Declaration* decl : *decls) {
2185 Variable* var = decl->var();
2186 if (!var->is_used()) continue;
2187 if (var->location() == VariableLocation::MODULE) {
2188 if (decl->IsFunctionDeclaration()) {
2189 DCHECK(var->IsExport());
2190 FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
2193 } else if (var->IsExport() && var->binding_needs_init()) {
2194 DCHECK(decl->IsVariableDeclaration());
2196 }
2197 } else {
2198 RegisterAllocationScope inner_register_scope(this);
2199 Visit(decl);
2200 }
2201 }
2202 BuildDeclareCall(Runtime::kDeclareModuleExports);
2203}
2204
2206 RegisterAllocationScope register_scope(this);
2207 for (Declaration* decl : *decls) {
2208 Variable* var = decl->var();
2209 DCHECK(var->is_used());
2210 if (var->location() == VariableLocation::UNALLOCATED) {
2211 // var or function.
2212 if (decl->IsFunctionDeclaration()) {
2214 FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
2216 } else {
2218 }
2219 } else {
2220 // let or const. Handled in NewScriptContext.
2221 DCHECK(decl->IsVariableDeclaration());
2222 DCHECK(IsLexicalVariableMode(var->mode()));
2223 }
2224 }
2225
2226 BuildDeclareCall(Runtime::kDeclareGlobals);
2227}
2228
2230 for (Declaration* decl : *declarations) {
2231 RegisterAllocationScope register_scope(this);
2232 Visit(decl);
2233 }
2234}
2235
2237 const ZonePtrList<Statement>* statements, int start) {
2238 for (int i = start; i < statements->length(); i++) {
2239 // Allocate an outer register allocations scope for the statement.
2240 RegisterAllocationScope allocation_scope(this);
2241 Statement* stmt = statements->at(i);
2242 Visit(stmt);
2243 if (builder()->RemainderOfBlockIsDead()) break;
2244 }
2245}
2246
2247void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
2249 VisitForEffect(stmt->expression());
2250}
2251
2252void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {}
2253
2254void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
2255 ConditionalControlFlowBuilder conditional_builder(
2258
2259 if (stmt->condition()->ToBooleanIsTrue()) {
2260 // Generate then block unconditionally as always true.
2261 conditional_builder.Then();
2262 Visit(stmt->then_statement());
2263 } else if (stmt->condition()->ToBooleanIsFalse()) {
2264 // Generate else block unconditionally if it exists.
2265 if (stmt->HasElseStatement()) {
2266 conditional_builder.Else();
2267 Visit(stmt->else_statement());
2268 }
2269 } else {
2270 // TODO(oth): If then statement is BreakStatement or
2271 // ContinueStatement we can reduce number of generated
2272 // jump/jump_ifs here. See BasicLoops test.
2273 VisitForTest(stmt->condition(), conditional_builder.then_labels(),
2274 conditional_builder.else_labels(), TestFallthrough::kThen);
2275
2276 HoleCheckElisionMergeScope merge_elider(this);
2277 {
2278 HoleCheckElisionMergeScope::Branch branch(merge_elider);
2279 conditional_builder.Then();
2280 Visit(stmt->then_statement());
2281 }
2282
2283 {
2284 HoleCheckElisionMergeScope::Branch branch(merge_elider);
2285 if (stmt->HasElseStatement()) {
2286 conditional_builder.JumpToEnd();
2287 conditional_builder.Else();
2288 Visit(stmt->else_statement());
2289 }
2290 }
2291
2292 merge_elider.Merge();
2293 }
2294}
2295
2296void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
2297 SloppyBlockFunctionStatement* stmt) {
2298 Visit(stmt->statement());
2299}
2300
2301void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
2304 execution_control()->Continue(stmt->target());
2305}
2306
2307void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
2310 execution_control()->Break(stmt->target());
2311}
2312
2313void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
2316 VisitForAccumulatorValue(stmt->expression());
2317 int return_position = stmt->end_position();
2318 if (return_position == ReturnStatement::kFunctionLiteralReturnPosition) {
2319 return_position = info()->literal()->return_position();
2320 }
2321 if (stmt->is_async_return()) {
2322 execution_control()->AsyncReturnAccumulator(return_position);
2323 } else {
2324 execution_control()->ReturnAccumulator(return_position);
2325 }
2326}
2327
2328void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
2330 VisitForAccumulatorValue(stmt->expression());
2331 BuildNewLocalWithContext(stmt->scope());
2332 VisitInScope(stmt->statement(), stmt->scope());
2333}
2334
2335namespace {
2336
2337bool IsSmiLiteralSwitchCaseValue(Expression* expr) {
2338 if (expr->IsSmiLiteral() ||
2339 (expr->IsLiteral() && expr->AsLiteral()->IsNumber() &&
2340 expr->AsLiteral()->AsNumber() == 0.0)) {
2341 return true;
2342#ifdef DEBUG
2343 } else if (expr->IsLiteral() && expr->AsLiteral()->IsNumber()) {
2344 DCHECK(!IsSmiDouble(expr->AsLiteral()->AsNumber()));
2345#endif
2346 }
2347 return false;
2348}
2349
2350// Precondition: we called IsSmiLiteral to check this.
2351inline int ReduceToSmiSwitchCaseValue(Expression* expr) {
2352 if (V8_LIKELY(expr->IsSmiLiteral())) {
2353 return expr->AsLiteral()->AsSmiLiteral().value();
2354 } else {
2355 // Only the zero case is possible otherwise.
2356 DCHECK(expr->IsLiteral() && expr->AsLiteral()->IsNumber() &&
2357 expr->AsLiteral()->AsNumber() == -0.0);
2358 return 0;
2359 }
2360}
2361
2362// Is the range of Smi's small enough relative to number of cases?
2363inline bool IsSpreadAcceptable(int spread, int ncases) {
2364 return spread < v8_flags.switch_table_spread_threshold * ncases;
2365}
2366
2367struct SwitchInfo {
2368 static const int kDefaultNotFound = -1;
2369
2370 std::map<int, CaseClause*> covered_cases;
2372
2373 SwitchInfo() { default_case = kDefaultNotFound; }
2374
2375 bool DefaultExists() { return default_case != kDefaultNotFound; }
2376 bool CaseExists(int j) {
2377 return covered_cases.find(j) != covered_cases.end();
2378 }
2379 bool CaseExists(Expression* expr) {
2380 return IsSmiLiteralSwitchCaseValue(expr)
2381 ? CaseExists(ReduceToSmiSwitchCaseValue(expr))
2382 : false;
2383 }
2384 CaseClause* GetClause(int j) { return covered_cases[j]; }
2385
2386 bool IsDuplicate(CaseClause* clause) {
2387 return IsSmiLiteralSwitchCaseValue(clause->label()) &&
2388 CaseExists(clause->label()) &&
2389 clause != GetClause(ReduceToSmiSwitchCaseValue(clause->label()));
2390 }
2391 int MinCase() {
2392 return covered_cases.empty() ? INT_MAX : covered_cases.begin()->first;
2393 }
2394 int MaxCase() {
2395 return covered_cases.empty() ? INT_MIN : covered_cases.rbegin()->first;
2396 }
2397 void Print() {
2398 std::cout << "Covered_cases: " << '\n';
2399 for (auto iter = covered_cases.begin(); iter != covered_cases.end();
2400 ++iter) {
2401 std::cout << iter->first << "->" << iter->second << '\n';
2402 }
2403 std::cout << "Default_case: " << default_case << '\n';
2404 }
2405};
2406
2407// Checks whether we should use a jump table to implement a switch operation.
2408bool IsSwitchOptimizable(SwitchStatement* stmt, SwitchInfo* info) {
2409 ZonePtrList<CaseClause>* cases = stmt->cases();
2410
2411 for (int i = 0; i < cases->length(); ++i) {
2412 CaseClause* clause = cases->at(i);
2413 if (clause->is_default()) {
2414 continue;
2415 } else if (!(clause->label()->IsLiteral())) {
2416 // Don't consider Smi cases after a non-literal, because we
2417 // need to evaluate the non-literal.
2418 break;
2419 } else if (IsSmiLiteralSwitchCaseValue(clause->label())) {
2420 int value = ReduceToSmiSwitchCaseValue(clause->label());
2421 info->covered_cases.insert({value, clause});
2422 }
2423 }
2424
2425 // GCC also jump-table optimizes switch statements with 6 cases or more.
2426 if (static_cast<int>(info->covered_cases.size()) >=
2427 v8_flags.switch_table_min_cases) {
2428 // Due to case spread will be used as the size of jump-table,
2429 // we need to check if it doesn't overflow by casting its
2430 // min and max bounds to int64_t, and calculate if the difference is less
2431 // than or equal to INT_MAX.
2432 int64_t min = static_cast<int64_t>(info->MinCase());
2433 int64_t max = static_cast<int64_t>(info->MaxCase());
2434 int64_t spread = max - min + 1;
2435
2436 DCHECK_GT(spread, 0);
2437
2438 // Check if casted spread is acceptable and doesn't overflow.
2439 if (spread <= INT_MAX &&
2440 IsSpreadAcceptable(static_cast<int>(spread), cases->length())) {
2441 return true;
2442 }
2443 }
2444 // Invariant- covered_cases has all cases and only cases that will go in the
2445 // jump table.
2446 info->covered_cases.clear();
2447 return false;
2448}
2449
2450} // namespace
2451
2452// This adds a jump table optimization for switch statements with Smi cases.
2453// If there are 5+ non-duplicate Smi clauses, and they are sufficiently compact,
2454// we generate a jump table. In the fall-through path, we put the compare-jumps
2455// for the non-Smi cases.
2456
2457// e.g.
2458//
2459// switch(x){
2460// case -0: out = 10;
2461// case 1: out = 11; break;
2462// case 0: out = 12; break;
2463// case 2: out = 13;
2464// case 3: out = 14; break;
2465// case 0.5: out = 15; break;
2466// case 4: out = 16;
2467// case y: out = 17;
2468// case 5: out = 18;
2469// default: out = 19; break;
2470// }
2471
2472// becomes this pseudo-bytecode:
2473
2474// lda x
2475// star r1
2476// test_type number
2477// jump_if_false @fallthrough
2478// ldar r1
2479// test_greater_than_or_equal_to smi_min
2480// jump_if_false @fallthrough
2481// ldar r1
2482// test_less_than_or_equal_to smi_max
2483// jump_if_false @fallthrough
2484// ldar r1
2485// bitwise_or 0
2486// star r2
2487// test_strict_equal r1
2488// jump_if_false @fallthrough
2489// ldar r2
2490// switch_on_smi {1: @case_1, 2: @case_2, 3: @case_3, 4: @case_4}
2491// @fallthrough:
2492// jump_if_strict_equal -0.0 @case_minus_0.0
2493// jump_if_strict_equal 0.5 @case_0.5
2494// jump_if_strict_equal y @case_y
2495// jump_if_strict_equal 5 @case_5
2496// jump @default
2497// @case_minus_0.0:
2498// <out = 10>
2499// @case_1
2500// <out = 11, break>
2501// @case_0:
2502// <out = 12, break>
2503// @case_2:
2504// <out = 13>
2505// @case_3:
2506// <out = 14, break>
2507// @case_0.5:
2508// <out = 15, break>
2509// @case_4:
2510// <out = 16>
2511// @case_y:
2512// <out = 17>
2513// @case_5:
2514// <out = 18>
2515// @default:
2516// <out = 19, break>
2517
2518void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
2519 // We need this scope because we visit for register values. We have to
2520 // maintain an execution result scope where registers can be allocated.
2521 ZonePtrList<CaseClause>* clauses = stmt->cases();
2522
2523 SwitchInfo info;
2524 BytecodeJumpTable* jump_table = nullptr;
2525 bool use_jump_table = IsSwitchOptimizable(stmt, &info);
2526
2527 // N_comp_cases is number of cases we will generate comparison jumps for.
2528 // Note we ignore duplicate cases, since they are very unlikely.
2529
2530 int n_comp_cases = clauses->length();
2531 if (use_jump_table) {
2532 n_comp_cases -= static_cast<int>(info.covered_cases.size());
2533 jump_table = builder()->AllocateJumpTable(
2534 info.MaxCase() - info.MinCase() + 1, info.MinCase());
2535 }
2536
2537 // Are we still using any if-else bytecodes to evaluate the switch?
2538 bool use_jumps = n_comp_cases != 0;
2539
2540 // Does the comparison for non-jump table jumps need an elision scope?
2541 bool jump_comparison_needs_hole_check_elision_scope = false;
2542
2543 SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
2544 n_comp_cases, jump_table);
2545 ControlScopeForBreakable scope(this, stmt, &switch_builder);
2547
2548 VisitForAccumulatorValue(stmt->tag());
2549
2550 if (use_jump_table) {
2551 // Release temps so that they can be reused in clauses.
2552 RegisterAllocationScope allocation_scope(this);
2553 // This also fills empty slots in jump table.
2555
2558
2559 builder()->CompareTypeOf(TestTypeOfFlags::LiteralFlag::kNumber);
2560 switch_builder.JumpToFallThroughIfFalse();
2562
2563 // TODO(leszeks): Note these are duplicated range checks with the
2564 // SwitchOnSmi handler for the most part.
2565
2569 Token::kGreaterThanEq, r1,
2570 feedback_index(feedback_spec()->AddCompareICSlot()));
2571
2572 switch_builder.JumpToFallThroughIfFalse();
2574
2578 Token::kLessThanEq, r1,
2579 feedback_index(feedback_spec()->AddCompareICSlot()));
2580
2581 switch_builder.JumpToFallThroughIfFalse();
2583
2585 Token::kBitOr, Smi::FromInt(0),
2586 feedback_index(feedback_spec()->AddBinaryOpICSlot()));
2587
2590 Token::kEqStrict, r1,
2591 feedback_index(feedback_spec()->AddCompareICSlot()));
2592
2593 switch_builder.JumpToFallThroughIfFalse();
2595
2596 switch_builder.EmitJumpTableIfExists(info.MinCase(), info.MaxCase(),
2597 info.covered_cases);
2598
2599 if (use_jumps) {
2600 // When using a jump table, the first jump comparison is conditionally
2601 // executed if the discriminant wasn't matched by anything in the jump
2602 // table, and so needs its own elision scope.
2603 jump_comparison_needs_hole_check_elision_scope = true;
2605 }
2606 }
2607
2608 int case_compare_ctr = 0;
2609#ifdef DEBUG
2610 std::unordered_map<int, int> case_ctr_checker;
2611#endif
2612
2613 if (use_jumps) {
2614 Register tag_holder = register_allocator()->NewRegister();
2615 FeedbackSlot slot = clauses->length() > 0
2618 builder()->StoreAccumulatorInRegister(tag_holder);
2619
2620 {
2621 // The comparisons linearly dominate, so no need to open a new elision
2622 // scope for each one.
2623 std::optional<HoleCheckElisionScope> elider;
2624 for (int i = 0; i < clauses->length(); ++i) {
2625 CaseClause* clause = clauses->at(i);
2626 if (clause->is_default()) {
2627 info.default_case = i;
2628 } else if (!info.CaseExists(clause->label())) {
2629 if (jump_comparison_needs_hole_check_elision_scope && !elider) {
2630 elider.emplace(this);
2631 }
2632
2633 // Perform label comparison as if via '===' with tag.
2634 VisitForAccumulatorValue(clause->label());
2635 builder()->CompareOperation(Token::kEqStrict, tag_holder,
2636 feedback_index(slot));
2637#ifdef DEBUG
2638 case_ctr_checker[i] = case_compare_ctr;
2639#endif
2640 switch_builder.JumpToCaseIfTrue(ToBooleanMode::kAlreadyBoolean,
2641 case_compare_ctr++);
2642 // The second and subsequent non-default comparisons are always
2643 // conditionally executed, and need an elision scope.
2644 jump_comparison_needs_hole_check_elision_scope = true;
2645 }
2646 }
2647 }
2648 register_allocator()->ReleaseRegister(tag_holder);
2649 }
2650
2651 // For fall-throughs after comparisons (or out-of-range/non-Smi's for jump
2652 // tables).
2653 if (info.DefaultExists()) {
2654 switch_builder.JumpToDefault();
2655 } else {
2656 switch_builder.Break();
2657 }
2658
2659 // It is only correct to merge hole check states if there is a default clause,
2660 // as otherwise it's unknown if the switch is exhaustive.
2661 HoleCheckElisionMergeScope merge_elider(this);
2662
2663 case_compare_ctr = 0;
2664 for (int i = 0; i < clauses->length(); ++i) {
2665 CaseClause* clause = clauses->at(i);
2666 if (i != info.default_case) {
2667 if (!info.IsDuplicate(clause)) {
2668 bool use_table = use_jump_table && info.CaseExists(clause->label());
2669 if (!use_table) {
2670// Guarantee that we should generate compare/jump if no table.
2671#ifdef DEBUG
2672 DCHECK(case_ctr_checker[i] == case_compare_ctr);
2673#endif
2674 switch_builder.BindCaseTargetForCompareJump(case_compare_ctr++,
2675 clause);
2676 } else {
2677 // Use jump table if this is not a duplicate label.
2678 switch_builder.BindCaseTargetForJumpTable(
2679 ReduceToSmiSwitchCaseValue(clause->label()), clause);
2680 }
2681 }
2682 } else {
2683 switch_builder.BindDefault(clause);
2684 }
2685 // Regardless, generate code (in case of fall throughs).
2686 HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
2687 VisitStatements(clause->statements());
2688 }
2689
2690 merge_elider.MergeIf(info.DefaultExists());
2691}
2692
2693template <typename TryBodyFunc, typename CatchBodyFunc>
2695 TryBodyFunc try_body_func, CatchBodyFunc catch_body_func,
2696 HandlerTable::CatchPrediction catch_prediction,
2697 TryCatchStatement* stmt_for_coverage) {
2698 if (builder()->RemainderOfBlockIsDead()) return;
2699
2700 TryCatchBuilder try_control_builder(
2701 builder(),
2702 stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
2703 stmt_for_coverage, catch_prediction);
2704
2705 // Preserve the context in a dedicated register, so that it can be restored
2706 // when the handler is entered by the stack-unwinding machinery.
2707 // TODO(ignition): Be smarter about register allocation.
2710
2711 // Evaluate the try-block inside a control scope. This simulates a handler
2712 // that is intercepting 'throw' control commands.
2713 try_control_builder.BeginTry(context);
2714
2715 HoleCheckElisionMergeScope merge_elider(this);
2716
2717 {
2718 ControlScopeForTryCatch scope(this, &try_control_builder);
2719 // The try-block itself, even though unconditionally executed, can throw
2720 // basically at any point, and so must be treated as conditional from the
2721 // perspective of the hole check elision analysis.
2722 //
2723 // try { x } catch (e) { }
2724 // use(x); <-- Still requires a TDZ check
2725 //
2726 // However, if both the try-block and the catch-block emit a hole check,
2727 // subsequent TDZ checks can be elided.
2728 //
2729 // try { x; } catch (e) { x; }
2730 // use(x); <-- TDZ check can be elided
2731 HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
2732 try_body_func();
2733 }
2734 try_control_builder.EndTry();
2735
2736 {
2737 HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
2738 catch_body_func(context);
2739 }
2740
2741 merge_elider.Merge();
2742
2743 try_control_builder.EndCatch();
2744}
2745
2746template <typename TryBodyFunc, typename FinallyBodyFunc>
2748 TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func,
2749 HandlerTable::CatchPrediction catch_prediction,
2750 TryFinallyStatement* stmt_for_coverage) {
2751 if (builder()->RemainderOfBlockIsDead()) return;
2752
2753 // We can't know whether the finally block will override ("catch") an
2754 // exception thrown in the try block, so we just adopt the outer prediction.
2755 TryFinallyBuilder try_control_builder(
2756 builder(),
2757 stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
2758 stmt_for_coverage, catch_prediction);
2759
2760 // We keep a record of all paths that enter the finally-block to be able to
2761 // dispatch to the correct continuation point after the statements in the
2762 // finally-block have been evaluated.
2763 //
2764 // The try-finally construct can enter the finally-block in three ways:
2765 // 1. By exiting the try-block normally, falling through at the end.
2766 // 2. By exiting the try-block with a function-local control flow transfer
2767 // (i.e. through break/continue/return statements).
2768 // 3. By exiting the try-block with a thrown exception.
2769 //
2770 // The result register semantics depend on how the block was entered:
2771 // - ReturnStatement: It represents the return value being returned.
2772 // - ThrowStatement: It represents the exception being thrown.
2773 // - BreakStatement/ContinueStatement: Undefined and not used.
2774 // - Falling through into finally-block: Undefined and not used.
2779 ControlScope::DeferredCommands commands(this, token, result, message);
2780
2781 // Preserve the context in a dedicated register, so that it can be restored
2782 // when the handler is entered by the stack-unwinding machinery.
2783 // TODO(ignition): Be smarter about register allocation.
2786
2787 // Evaluate the try-block inside a control scope. This simulates a handler
2788 // that is intercepting all control commands.
2789 try_control_builder.BeginTry(context);
2790 {
2791 ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
2792 // The try-block itself, even though unconditionally executed, can throw
2793 // basically at any point, and so must be treated as conditional from the
2794 // perspective of the hole check elision analysis.
2795 HoleCheckElisionScope elider(this);
2796 try_body_func();
2797 }
2798 try_control_builder.EndTry();
2799
2800 // Record fall-through and exception cases.
2801 if (!builder()->RemainderOfBlockIsDead()) {
2802 commands.RecordFallThroughPath();
2803 }
2804 try_control_builder.LeaveTry();
2805 try_control_builder.BeginHandler();
2806 commands.RecordHandlerReThrowPath();
2807
2808 try_control_builder.BeginFinally();
2809
2810 // Evaluate the finally-block.
2811 finally_body_func(token, result, message);
2812 try_control_builder.EndFinally();
2813
2814 // Dynamic dispatch after the finally-block.
2815 commands.ApplyDeferredCommands();
2816}
2817
2818template <typename WrappedFunc>
2819void BytecodeGenerator::BuildDisposeScope(WrappedFunc wrapped_func,
2820 bool has_await_using) {
2821 RegisterAllocationScope allocation_scope(this);
2822 DisposablesStackScope disposables_stack_scope(this);
2823 if (has_await_using) {
2824 set_catch_prediction(info()->scope()->is_repl_mode_scope()
2827 }
2828
2830 // Try block
2831 [&]() { wrapped_func(); },
2832 // Finally block
2833 [&](Register body_continuation_token, Register body_continuation_result,
2834 Register message) {
2835 if (has_await_using) {
2836 Register result_register = register_allocator()->NewRegister();
2837 Register disposable_stack_register =
2840 disposable_stack_register);
2841 LoopBuilder loop_builder(builder(), nullptr, nullptr,
2842 feedback_spec());
2843 LoopScope loop_scope(this, &loop_builder);
2844
2845 {
2846 RegisterAllocationScope allocation_scope(this);
2848 builder()
2849 ->MoveRegister(disposable_stack_register, args[0])
2850 .MoveRegister(body_continuation_token, args[1])
2851 .MoveRegister(body_continuation_result, args[2])
2852 .MoveRegister(message, args[3])
2856 builder()->CallRuntime(Runtime::kDisposeDisposableStack, args);
2857 }
2858
2859 builder()
2860 ->StoreAccumulatorInRegister(result_register)
2861 .LoadTrue()
2862 .CompareReference(result_register);
2863
2864 loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2865
2866 builder()->LoadAccumulatorWithRegister(result_register);
2868 [&]() { BuildAwait(); },
2869 [&](Register context) {
2871 builder()
2873 .StoreAccumulatorInRegister(args[1]) // exception
2874 .LoadTheHole()
2877 .CallRuntime(
2878 Runtime::kHandleExceptionsInDisposeDisposableStack,
2879 args);
2880
2882 disposable_stack_register);
2883 },
2885
2886 loop_builder.BindContinueTarget();
2887 } else {
2889 builder()
2891 .MoveRegister(body_continuation_token, args[1])
2892 .MoveRegister(body_continuation_result, args[2])
2893 .MoveRegister(message, args[3])
2894 .LoadLiteral(
2897 builder()->CallRuntime(Runtime::kDisposeDisposableStack, args);
2898 }
2899 },
2901}
2902
2904 LoopBuilder* loop_builder) {
2905 loop_builder->LoopBody();
2906 ControlScopeForIteration execution_control(this, stmt, loop_builder);
2907 Visit(stmt->body());
2908 loop_builder->BindContinueTarget();
2909}
2910
2912 IterationStatement* stmt, LoopBuilder* loop_builder) {
2913 HoleCheckElisionScope elider(this);
2914 VisitIterationBody(stmt, loop_builder);
2915}
2916
2917void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
2918 LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt,
2919 feedback_spec());
2920 if (stmt->cond()->ToBooleanIsFalse()) {
2921 // Since we know that the condition is false, we don't create a loop.
2922 // Therefore, we don't create a LoopScope (and thus we don't create a header
2923 // and a JumpToHeader). However, we still need to iterate once through the
2924 // body.
2925 VisitIterationBodyInHoleCheckElisionScope(stmt, &loop_builder);
2926 } else if (stmt->cond()->ToBooleanIsTrue()) {
2927 LoopScope loop_scope(this, &loop_builder);
2928 VisitIterationBodyInHoleCheckElisionScope(stmt, &loop_builder);
2929 } else {
2930 LoopScope loop_scope(this, &loop_builder);
2931 VisitIterationBodyInHoleCheckElisionScope(stmt, &loop_builder);
2933 BytecodeLabels loop_backbranch(zone());
2934 if (!loop_builder.break_labels()->empty()) {
2935 // The test may be conditionally executed if there was a break statement
2936 // inside the loop body, and therefore requires its own elision scope.
2937 HoleCheckElisionScope elider(this);
2938 VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
2940 } else {
2941 VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
2943 }
2944 loop_backbranch.Bind(builder());
2945 }
2946}
2947
2948void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
2949 LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt,
2950 feedback_spec());
2951
2952 if (stmt->cond()->ToBooleanIsFalse()) {
2953 // If the condition is false there is no need to generate the loop.
2954 return;
2955 }
2956
2957 LoopScope loop_scope(this, &loop_builder);
2958 if (!stmt->cond()->ToBooleanIsTrue()) {
2960 BytecodeLabels loop_body(zone());
2961 VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
2963 loop_body.Bind(builder());
2964 }
2965 VisitIterationBodyInHoleCheckElisionScope(stmt, &loop_builder);
2966}
2967
2968void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
2969 if (stmt->init() != nullptr) {
2970 Visit(stmt->init());
2971 }
2972
2973 LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt,
2974 feedback_spec());
2975 if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
2976 // If the condition is known to be false there is no need to generate
2977 // body, next or condition blocks. Init block should be generated.
2978 return;
2979 }
2980
2981 LoopScope loop_scope(this, &loop_builder);
2982 if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
2984 BytecodeLabels loop_body(zone());
2985 VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
2987 loop_body.Bind(builder());
2988 }
2989
2990 // C-style for loops' textual order differs from dominator order.
2991 //
2992 // for (INIT; TEST; NEXT) BODY
2993 // REST
2994 //
2995 // has the dominator order of
2996 //
2997 // INIT dominates TEST dominates BODY dominates NEXT
2998 // and
2999 // INIT dominates TEST dominates REST
3000 //
3001 // INIT and TEST are always evaluated and so do not have their own
3002 // HoleCheckElisionScope. BODY, like all iteration bodies, can contain control
3003 // flow like breaks or continues, has its own HoleCheckElisionScope. NEXT is
3004 // therefore conditionally evaluated and also so has its own
3005 // HoleCheckElisionScope.
3006 HoleCheckElisionScope elider(this);
3007 VisitIterationBody(stmt, &loop_builder);
3008 if (stmt->next() != nullptr) {
3009 builder()->SetStatementPosition(stmt->next());
3010 Visit(stmt->next());
3011 }
3012}
3013
3014void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
3015 if (stmt->subject()->IsNullLiteral() ||
3016 stmt->subject()->IsUndefinedLiteral()) {
3017 // ForIn generates lots of code, skip if it wouldn't produce any effects.
3018 return;
3019 }
3020
3021 BytecodeLabel subject_undefined_label;
3022 FeedbackSlot slot = feedback_spec()->AddForInSlot();
3023
3024 // Prepare the state for executing ForIn.
3025 builder()->SetExpressionAsStatementPosition(stmt->subject());
3026 {
3027 CurrentScope current_scope(this, stmt->subject_scope());
3028 VisitForAccumulatorValue(stmt->subject());
3029 }
3030 builder()->JumpIfUndefinedOrNull(&subject_undefined_label);
3033
3034 // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
3035 RegisterList triple = register_allocator()->NewRegisterList(3);
3036 Register cache_length = triple[2];
3038 builder()->ForInPrepare(triple, feedback_index(slot));
3039
3040 // Set up loop counter
3044
3045 // The loop
3046 {
3047 LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt,
3048 feedback_spec());
3049 LoopScope loop_scope(this, &loop_builder);
3050 HoleCheckElisionScope elider(this);
3052 loop_builder.BreakIfForInDone(index, cache_length);
3053 builder()->ForInNext(receiver, index, triple.Truncate(2),
3054 feedback_index(slot));
3055 loop_builder.ContinueIfUndefined();
3056
3057 // Assign accumulator value to the 'each' target.
3058 {
3059 EffectResultScope scope(this);
3060 // Make sure to preserve the accumulator across the PrepareAssignmentLhs
3061 // call.
3062 AssignmentLhsData lhs_data = PrepareAssignmentLhs(
3064 builder()->SetExpressionPosition(stmt->each());
3065 BuildAssignment(lhs_data, Token::kAssign, LookupHoistingMode::kNormal);
3066 }
3067
3068 {
3069 Register cache_type = triple[0];
3070 ForInScope scope(this, stmt, index, cache_type);
3071 VisitIterationBody(stmt, &loop_builder);
3072 builder()->ForInStep(index);
3073 }
3074 }
3075 builder()->Bind(&subject_undefined_label);
3076}
3077
3078// Desugar a for-of statement into an application of the iteration protocol.
3079//
3080// for (EACH of SUBJECT) BODY
3081//
3082// becomes
3083//
3084// iterator = %GetIterator(SUBJECT)
3085// try {
3086//
3087// loop {
3088// // Make sure we are considered 'done' if .next(), .done or .value fail.
3089// done = true
3090// value = iterator.next()
3091// if (value.done) break;
3092// value = value.value
3093// done = false
3094//
3095// EACH = value
3096// BODY
3097// }
3098// done = true
3099//
3100// } catch(e) {
3101// iteration_continuation = RETHROW
3102// } finally {
3103// %FinalizeIteration(iterator, done, iteration_continuation)
3104// }
3105void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
3106 EffectResultScope effect_scope(this);
3107
3108 builder()->SetExpressionAsStatementPosition(stmt->subject());
3109 {
3110 CurrentScope current_scope(this, stmt->subject_scope());
3111 VisitForAccumulatorValue(stmt->subject());
3112 }
3113
3114 // Store the iterator in a dedicated register so that it can be closed on
3115 // exit, and the 'done' value in a dedicated register so that it can be
3116 // changed and accessed independently of the iteration result.
3117 IteratorRecord iterator = BuildGetIteratorRecord(stmt->type());
3119 builder()->LoadFalse();
3121
3123 // Try block.
3124 [&]() {
3125 LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt,
3126 feedback_spec());
3127 LoopScope loop_scope(this, &loop_builder);
3128
3129 // This doesn't need a HoleCheckElisionScope because BuildTryFinally
3130 // already makes one for try blocks.
3131
3133
3134 {
3135 RegisterAllocationScope allocation_scope(this);
3136 Register next_result = register_allocator()->NewRegister();
3137
3138 // Call the iterator's .next() method. Break from the loop if the
3139 // `done` property is truthy, otherwise load the value from the
3140 // iterator result and append the argument.
3142 BuildIteratorNext(iterator, next_result);
3144 next_result, ast_string_constants()->done_string(),
3145 feedback_index(feedback_spec()->AddLoadICSlot()));
3146 loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
3147
3148 builder()
3149 // value = value.value
3151 next_result, ast_string_constants()->value_string(),
3152 feedback_index(feedback_spec()->AddLoadICSlot()));
3153 // done = false, before the assignment to each happens, so that done
3154 // is false if the assignment throws.
3155 builder()
3156 ->StoreAccumulatorInRegister(next_result)
3157 .LoadFalse()
3159
3160 // Assign to the 'each' target.
3161 AssignmentLhsData lhs_data = PrepareAssignmentLhs(stmt->each());
3162 builder()->LoadAccumulatorWithRegister(next_result);
3163 BuildAssignment(lhs_data, Token::kAssign,
3165 }
3166
3167 VisitIterationBody(stmt, &loop_builder);
3168 },
3169 // Finally block.
3170 [&](Register iteration_continuation_token,
3171 Register iteration_continuation_result, Register message) {
3172 // Finish the iteration in the finally block.
3173 BuildFinalizeIteration(iterator, done, iteration_continuation_token);
3174 },
3176}
3177
3178void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
3179 // Update catch prediction tracking. The updated catch_prediction value lasts
3180 // until the end of the try_block in the AST node, and does not apply to the
3181 // catch_block.
3182 HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
3183 set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
3184
3186 // Try body.
3187 [&]() {
3188 Visit(stmt->try_block());
3189 set_catch_prediction(outer_catch_prediction);
3190 },
3191 // Catch body.
3192 [&](Register context) {
3193 if (stmt->scope()) {
3194 // Create a catch scope that binds the exception.
3195 BuildNewLocalCatchContext(stmt->scope());
3197 }
3198
3199 // If requested, clear message object as we enter the catch block.
3200 if (stmt->ShouldClearException(outer_catch_prediction)) {
3202 }
3203
3204 // Load the catch context into the accumulator.
3206
3207 // Evaluate the catch-block.
3208 if (stmt->scope()) {
3209 VisitInScope(stmt->catch_block(), stmt->scope());
3210 } else {
3211 VisitBlock(stmt->catch_block());
3212 }
3213 },
3214 catch_prediction(), stmt);
3215}
3216
3217void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
3219 // Try block.
3220 [&]() { Visit(stmt->try_block()); },
3221 // Finally block.
3222 [&](Register body_continuation_token, Register body_continuation_result,
3223 Register message) { Visit(stmt->finally_block()); },
3224 catch_prediction(), stmt);
3225}
3226
3227void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
3229 builder()->Debugger();
3230}
3231
3232void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
3234 expr->function_literal_id());
3235 DCHECK_EQ(expr->scope()->outer_scope(), current_scope());
3236 uint8_t flags = CreateClosureFlags::Encode(
3237 expr->pretenure(), closure_scope()->is_function_scope(),
3238 info()->flags().might_always_turbofan());
3239 size_t entry = builder()->AllocateDeferredConstantPoolEntry();
3240 builder()->CreateClosure(entry, GetCachedCreateClosureSlot(expr), flags);
3241 function_literals_.push_back(std::make_pair(expr, entry));
3243}
3244
3246 // Only parallel compile when there's a script (not the case for source
3247 // position collection).
3248 if (!script_.is_null() && literal->should_parallel_compile()) {
3249 // If we should normally be eagerly compiling this function, we must be here
3250 // because of post_parallel_compile_tasks_for_eager_toplevel.
3252 literal->ShouldEagerCompile(),
3253 info()->flags().post_parallel_compile_tasks_for_eager_toplevel());
3254 // There exists a lazy compile dispatcher.
3255 DCHECK(info()->dispatcher());
3256 // There exists a cloneable character stream.
3257 DCHECK(info()->character_stream()->can_be_cloned_for_parallel_access());
3258
3260 // If there doesn't already exist a SharedFunctionInfo for this function,
3261 // then create one and enqueue it. Otherwise, we're reparsing (e.g. for the
3262 // debugger, source position collection, call printing, recompile after
3263 // flushing, etc.) and don't want to over-compile.
3266 if (!shared_info->is_compiled()) {
3267 info()->dispatcher()->Enqueue(
3269 info()->character_stream()->Clone());
3270 }
3271 } else if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
3272 DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
3273 DCHECK(!literal->should_parallel_compile());
3274 eager_inner_literals_->push_back(literal);
3275 }
3276}
3277
3279 size_t class_boilerplate_entry =
3281 class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
3282
3284 Register class_constructor = register_allocator()->NewRegister();
3285
3286 // Create the class brand symbol and store it on the context during class
3287 // evaluation. This will be stored in the instance later in the constructor.
3288 // We do this early so that invalid access to private methods or accessors
3289 // in computed property keys throw.
3290 if (expr->scope()->brand() != nullptr) {
3292 const AstRawString* class_name =
3293 expr->scope()->class_variable() != nullptr
3294 ? expr->scope()->class_variable()->raw_name()
3295 : ast_string_constants()->anonymous_string();
3296 builder()
3297 ->LoadLiteral(class_name)
3299 .CallRuntime(Runtime::kCreatePrivateBrandSymbol, brand);
3301
3302 BuildVariableAssignment(expr->scope()->brand(), Token::kInit,
3304 }
3305
3306 AccessorTable<ClassLiteral::Property> private_accessors(zone());
3307 for (int i = 0; i < expr->private_members()->length(); i++) {
3308 ClassLiteral::Property* property = expr->private_members()->at(i);
3309 DCHECK(property->is_private());
3310 switch (property->kind()) {
3312 // Initialize the private field variables early.
3313 // Create the private name symbols for fields during class
3314 // evaluation and store them on the context. These will be
3315 // used as keys later during instance or static initialization.
3316 RegisterAllocationScope private_name_register_scope(this);
3317 Register private_name = register_allocator()->NewRegister();
3318 VisitForRegisterValue(property->key(), private_name);
3319 builder()
3320 ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3321 .StoreAccumulatorInRegister(private_name)
3322 .CallRuntime(Runtime::kCreatePrivateNameSymbol, private_name);
3323 DCHECK_NOT_NULL(property->private_name_var());
3324 BuildVariableAssignment(property->private_name_var(), Token::kInit,
3326 break;
3327 }
3329 RegisterAllocationScope register_scope(this);
3330 VisitForAccumulatorValue(property->value());
3331 BuildVariableAssignment(property->private_name_var(), Token::kInit,
3333 break;
3334 }
3335 // Collect private accessors into a table to merge the creation of
3336 // those closures later.
3338 Literal* key = property->key()->AsLiteral();
3339 DCHECK_NULL(private_accessors.LookupOrInsert(key)->getter);
3340 private_accessors.LookupOrInsert(key)->getter = property;
3341 break;
3342 }
3344 Literal* key = property->key()->AsLiteral();
3345 DCHECK_NULL(private_accessors.LookupOrInsert(key)->setter);
3346 private_accessors.LookupOrInsert(key)->setter = property;
3347 break;
3348 }
3350 Literal* key = property->key()->AsLiteral();
3351 RegisterAllocationScope private_name_register_scope(this);
3352 Register accessor_storage_private_name =
3354 Variable* accessor_storage_private_name_var =
3355 property->auto_accessor_info()
3356 ->accessor_storage_name_proxy()
3357 ->var();
3358 // We reuse the already internalized
3359 // ".accessor-storage-<accessor_number>" strings that were defined in
3360 // the parser instead of the "<name>accessor storage" string from the
3361 // spec. The downsides are that is that these are the property names
3362 // that will show up in devtools and in error messages.
3363 // Additionally, a property can share a name with the corresponding
3364 // property of their parent class, i.e. for classes defined as
3365 // "class C {accessor x}" and "class D extends C {accessor y}",
3366 // if "d = new D()", then d.x and d.y will share the name
3367 // ".accessor-storage-0", (but a different private symbol).
3368 // TODO(42202709): Get to a resolution on how to handle this naming
3369 // issue before shipping the feature.
3370 builder()
3371 ->LoadLiteral(accessor_storage_private_name_var->raw_name())
3372 .StoreAccumulatorInRegister(accessor_storage_private_name)
3373 .CallRuntime(Runtime::kCreatePrivateNameSymbol,
3374 accessor_storage_private_name);
3375 BuildVariableAssignment(accessor_storage_private_name_var, Token::kInit,
3377 auto* accessor_pair = private_accessors.LookupOrInsert(key);
3378 DCHECK_NULL(accessor_pair->getter);
3379 accessor_pair->getter = property;
3380 DCHECK_NULL(accessor_pair->setter);
3381 accessor_pair->setter = property;
3382 break;
3383 }
3384 default:
3385 UNREACHABLE();
3386 }
3387 }
3388
3389 {
3390 RegisterAllocationScope register_scope(this);
3392
3393 Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
3394 Register class_constructor_in_args =
3398 args.register_count());
3399
3401 builder()->StoreAccumulatorInRegister(super_class);
3402
3403 VisitFunctionLiteral(expr->constructor());
3404 builder()
3405 ->StoreAccumulatorInRegister(class_constructor)
3406 .MoveRegister(class_constructor, class_constructor_in_args)
3407 .LoadConstantPoolEntry(class_boilerplate_entry)
3408 .StoreAccumulatorInRegister(class_boilerplate);
3409
3410 // Create computed names and method values nodes to store into the literal.
3411 for (int i = 0; i < expr->public_members()->length(); i++) {
3412 ClassLiteral::Property* property = expr->public_members()->at(i);
3413 if (property->is_computed_name()) {
3415
3416 builder()->SetExpressionAsStatementPosition(property->key());
3417 BuildLoadPropertyKey(property, key);
3418 if (property->is_static()) {
3419 // The static prototype property is read only. We handle the non
3420 // computed property name case in the parser. Since this is the only
3421 // case where we need to check for an own read only property we
3422 // special case this so we do not need to do this for every property.
3423
3425 BytecodeLabel done;
3426 builder()
3427 ->LoadLiteral(ast_string_constants()->prototype_string())
3428 .CompareOperation(Token::kEqStrict, key, feedback_index(slot))
3429 .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
3430 .CallRuntime(Runtime::kThrowStaticPrototypeError)
3431 .Bind(&done);
3432 }
3433
3434 if (property->kind() == ClassLiteral::Property::FIELD) {
3435 DCHECK(!property->is_private());
3436 // Initialize field's name variable with the computed name.
3437 DCHECK_NOT_NULL(property->computed_name_var());
3439 BuildVariableAssignment(property->computed_name_var(), Token::kInit,
3441 }
3442 }
3443
3444 DCHECK(!property->is_private());
3445
3446 if (property->kind() == ClassLiteral::Property::FIELD) {
3447 // We don't compute field's value here, but instead do it in the
3448 // initializer function.
3449 continue;
3450 }
3451
3452 if (property->kind() == ClassLiteral::Property::AUTO_ACCESSOR) {
3453 {
3454 RegisterAllocationScope private_name_register_scope(this);
3455 Register name_register = register_allocator()->NewRegister();
3456 Variable* accessor_storage_private_name_var =
3457 property->auto_accessor_info()
3458 ->accessor_storage_name_proxy()
3459 ->var();
3460 builder()
3461 ->LoadLiteral(accessor_storage_private_name_var->raw_name())
3462 .StoreAccumulatorInRegister(name_register)
3463 .CallRuntime(Runtime::kCreatePrivateNameSymbol, name_register);
3464 BuildVariableAssignment(accessor_storage_private_name_var,
3465 Token::kInit, HoleCheckMode::kElided);
3466 }
3467
3470 AutoAccessorInfo* auto_accessor_info = property->auto_accessor_info();
3471 VisitForRegisterValue(auto_accessor_info->generated_getter(), getter);
3472 VisitForRegisterValue(auto_accessor_info->generated_setter(), setter);
3473 continue;
3474 }
3475
3477 VisitForRegisterValue(property->value(), value);
3478 }
3479
3480 builder()->CallRuntime(Runtime::kDefineClass, args);
3481 }
3482
3483 // Assign to the home object variable. Accumulator already contains the
3484 // prototype.
3485 Variable* home_object_variable = expr->home_object();
3486 if (home_object_variable != nullptr) {
3487 DCHECK(home_object_variable->is_used());
3488 DCHECK(home_object_variable->IsContextSlot());
3489 BuildVariableAssignment(home_object_variable, Token::kInit,
3491 }
3492 Variable* static_home_object_variable = expr->static_home_object();
3493 if (static_home_object_variable != nullptr) {
3494 DCHECK(static_home_object_variable->is_used());
3495 DCHECK(static_home_object_variable->IsContextSlot());
3496 builder()->LoadAccumulatorWithRegister(class_constructor);
3497 BuildVariableAssignment(static_home_object_variable, Token::kInit,
3499 }
3500
3501 // Assign to class variable.
3502 Variable* class_variable = expr->scope()->class_variable();
3503 if (class_variable != nullptr && class_variable->is_used()) {
3504 DCHECK(class_variable->IsStackLocal() || class_variable->IsContextSlot());
3505 builder()->LoadAccumulatorWithRegister(class_constructor);
3506 BuildVariableAssignment(class_variable, Token::kInit,
3508 }
3509
3510 // Define private accessors, using only a single call to the runtime for
3511 // each pair of corresponding getters and setters, in the order the first
3512 // component is declared.
3513 for (auto accessors : private_accessors.ordered_accessors()) {
3514 RegisterAllocationScope inner_register_scope(this);
3515 RegisterList accessors_reg = register_allocator()->NewRegisterList(2);
3516 ClassLiteral::Property* getter = accessors.second->getter;
3517 ClassLiteral::Property* setter = accessors.second->setter;
3518 Variable* accessor_pair_var;
3521 AutoAccessorInfo* auto_accessor_info = getter->auto_accessor_info();
3522 VisitForRegisterValue(auto_accessor_info->generated_getter(),
3523 accessors_reg[0]);
3524 VisitForRegisterValue(auto_accessor_info->generated_setter(),
3525 accessors_reg[1]);
3526 accessor_pair_var =
3527 auto_accessor_info->property_private_name_proxy()->var();
3528 } else {
3529 VisitLiteralAccessor(getter, accessors_reg[0]);
3530 VisitLiteralAccessor(setter, accessors_reg[1]);
3531 accessor_pair_var = getter != nullptr ? getter->private_name_var()
3532 : setter->private_name_var();
3533 }
3534 builder()->CallRuntime(Runtime::kCreatePrivateAccessors, accessors_reg);
3535 DCHECK_NOT_NULL(accessor_pair_var);
3536 BuildVariableAssignment(accessor_pair_var, Token::kInit,
3538 }
3539
3540 if (expr->instance_members_initializer_function() != nullptr) {
3542
3544 builder()
3545 ->StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
3546 .LoadAccumulatorWithRegister(class_constructor);
3547 }
3548
3549 if (expr->static_initializer() != nullptr) {
3550 // TODO(gsathya): This can be optimized away to be a part of the
3551 // class boilerplate in the future. The name argument can be
3552 // passed to the DefineClass runtime function and have it set
3553 // there.
3554 // TODO(v8:13451): Alternatively, port SetFunctionName to an ic so that we
3555 // can replace the runtime call to a dedicate bytecode here.
3556 if (name.is_valid()) {
3557 RegisterAllocationScope inner_register_scope(this);
3559 builder()
3560 ->MoveRegister(class_constructor, args[0])
3561 .MoveRegister(name, args[1])
3562 .CallRuntime(Runtime::kSetFunctionName, args);
3563 }
3564
3565 RegisterAllocationScope inner_register_scope(this);
3567 Register initializer = VisitForRegisterValue(expr->static_initializer());
3568
3569 builder()
3570 ->MoveRegister(class_constructor, args[0])
3571 .CallProperty(initializer, args,
3572 feedback_index(feedback_spec()->AddCallICSlot()));
3573 }
3574 builder()->LoadAccumulatorWithRegister(class_constructor);
3575}
3576
3580
3582 CurrentScope current_scope(this, expr->scope());
3583 DCHECK_NOT_NULL(expr->scope());
3584 if (expr->scope()->NeedsContext()) {
3585 // Make sure to associate the source position for the class
3586 // after the block context is created. Otherwise we have a mismatch
3587 // between the scope and the context, where we already are in a
3588 // block context for the class, but not yet in the class scope. Only do
3589 // this if the current source position is inside the class scope though.
3590 // For example:
3591 // * `var x = class {};` will break on `class` which is inside
3592 // the class scope, so we expect the BlockContext to be pushed.
3593 //
3594 // * `new class x {};` will break on `new` which is outside the
3595 // class scope, so we expect the BlockContext to not be pushed yet.
3596 std::optional<BytecodeSourceInfo> source_info =
3599 ContextScope scope(this, expr->scope());
3600 if (source_info) builder()->PushSourcePosition(*source_info);
3601 BuildClassLiteral(expr, name);
3602 } else {
3603 BuildClassLiteral(expr, name);
3604 }
3605}
3606
3608 RegisterAllocationScope register_scope(this);
3609 Register key;
3610
3611 // Private methods are not initialized in BuildClassProperty.
3612 DCHECK_IMPLIES(property->is_private(),
3613 property->kind() == ClassLiteral::Property::FIELD ||
3614 property->is_auto_accessor());
3615 builder()->SetExpressionPosition(property->key());
3616
3617 bool is_literal_store =
3618 property->key()->IsPropertyName() && !property->is_computed_name() &&
3619 !property->is_private() && !property->is_auto_accessor();
3620
3621 if (!is_literal_store) {
3623 if (property->is_auto_accessor()) {
3624 Variable* var =
3625 property->auto_accessor_info()->accessor_storage_name_proxy()->var();
3626 DCHECK_NOT_NULL(var);
3629 } else if (property->is_computed_name()) {
3630 DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
3631 DCHECK(!property->is_private());
3632 Variable* var = property->computed_name_var();
3633 DCHECK_NOT_NULL(var);
3634 // The computed name is already evaluated and stored in a variable at
3635 // class definition time.
3638 } else if (property->is_private()) {
3639 Variable* private_name_var = property->private_name_var();
3640 DCHECK_NOT_NULL(private_name_var);
3641 BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
3643 } else {
3644 VisitForRegisterValue(property->key(), key);
3645 }
3646 }
3647
3648 builder()->SetExpressionAsStatementPosition(property->value());
3649
3650 if (is_literal_store) {
3651 VisitForAccumulatorValue(property->value());
3654 builder()->Receiver(),
3655 property->key()->AsLiteral()->AsRawPropertyName(),
3656 feedback_index(slot));
3657 } else {
3659 if (property->NeedsSetFunctionName()) {
3660 // Static class fields require the name property to be set on
3661 // the class, meaning we can't wait until the
3662 // DefineKeyedOwnProperty call later to set the name.
3663 if (property->value()->IsClassLiteral() &&
3664 property->value()->AsClassLiteral()->static_initializer() !=
3665 nullptr) {
3666 VisitClassLiteral(property->value()->AsClassLiteral(), key);
3667 } else {
3668 VisitForAccumulatorValue(property->value());
3670 }
3671 } else {
3672 VisitForAccumulatorValue(property->value());
3673 }
3675 builder()->DefineKeyedOwnProperty(builder()->Receiver(), key, flags,
3676 feedback_index(slot));
3677 }
3678}
3679
3680void BytecodeGenerator::VisitInitializeClassMembersStatement(
3682 for (int i = 0; i < stmt->fields()->length(); i++) {
3683 BuildClassProperty(stmt->fields()->at(i));
3684 }
3685}
3686
3687void BytecodeGenerator::VisitInitializeClassStaticElementsStatement(
3688 InitializeClassStaticElementsStatement* stmt) {
3689 for (int i = 0; i < stmt->elements()->length(); i++) {
3690 ClassLiteral::StaticElement* element = stmt->elements()->at(i);
3691 switch (element->kind()) {
3693 BuildClassProperty(element->property());
3694 break;
3696 VisitBlock(element->static_block());
3697 break;
3698 }
3699 }
3700}
3701
3702void BytecodeGenerator::VisitAutoAccessorGetterBody(
3703 AutoAccessorGetterBody* stmt) {
3704 BuildVariableLoad(stmt->name_proxy()->var(), HoleCheckMode::kElided);
3706 builder()->Receiver(),
3707 feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3708 BuildReturn(stmt->position());
3709}
3710
3711void BytecodeGenerator::VisitAutoAccessorSetterBody(
3712 AutoAccessorSetterBody* stmt) {
3714 Register value = builder()->Parameter(0);
3715 FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
3716 BuildVariableLoad(stmt->name_proxy()->var(), HoleCheckMode::kElided);
3717
3718 builder()
3721 .SetKeyedProperty(builder()->Receiver(), key, feedback_index(slot),
3722 language_mode());
3723}
3724
3726 Property* property) {
3727 RegisterAllocationScope register_scope(this);
3728 const AstRawString* name = property->key()->AsVariableProxy()->raw_name();
3730 builder()
3731 ->LoadLiteral(Smi::FromEnum(tmpl))
3733 .LoadLiteral(name)
3735 .CallRuntime(Runtime::kNewTypeError, args)
3736 .Throw();
3737}
3738
3740 Variable* brand) {
3742 int depth = execution_context()->ContextChainDepth(brand->scope());
3743 ContextScope* class_context = execution_context()->Previous(depth);
3744 if (class_context) {
3745 Register brand_reg = register_allocator()->NewRegister();
3747 builder()
3748 ->StoreAccumulatorInRegister(brand_reg)
3749 .LoadAccumulatorWithRegister(class_context->reg())
3750 .DefineKeyedOwnProperty(receiver, brand_reg,
3752 feedback_index(slot));
3753 } else {
3754 // We are in the slow case where super() is called from a nested
3755 // arrow function or an eval(), so the class scope context isn't
3756 // tracked in a context register in the stack, and we have to
3757 // walk the context chain from the runtime to find it.
3758 DCHECK_NE(info()->literal()->scope()->outer_scope(), brand->scope());
3760 builder()
3761 ->StoreAccumulatorInRegister(brand_args[1])
3762 .MoveRegister(receiver, brand_args[0])
3763 .MoveRegister(execution_context()->reg(), brand_args[2])
3764 .LoadLiteral(Smi::FromInt(depth))
3765 .StoreAccumulatorInRegister(brand_args[3])
3766 .CallRuntime(Runtime::kAddPrivateBrand, brand_args);
3767 }
3768}
3769
3771 Register instance) {
3773 Register initializer = register_allocator()->NewRegister();
3774
3776 BytecodeLabel done;
3777
3778 builder()
3779 ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
3780 // TODO(gsathya): This jump can be elided for the base
3781 // constructor and derived constructor. This is only required
3782 // when called from an arrow function.
3783 .JumpIfUndefined(&done)
3784 .StoreAccumulatorInRegister(initializer)
3785 .MoveRegister(instance, args[0])
3786 .CallProperty(initializer, args,
3787 feedback_index(feedback_spec()->AddCallICSlot()))
3788 .Bind(&done);
3789}
3790
3791void BytecodeGenerator::VisitNativeFunctionLiteral(
3792 NativeFunctionLiteral* expr) {
3793 size_t entry = builder()->AllocateDeferredConstantPoolEntry();
3794 // Native functions don't use argument adaption and so have the special
3795 // kDontAdaptArgumentsSentinel as their parameter count.
3798 uint8_t flags = CreateClosureFlags::Encode(false, false, false);
3799 builder()->CreateClosure(entry, index, flags);
3800 native_function_literals_.push_back(std::make_pair(expr, entry));
3801}
3802
3803void BytecodeGenerator::VisitConditionalChain(ConditionalChain* expr) {
3804 ConditionalChainControlFlowBuilder conditional_builder(
3806 expr->conditional_chain_length());
3807
3808 HoleCheckElisionMergeScope merge_elider(this);
3809 {
3810 bool should_visit_else_expression = true;
3811 HoleCheckElisionScope elider(this);
3812 for (size_t i = 0; i < expr->conditional_chain_length(); ++i) {
3813 if (expr->condition_at(i)->ToBooleanIsTrue()) {
3814 // Generate then block unconditionally as always true.
3815 should_visit_else_expression = false;
3816 HoleCheckElisionMergeScope::Branch branch(merge_elider);
3817 conditional_builder.ThenAt(i);
3819 break;
3820 } else if (expr->condition_at(i)->ToBooleanIsFalse()) {
3821 // Generate else block unconditionally by skipping the then block.
3822 HoleCheckElisionMergeScope::Branch branch(merge_elider);
3823 conditional_builder.ElseAt(i);
3824 } else {
3826 expr->condition_at(i), conditional_builder.then_labels_at(i),
3827 conditional_builder.else_labels_at(i), TestFallthrough::kThen);
3828 {
3829 HoleCheckElisionMergeScope::Branch branch(merge_elider);
3830 conditional_builder.ThenAt(i);
3832 }
3833 conditional_builder.JumpToEnd();
3834 {
3835 HoleCheckElisionMergeScope::Branch branch(merge_elider);
3836 conditional_builder.ElseAt(i);
3837 }
3838 }
3839 }
3840
3841 if (should_visit_else_expression) {
3843 }
3844 }
3845 merge_elider.Merge();
3846}
3847
3848void BytecodeGenerator::VisitConditional(Conditional* expr) {
3849 ConditionalControlFlowBuilder conditional_builder(
3851
3852 if (expr->condition()->ToBooleanIsTrue()) {
3853 // Generate then block unconditionally as always true.
3854 conditional_builder.Then();
3855 VisitForAccumulatorValue(expr->then_expression());
3856 } else if (expr->condition()->ToBooleanIsFalse()) {
3857 // Generate else block unconditionally if it exists.
3858 conditional_builder.Else();
3859 VisitForAccumulatorValue(expr->else_expression());
3860 } else {
3861 VisitForTest(expr->condition(), conditional_builder.then_labels(),
3862 conditional_builder.else_labels(), TestFallthrough::kThen);
3863
3864 HoleCheckElisionMergeScope merge_elider(this);
3865 conditional_builder.Then();
3866 {
3867 HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
3868 VisitForAccumulatorValue(expr->then_expression());
3869 }
3870 conditional_builder.JumpToEnd();
3871
3872 conditional_builder.Else();
3873 {
3874 HoleCheckElisionMergeScope::Branch branch_elider(merge_elider);
3875 VisitForAccumulatorValue(expr->else_expression());
3876 }
3877
3878 merge_elider.Merge();
3879 }
3880}
3881
3882void BytecodeGenerator::VisitLiteral(Literal* expr) {
3883 if (execution_result()->IsEffect()) return;
3884 switch (expr->type()) {
3885 case Literal::kSmi:
3886 builder()->LoadLiteral(expr->AsSmiLiteral());
3887 break;
3889 builder()->LoadLiteral(expr->AsNumber());
3890 break;
3893 break;
3894 case Literal::kBoolean:
3895 builder()->LoadBoolean(expr->ToBooleanIsTrue());
3897 break;
3898 case Literal::kNull:
3899 builder()->LoadNull();
3900 break;
3901 case Literal::kTheHole:
3902 builder()->LoadTheHole();
3903 break;
3904 case Literal::kString:
3905 builder()->LoadLiteral(expr->AsRawString());
3907 break;
3909 builder()->LoadLiteral(expr->AsConsString());
3910 break;
3911 case Literal::kBigInt:
3912 builder()->LoadLiteral(expr->AsBigInt());
3913 break;
3914 }
3915}
3916
3917void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
3918 // Materialize a regular expression literal.
3920 expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
3921 expr->flags());
3922}
3923
3925 uint8_t flags, size_t entry) {
3926 // TODO(cbruni): Directly generate runtime call for literals we cannot
3927 // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
3928 // optimizations.
3929 int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3930 builder()
3931 ->CreateObjectLiteral(entry, literal_index, flags)
3933}
3934
3935void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
3936 expr->builder()->InitDepthAndFlags();
3937
3938 // Fast path for the empty object literal which doesn't need an
3939 // AllocationSite.
3940 if (expr->builder()->IsEmptyObjectLiteral()) {
3941 DCHECK(expr->builder()->IsFastCloningSupported());
3943 return;
3944 }
3945
3946 Variable* home_object = expr->home_object();
3947 if (home_object != nullptr) {
3948 DCHECK(home_object->is_used());
3949 DCHECK(home_object->IsContextSlot());
3950 }
3951 MultipleEntryBlockContextScope object_literal_context_scope(
3952 this, home_object ? home_object->scope() : nullptr);
3953
3954 // Deep-copy the literal boilerplate.
3955 uint8_t flags = CreateObjectLiteralFlags::Encode(
3956 expr->builder()->ComputeFlags(),
3957 expr->builder()->IsFastCloningSupported());
3958
3959 Register literal = register_allocator()->NewRegister();
3960
3961 // Create literal object.
3962 int property_index = 0;
3963 bool clone_object_spread =
3964 expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
3965 if (clone_object_spread) {
3966 // Avoid the slow path for spreads in the following common cases:
3967 // 1) `let obj = { ...source }`
3968 // 2) `let obj = { ...source, override: 1 }`
3969 // 3) `let obj = { ...source, ...overrides }`
3970 RegisterAllocationScope register_scope(this);
3971 Expression* property = expr->properties()->first()->value();
3972 Register from_value = VisitForRegisterValue(property);
3973 int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
3974 builder()->CloneObject(from_value, flags, clone_index);
3976 property_index++;
3977 } else {
3978 size_t entry;
3979 // If constant properties is an empty fixed array, use a cached empty fixed
3980 // array to ensure it's only added to the constant pool once.
3981 if (expr->builder()->properties_count() == 0) {
3982 entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
3983 } else {
3985 object_literals_.push_back(std::make_pair(expr->builder(), entry));
3986 }
3987 BuildCreateObjectLiteral(literal, flags, entry);
3988 }
3989
3990 // Store computed values into the literal.
3991 AccessorTable<ObjectLiteral::Property> accessor_table(zone());
3992 for (; property_index < expr->properties()->length(); property_index++) {
3993 ObjectLiteral::Property* property = expr->properties()->at(property_index);
3994 if (property->is_computed_name()) break;
3995 if (!clone_object_spread && property->IsCompileTimeValue()) continue;
3996
3997 RegisterAllocationScope inner_register_scope(this);
3998 Literal* key = property->key()->AsLiteral();
3999 switch (property->kind()) {
4001 UNREACHABLE();
4004 DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
4005 [[fallthrough]];
4007 // It is safe to use [[Put]] here because the boilerplate already
4008 // contains computed properties with an uninitialized value.
4009 Register key_reg;
4010 if (key->IsStringLiteral()) {
4011 DCHECK(key->IsPropertyName());
4012 } else {
4013 key_reg = register_allocator()->NewRegister();
4014 builder()->SetExpressionPosition(property->key());
4015 VisitForRegisterValue(property->key(), key_reg);
4016 }
4017
4018 object_literal_context_scope.SetEnteredIf(
4019 property->value()->IsConciseMethodDefinition());
4020 builder()->SetExpressionPosition(property->value());
4021
4022 if (property->emit_store()) {
4023 VisitForAccumulatorValue(property->value());
4024 if (key->IsStringLiteral()) {
4025 FeedbackSlot slot = feedback_spec()->AddDefineNamedOwnICSlot();
4026 builder()->DefineNamedOwnProperty(literal, key->AsRawPropertyName(),
4027 feedback_index(slot));
4028 } else {
4029 FeedbackSlot slot = feedback_spec()->AddDefineKeyedOwnICSlot();
4032 feedback_index(slot));
4033 }
4034 } else {
4035 VisitForEffect(property->value());
4036 }
4037 break;
4038 }
4040 // __proto__:null is handled by CreateObjectLiteral.
4041 if (property->IsNullPrototype()) break;
4042 DCHECK(property->emit_store());
4043 DCHECK(!property->NeedsSetFunctionName());
4044 RegisterList args = register_allocator()->NewRegisterList(2);
4046 object_literal_context_scope.SetEnteredIf(false);
4047 builder()->SetExpressionPosition(property->value());
4048 VisitForRegisterValue(property->value(), args[1]);
4049 builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
4050 break;
4051 }
4053 if (property->emit_store()) {
4054 accessor_table.LookupOrInsert(key)->getter = property;
4055 }
4056 break;
4058 if (property->emit_store()) {
4059 accessor_table.LookupOrInsert(key)->setter = property;
4060 }
4061 break;
4062 }
4063 }
4064
4065 // Define accessors, using only a single call to the runtime for each pair
4066 // of corresponding getters and setters.
4067 object_literal_context_scope.SetEnteredIf(true);
4068 for (auto accessors : accessor_table.ordered_accessors()) {
4069 RegisterAllocationScope inner_register_scope(this);
4070 RegisterList args = register_allocator()->NewRegisterList(5);
4072 VisitForRegisterValue(accessors.first, args[1]);
4073 VisitLiteralAccessor(accessors.second->getter, args[2]);
4074 VisitLiteralAccessor(accessors.second->setter, args[3]);
4075 builder()
4078 .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
4079 }
4080
4081 // Object literals have two parts. The "static" part on the left contains no
4082 // computed property names, and so we can compute its map ahead of time; see
4083 // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
4084 // with the first computed property name and continues with all properties to
4085 // its right. All the code from above initializes the static component of the
4086 // object literal, and arranges for the map of the result to reflect the
4087 // static order in which the keys appear. For the dynamic properties, we
4088 // compile them into a series of "SetOwnProperty" runtime calls. This will
4089 // preserve insertion order.
4090 for (; property_index < expr->properties()->length(); property_index++) {
4091 ObjectLiteral::Property* property = expr->properties()->at(property_index);
4092 RegisterAllocationScope inner_register_scope(this);
4093
4094 bool should_be_in_object_literal_scope =
4095 (property->value()->IsConciseMethodDefinition() ||
4096 property->value()->IsAccessorFunctionDefinition());
4097
4098 if (property->IsPrototype()) {
4099 // __proto__:null is handled by CreateObjectLiteral.
4100 if (property->IsNullPrototype()) continue;
4101 DCHECK(property->emit_store());
4102 DCHECK(!property->NeedsSetFunctionName());
4103 RegisterList args = register_allocator()->NewRegisterList(2);
4105
4106 DCHECK(!should_be_in_object_literal_scope);
4107 object_literal_context_scope.SetEnteredIf(false);
4108 builder()->SetExpressionPosition(property->value());
4109 VisitForRegisterValue(property->value(), args[1]);
4110 builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
4111 continue;
4112 }
4113
4114 switch (property->kind()) {
4118 // Computed property keys don't belong to the object literal scope (even
4119 // if they're syntactically inside it).
4120 if (property->is_computed_name()) {
4121 object_literal_context_scope.SetEnteredIf(false);
4122 }
4124 BuildLoadPropertyKey(property, key);
4125
4126 object_literal_context_scope.SetEnteredIf(
4127 should_be_in_object_literal_scope);
4128 builder()->SetExpressionPosition(property->value());
4129
4130 DefineKeyedOwnPropertyInLiteralFlags data_property_flags =
4132 if (property->NeedsSetFunctionName()) {
4133 // Static class fields require the name property to be set on
4134 // the class, meaning we can't wait until the
4135 // DefineKeyedOwnPropertyInLiteral call later to set the name.
4136 if (property->value()->IsClassLiteral() &&
4137 property->value()->AsClassLiteral()->static_initializer() !=
4138 nullptr) {
4139 VisitClassLiteral(property->value()->AsClassLiteral(), key);
4140 } else {
4141 data_property_flags |=
4143 VisitForAccumulatorValue(property->value());
4144 }
4145 } else {
4146 VisitForAccumulatorValue(property->value());
4147 }
4148
4149 FeedbackSlot slot =
4152 literal, key, data_property_flags, feedback_index(slot));
4153 break;
4154 }
4157 // Computed property keys don't belong to the object literal scope (even
4158 // if they're syntactically inside it).
4159 if (property->is_computed_name()) {
4160 object_literal_context_scope.SetEnteredIf(false);
4161 }
4162 RegisterList args = register_allocator()->NewRegisterList(4);
4164 BuildLoadPropertyKey(property, args[1]);
4165
4166 DCHECK(should_be_in_object_literal_scope);
4167 object_literal_context_scope.SetEnteredIf(true);
4168 builder()->SetExpressionPosition(property->value());
4169 VisitForRegisterValue(property->value(), args[2]);
4170 builder()
4173 Runtime::FunctionId function_id =
4174 property->kind() == ObjectLiteral::Property::GETTER
4175 ? Runtime::kDefineGetterPropertyUnchecked
4176 : Runtime::kDefineSetterPropertyUnchecked;
4177 builder()->CallRuntime(function_id, args);
4178 break;
4179 }
4181 // TODO(olivf, chrome:1204540) This can be slower than the Babel
4182 // translation. Should we compile this to a copying loop in bytecode?
4183 RegisterList args = register_allocator()->NewRegisterList(2);
4185 builder()->SetExpressionPosition(property->value());
4186 object_literal_context_scope.SetEnteredIf(false);
4187 VisitForRegisterValue(property->value(), args[1]);
4188 builder()->CallRuntime(Runtime::kInlineCopyDataProperties, args);
4189 break;
4190 }
4192 UNREACHABLE(); // Handled specially above.
4193 }
4194 }
4195
4196 if (home_object != nullptr) {
4197 object_literal_context_scope.SetEnteredIf(true);
4199 BuildVariableAssignment(home_object, Token::kInit, HoleCheckMode::kElided);
4200 }
4201 // Make sure to exit the scope before materialising the value into the
4202 // accumulator, to prevent the context scope from clobbering it.
4203 object_literal_context_scope.SetEnteredIf(false);
4205}
4206
4207// Fill an array with values from an iterator, starting at a given index. It is
4208// guaranteed that the loop will only terminate if the iterator is exhausted, or
4209// if one of iterator.next(), value.done, or value.value fail.
4210//
4211// In pseudocode:
4212//
4213// loop {
4214// value = iterator.next()
4215// if (value.done) break;
4216// value = value.value
4217// array[index++] = value
4218// }
4220 IteratorRecord iterator, Register array, Register index, Register value,
4221 FeedbackSlot next_value_slot, FeedbackSlot next_done_slot,
4222 FeedbackSlot index_slot, FeedbackSlot element_slot) {
4223 DCHECK(array.is_valid());
4224 DCHECK(index.is_valid());
4225 DCHECK(value.is_valid());
4226
4227 LoopBuilder loop_builder(builder(), nullptr, nullptr, feedback_spec());
4228 LoopScope loop_scope(this, &loop_builder);
4229
4230 // Call the iterator's .next() method. Break from the loop if the `done`
4231 // property is truthy, otherwise load the value from the iterator result and
4232 // append the argument.
4233 BuildIteratorNext(iterator, value);
4235 value, ast_string_constants()->done_string(),
4236 feedback_index(feedback_spec()->AddLoadICSlot()));
4237 loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
4238
4239 loop_builder.LoopBody();
4240 builder()
4241 // value = value.value
4242 ->LoadNamedProperty(value, ast_string_constants()->value_string(),
4243 feedback_index(next_value_slot))
4244 // array[index] = value
4245 .StoreInArrayLiteral(array, index, feedback_index(element_slot))
4246 // index++
4248 .UnaryOperation(Token::kInc, feedback_index(index_slot))
4250 loop_builder.BindContinueTarget();
4251}
4252
4254 const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
4255 RegisterAllocationScope register_scope(this);
4256 // Make this the first register allocated so that it has a chance of aliasing
4257 // the next register allocated after returning from this function.
4260 SharedFeedbackSlot element_slot(feedback_spec(),
4262 ZonePtrList<Expression>::const_iterator current = elements->begin();
4264 bool is_empty = elements->is_empty();
4265
4266 if (!is_empty && (*current)->IsSpread()) {
4267 // If we have a leading spread, use CreateArrayFromIterable to create
4268 // an array from it and then add the remaining components to that array.
4269 VisitForAccumulatorValue(*current);
4270 builder()->SetExpressionPosition((*current)->AsSpread()->expression());
4272
4273 if (++current != end) {
4274 // If there are remaining elements, prepare the index register that is
4275 // used for adding those elements. The next index is the length of the
4276 // newly created array.
4277 auto length = ast_string_constants()->length_string();
4278 int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
4279 builder()
4280 ->LoadNamedProperty(array, length, length_load_slot)
4282 }
4283 } else {
4284 // There are some elements before the first (if any) spread, and we can
4285 // use a boilerplate when creating the initial array from those elements.
4286
4287 // First, allocate a constant pool entry for the boilerplate that will
4288 // be created during finalization, and will contain all the constant
4289 // elements before the first spread. This also handle the empty array case
4290 // and one-shot optimization.
4291
4292 ArrayLiteralBoilerplateBuilder* array_literal_builder = nullptr;
4293 if (expr != nullptr) {
4294 array_literal_builder = expr->builder();
4295 } else {
4296 DCHECK(!elements->is_empty());
4297
4298 // get first_spread_index
4299 int first_spread_index = -1;
4300 for (auto iter = elements->begin(); iter != elements->end(); iter++) {
4301 if ((*iter)->IsSpread()) {
4302 first_spread_index = static_cast<int>(iter - elements->begin());
4303 break;
4304 }
4305 }
4306
4307 array_literal_builder = zone()->New<ArrayLiteralBoilerplateBuilder>(
4308 elements, first_spread_index);
4309 array_literal_builder->InitDepthAndFlags();
4310 }
4311
4312 DCHECK(array_literal_builder != nullptr);
4313 uint8_t flags = CreateArrayLiteralFlags::Encode(
4314 array_literal_builder->IsFastCloningSupported(),
4315 array_literal_builder->ComputeFlags());
4316 if (is_empty) {
4317 // Empty array literal fast-path.
4318 int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
4319 DCHECK(array_literal_builder->IsFastCloningSupported());
4320 builder()->CreateEmptyArrayLiteral(literal_index);
4321 } else {
4322 // Create array literal from boilerplate.
4323 size_t entry = builder()->AllocateDeferredConstantPoolEntry();
4324 array_literals_.push_back(std::make_pair(array_literal_builder, entry));
4325 int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
4326 builder()->CreateArrayLiteral(entry, literal_index, flags);
4327 }
4329
4330 ZonePtrList<Expression>::const_iterator first_spread_or_end =
4331 array_literal_builder->first_spread_index() >= 0
4332 ? current + array_literal_builder->first_spread_index()
4333 : end;
4334
4335 // Insert the missing non-constant elements, up until the first spread
4336 // index, into the initial array (the remaining elements will be inserted
4337 // below).
4338 DCHECK_EQ(current, elements->begin());
4339 int array_index = 0;
4340 for (; current != first_spread_or_end; ++current, array_index++) {
4341 Expression* subexpr = *current;
4342 DCHECK(!subexpr->IsSpread());
4343 // Skip the constants.
4344 if (subexpr->IsCompileTimeValue()) continue;
4345
4346 builder()
4347 ->LoadLiteral(Smi::FromInt(array_index))
4349 VisitForAccumulatorValue(subexpr);
4350 builder()->StoreInArrayLiteral(array, index,
4351 feedback_index(element_slot.Get()));
4352 }
4353
4354 if (current != end) {
4355 // If there are remaining elements, prepare the index register
4356 // to store the next element, which comes from the first spread.
4357 builder()
4358 ->LoadLiteral(Smi::FromInt(array_index))
4360 }
4361 }
4362
4363 // Now build insertions for the remaining elements from current to end.
4365 SharedFeedbackSlot length_slot(
4366 feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
4367 for (; current != end; ++current) {
4368 Expression* subexpr = *current;
4369 if (subexpr->IsSpread()) {
4370 RegisterAllocationScope scope(this);
4371 builder()->SetExpressionPosition(subexpr->AsSpread()->expression());
4372 VisitForAccumulatorValue(subexpr->AsSpread()->expression());
4373 builder()->SetExpressionPosition(subexpr->AsSpread()->expression());
4375
4377 FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
4378 FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
4379 FeedbackSlot real_index_slot = index_slot.Get();
4380 FeedbackSlot real_element_slot = element_slot.Get();
4381 BuildFillArrayWithIterator(iterator, array, index, value,
4382 next_value_load_slot, next_done_load_slot,
4383 real_index_slot, real_element_slot);
4384 } else if (!subexpr->IsTheHoleLiteral()) {
4385 // literal[index++] = subexpr
4386 VisitForAccumulatorValue(subexpr);
4387 builder()
4388 ->StoreInArrayLiteral(array, index,
4389 feedback_index(element_slot.Get()))
4391 // Only increase the index if we are not the last element.
4392 if (current + 1 != end) {
4393 builder()
4394 ->UnaryOperation(Token::kInc, feedback_index(index_slot.Get()))
4396 }
4397 } else {
4398 // literal.length = ++index
4399 // length_slot is only used when there are holes.
4400 auto length = ast_string_constants()->length_string();
4401 builder()
4403 .UnaryOperation(Token::kInc, feedback_index(index_slot.Get()))
4405 .SetNamedProperty(array, length, feedback_index(length_slot.Get()),
4407 }
4408 }
4409
4411}
4412
4413void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
4414 expr->builder()->InitDepthAndFlags();
4415 BuildCreateArrayLiteral(expr->values(), expr);
4416}
4417
4418void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
4420 BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
4421}
4422
4425 if (optimizer) {
4426 return optimizer->IsVariableInRegister(var, reg);
4427 }
4428 return false;
4429}
4430
4433 if (optimizer) {
4434 optimizer->SetVariableInRegister(var, reg);
4435 }
4436}
4437
4440 if (optimizer) {
4441 return optimizer->GetPotentialVariableInAccumulator();
4442 }
4443 return nullptr;
4444}
4445
4447 HoleCheckMode hole_check_mode,
4448 TypeofMode typeof_mode) {
4449 switch (variable->location()) {
4451 Register source(builder()->Local(variable->index()));
4452 // We need to load the variable into the accumulator, even when in a
4453 // VisitForRegisterScope, in order to avoid register aliasing if
4454 // subsequent expressions assign to the same variable.
4456 if (VariableNeedsHoleCheckInCurrentBlock(variable, hole_check_mode)) {
4457 BuildThrowIfHole(variable);
4458 }
4459 break;
4460 }
4463 if (variable->IsReceiver()) {
4464 source = builder()->Receiver();
4465 } else {
4466 source = builder()->Parameter(variable->index());
4467 }
4468 // We need to load the variable into the accumulator, even when in a
4469 // VisitForRegisterScope, in order to avoid register aliasing if
4470 // subsequent expressions assign to the same variable.
4472 if (VariableNeedsHoleCheckInCurrentBlock(variable, hole_check_mode)) {
4473 BuildThrowIfHole(variable);
4474 }
4475 break;
4476 }
4478 // The global identifier "undefined" is immutable. Everything
4479 // else could be reassigned. For performance, we do a pointer comparison
4480 // rather than checking if the raw_name is really "undefined".
4481 if (variable->raw_name() == ast_string_constants()->undefined_string()) {
4483 } else {
4484 FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
4485 builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
4486 typeof_mode);
4487 }
4488 break;
4489 }
4491 int depth = execution_context()->ContextChainDepth(variable->scope());
4492 ContextScope* context = execution_context()->Previous(depth);
4493 Register context_reg;
4494 if (context) {
4495 context_reg = context->reg();
4496 depth = 0;
4497 } else {
4498 context_reg = execution_context()->reg();
4499 }
4500
4502 (variable->maybe_assigned() == kNotAssigned)
4506 if (immutable == BytecodeArrayBuilder::kImmutableSlot &&
4507 IsVariableInRegister(variable, acc)) {
4508 return;
4509 }
4510
4511 builder()->LoadContextSlot(context_reg, variable, depth, immutable);
4512 if (VariableNeedsHoleCheckInCurrentBlock(variable, hole_check_mode)) {
4513 BuildThrowIfHole(variable);
4514 }
4515 if (immutable == BytecodeArrayBuilder::kImmutableSlot) {
4516 SetVariableInRegister(variable, acc);
4517 }
4518 break;
4519 }
4521 switch (variable->mode()) {
4523 Variable* local_variable = variable->local_if_not_shadowed();
4524 int depth =
4525 execution_context()->ContextChainDepth(local_variable->scope());
4526 ContextKind context_kind = (local_variable->scope()->is_script_scope()
4529 builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
4530 context_kind,
4531 local_variable->index(), depth);
4532 if (VariableNeedsHoleCheckInCurrentBlock(local_variable,
4533 hole_check_mode)) {
4534 BuildThrowIfHole(local_variable);
4535 }
4536 break;
4537 }
4539 int depth =
4541 // TODO(1008414): Add back caching here when bug is fixed properly.
4542 FeedbackSlot slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
4543
4544 builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
4545 feedback_index(slot), depth);
4546 break;
4547 }
4548 default: {
4549 // Normally, private names should not be looked up dynamically,
4550 // but we make an exception in debug-evaluate, in that case the
4551 // lookup will be done in %SetPrivateMember() and %GetPrivateMember()
4552 // calls, not here.
4553 DCHECK(!variable->raw_name()->IsPrivateName());
4554 builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
4555 break;
4556 }
4557 }
4558 break;
4559 }
4561 int depth = execution_context()->ContextChainDepth(variable->scope());
4562 builder()->LoadModuleVariable(variable->index(), depth);
4563 if (VariableNeedsHoleCheckInCurrentBlock(variable, hole_check_mode)) {
4564 BuildThrowIfHole(variable);
4565 }
4566 break;
4567 }
4569 DCHECK(variable->IsReplGlobal());
4570 FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
4571 builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
4572 typeof_mode);
4573 break;
4574 }
4575 }
4576}
4577
4579 Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
4580 ValueResultScope accumulator_result(this);
4581 BuildVariableLoad(variable, hole_check_mode, typeof_mode);
4582}
4583
4584void BytecodeGenerator::BuildReturn(int source_position) {
4585 if (v8_flags.trace) {
4586 RegisterAllocationScope register_scope(this);
4588 // Runtime returns {result} value, preserving accumulator.
4590 Runtime::kTraceExit, result);
4591 }
4592 builder()->SetStatementPosition(source_position);
4593 builder()->Return();
4594}
4595
4596void BytecodeGenerator::BuildAsyncReturn(int source_position) {
4597 RegisterAllocationScope register_scope(this);
4598
4601 builder()
4602 ->MoveRegister(generator_object(), args[0]) // generator
4603 .StoreAccumulatorInRegister(args[1]) // value
4604 .LoadTrue()
4605 .StoreAccumulatorInRegister(args[2]) // done
4606 .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
4607 } else {
4611 builder()
4612 ->MoveRegister(generator_object(), args[0]) // generator
4613 .StoreAccumulatorInRegister(args[1]) // value
4614 .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
4615 }
4616
4617 BuildReturn(source_position);
4618}
4619
4621
4623 if (!v8_flags.ignition_elide_redundant_tdz_checks) return;
4624
4625 // The first N-1 variables that need hole checks may be cached in a bitmap to
4626 // elide subsequent hole checks in the same basic block, where N is
4627 // Variable::kHoleCheckBitmapBits.
4628 //
4629 // This numbering is done during bytecode generation instead of scope analysis
4630 // for 2 reasons:
4631 //
4632 // 1. There may be multiple eagerly compiled inner functions during a single
4633 // run of scope analysis, so a global numbering will result in fewer variables
4634 // with cacheable hole checks.
4635 //
4636 // 2. Compiler::CollectSourcePositions reparses functions and checks that the
4637 // recompiled bytecode is identical. Therefore the numbering must be kept
4638 // identical regardless of whether a function is eagerly compiled as part of
4639 // an outer compilation or recompiled during source position collection. The
4640 // simplest way to guarantee identical numbering is to scope it to the
4641 // compilation instead of scope analysis.
4642 variable->RememberHoleCheckInBitmap(hole_check_bitmap_,
4644}
4645
4647 if (variable->is_this()) {
4648 DCHECK(variable->mode() == VariableMode::kConst);
4650 } else {
4651 builder()->ThrowReferenceErrorIfHole(variable->raw_name());
4652 }
4654}
4655
4657 Variable* variable, HoleCheckMode hole_check_mode) {
4658 return hole_check_mode == HoleCheckMode::kRequired &&
4659 !variable->HasRememberedHoleCheck(hole_check_bitmap_);
4660}
4661
4663 Variable* variable, Token::Value op, HoleCheckMode hole_check_mode) {
4664 return VariableNeedsHoleCheckInCurrentBlock(variable, hole_check_mode) ||
4665 (variable->is_this() && variable->mode() == VariableMode::kConst &&
4666 op == Token::kInit);
4667}
4668
4670 Token::Value op) {
4673 variable, op, HoleCheckMode::kRequired));
4674 if (variable->is_this()) {
4675 DCHECK(variable->mode() == VariableMode::kConst && op == Token::kInit);
4676 // Perform an initialization check for 'this'. 'this' variable is the
4677 // only variable able to trigger bind operations outside the TDZ
4678 // via 'super' calls.
4679 //
4680 // Do not remember the hole check because this bytecode throws if 'this' is
4681 // *not* the hole, i.e. the opposite of the TDZ hole check.
4683 } else {
4684 // Perform an initialization check for let/const declared variables.
4685 // E.g. let x = (x = 20); is not allowed.
4686 DCHECK(IsLexicalVariableMode(variable->mode()));
4687 BuildThrowIfHole(variable);
4688 }
4689}
4690
4692 Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
4693 LookupHoistingMode lookup_hoisting_mode) {
4694 VariableMode mode = variable->mode();
4695 RegisterAllocationScope assignment_register_scope(this);
4696 switch (variable->location()) {
4700 if (VariableLocation::PARAMETER == variable->location()) {
4701 if (variable->IsReceiver()) {
4703 } else {
4704 destination = builder()->Parameter(variable->index());
4705 }
4706 } else {
4707 destination = builder()->Local(variable->index());
4708 }
4709
4711 hole_check_mode)) {
4712 // Load destination to check for hole.
4713 Register value_temp = register_allocator()->NewRegister();
4714 builder()
4715 ->StoreAccumulatorInRegister(value_temp)
4718 builder()->LoadAccumulatorWithRegister(value_temp);
4719 }
4720
4721 if ((mode != VariableMode::kConst && mode != VariableMode::kUsing &&
4722 mode != VariableMode::kAwaitUsing) ||
4723 op == Token::kInit) {
4724 if (op == Token::kInit) {
4725 if (variable->HasHoleCheckUseInSameClosureScope()) {
4726 // After initializing a variable it won't be the hole anymore, so
4727 // elide subsequent checks.
4729 }
4730 if (mode == VariableMode::kUsing) {
4732 builder()
4735 .CallRuntime(Runtime::kAddDisposableValue, args);
4736 } else if (mode == VariableMode::kAwaitUsing) {
4738 builder()
4741 .CallRuntime(Runtime::kAddAsyncDisposableValue, args);
4742 }
4743 }
4745 } else if (variable->throw_on_const_assignment(language_mode()) &&
4746 mode == VariableMode::kConst) {
4747 builder()->CallRuntime(Runtime::kThrowConstAssignError);
4748 } else if (variable->throw_on_const_assignment(language_mode()) &&
4749 mode == VariableMode::kUsing) {
4750 builder()->CallRuntime(Runtime::kThrowUsingAssignError);
4751 }
4752 break;
4753 }
4755 BuildStoreGlobal(variable);
4756 break;
4757 }
4759 int depth = execution_context()->ContextChainDepth(variable->scope());
4760 ContextScope* context = execution_context()->Previous(depth);
4761 Register context_reg;
4762
4763 if (context) {
4764 context_reg = context->reg();
4765 depth = 0;
4766 } else {
4767 context_reg = execution_context()->reg();
4768 }
4769
4771 hole_check_mode)) {
4772 // Load destination to check for hole.
4773 Register value_temp = register_allocator()->NewRegister();
4774 builder()
4775 ->StoreAccumulatorInRegister(value_temp)
4776 .LoadContextSlot(context_reg, variable, depth,
4778
4780 builder()->LoadAccumulatorWithRegister(value_temp);
4781 }
4782
4783 if (mode != VariableMode::kConst || op == Token::kInit) {
4784 if (op == Token::kInit &&
4785 variable->HasHoleCheckUseInSameClosureScope()) {
4786 // After initializing a variable it won't be the hole anymore, so
4787 // elide subsequent checks.
4789 }
4790 builder()->StoreContextSlot(context_reg, variable, depth);
4791 } else if (variable->throw_on_const_assignment(language_mode())) {
4792 builder()->CallRuntime(Runtime::kThrowConstAssignError);
4793 }
4794 break;
4795 }
4797 builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
4798 lookup_hoisting_mode);
4799 break;
4800 }
4803
4804 if (mode == VariableMode::kConst && op != Token::kInit) {
4805 builder()->CallRuntime(Runtime::kThrowConstAssignError);
4806 break;
4807 }
4808
4809 // If we don't throw above, we know that we're dealing with an
4810 // export because imports are const and we do not generate initializing
4811 // assignments for them.
4812 DCHECK(variable->IsExport());
4813
4814 int depth = execution_context()->ContextChainDepth(variable->scope());
4816 hole_check_mode)) {
4817 Register value_temp = register_allocator()->NewRegister();
4818 builder()
4819 ->StoreAccumulatorInRegister(value_temp)
4820 .LoadModuleVariable(variable->index(), depth);
4822 builder()->LoadAccumulatorWithRegister(value_temp);
4823 }
4824 builder()->StoreModuleVariable(variable->index(), depth);
4825 break;
4826 }
4828 // A let or const declaration like 'let x = 7' is effectively translated
4829 // to:
4830 // <top of the script>:
4831 // ScriptContext.x = TheHole;
4832 // ...
4833 // <where the actual 'let' is>:
4834 // ScriptContextTable.x = 7; // no hole check
4835 //
4836 // The ScriptContext slot for 'x' that we store to here is not
4837 // necessarily the ScriptContext of this script, but rather the
4838 // first ScriptContext that has a slot for name 'x'.
4839 DCHECK(variable->IsReplGlobal());
4840 if (op == Token::kInit) {
4842 builder()
4843 ->StoreAccumulatorInRegister(store_args[1])
4844 .LoadLiteral(variable->raw_name())
4845 .StoreAccumulatorInRegister(store_args[0]);
4847 Runtime::kStoreGlobalNoHoleCheckForReplLetOrConst, store_args);
4848 } else {
4849 if (mode == VariableMode::kConst) {
4850 builder()->CallRuntime(Runtime::kThrowConstAssignError);
4851 } else {
4852 BuildStoreGlobal(variable);
4853 }
4854 }
4855 break;
4856 }
4857 }
4858}
4859
4861 Register object,
4862 const AstRawString* name) {
4863 FeedbackSlot slot = GetCachedLoadICSlot(object_expr, name);
4864 builder()->LoadNamedProperty(object, name, feedback_index(slot));
4865}
4866
4868 Register object,
4869 const AstRawString* name) {
4871 if (!execution_result()->IsEffect()) {
4872 value = register_allocator()->NewRegister();
4874 }
4875
4876 FeedbackSlot slot = GetCachedStoreICSlot(object_expr, name);
4877 builder()->SetNamedProperty(object, name, feedback_index(slot),
4878 language_mode());
4879
4880 if (!execution_result()->IsEffect()) {
4882 }
4883}
4884
4887 if (!execution_result()->IsEffect()) {
4888 value = register_allocator()->NewRegister();
4890 }
4891
4893 builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
4894
4895 if (!execution_result()->IsEffect()) {
4897 }
4898}
4899
4901 FeedbackSlot slot) {
4902 if (v8_flags.enable_enumerated_keyed_access_bytecode &&
4903 current_for_in_scope() != nullptr) {
4905 if (key != nullptr) {
4907 if (scope != nullptr) {
4908 Register enum_index = scope->enum_index();
4909 Register cache_type = scope->cache_type();
4910 builder()->LoadEnumeratedKeyedProperty(object, enum_index, cache_type,
4911 feedback_index(slot));
4912 return;
4913 }
4914 }
4915 }
4916 builder()->LoadKeyedProperty(object, feedback_index(slot));
4917}
4918
4919// static
4925// static
4928 Register object,
4929 const AstRawString* name) {
4930 return AssignmentLhsData(NAMED_PROPERTY, nullptr, RegisterList(), object,
4931 Register(), object_expr, name);
4932}
4933// static
4936 Register key) {
4937 return AssignmentLhsData(KEYED_PROPERTY, nullptr, RegisterList(), object, key,
4938 nullptr, nullptr);
4939}
4940// static
4943 RegisterList super_property_args) {
4944 return AssignmentLhsData(NAMED_SUPER_PROPERTY, nullptr, super_property_args,
4945 Register(), Register(), nullptr, nullptr);
4946}
4947// static
4950 AssignType type, Property* property, Register object, Register key) {
4951 return AssignmentLhsData(type, property, RegisterList(), object, key, nullptr,
4952 nullptr);
4953}
4954// static
4957 Property* property,
4958 Register object) {
4959 return AssignmentLhsData(type, property, RegisterList(), object, Register(),
4960 nullptr, nullptr);
4961}
4962// static
4965 RegisterList super_property_args) {
4966 return AssignmentLhsData(KEYED_SUPER_PROPERTY, nullptr, super_property_args,
4967 Register(), Register(), nullptr, nullptr);
4968}
4969
4971 Expression* lhs, AccumulatorPreservingMode accumulator_preserving_mode) {
4972 // Left-hand side can only be a property, a global or a variable slot.
4973 Property* property = lhs->AsProperty();
4974 AssignType assign_type = Property::GetAssignType(property);
4975
4976 // Evaluate LHS expression.
4977 switch (assign_type) {
4978 case NON_PROPERTY:
4980 case NAMED_PROPERTY: {
4981 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
4982 Register object = VisitForRegisterValue(property->obj());
4983 const AstRawString* name =
4984 property->key()->AsLiteral()->AsRawPropertyName();
4985 return AssignmentLhsData::NamedProperty(property->obj(), object, name);
4986 }
4987 case KEYED_PROPERTY: {
4988 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
4989 Register object = VisitForRegisterValue(property->obj());
4990 Register key = VisitForRegisterValue(property->key());
4991 return AssignmentLhsData::KeyedProperty(object, key);
4992 }
4993 case PRIVATE_METHOD:
4997 DCHECK(!property->IsSuperAccess());
4998 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
4999 Register object = VisitForRegisterValue(property->obj());
5000 Register key = VisitForRegisterValue(property->key());
5001 return AssignmentLhsData::PrivateMethodOrAccessor(assign_type, property,
5002 object, key);
5003 }
5004 case PRIVATE_DEBUG_DYNAMIC: {
5005 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
5006 Register object = VisitForRegisterValue(property->obj());
5007 // Do not visit the key here, instead we will look them up at run time.
5008 return AssignmentLhsData::PrivateDebugEvaluate(assign_type, property,
5009 object);
5010 }
5011 case NAMED_SUPER_PROPERTY: {
5012 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
5013 RegisterList super_property_args =
5016 builder()->StoreAccumulatorInRegister(super_property_args[0]);
5018 property->obj()->AsSuperPropertyReference()->home_object()->var(),
5020 builder()->StoreAccumulatorInRegister(super_property_args[1]);
5021 builder()
5022 ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
5023 .StoreAccumulatorInRegister(super_property_args[2]);
5024 return AssignmentLhsData::NamedSuperProperty(super_property_args);
5025 }
5026 case KEYED_SUPER_PROPERTY: {
5027 AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
5028 RegisterList super_property_args =
5031 builder()->StoreAccumulatorInRegister(super_property_args[0]);
5033 property->obj()->AsSuperPropertyReference()->home_object()->var(),
5035 builder()->StoreAccumulatorInRegister(super_property_args[1]);
5036 VisitForRegisterValue(property->key(), super_property_args[2]);
5037 return AssignmentLhsData::KeyedSuperProperty(super_property_args);
5038 }
5039 }
5040 UNREACHABLE();
5041}
5042
5043// Build the iteration finalizer called in the finally block of an iteration
5044// protocol execution. This closes the iterator if needed, and suppresses any
5045// exception it throws if necessary, including the exception when the return
5046// method is not callable.
5047//
5048// In pseudo-code, this builds:
5049//
5050// if (!done) {
5051// try {
5052// let method = iterator.return
5053// if (method !== null && method !== undefined) {
5054// let return_val = method.call(iterator)
5055// if (!%IsObject(return_val)) throw TypeError
5056// }
5057// } catch (e) {
5058// if (iteration_continuation != RETHROW)
5059// rethrow e
5060// }
5061// }
5062//
5063// For async iterators, iterator.close() becomes await iterator.close().
5065 IteratorRecord iterator, Register done,
5066 Register iteration_continuation_token) {
5067 RegisterAllocationScope register_scope(this);
5068 BytecodeLabels iterator_is_done(zone());
5069
5070 // if (!done) {
5072 ToBooleanMode::kConvertToBoolean, iterator_is_done.New());
5073
5074 {
5075 RegisterAllocationScope inner_register_scope(this);
5077 // try {
5078 // let method = iterator.return
5079 // if (method !== null && method !== undefined) {
5080 // let return_val = method.call(iterator)
5081 // if (!%IsObject(return_val)) throw TypeError
5082 // }
5083 // }
5084 [&]() {
5086 builder()
5088 iterator.object(), ast_string_constants()->return_string(),
5089 feedback_index(feedback_spec()->AddLoadICSlot()))
5090 .JumpIfUndefinedOrNull(iterator_is_done.New())
5092
5093 RegisterList args(iterator.object());
5095 method, args, feedback_index(feedback_spec()->AddCallICSlot()));
5096 if (iterator.type() == IteratorType::kAsync) {
5097 BuildAwait();
5098 }
5099 builder()->JumpIfJSReceiver(iterator_is_done.New());
5100 {
5101 // Throw this exception inside the try block so that it is
5102 // suppressed by the iteration continuation if necessary.
5103 RegisterAllocationScope register_scope(this);
5104 Register return_result = register_allocator()->NewRegister();
5105 builder()
5106 ->StoreAccumulatorInRegister(return_result)
5107 .CallRuntime(Runtime::kThrowIteratorResultNotAnObject,
5108 return_result);
5109 }
5110 },
5111
5112 // catch (e) {
5113 // if (iteration_continuation != RETHROW)
5114 // rethrow e
5115 // }
5116 [&](Register context) {
5117 // Reuse context register to store the exception.
5118 Register close_exception = context;
5119 builder()->StoreAccumulatorInRegister(close_exception);
5120
5121 BytecodeLabel suppress_close_exception;
5122 builder()
5125 .CompareReference(iteration_continuation_token)
5126 .JumpIfTrue(ToBooleanMode::kAlreadyBoolean,
5127 &suppress_close_exception)
5128 .LoadAccumulatorWithRegister(close_exception)
5129 .ReThrow()
5130 .Bind(&suppress_close_exception);
5131 },
5133 }
5134
5135 iterator_is_done.Bind(builder());
5136}
5137
5138// Get the default value of a destructuring target. Will mutate the
5139// destructuring target expression if there is a default value.
5140//
5141// For
5142// a = b
5143// in
5144// let {a = b} = c
5145// returns b and mutates the input into a.
5147 Expression** target) {
5148 Expression* default_value = nullptr;
5149 if ((*target)->IsAssignment()) {
5150 Assignment* default_init = (*target)->AsAssignment();
5151 DCHECK_EQ(default_init->op(), Token::kAssign);
5152 default_value = default_init->value();
5153 *target = default_init->target();
5154 DCHECK((*target)->IsValidReferenceExpression() || (*target)->IsPattern());
5155 }
5156 return default_value;
5157}
5158
5159// Convert a destructuring assignment to an array literal into a sequence of
5160// iterator accesses into the value being assigned (in the accumulator).
5161//
5162// [a().x, ...b] = accumulator
5163//
5164// becomes
5165//
5166// iterator = %GetIterator(accumulator)
5167// try {
5168//
5169// // Individual assignments read off the value from iterator.next() This gets
5170// // repeated per destructuring element.
5171// if (!done) {
5172// // Make sure we are considered 'done' if .next(), .done or .value fail.
5173// done = true
5174// var next_result = iterator.next()
5175// var tmp_done = next_result.done
5176// if (!tmp_done) {
5177// value = next_result.value
5178// done = false
5179// }
5180// }
5181// if (done)
5182// value = undefined
5183// a().x = value
5184//
5185// // A spread receives the remaining items in the iterator.
5186// var array = []
5187// var index = 0
5188// %FillArrayWithIterator(iterator, array, index, done)
5189// done = true
5190// b = array
5191//
5192// } catch(e) {
5193// iteration_continuation = RETHROW
5194// } finally {
5195// %FinalizeIteration(iterator, done, iteration_continuation)
5196// }
5199 LookupHoistingMode lookup_hoisting_mode) {
5200 RegisterAllocationScope scope(this);
5201
5204
5205 // Store the iterator in a dedicated register so that it can be closed on
5206 // exit, and the 'done' value in a dedicated register so that it can be
5207 // changed and accessed independently of the iteration result.
5210 builder()->LoadFalse();
5212
5214 // Try block.
5215 [&]() {
5216 Register next_result = register_allocator()->NewRegister();
5217 FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
5218 FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
5219
5220 Spread* spread = nullptr;
5221 for (Expression* target : *pattern->values()) {
5222 if (target->IsSpread()) {
5223 spread = target->AsSpread();
5224 break;
5225 }
5226
5227 Expression* default_value = GetDestructuringDefaultValue(&target);
5228 builder()->SetExpressionPosition(target);
5229
5230 AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
5231
5232 // if (!done) {
5233 // // Make sure we are considered done if .next(), .done or .value
5234 // // fail.
5235 // done = true
5236 // var next_result = iterator.next()
5237 // var tmp_done = next_result.done
5238 // if (!tmp_done) {
5239 // value = next_result.value
5240 // done = false
5241 // }
5242 // }
5243 // if (done)
5244 // value = undefined
5245 BytecodeLabels is_done(zone());
5246
5248 builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean,
5249 is_done.New());
5250
5252 BuildIteratorNext(iterator, next_result);
5253 builder()
5254 ->LoadNamedProperty(next_result,
5255 ast_string_constants()->done_string(),
5256 feedback_index(next_done_load_slot))
5257 .JumpIfTrue(ToBooleanMode::kConvertToBoolean, is_done.New());
5258
5259 // Only do the assignment if this is not a hole (i.e. 'elided').
5260 if (!target->IsTheHoleLiteral()) {
5261 builder()
5262 ->LoadNamedProperty(next_result,
5263 ast_string_constants()->value_string(),
5264 feedback_index(next_value_load_slot))
5265 .StoreAccumulatorInRegister(next_result)
5266 .LoadFalse()
5268 .LoadAccumulatorWithRegister(next_result);
5269
5270 // [<pattern> = <init>] = <value>
5271 // becomes (roughly)
5272 // temp = <value>.next();
5273 // <pattern> = temp === undefined ? <init> : temp;
5274 BytecodeLabel do_assignment;
5275 if (default_value) {
5276 builder()->JumpIfNotUndefined(&do_assignment);
5277 // Since done == true => temp == undefined, jump directly to using
5278 // the default value for that case.
5279 is_done.Bind(builder());
5281 } else {
5282 builder()->Jump(&do_assignment);
5283 is_done.Bind(builder());
5285 }
5286 builder()->Bind(&do_assignment);
5287
5288 BuildAssignment(lhs_data, op, lookup_hoisting_mode);
5289 } else {
5291 DCHECK_EQ(lhs_data.assign_type(), NON_PROPERTY);
5292 is_done.Bind(builder());
5293 }
5294 }
5295
5296 if (spread) {
5297 RegisterAllocationScope scope(this);
5298 BytecodeLabel is_done;
5299
5300 // A spread is turned into a loop over the remainer of the iterator.
5301 Expression* target = spread->expression();
5302 builder()->SetExpressionPosition(spread);
5303
5304 AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
5305
5306 // var array = [];
5309 feedback_index(feedback_spec()->AddLiteralSlot()));
5311
5312 // If done, jump to assigning empty array
5314 builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &is_done);
5315
5316 // var index = 0;
5320
5321 // Set done to true, since it's guaranteed to be true by the time the
5322 // array fill completes.
5324
5325 // Fill the array with the iterator.
5326 FeedbackSlot element_slot =
5329 BuildFillArrayWithIterator(iterator, array, index, next_result,
5330 next_value_load_slot, next_done_load_slot,
5331 index_slot, element_slot);
5332
5333 builder()->Bind(&is_done);
5334 // Assign the array to the LHS.
5336 BuildAssignment(lhs_data, op, lookup_hoisting_mode);
5337 }
5338 },
5339 // Finally block.
5340 [&](Register iteration_continuation_token,
5341 Register iteration_continuation_result, Register message) {
5342 // Finish the iteration in the finally block.
5343 BuildFinalizeIteration(iterator, done, iteration_continuation_token);
5344 },
5346
5347 if (!execution_result()->IsEffect()) {
5349 }
5350}
5351
5352// Convert a destructuring assignment to an object literal into a sequence of
5353// property accesses into the value being assigned (in the accumulator).
5354//
5355// { y, [x++]: a(), ...b.c } = value
5356//
5357// becomes
5358//
5359// var rest_runtime_callargs = new Array(3);
5360// rest_runtime_callargs[0] = value;
5361//
5362// rest_runtime_callargs[1] = "y";
5363// y = value.y;
5364//
5365// var temp1 = %ToName(x++);
5366// rest_runtime_callargs[2] = temp1;
5367// a() = value[temp1];
5368//
5369// b.c =
5370// %CopyDataPropertiesWithExcludedPropertiesOnStack.call(rest_runtime_callargs);
5373 LookupHoistingMode lookup_hoisting_mode) {
5374 RegisterAllocationScope register_scope(this);
5375
5376 // Store the assignment value in a register.
5378 RegisterList rest_runtime_callargs;
5379 if (pattern->builder()->has_rest_property()) {
5380 rest_runtime_callargs =
5381 register_allocator()->NewRegisterList(pattern->properties()->length());
5382 value = rest_runtime_callargs[0];
5383 } else {
5384 value = register_allocator()->NewRegister();
5385 }
5387
5388 // if (value === null || value === undefined)
5389 // throw new TypeError(kNonCoercible);
5390 //
5391 // Since the first property access on null/undefined will also trigger a
5392 // TypeError, we can elide this check. The exception is when there are no
5393 // properties and no rest property (this is an empty literal), or when the
5394 // first property is a computed name and accessing it can have side effects.
5395 //
5396 // TODO(leszeks): Also eliminate this check if the value is known to be
5397 // non-null (e.g. an object literal).
5398 if (pattern->properties()->is_empty() ||
5399 (pattern->properties()->at(0)->is_computed_name() &&
5400 pattern->properties()->at(0)->kind() != ObjectLiteralProperty::SPREAD)) {
5401 BytecodeLabel is_null_or_undefined, not_null_or_undefined;
5402 builder()
5403 ->JumpIfUndefinedOrNull(&is_null_or_undefined)
5404 .Jump(&not_null_or_undefined);
5405
5406 {
5407 builder()->Bind(&is_null_or_undefined);
5409 builder()->CallRuntime(Runtime::kThrowPatternAssignmentNonCoercible,
5410 value);
5411 }
5412 builder()->Bind(&not_null_or_undefined);
5413 }
5414
5415 int i = 0;
5416 for (ObjectLiteralProperty* pattern_property : *pattern->properties()) {
5417 RegisterAllocationScope inner_register_scope(this);
5418
5419 // The key of the pattern becomes the key into the RHS value, and the value
5420 // of the pattern becomes the target of the assignment.
5421 //
5422 // e.g. { a: b } = o becomes b = o.a
5423 Expression* pattern_key = pattern_property->key();
5424 Expression* target = pattern_property->value();
5425 Expression* default_value = GetDestructuringDefaultValue(&target);
5426 builder()->SetExpressionPosition(target);
5427
5428 // Calculate this property's key into the assignment RHS value, additionally
5429 // storing the key for rest_runtime_callargs if needed.
5430 //
5431 // The RHS is accessed using the key either by LoadNamedProperty (if
5432 // value_name is valid) or by LoadKeyedProperty (otherwise).
5433 const AstRawString* value_name = nullptr;
5434 Register value_key;
5435
5436 if (pattern_property->kind() != ObjectLiteralProperty::Kind::SPREAD) {
5437 if (pattern_key->IsPropertyName()) {
5438 value_name = pattern_key->AsLiteral()->AsRawPropertyName();
5439 }
5440 if (pattern->builder()->has_rest_property() || !value_name) {
5441 if (pattern->builder()->has_rest_property()) {
5442 value_key = rest_runtime_callargs[i + 1];
5443 } else {
5444 value_key = register_allocator()->NewRegister();
5445 }
5446 if (pattern_property->is_computed_name()) {
5447 // { [a()]: b().x } = c
5448 // becomes
5449 // var tmp = a()
5450 // b().x = c[tmp]
5451 DCHECK(!pattern_key->IsPropertyName() ||
5452 !pattern_key->IsNumberLiteral());
5453 VisitForAccumulatorValue(pattern_key);
5455 } else {
5456 // We only need the key for non-computed properties when it is numeric
5457 // or is being saved for the rest_runtime_callargs.
5458 DCHECK(pattern_key->IsNumberLiteral() ||
5459 (pattern->builder()->has_rest_property() &&
5460 pattern_key->IsPropertyName()));
5461 VisitForRegisterValue(pattern_key, value_key);
5462 }
5463 }
5464 }
5465
5466 AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
5467
5468 // Get the value from the RHS.
5469 if (pattern_property->kind() == ObjectLiteralProperty::Kind::SPREAD) {
5470 DCHECK_EQ(i, pattern->properties()->length() - 1);
5471 DCHECK(!value_key.is_valid());
5472 DCHECK_NULL(value_name);
5474 Runtime::kInlineCopyDataPropertiesWithExcludedPropertiesOnStack,
5475 rest_runtime_callargs);
5476 } else if (value_name) {
5478 value, value_name, feedback_index(feedback_spec()->AddLoadICSlot()));
5479 } else {
5480 DCHECK(value_key.is_valid());
5482 value, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
5483 }
5484
5485 // {<pattern> = <init>} = <value>
5486 // becomes
5487 // temp = <value>;
5488 // <pattern> = temp === undefined ? <init> : temp;
5489 if (default_value) {
5490 BytecodeLabel value_not_undefined;
5491 builder()->JumpIfNotUndefined(&value_not_undefined);
5493 builder()->Bind(&value_not_undefined);
5494 }
5495
5496 BuildAssignment(lhs_data, op, lookup_hoisting_mode);
5497
5498 i++;
5499 }
5500
5501 if (!execution_result()->IsEffect()) {
5503 }
5504}
5505
5507 const AssignmentLhsData& lhs_data, Token::Value op,
5508 LookupHoistingMode lookup_hoisting_mode) {
5509 // Assign the value to the LHS.
5510 switch (lhs_data.assign_type()) {
5511 case NON_PROPERTY: {
5512 if (ObjectLiteral* pattern_as_object =
5513 lhs_data.expr()->AsObjectLiteral()) {
5514 // Split object literals into destructuring.
5515 BuildDestructuringObjectAssignment(pattern_as_object, op,
5516 lookup_hoisting_mode);
5517 } else if (ArrayLiteral* pattern_as_array =
5518 lhs_data.expr()->AsArrayLiteral()) {
5519 // Split object literals into destructuring.
5520 BuildDestructuringArrayAssignment(pattern_as_array, op,
5521 lookup_hoisting_mode);
5522 } else {
5523 DCHECK(lhs_data.expr()->IsVariableProxy());
5524 VariableProxy* proxy = lhs_data.expr()->AsVariableProxy();
5525 BuildVariableAssignment(proxy->var(), op, proxy->hole_check_mode(),
5526 lookup_hoisting_mode);
5527 }
5528 break;
5529 }
5530 case NAMED_PROPERTY: {
5531 BuildSetNamedProperty(lhs_data.object_expr(), lhs_data.object(),
5532 lhs_data.name());
5533 break;
5534 }
5535 case KEYED_PROPERTY: {
5538 if (!execution_result()->IsEffect()) {
5539 value = register_allocator()->NewRegister();
5541 }
5542 builder()->SetKeyedProperty(lhs_data.object(), lhs_data.key(),
5543 feedback_index(slot), language_mode());
5544 if (!execution_result()->IsEffect()) {
5546 }
5547 break;
5548 }
5549 case NAMED_SUPER_PROPERTY: {
5550 builder()
5552 .CallRuntime(Runtime::kStoreToSuper, lhs_data.super_property_args());
5553 break;
5554 }
5555 case KEYED_SUPER_PROPERTY: {
5556 builder()
5558 .CallRuntime(Runtime::kStoreKeyedToSuper,
5559 lhs_data.super_property_args());
5560 break;
5561 }
5562 case PRIVATE_METHOD: {
5563 Property* property = lhs_data.expr()->AsProperty();
5564 BuildPrivateBrandCheck(property, lhs_data.object());
5565 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
5566 lhs_data.expr()->AsProperty());
5567 break;
5568 }
5569 case PRIVATE_GETTER_ONLY: {
5570 Property* property = lhs_data.expr()->AsProperty();
5571 BuildPrivateBrandCheck(property, lhs_data.object());
5572 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
5573 lhs_data.expr()->AsProperty());
5574 break;
5575 }
5580 Property* property = lhs_data.expr()->AsProperty();
5581 BuildPrivateBrandCheck(property, lhs_data.object());
5582 BuildPrivateSetterAccess(lhs_data.object(), lhs_data.key(), value);
5583 if (!execution_result()->IsEffect()) {
5585 }
5586 break;
5587 }
5588 case PRIVATE_DEBUG_DYNAMIC: {
5591 Property* property = lhs_data.expr()->AsProperty();
5592 BuildPrivateDebugDynamicSet(property, lhs_data.object(), value);
5593 if (!execution_result()->IsEffect()) {
5595 }
5596 break;
5597 }
5598 }
5599}
5600
5601void BytecodeGenerator::VisitAssignment(Assignment* expr) {
5602 AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
5603
5605
5607 BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
5608}
5609
5610void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
5611 AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
5612
5613 // Evaluate the value and potentially handle compound assignments by loading
5614 // the left-hand side value and performing a binary operation.
5615 switch (lhs_data.assign_type()) {
5616 case NON_PROPERTY: {
5617 VariableProxy* proxy = expr->target()->AsVariableProxy();
5618 BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
5619 break;
5620 }
5621 case NAMED_PROPERTY: {
5622 BuildLoadNamedProperty(lhs_data.object_expr(), lhs_data.object(),
5623 lhs_data.name());
5624 break;
5625 }
5626 case KEYED_PROPERTY: {
5627 FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
5628 builder()->LoadAccumulatorWithRegister(lhs_data.key());
5629 BuildLoadKeyedProperty(lhs_data.object(), slot);
5630 break;
5631 }
5632 case NAMED_SUPER_PROPERTY: {
5633 builder()->CallRuntime(Runtime::kLoadFromSuper,
5634 lhs_data.super_property_args().Truncate(3));
5635 break;
5636 }
5637 case KEYED_SUPER_PROPERTY: {
5638 builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
5639 lhs_data.super_property_args().Truncate(3));
5640 break;
5641 }
5642 // BuildAssignment() will throw an error about the private method being
5643 // read-only.
5644 case PRIVATE_METHOD: {
5645 Property* property = lhs_data.expr()->AsProperty();
5646 BuildPrivateBrandCheck(property, lhs_data.object());
5647 builder()->LoadAccumulatorWithRegister(lhs_data.key());
5648 break;
5649 }
5650 // For read-only properties, BuildAssignment() will throw an error about
5651 // the missing setter.
5654 Property* property = lhs_data.expr()->AsProperty();
5655 BuildPrivateBrandCheck(property, lhs_data.object());
5656 BuildPrivateGetterAccess(lhs_data.object(), lhs_data.key());
5657 break;
5658 }
5659 case PRIVATE_SETTER_ONLY: {
5660 // The property access is invalid, but if the brand check fails too, we
5661 // need to return the error from the brand check.
5662 Property* property = lhs_data.expr()->AsProperty();
5663 BuildPrivateBrandCheck(property, lhs_data.object());
5664 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
5665 lhs_data.expr()->AsProperty());
5666 break;
5667 }
5668 case PRIVATE_DEBUG_DYNAMIC: {
5669 Property* property = lhs_data.expr()->AsProperty();
5670 BuildPrivateDebugDynamicGet(property, lhs_data.object());
5671 break;
5672 }
5673 }
5674
5675 BinaryOperation* binop = expr->binary_operation();
5676 FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
5677 BytecodeLabel short_circuit;
5678 if (binop->op() == Token::kNullish) {
5679 BytecodeLabel nullish;
5680 builder()
5681 ->JumpIfUndefinedOrNull(&nullish)
5682 .Jump(&short_circuit)
5683 .Bind(&nullish);
5685 } else if (binop->op() == Token::kOr) {
5686 builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &short_circuit);
5688 } else if (binop->op() == Token::kAnd) {
5689 builder()->JumpIfFalse(ToBooleanMode::kConvertToBoolean, &short_circuit);
5691 } else if (expr->value()->IsSmiLiteral()) {
5693 binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
5694 feedback_index(slot));
5695 } else {
5696 Register old_value = register_allocator()->NewRegister();
5697 builder()->StoreAccumulatorInRegister(old_value);
5699 builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
5700 }
5702
5703 BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
5704 builder()->Bind(&short_circuit);
5705}
5706
5707// Suspends the generator to resume at the next suspend_id, with output stored
5708// in the accumulator. When the generator is resumed, the sent value is loaded
5709// in the accumulator.
5711 // Because we eliminate jump targets in dead code, we also eliminate resumes
5712 // when the suspend is not emitted because otherwise the below call to Bind
5713 // would start a new basic block and the code would be considered alive.
5714 if (builder()->RemainderOfBlockIsDead()) {
5715 return;
5716 }
5717 const int suspend_id = suspend_count_++;
5718
5720
5721 // Save context, registers, and state. This bytecode then returns the value
5722 // in the accumulator.
5725
5726 // Upon resume, we continue here.
5727 builder()->Bind(generator_jump_table_, suspend_id);
5728
5729 // Clobbers all registers and sets the accumulator to the
5730 // [[input_or_debug_pos]] slot of the generator object.
5732}
5733
5734void BytecodeGenerator::VisitYield(Yield* expr) {
5737
5738 bool is_async = IsAsyncGeneratorFunction(function_kind());
5739 // If this is not the first yield
5740 if (suspend_count_ > 0) {
5741 if (is_async) {
5742 // AsyncGenerator yields (with the exception of the initial yield)
5743 // delegate work to the AsyncGeneratorYieldWithAwait stub, which Awaits
5744 // the operand and on success, wraps the value in an IteratorResult.
5745 //
5746 // In the spec the Await is a separate operation, but they are combined
5747 // here to reduce bytecode size.
5748 RegisterAllocationScope register_scope(this);
5750 builder()
5751 ->MoveRegister(generator_object(), args[0]) // generator
5752 .StoreAccumulatorInRegister(args[1]) // value
5753 .CallRuntime(Runtime::kInlineAsyncGeneratorYieldWithAwait, args);
5754 } else {
5755 // Generator yields (with the exception of the initial yield) wrap the
5756 // value into IteratorResult.
5757 RegisterAllocationScope register_scope(this);
5758 RegisterList args = register_allocator()->NewRegisterList(2);
5759 builder()
5760 ->StoreAccumulatorInRegister(args[0]) // value
5761 .LoadFalse()
5762 .StoreAccumulatorInRegister(args[1]) // done
5763 .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
5764 }
5765 }
5766
5767 BuildSuspendPoint(expr->position());
5768 // At this point, the generator has been resumed, with the received value in
5769 // the accumulator.
5770
5771 // TODO(caitp): remove once yield* desugaring for async generators is handled
5772 // in BytecodeGenerator.
5773 if (expr->on_abrupt_resume() == Yield::kNoControl) {
5774 DCHECK(is_async);
5775 return;
5776 }
5777
5780 Runtime::kInlineGeneratorGetResumeMode, generator_object());
5781
5782 // Now dispatch on resume mode.
5785 BytecodeJumpTable* jump_table =
5787
5788 builder()->SwitchOnSmiNoFeedback(jump_table);
5789
5790 if (is_async) {
5791 // Resume with rethrow (switch fallthrough).
5792 // This case is only necessary in async generators.
5795 builder()->ReThrow();
5796
5797 // Add label for kThrow (next case).
5798 builder()->Bind(jump_table, JSGeneratorObject::kThrow);
5799 }
5800
5801 {
5802 // Resume with throw (switch fallthrough in sync case).
5803 // TODO(leszeks): Add a debug-only check that the accumulator is
5804 // JSGeneratorObject::kThrow.
5807 builder()->Throw();
5808 }
5809
5810 {
5811 // Resume with return.
5812 builder()->Bind(jump_table, JSGeneratorObject::kReturn);
5814 if (is_async) {
5816 } else {
5818 }
5819 }
5820
5821 {
5822 // Resume with next.
5823 builder()->Bind(jump_table, JSGeneratorObject::kNext);
5827 }
5828}
5829
5830// Desugaring of (yield* iterable)
5831//
5832// do {
5833// const kNext = 0;
5834// const kReturn = 1;
5835// const kThrow = 2;
5836//
5837// let output; // uninitialized
5838//
5839// let iteratorRecord = GetIterator(iterable);
5840// let iterator = iteratorRecord.[[Iterator]];
5841// let next = iteratorRecord.[[NextMethod]];
5842// let input = undefined;
5843// let resumeMode = kNext;
5844//
5845// while (true) {
5846// // From the generator to the iterator:
5847// // Forward input according to resumeMode and obtain output.
5848// switch (resumeMode) {
5849// case kNext:
5850// output = next.[[Call]](iterator, « »);;
5851// break;
5852// case kReturn:
5853// let iteratorReturn = iterator.return;
5854// if (IS_NULL_OR_UNDEFINED(iteratorReturn)) {
5855// if (IS_ASYNC_GENERATOR) input = await input;
5856// return input;
5857// }
5858// output = iteratorReturn.[[Call]](iterator, «input»);
5859// break;
5860// case kThrow:
5861// let iteratorThrow = iterator.throw;
5862// if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
5863// let iteratorReturn = iterator.return;
5864// if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
5865// output = iteratorReturn.[[Call]](iterator, « »);
5866// if (IS_ASYNC_GENERATOR) output = await output;
5867// if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
5868// }
5869// throw MakeTypeError(kThrowMethodMissing);
5870// }
5871// output = iteratorThrow.[[Call]](iterator, «input»);
5872// break;
5873// }
5874//
5875// if (IS_ASYNC_GENERATOR) output = await output;
5876// if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
5877// if (output.done) break;
5878//
5879// // From the generator to its user:
5880// // Forward output, receive new input, and determine resume mode.
5881// if (IS_ASYNC_GENERATOR) {
5882// // Resolve the promise for the current AsyncGeneratorRequest.
5883// %_AsyncGeneratorResolve(output.value, /* done = */ false)
5884// }
5885// input = Suspend(output);
5886// resumeMode = %GeneratorGetResumeMode();
5887// }
5888//
5889// if (resumeMode === kReturn) {
5890// return output.value;
5891// }
5892// output.value
5893// }
5894void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
5896 Register resume_mode = register_allocator()->NewRegister();
5900
5901 {
5902 RegisterAllocationScope register_scope(this);
5903 RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
5904 VisitForAccumulatorValue(expr->expression());
5906 register_allocator()->NewRegister() /* next method */,
5907 iterator_and_input[0], iterator_type);
5908
5909 Register input = iterator_and_input[1];
5911 builder()
5913 .StoreAccumulatorInRegister(resume_mode);
5914
5915 {
5916 // This loop builder does not construct counters as the loop is not
5917 // visible to the user, and we therefore neither pass the block coverage
5918 // builder nor the expression.
5919 //
5920 // In addition to the normal suspend for yield*, a yield* in an async
5921 // generator has 2 additional suspends:
5922 // - One for awaiting the iterator result of closing the generator when
5923 // resumed with a "throw" completion, and a throw method is not
5924 // present on the delegated iterator
5925 // - One for awaiting the iterator result yielded by the delegated
5926 // iterator
5927
5928 LoopBuilder loop_builder(builder(), nullptr, nullptr, feedback_spec());
5929 LoopScope loop_scope(this, &loop_builder);
5930
5931 {
5932 BytecodeLabels after_switch(zone());
5933 BytecodeJumpTable* switch_jump_table =
5934 builder()->AllocateJumpTable(2, 1);
5935
5936 builder()
5937 ->LoadAccumulatorWithRegister(resume_mode)
5938 .SwitchOnSmiNoFeedback(switch_jump_table);
5939
5940 // Fallthrough to default case.
5941 // TODO(ignition): Add debug code to check that {resume_mode} really is
5942 // {JSGeneratorObject::kNext} in this case.
5943 static_assert(JSGeneratorObject::kNext == 0);
5944 {
5945 FeedbackSlot slot = feedback_spec()->AddCallICSlot();
5946 builder()->CallProperty(iterator.next(), iterator_and_input,
5947 feedback_index(slot));
5948 builder()->Jump(after_switch.New());
5949 }
5950
5951 static_assert(JSGeneratorObject::kReturn == 1);
5952 builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
5953 {
5954 const AstRawString* return_string =
5955 ast_string_constants()->return_string();
5956 BytecodeLabels no_return_method(zone());
5957
5958 BuildCallIteratorMethod(iterator.object(), return_string,
5959 iterator_and_input, after_switch.New(),
5960 &no_return_method);
5961 no_return_method.Bind(builder());
5963 if (iterator_type == IteratorType::kAsync) {
5964 // Await input.
5965 BuildAwait(expr->position());
5967 } else {
5969 }
5970 }
5971
5972 static_assert(JSGeneratorObject::kThrow == 2);
5973 builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
5974 {
5975 const AstRawString* throw_string =
5976 ast_string_constants()->throw_string();
5977 BytecodeLabels no_throw_method(zone());
5978 BuildCallIteratorMethod(iterator.object(), throw_string,
5979 iterator_and_input, after_switch.New(),
5980 &no_throw_method);
5981
5982 // If there is no "throw" method, perform IteratorClose, and finally
5983 // throw a TypeError.
5984 no_throw_method.Bind(builder());
5985 BuildIteratorClose(iterator, expr);
5986 builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
5987 }
5988
5989 after_switch.Bind(builder());
5990 }
5991
5992 if (iterator_type == IteratorType::kAsync) {
5993 // Await the result of the method invocation.
5994 BuildAwait(expr->position());
5995 }
5996
5997 // Check that output is an object.
5998 BytecodeLabel check_if_done;
5999 builder()
6001 .JumpIfJSReceiver(&check_if_done)
6002 .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
6003
6004 builder()->Bind(&check_if_done);
6005 // Break once output.done is true.
6007 output, ast_string_constants()->done_string(),
6008 feedback_index(feedback_spec()->AddLoadICSlot()));
6009
6010 loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
6011
6012 // Suspend the current generator.
6013 if (iterator_type == IteratorType::kNormal) {
6015 } else {
6016 RegisterAllocationScope inner_register_scope(this);
6017 DCHECK_EQ(iterator_type, IteratorType::kAsync);
6018 // If generatorKind is async, perform
6019 // AsyncGeneratorResolve(output.value, /* done = */ false), which will
6020 // resolve the current AsyncGeneratorRequest's promise with
6021 // output.value.
6023 output, ast_string_constants()->value_string(),
6024 feedback_index(feedback_spec()->AddLoadICSlot()));
6025
6026 RegisterList args = register_allocator()->NewRegisterList(3);
6027 builder()
6028 ->MoveRegister(generator_object(), args[0]) // generator
6029 .StoreAccumulatorInRegister(args[1]) // value
6030 .LoadFalse()
6031 .StoreAccumulatorInRegister(args[2]) // done
6032 .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
6033 }
6034
6035 BuildSuspendPoint(expr->position());
6037 builder()
6038 ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
6040 .StoreAccumulatorInRegister(resume_mode);
6041
6042 loop_builder.BindContinueTarget();
6043 }
6044 }
6045
6046 // Decide if we trigger a return or if the yield* expression should just
6047 // produce a value.
6048 BytecodeLabel completion_is_output_value;
6049 Register output_value = register_allocator()->NewRegister();
6050 builder()
6051 ->LoadNamedProperty(output, ast_string_constants()->value_string(),
6052 feedback_index(feedback_spec()->AddLoadICSlot()))
6053 .StoreAccumulatorInRegister(output_value)
6055 .CompareReference(resume_mode)
6056 .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
6057 .LoadAccumulatorWithRegister(output_value);
6058 if (iterator_type == IteratorType::kAsync) {
6060 } else {
6062 }
6063
6064 builder()->Bind(&completion_is_output_value);
6067 builder()->LoadAccumulatorWithRegister(output_value);
6068}
6069
6071 // Rather than HandlerTable::UNCAUGHT, async functions use
6072 // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
6073 // transformed into promise rejections. This is necessary to prevent emitting
6074 // multiple debug events for the same uncaught exception. There is no point
6075 // in the body of an async function where catch prediction is
6076 // HandlerTable::UNCAUGHT.
6078 info()->scope()->is_repl_mode_scope());
6079
6080 {
6081 // Await(operand) and suspend.
6082 RegisterAllocationScope register_scope(this);
6083
6084 Runtime::FunctionId await_intrinsic_id;
6086 await_intrinsic_id = Runtime::kInlineAsyncGeneratorAwait;
6087 } else {
6088 await_intrinsic_id = Runtime::kInlineAsyncFunctionAwait;
6089 }
6091 builder()
6094 .CallRuntime(await_intrinsic_id, args);
6095 }
6096
6098
6100 Register resume_mode = register_allocator()->NewRegister();
6101
6102 // Now dispatch on resume mode.
6103 BytecodeLabel resume_next;
6104 builder()
6106 .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
6107 .StoreAccumulatorInRegister(resume_mode)
6109 .CompareReference(resume_mode)
6110 .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
6111
6112 // Resume with "throw" completion (rethrow the received value).
6113 // TODO(leszeks): Add a debug-only check that the accumulator is
6114 // JSGeneratorObject::kThrow.
6116
6117 // Resume with next.
6118 builder()->Bind(&resume_next);
6120}
6121
6122void BytecodeGenerator::VisitAwait(Await* expr) {
6125 BuildAwait(expr->position());
6128}
6129
6130void BytecodeGenerator::VisitThrow(Throw* expr) {
6134 builder()->Throw();
6135}
6136
6138 if (property->is_optional_chain_link()) {
6140 int right_range =
6145 }
6146
6147 AssignType property_kind = Property::GetAssignType(property);
6148
6149 switch (property_kind) {
6150 case NON_PROPERTY:
6151 UNREACHABLE();
6152 case NAMED_PROPERTY: {
6153 builder()->SetExpressionPosition(property);
6154 const AstRawString* name =
6155 property->key()->AsLiteral()->AsRawPropertyName();
6156 BuildLoadNamedProperty(property->obj(), obj, name);
6157 break;
6158 }
6159 case KEYED_PROPERTY: {
6160 VisitForAccumulatorValue(property->key());
6161 builder()->SetExpressionPosition(property);
6162 BuildLoadKeyedProperty(obj, feedback_spec()->AddKeyedLoadICSlot());
6163 break;
6164 }
6167 break;
6170 break;
6171 case PRIVATE_SETTER_ONLY: {
6172 BuildPrivateBrandCheck(property, obj);
6173 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
6174 property);
6175 break;
6176 }
6179 Register key = VisitForRegisterValue(property->key());
6180 BuildPrivateBrandCheck(property, obj);
6182 break;
6183 }
6184 case PRIVATE_METHOD: {
6185 BuildPrivateBrandCheck(property, obj);
6186 // In the case of private methods, property->key() is the function to be
6187 // loaded (stored in a context slot), so load this directly.
6188 VisitForAccumulatorValue(property->key());
6189 break;
6190 }
6191 case PRIVATE_DEBUG_DYNAMIC: {
6192 BuildPrivateDebugDynamicGet(property, obj);
6193 break;
6194 }
6195 }
6196}
6197
6199 Register obj) {
6200 RegisterAllocationScope scope(this);
6202
6203 Variable* private_name = property->key()->AsVariableProxy()->var();
6204 builder()
6205 ->MoveRegister(obj, args[0])
6206 .LoadLiteral(private_name->raw_name())
6208 .CallRuntime(Runtime::kGetPrivateMember, args);
6209}
6210
6212 Register obj,
6213 Register value) {
6214 RegisterAllocationScope scope(this);
6216
6217 Variable* private_name = property->key()->AsVariableProxy()->var();
6218 builder()
6219 ->MoveRegister(obj, args[0])
6220 .LoadLiteral(private_name->raw_name())
6222 .MoveRegister(value, args[2])
6223 .CallRuntime(Runtime::kSetPrivateMember, args);
6224}
6225
6227 Register accessor_pair) {
6228 RegisterAllocationScope scope(this);
6229 Register accessor = register_allocator()->NewRegister();
6231
6232 builder()
6233 ->CallRuntime(Runtime::kLoadPrivateGetter, accessor_pair)
6235 .MoveRegister(object, args[0])
6236 .CallProperty(accessor, args,
6237 feedback_index(feedback_spec()->AddCallICSlot()));
6238}
6239
6241 Register accessor_pair,
6242 Register value) {
6243 RegisterAllocationScope scope(this);
6244 Register accessor = register_allocator()->NewRegister();
6246
6247 builder()
6248 ->CallRuntime(Runtime::kLoadPrivateSetter, accessor_pair)
6250 .MoveRegister(object, args[0])
6251 .MoveRegister(value, args[1])
6252 .CallProperty(accessor, args,
6253 feedback_index(feedback_spec()->AddCallICSlot()));
6254}
6255
6257 Expression* object_expression) {
6258 DCHECK(IsPrivateMethodOrAccessorVariableMode(private_name->mode()));
6259 ClassScope* scope = private_name->scope()->AsClassScope();
6260 if (private_name->is_static()) {
6261 // For static private methods, "#privatemethod in ..." only returns true for
6262 // the class constructor.
6263 if (scope->class_variable() == nullptr) {
6264 // Can only happen via the debugger. See comment in
6265 // BuildPrivateBrandCheck.
6266 RegisterAllocationScope register_scope(this);
6268 builder()
6271 kInvalidUnusedPrivateStaticMethodAccessedByDebugger))
6273 .LoadLiteral(private_name->raw_name())
6275 .CallRuntime(Runtime::kNewError, args)
6276 .Throw();
6277 } else {
6278 VisitForAccumulatorValue(object_expression);
6281
6282 BytecodeLabel is_object;
6283 builder()->JumpIfJSReceiver(&is_object);
6284
6286 builder()
6288 .LoadLiteral(Smi::FromEnum(MessageTemplate::kInvalidInOperatorUse))
6290 .LoadLiteral(private_name->raw_name())
6292 .CallRuntime(Runtime::kNewTypeError, args)
6293 .Throw();
6294
6295 builder()->Bind(&is_object);
6298 builder()->CompareReference(object);
6299 }
6300 } else {
6305
6306 VisitForAccumulatorValue(object_expression);
6307 builder()->SetExpressionPosition(object_expression);
6308
6310 builder()->CompareOperation(Token::kIn, brand, feedback_index(slot));
6312 }
6313}
6314
6316 Register object) {
6317 Variable* private_name = property->key()->AsVariableProxy()->var();
6318 DCHECK(IsPrivateMethodOrAccessorVariableMode(private_name->mode()));
6319 ClassScope* scope = private_name->scope()->AsClassScope();
6320 builder()->SetExpressionPosition(property);
6321 if (private_name->is_static()) {
6322 // For static private methods, the only valid receiver is the class.
6323 // Load the class constructor.
6324 if (scope->class_variable() == nullptr) {
6325 // If the static private method has not been used used in source
6326 // code (either explicitly or through the presence of eval), but is
6327 // accessed by the debugger at runtime, reference to the class variable
6328 // is not available since it was not be context-allocated. Therefore we
6329 // can't build a branch check, and throw an ReferenceError as if the
6330 // method was optimized away.
6331 // TODO(joyee): get a reference to the class constructor through
6332 // something other than scope->class_variable() in this scenario.
6333 RegisterAllocationScope register_scope(this);
6335 builder()
6338 kInvalidUnusedPrivateStaticMethodAccessedByDebugger))
6340 .LoadLiteral(private_name->raw_name())
6342 .CallRuntime(Runtime::kNewError, args)
6343 .Throw();
6344 } else {
6347 BytecodeLabel return_check;
6349 ToBooleanMode::kAlreadyBoolean, &return_check);
6350 const AstRawString* name = scope->class_variable()->raw_name();
6351 RegisterAllocationScope register_scope(this);
6353 builder()
6354 ->LoadLiteral(
6355 Smi::FromEnum(MessageTemplate::kInvalidPrivateBrandStatic))
6357 .LoadLiteral(name)
6359 .CallRuntime(Runtime::kNewTypeError, args)
6360 .Throw();
6361 builder()->Bind(&return_check);
6362 }
6363 } else {
6367 object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
6368 }
6369}
6370
6378
6380 Register opt_receiver_out) {
6381 RegisterAllocationScope register_scope(this);
6382 if (v8_flags.super_ic) {
6387 property->obj()->AsSuperPropertyReference()->home_object()->var(),
6389 builder()->SetExpressionPosition(property);
6390 auto name = property->key()->AsLiteral()->AsRawPropertyName();
6393 if (opt_receiver_out.is_valid()) {
6394 builder()->MoveRegister(receiver, opt_receiver_out);
6395 }
6396 } else {
6401 property->obj()->AsSuperPropertyReference()->home_object()->var(),
6404 builder()->SetExpressionPosition(property);
6405 builder()
6406 ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
6408 .CallRuntime(Runtime::kLoadFromSuper, args);
6409
6410 if (opt_receiver_out.is_valid()) {
6411 builder()->MoveRegister(args[0], opt_receiver_out);
6412 }
6413 }
6414}
6415
6417 Register opt_receiver_out) {
6418 RegisterAllocationScope register_scope(this);
6423 property->obj()->AsSuperPropertyReference()->home_object()->var(),
6426 VisitForRegisterValue(property->key(), args[2]);
6427
6428 builder()->SetExpressionPosition(property);
6429 builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
6430
6431 if (opt_receiver_out.is_valid()) {
6432 builder()->MoveRegister(args[0], opt_receiver_out);
6433 }
6434}
6435
6436template <typename ExpressionFunc>
6437void BytecodeGenerator::BuildOptionalChain(ExpressionFunc expression_func) {
6438 BytecodeLabel done;
6439 OptionalChainNullLabelScope label_scope(this);
6440 // Use the same scope for the entire optional chain, as links earlier in the
6441 // chain dominate later links, linearly.
6442 HoleCheckElisionScope elider(this);
6443 expression_func();
6444 builder()->Jump(&done);
6445 label_scope.labels()->Bind(builder());
6447 builder()->Bind(&done);
6448}
6449
6450void BytecodeGenerator::VisitOptionalChain(OptionalChain* expr) {
6452}
6453
6454void BytecodeGenerator::VisitProperty(Property* expr) {
6455 AssignType property_kind = Property::GetAssignType(expr);
6456 if (property_kind != NAMED_SUPER_PROPERTY &&
6457 property_kind != KEYED_SUPER_PROPERTY) {
6458 Register obj = VisitForRegisterValue(expr->obj());
6459 VisitPropertyLoad(obj, expr);
6460 } else {
6462 }
6463}
6464
6466 RegisterList* arg_regs) {
6467 // Visit arguments.
6468 builder()->UpdateMaxArguments(static_cast<uint16_t>(args->length()));
6469 for (int i = 0; i < static_cast<int>(args->length()); i++) {
6470 VisitAndPushIntoRegisterList(args->at(i), arg_regs);
6471 }
6472}
6473
6474void BytecodeGenerator::VisitCall(Call* expr) {
6475 Expression* callee_expr = expr->expression();
6476 Call::CallType call_type = expr->GetCallType();
6477
6478 if (call_type == Call::SUPER_CALL) {
6479 return VisitCallSuper(expr);
6480 }
6481
6482 // We compile the call differently depending on the presence of spreads and
6483 // their positions.
6484 //
6485 // If there is only one spread and it is the final argument, there is a
6486 // special CallWithSpread bytecode.
6487 //
6488 // If there is a non-final spread, we rewrite calls like
6489 // callee(1, ...x, 2)
6490 // to
6491 // %reflect_apply(callee, receiver, [1, ...x, 2])
6492 const Call::SpreadPosition spread_position = expr->spread_position();
6493
6494 // Grow the args list as we visit receiver / arguments to avoid allocating all
6495 // the registers up-front. Otherwise these registers are unavailable during
6496 // receiver / argument visiting and we can end up with memory leaks due to
6497 // registers keeping objects alive.
6499
6500 // The callee is the first register in args for ease of calling %reflect_apply
6501 // if we have a non-final spread. For all other cases it is popped from args
6502 // before emitting the call below.
6504
6505 bool implicit_undefined_receiver = false;
6506
6507 // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
6508 // see if we can reduce the number by adding a separate argument which
6509 // specifies the call type (e.g., property, spread, tailcall, etc.).
6510
6511 // Prepare the callee and the receiver to the function call. This depends on
6512 // the semantics of the underlying call type.
6513 switch (call_type) {
6516 case Call::PRIVATE_CALL: {
6517 Property* property = callee_expr->AsProperty();
6518 VisitAndPushIntoRegisterList(property->obj(), &args);
6519 VisitPropertyLoadForRegister(args.last_register(), property, callee);
6520 break;
6521 }
6522 case Call::GLOBAL_CALL: {
6523 // Receiver is undefined for global calls.
6524 if (spread_position == Call::kNoSpread) {
6525 implicit_undefined_receiver = true;
6526 } else {
6527 // TODO(leszeks): There's no special bytecode for tail calls or spread
6528 // calls with an undefined receiver, so just push undefined ourselves.
6530 }
6531 // Load callee as a global variable.
6532 VariableProxy* proxy = callee_expr->AsVariableProxy();
6534 proxy->hole_check_mode());
6536 break;
6537 }
6538 case Call::WITH_CALL: {
6540 DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
6541 {
6542 RegisterAllocationScope inner_register_scope(this);
6544
6545 // Call %LoadLookupSlotForCall to get the callee and receiver.
6546 RegisterList result_pair = register_allocator()->NewRegisterList(2);
6547 Variable* variable = callee_expr->AsVariableProxy()->var();
6548 builder()
6549 ->LoadLiteral(variable->raw_name())
6551 .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
6552 result_pair)
6553 .MoveRegister(result_pair[0], callee)
6554 .MoveRegister(result_pair[1], receiver);
6555 }
6556 break;
6557 }
6558 case Call::OTHER_CALL: {
6559 // Receiver is undefined for other calls.
6560 if (spread_position == Call::kNoSpread) {
6561 implicit_undefined_receiver = true;
6562 } else {
6563 // TODO(leszeks): There's no special bytecode for tail calls or spread
6564 // calls with an undefined receiver, so just push undefined ourselves.
6566 }
6567 VisitForRegisterValue(callee_expr, callee);
6568 break;
6569 }
6572 Property* property = callee_expr->AsProperty();
6575 break;
6576 }
6579 Property* property = callee_expr->AsProperty();
6582 break;
6583 }
6587 OptionalChain* chain = callee_expr->AsOptionalChain();
6588 Property* property = chain->expression()->AsProperty();
6589 BuildOptionalChain([&]() {
6590 VisitAndPushIntoRegisterList(property->obj(), &args);
6591 VisitPropertyLoad(args.last_register(), property);
6592 });
6594 break;
6595 }
6596 case Call::SUPER_CALL:
6597 UNREACHABLE();
6598 }
6599
6600 if (expr->is_optional_chain_link()) {
6602 int right_range =
6607 }
6608
6609 int receiver_arg_count = -1;
6610 if (spread_position == Call::kHasNonFinalSpread) {
6611 // If we're building %reflect_apply, build the array literal and put it in
6612 // the 3rd argument.
6613 DCHECK(!implicit_undefined_receiver);
6614 DCHECK_EQ(args.register_count(), 2);
6615 BuildCreateArrayLiteral(expr->arguments(), nullptr);
6617 register_allocator()->GrowRegisterList(&args));
6618 } else {
6619 // If we're not building %reflect_apply and don't need to build an array
6620 // literal, pop the callee and evaluate all arguments to the function call
6621 // and store in sequential args registers.
6622 args = args.PopLeft();
6623 VisitArguments(expr->arguments(), &args);
6624 receiver_arg_count = implicit_undefined_receiver ? 0 : 1;
6625 CHECK_EQ(receiver_arg_count + expr->arguments()->length(),
6626 args.register_count());
6627 }
6628
6629 // Resolve callee for a potential direct eval call. This block will mutate the
6630 // callee value.
6631 if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
6632 RegisterAllocationScope inner_register_scope(this);
6633 RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
6634 // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
6635 // strings and function closure, and loading language and
6636 // position.
6637
6638 // Move the first arg.
6639 if (spread_position == Call::kHasNonFinalSpread) {
6640 int feedback_slot_index =
6641 feedback_index(feedback_spec()->AddKeyedLoadICSlot());
6642 Register args_array = args[2];
6643 builder()
6645 .LoadKeyedProperty(args_array, feedback_slot_index)
6646 .StoreAccumulatorInRegister(runtime_call_args[1]);
6647 } else {
6648 // FIXME(v8:5690): Support final spreads for eval.
6649 DCHECK_GE(receiver_arg_count, 0);
6650 builder()->MoveRegister(args[receiver_arg_count], runtime_call_args[1]);
6651 }
6652 Scope* scope_with_context = current_scope();
6653 if (!scope_with_context->NeedsContext()) {
6654 scope_with_context = scope_with_context->GetOuterScopeWithContext();
6655 }
6656 if (scope_with_context) {
6657 eval_calls_.emplace_back(expr, scope_with_context);
6658 }
6659 builder()
6660 ->MoveRegister(callee, runtime_call_args[0])
6661 .MoveRegister(Register::function_closure(), runtime_call_args[2])
6663 .StoreAccumulatorInRegister(runtime_call_args[3])
6665 .StoreAccumulatorInRegister(runtime_call_args[4])
6667 .StoreAccumulatorInRegister(runtime_call_args[5]);
6668
6669 // Call ResolvePossiblyDirectEval and modify the callee.
6670 builder()
6671 ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
6673 }
6674
6676
6677 if (spread_position == Call::kHasFinalSpread) {
6678 DCHECK(!implicit_undefined_receiver);
6679 builder()->CallWithSpread(callee, args,
6680 feedback_index(feedback_spec()->AddCallICSlot()));
6681 } else if (spread_position == Call::kHasNonFinalSpread) {
6682 builder()->CallJSRuntime(Context::REFLECT_APPLY_INDEX, args);
6683 } else if (call_type == Call::NAMED_PROPERTY_CALL ||
6684 call_type == Call::KEYED_PROPERTY_CALL) {
6685 DCHECK(!implicit_undefined_receiver);
6686 builder()->CallProperty(callee, args,
6687 feedback_index(feedback_spec()->AddCallICSlot()));
6688 } else if (implicit_undefined_receiver) {
6690 callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
6691 } else {
6693 callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
6694 }
6695}
6696
6698 RegisterAllocationScope register_scope(this);
6699 SuperCallReference* super = expr->expression()->AsSuperCallReference();
6700 const ZonePtrList<Expression>* args = expr->arguments();
6701
6702 // We compile the super call differently depending on the presence of spreads
6703 // and their positions.
6704 //
6705 // If there is only one spread and it is the final argument, there is a
6706 // special ConstructWithSpread bytecode.
6707 //
6708 // It there is a non-final spread, we rewrite something like
6709 // super(1, ...x, 2)
6710 // to
6711 // %reflect_construct(constructor, [1, ...x, 2], new_target)
6712 //
6713 // That is, we implement (non-last-arg) spreads in super calls via our
6714 // mechanism for spreads in array literals.
6715 const Call::SpreadPosition spread_position = expr->spread_position();
6716
6717 // Prepare the constructor to the super call.
6718 Register this_function = VisitForRegisterValue(super->this_function_var());
6719 // This register will initially hold the constructor, then afterward it will
6720 // hold the instance -- the lifetimes of the two don't need to overlap, and
6721 // this way FindNonDefaultConstructorOrConstruct can choose to write either
6722 // the instance or the constructor into the same register.
6723 Register constructor_then_instance = register_allocator()->NewRegister();
6724
6725 BytecodeLabel super_ctor_call_done;
6726
6727 if (spread_position == Call::kHasNonFinalSpread) {
6728 RegisterAllocationScope inner_register_scope(this);
6729 RegisterList construct_args(constructor_then_instance);
6730 const Register& constructor = constructor_then_instance;
6731
6732 // Generate the array containing all arguments.
6733 BuildCreateArrayLiteral(args, nullptr);
6734 Register args_array =
6735 register_allocator()->GrowRegisterList(&construct_args);
6736 builder()->StoreAccumulatorInRegister(args_array);
6737
6739 register_allocator()->GrowRegisterList(&construct_args);
6741
6742 BuildGetAndCheckSuperConstructor(this_function, new_target, constructor,
6743 &super_ctor_call_done);
6744
6745 // Now pass that array to %reflect_construct.
6746 builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
6747 } else {
6748 RegisterAllocationScope inner_register_scope(this);
6750 VisitArguments(args, &args_regs);
6751
6752 // The new target is loaded into the new_target register from the
6753 // {new.target} variable.
6756
6757 const Register& constructor = constructor_then_instance;
6758 BuildGetAndCheckSuperConstructor(this_function, new_target, constructor,
6759 &super_ctor_call_done);
6760
6763
6764 int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
6765
6766 if (spread_position == Call::kHasFinalSpread) {
6767 builder()->ConstructWithSpread(constructor, args_regs,
6768 feedback_slot_index);
6769 } else {
6770 DCHECK_EQ(spread_position, Call::kNoSpread);
6771 // Call construct.
6772 // TODO(turbofan): For now we do gather feedback on super constructor
6773 // calls, utilizing the existing machinery to inline the actual call
6774 // target and the JSCreate for the implicit receiver allocation. This
6775 // is not an ideal solution for super constructor calls, but it gets
6776 // the job done for now. In the long run we might want to revisit this
6777 // and come up with a better way.
6778 builder()->Construct(constructor, args_regs, feedback_slot_index);
6779 }
6780 }
6781
6782 // From here onwards, constructor_then_instance will hold the instance.
6783 const Register& instance = constructor_then_instance;
6785 builder()->Bind(&super_ctor_call_done);
6786
6787 BuildInstanceInitializationAfterSuperCall(this_function, instance);
6789}
6790
6792 Register this_function, Register instance) {
6793 // Explicit calls to the super constructor using super() perform an
6794 // implicit binding assignment to the 'this' variable.
6795 //
6796 // Default constructors don't need have to do the assignment because
6797 // 'this' isn't accessed in default constructors.
6798 if (!IsDefaultConstructor(info()->literal()->kind())) {
6802 }
6803
6804 // The constructor scope always needs ScopeInfo, so we are certain that
6805 // the first constructor scope found in the outer scope chain is the
6806 // scope that we are looking for for this super() call.
6807 // Note that this doesn't necessarily mean that the constructor needs
6808 // a context, if it doesn't this would get handled specially in
6809 // BuildPrivateBrandInitialization().
6810 DeclarationScope* constructor_scope = info()->scope()->GetConstructorScope();
6811
6812 // We can rely on the class_scope_has_private_brand bit to tell if the
6813 // constructor needs private brand initialization, and if that's
6814 // the case we are certain that its outer class scope requires a context to
6815 // keep the brand variable, so we can just get the brand variable
6816 // from the outer scope.
6817 if (constructor_scope->class_scope_has_private_brand()) {
6818 DCHECK(constructor_scope->outer_scope()->is_class_scope());
6819 ClassScope* class_scope = constructor_scope->outer_scope()->AsClassScope();
6820 DCHECK_NOT_NULL(class_scope->brand());
6821 Variable* brand = class_scope->brand();
6822 BuildPrivateBrandInitialization(instance, brand);
6823 }
6824
6825 // The derived constructor has the correct bit set always, so we
6826 // don't emit code to load and call the initializer if not
6827 // required.
6828 //
6829 // For the arrow function or eval case, we always emit code to load
6830 // and call the initializer.
6831 //
6832 // TODO(gsathya): In the future, we could tag nested arrow functions
6833 // or eval with the correct bit so that we do the load conditionally
6834 // if required.
6837 BuildInstanceMemberInitialization(this_function, instance);
6838 }
6839}
6840
6842 Register this_function, Register new_target, Register constructor,
6843 BytecodeLabel* super_ctor_call_done) {
6844 bool omit_super_ctor = v8_flags.omit_default_ctors &&
6846
6847 if (omit_super_ctor) {
6848 BuildSuperCallOptimization(this_function, new_target, constructor,
6849 super_ctor_call_done);
6850 } else {
6851 builder()
6852 ->LoadAccumulatorWithRegister(this_function)
6853 .GetSuperConstructor(constructor);
6854 }
6855
6856 // Check if the constructor is in fact a constructor.
6857 builder()->ThrowIfNotSuperConstructor(constructor);
6858}
6859
6861 Register this_function, Register new_target,
6862 Register constructor_then_instance, BytecodeLabel* super_ctor_call_done) {
6863 DCHECK(v8_flags.omit_default_ctors);
6866 output);
6867 builder()->MoveRegister(output[1], constructor_then_instance);
6869 ToBooleanMode::kAlreadyBoolean, super_ctor_call_done);
6870}
6871
6872void BytecodeGenerator::VisitCallNew(CallNew* expr) {
6874
6875 // Load the constructor. It's in the first register in args for ease of
6876 // calling %reflect_construct if we have a non-final spread. For all other
6877 // cases it is popped before emitting the construct below.
6879
6880 // We compile the new differently depending on the presence of spreads and
6881 // their positions.
6882 //
6883 // If there is only one spread and it is the final argument, there is a
6884 // special ConstructWithSpread bytecode.
6885 //
6886 // If there is a non-final spread, we rewrite calls like
6887 // new ctor(1, ...x, 2)
6888 // to
6889 // %reflect_construct(ctor, [1, ...x, 2])
6890 const CallNew::SpreadPosition spread_position = expr->spread_position();
6891
6892 if (spread_position == CallNew::kHasNonFinalSpread) {
6893 BuildCreateArrayLiteral(expr->arguments(), nullptr);
6895 builder()
6897 register_allocator()->GrowRegisterList(&args))
6898 .CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, args);
6899 return;
6900 }
6901
6902 Register constructor = args.first_register();
6903 args = args.PopLeft();
6904 VisitArguments(expr->arguments(), &args);
6905
6906 // The accumulator holds new target which is the same as the
6907 // constructor for CallNew.
6909 builder()->LoadAccumulatorWithRegister(constructor);
6910
6911 int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
6912 if (spread_position == CallNew::kHasFinalSpread) {
6913 builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
6914 } else {
6915 DCHECK_EQ(spread_position, CallNew::kNoSpread);
6916 builder()->Construct(constructor, args, feedback_slot_index);
6917 }
6918}
6919
6920void BytecodeGenerator::VisitSuperCallForwardArgs(SuperCallForwardArgs* expr) {
6921 RegisterAllocationScope register_scope(this);
6922
6923 SuperCallReference* super = expr->expression();
6924 Register this_function = VisitForRegisterValue(super->this_function_var());
6925 Register new_target = VisitForRegisterValue(super->new_target_var());
6926
6927 // This register initially holds the constructor, then the instance.
6928 Register constructor_then_instance = register_allocator()->NewRegister();
6929
6930 BytecodeLabel super_ctor_call_done;
6931
6932 {
6933 const Register& constructor = constructor_then_instance;
6934 BuildGetAndCheckSuperConstructor(this_function, new_target, constructor,
6935 &super_ctor_call_done);
6936
6939 int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
6940
6941 builder()->ConstructForwardAllArgs(constructor, feedback_slot_index);
6942 }
6943
6944 // From here onwards, constructor_then_instance holds the instance.
6945 const Register& instance = constructor_then_instance;
6947 builder()->Bind(&super_ctor_call_done);
6948
6949 BuildInstanceInitializationAfterSuperCall(this_function, instance);
6951}
6952
6953void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
6954 // Evaluate all arguments to the runtime call.
6956 VisitArguments(expr->arguments(), &args);
6957 Runtime::FunctionId function_id = expr->function()->function_id;
6958 builder()->CallRuntime(function_id, args);
6959}
6960
6965
6967 if (expr->IsVariableProxy()) {
6968 // Typeof does not throw a reference error on global variables, hence we
6969 // perform a non-contextual load in case the operand is a variable proxy.
6970 VariableProxy* proxy = expr->AsVariableProxy();
6973 } else {
6975 }
6976}
6977
6983
6985 if (execution_result()->IsEffect()) {
6986 VisitForEffect(expr->expression());
6987 } else if (execution_result()->IsTest()) {
6988 // No actual logical negation happening, we just swap the control flow, by
6989 // swapping the target labels and the fallthrough branch, and visit in the
6990 // same test result context.
6991 TestResultScope* test_result = execution_result()->AsTest();
6992 test_result->InvertControlFlow();
6994 } else {
6995 UnaryOperation* unary_op = expr->expression()->AsUnaryOperation();
6996 if (unary_op && unary_op->op() == Token::kNot) {
6997 // Shortcut repeated nots, to capture the `!!foo` pattern for converting
6998 // expressions to booleans.
6999 TypeHint type_hint = VisitForAccumulatorValue(unary_op->expression());
7001 } else {
7002 TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
7004 }
7005 // Always returns a boolean value.
7007 }
7008}
7009
7010void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
7011 switch (expr->op()) {
7012 case Token::kNot:
7013 VisitNot(expr);
7014 break;
7015 case Token::kTypeOf:
7016 VisitTypeOf(expr);
7017 break;
7018 case Token::kVoid:
7019 VisitVoid(expr);
7020 break;
7021 case Token::kDelete:
7022 VisitDelete(expr);
7023 break;
7024 case Token::kAdd:
7025 case Token::kSub:
7026 case Token::kBitNot:
7030 expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
7031 break;
7032 default:
7033 UNREACHABLE();
7034 }
7035}
7036
7038 Expression* expr = unary->expression();
7039 if (expr->IsProperty()) {
7040 // Delete of an object property is allowed both in sloppy
7041 // and strict modes.
7042 Property* property = expr->AsProperty();
7043 DCHECK(!property->IsPrivateReference());
7044 if (property->IsSuperAccess()) {
7045 // Delete of super access is not allowed.
7046 VisitForEffect(property->key());
7047 builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
7048 } else {
7049 Register object = VisitForRegisterValue(property->obj());
7050 VisitForAccumulatorValue(property->key());
7051 builder()->Delete(object, language_mode());
7052 }
7053 } else if (expr->IsOptionalChain()) {
7054 Expression* expr_inner = expr->AsOptionalChain()->expression();
7055 if (expr_inner->IsProperty()) {
7056 Property* property = expr_inner->AsProperty();
7057 DCHECK(!property->IsPrivateReference());
7058 BytecodeLabel done;
7059 OptionalChainNullLabelScope label_scope(this);
7060 VisitForAccumulatorValue(property->obj());
7061 if (property->is_optional_chain_link()) {
7062 int right_range = AllocateBlockCoverageSlotIfEnabled(
7063 property, SourceRangeKind::kRight);
7064 builder()->JumpIfUndefinedOrNull(label_scope.labels()->New());
7066 }
7069 if (property->is_optional_chain_link()) {
7071 } else {
7072 VisitForAccumulatorValue(property->key());
7073 }
7074 builder()->Delete(object, language_mode());
7075 builder()->Jump(&done);
7076 label_scope.labels()->Bind(builder());
7077 builder()->LoadTrue();
7078 builder()->Bind(&done);
7079 } else {
7080 VisitForEffect(expr);
7081 builder()->LoadTrue();
7082 }
7083 } else if (expr->IsVariableProxy() &&
7084 !expr->AsVariableProxy()->is_new_target()) {
7085 // Delete of an unqualified identifier is allowed in sloppy mode but is
7086 // not allowed in strict mode.
7088 Variable* variable = expr->AsVariableProxy()->var();
7089 switch (variable->location()) {
7094 // Deleting local var/let/const, context variables, and arguments
7095 // does not have any effect.
7096 builder()->LoadFalse();
7097 break;
7098 }
7100 // TODO(adamk): Falling through to the runtime results in correct
7101 // behavior, but does unnecessary context-walking (since scope
7102 // analysis has already proven that the variable doesn't exist in
7103 // any non-global scope). Consider adding a DeleteGlobal bytecode
7104 // that knows how to deal with ScriptContexts as well as global
7105 // object properties.
7107 Register name_reg = register_allocator()->NewRegister();
7108 builder()
7109 ->LoadLiteral(variable->raw_name())
7111 .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
7112 break;
7113 }
7115 // Modules are always in strict mode and unqualified identifiers are not
7116 // allowed in strict mode.
7117 UNREACHABLE();
7118 }
7119 } else {
7120 // Delete of an unresolvable reference, new.target, and this returns true.
7121 VisitForEffect(expr);
7122 builder()->LoadTrue();
7123 }
7124}
7125
7126void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
7128
7129 // Left-hand side can only be a property, a global or a variable slot.
7130 Property* property = expr->expression()->AsProperty();
7131 AssignType assign_type = Property::GetAssignType(property);
7132
7133 bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
7134
7135 // Evaluate LHS expression and get old value.
7136 Register object, key, old_value;
7137 RegisterList super_property_args;
7138 const AstRawString* name;
7139 switch (assign_type) {
7140 case NON_PROPERTY: {
7141 VariableProxy* proxy = expr->expression()->AsVariableProxy();
7143 proxy->hole_check_mode());
7144 break;
7145 }
7146 case NAMED_PROPERTY: {
7147 object = VisitForRegisterValue(property->obj());
7148 name = property->key()->AsLiteral()->AsRawPropertyName();
7150 object, name,
7151 feedback_index(GetCachedLoadICSlot(property->obj(), name)));
7152 break;
7153 }
7154 case KEYED_PROPERTY: {
7155 object = VisitForRegisterValue(property->obj());
7156 // Use visit for accumulator here since we need the key in the accumulator
7157 // for the LoadKeyedProperty.
7159 VisitForAccumulatorValue(property->key());
7161 object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
7162 break;
7163 }
7164 case NAMED_SUPER_PROPERTY: {
7165 super_property_args = register_allocator()->NewRegisterList(4);
7166 RegisterList load_super_args = super_property_args.Truncate(3);
7168 builder()->StoreAccumulatorInRegister(load_super_args[0]);
7170 property->obj()->AsSuperPropertyReference()->home_object()->var(),
7172 builder()->StoreAccumulatorInRegister(load_super_args[1]);
7173 builder()
7174 ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
7175 .StoreAccumulatorInRegister(load_super_args[2])
7176 .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
7177 break;
7178 }
7179 case KEYED_SUPER_PROPERTY: {
7180 super_property_args = register_allocator()->NewRegisterList(4);
7181 RegisterList load_super_args = super_property_args.Truncate(3);
7183 builder()->StoreAccumulatorInRegister(load_super_args[0]);
7185 property->obj()->AsSuperPropertyReference()->home_object()->var(),
7187 builder()->StoreAccumulatorInRegister(load_super_args[1]);
7188 VisitForRegisterValue(property->key(), load_super_args[2]);
7189 builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
7190 break;
7191 }
7192 case PRIVATE_METHOD: {
7193 object = VisitForRegisterValue(property->obj());
7194 BuildPrivateBrandCheck(property, object);
7195 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
7196 property);
7197 return;
7198 }
7199 case PRIVATE_GETTER_ONLY: {
7200 object = VisitForRegisterValue(property->obj());
7201 BuildPrivateBrandCheck(property, object);
7202 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
7203 property);
7204 return;
7205 }
7206 case PRIVATE_SETTER_ONLY: {
7207 object = VisitForRegisterValue(property->obj());
7208 BuildPrivateBrandCheck(property, object);
7209 BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
7210 property);
7211 return;
7212 }
7214 object = VisitForRegisterValue(property->obj());
7215 key = VisitForRegisterValue(property->key());
7216 BuildPrivateBrandCheck(property, object);
7218 break;
7219 }
7220 case PRIVATE_DEBUG_DYNAMIC: {
7221 object = VisitForRegisterValue(property->obj());
7222 BuildPrivateDebugDynamicGet(property, object);
7223 break;
7224 }
7225 }
7226
7227 // Save result for postfix expressions.
7228 FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
7229 if (is_postfix) {
7230 old_value = register_allocator()->NewRegister();
7231 // Convert old value into a number before saving it.
7232 // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
7233 // instead of this ToNumeric + Inc/Dec dance.
7234 builder()
7235 ->ToNumeric(feedback_index(count_slot))
7236 .StoreAccumulatorInRegister(old_value);
7237 }
7238
7239 // Perform +1/-1 operation.
7240 builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
7241
7242 // Store the value.
7244 switch (assign_type) {
7245 case NON_PROPERTY: {
7246 VariableProxy* proxy = expr->expression()->AsVariableProxy();
7247 BuildVariableAssignment(proxy->var(), expr->op(),
7248 proxy->hole_check_mode());
7249 break;
7250 }
7251 case NAMED_PROPERTY: {
7252 FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
7254 if (!execution_result()->IsEffect()) {
7255 value = register_allocator()->NewRegister();
7257 }
7258 builder()->SetNamedProperty(object, name, feedback_index(slot),
7259 language_mode());
7260 if (!execution_result()->IsEffect()) {
7262 }
7263 break;
7264 }
7265 case KEYED_PROPERTY: {
7266 FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
7268 if (!execution_result()->IsEffect()) {
7269 value = register_allocator()->NewRegister();
7271 }
7272 builder()->SetKeyedProperty(object, key, feedback_index(slot),
7273 language_mode());
7274 if (!execution_result()->IsEffect()) {
7276 }
7277 break;
7278 }
7279 case NAMED_SUPER_PROPERTY: {
7280 builder()
7281 ->StoreAccumulatorInRegister(super_property_args[3])
7282 .CallRuntime(Runtime::kStoreToSuper, super_property_args);
7283 break;
7284 }
7285 case KEYED_SUPER_PROPERTY: {
7286 builder()
7287 ->StoreAccumulatorInRegister(super_property_args[3])
7288 .CallRuntime(Runtime::kStoreKeyedToSuper, super_property_args);
7289 break;
7290 }
7293 case PRIVATE_METHOD: {
7294 UNREACHABLE();
7295 }
7299 BuildPrivateSetterAccess(object, key, value);
7300 if (!execution_result()->IsEffect()) {
7302 }
7303 break;
7304 }
7305 case PRIVATE_DEBUG_DYNAMIC: {
7308 BuildPrivateDebugDynamicSet(property, object, value);
7309 break;
7310 }
7311 }
7312
7313 // Restore old value for postfix expressions.
7314 if (is_postfix) {
7316 }
7317}
7318
7319void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
7320 switch (binop->op()) {
7321 case Token::kComma:
7322 VisitCommaExpression(binop);
7323 break;
7324 case Token::kOr:
7326 break;
7327 case Token::kAnd:
7329 break;
7330 case Token::kNullish:
7332 break;
7333 default:
7335 break;
7336 }
7337}
7338
7339void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
7340 switch (expr->op()) {
7341 case Token::kComma:
7343 break;
7344 case Token::kOr:
7346 break;
7347 case Token::kAnd:
7349 break;
7350 case Token::kNullish:
7352 break;
7353 default:
7355 break;
7356 }
7357}
7358
7361 if (execution_result()->IsTest()) {
7362 TestResultScope* test_result = execution_result()->AsTest();
7363 switch (test_result->fallthrough()) {
7365 builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
7366 break;
7368 builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
7369 break;
7371 builder()
7372 ->JumpIfNil(test_result->NewThenLabel(), op, nil)
7373 .Jump(test_result->NewElseLabel());
7374 }
7375 test_result->SetResultConsumedByTest();
7376 } else {
7377 builder()->CompareNil(op, nil);
7378 }
7379}
7380
7388
7390 Expression* expr) {
7391 VariableProxy* proxy = expr->AsVariableProxy();
7392 return proxy != nullptr && proxy->is_resolved() &&
7393 proxy->var()->IsStackLocal() &&
7394 GetTypeHintForLocalVariable(proxy->var()) ==
7395 TypeHint::kInternalizedString;
7396}
7397
7398static bool IsTypeof(Expression* expr) {
7399 UnaryOperation* maybe_unary = expr->AsUnaryOperation();
7400 return maybe_unary != nullptr && maybe_unary->op() == Token::kTypeOf;
7401}
7402
7403static bool IsCharU(const AstRawString* str) {
7404 return str->length() == 1 && str->FirstCharacter() == 'u';
7405}
7406
7408 Expression** sub_expr,
7410 const AstStringConstants* ast_constants) {
7411 if (IsTypeof(expr->left()) && expr->right()->IsStringLiteral()) {
7412 Literal* right_lit = expr->right()->AsLiteral();
7413
7414 if (Token::IsEqualityOp(expr->op())) {
7415 // typeof(x) === 'string'
7416 *flag = TestTypeOfFlags::GetFlagForLiteral(ast_constants, right_lit);
7417 } else if (expr->op() == Token::kGreaterThan &&
7418 IsCharU(right_lit->AsRawString())) {
7419 // typeof(x) > 'u'
7420 // Minifier may convert `typeof(x) === 'undefined'` to this form,
7421 // since `undefined` is the only valid value that is greater than 'u'.
7422 // Check the test OnlyUndefinedGreaterThanU in bytecodes-unittest.cc
7423 *flag = TestTypeOfFlags::LiteralFlag::kUndefined;
7424 } else {
7425 return false;
7426 }
7427
7428 *sub_expr = expr->left()->AsUnaryOperation()->expression();
7429 return true;
7430 }
7431
7432 if (IsTypeof(expr->right()) && expr->left()->IsStringLiteral()) {
7433 Literal* left_lit = expr->left()->AsLiteral();
7434
7435 if (Token::IsEqualityOp(expr->op())) {
7436 // 'string' === typeof(x)
7437 *flag = TestTypeOfFlags::GetFlagForLiteral(ast_constants, left_lit);
7438 } else if (expr->op() == Token::kLessThan &&
7439 IsCharU(left_lit->AsRawString())) {
7440 // 'u' < typeof(x)
7441 *flag = TestTypeOfFlags::LiteralFlag::kUndefined;
7442 } else {
7443 return false;
7444 }
7445
7446 *sub_expr = expr->right()->AsUnaryOperation()->expression();
7447 return true;
7448 }
7449
7450 return false;
7451}
7452
7453void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
7454 Expression* sub_expr;
7457 if (IsLiteralCompareTypeof(expr, &sub_expr, &flag, ast_string_constants())) {
7458 // Emit a fast literal comparison for expressions of the form:
7459 // typeof(x) === 'string'.
7460 VisitForTypeOfValue(sub_expr);
7462 if (flag == TestTypeOfFlags::LiteralFlag::kOther) {
7463 builder()->LoadFalse();
7464 } else {
7465 builder()->CompareTypeOf(flag);
7466 }
7467 } else if (expr->IsLiteralStrictCompareBoolean(&sub_expr, &literal)) {
7468 DCHECK(expr->op() == Token::kEqStrict);
7469 VisitForAccumulatorValue(sub_expr);
7472 } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
7473 VisitForAccumulatorValue(sub_expr);
7476 } else if (expr->IsLiteralCompareNull(&sub_expr)) {
7477 VisitForAccumulatorValue(sub_expr);
7480 } else if (expr->IsLiteralCompareEqualVariable(&sub_expr, &literal) &&
7482 builder()->LoadLiteral(literal->AsRawString());
7484 GetRegisterForLocalVariable(sub_expr->AsVariableProxy()->var()));
7485 } else {
7486 if (expr->op() == Token::kIn && expr->left()->IsPrivateName()) {
7487 Variable* var = expr->left()->AsVariableProxy()->var();
7488 if (IsPrivateMethodOrAccessorVariableMode(var->mode())) {
7489 BuildPrivateMethodIn(var, expr->right());
7490 return;
7491 }
7492 // For private fields, the code below does the right thing.
7493 }
7494
7495 Register lhs = VisitForRegisterValue(expr->left());
7498 FeedbackSlot slot;
7499 if (expr->op() == Token::kIn) {
7500 slot = feedback_spec()->AddKeyedHasICSlot();
7501 } else if (expr->op() == Token::kInstanceOf) {
7502 slot = feedback_spec()->AddInstanceOfSlot();
7503 } else {
7504 slot = feedback_spec()->AddCompareICSlot();
7505 }
7506 builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
7507 }
7508 // Always returns a boolean value.
7510}
7511
7514 Expression* subexpr;
7516 if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
7517 TypeHint type_hint = VisitForAccumulatorValue(subexpr);
7520 feedback_index(slot));
7521 if (expr->op() == Token::kAdd && IsStringTypeHint(type_hint)) {
7523 }
7524 } else {
7525 TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
7528 TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
7529 if (expr->op() == Token::kAdd &&
7530 (IsStringTypeHint(lhs_type) || IsStringTypeHint(rhs_type))) {
7532 }
7533
7535 builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
7536 }
7537}
7538
7540 // TODO(leszeks): Add support for lhs smi in commutative ops.
7541 TypeHint type_hint = VisitForAccumulatorValue(expr->first());
7542
7543 for (size_t i = 0; i < expr->subsequent_length(); ++i) {
7544 RegisterAllocationScope register_scope(this);
7545 if (expr->subsequent(i)->IsSmiLiteral()) {
7548 expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
7549 feedback_index(feedback_spec()->AddBinaryOpICSlot()));
7550 } else {
7553 TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
7554 if (IsStringTypeHint(rhs_hint)) type_hint = TypeHint::kString;
7557 expr->op(), lhs,
7558 feedback_index(feedback_spec()->AddBinaryOpICSlot()));
7559 }
7560 }
7561
7562 if (IsStringTypeHint(type_hint) && expr->op() == Token::kAdd) {
7563 // If any operand of an ADD is a String, a String is produced.
7565 }
7566}
7567
7568// Note: the actual spreading is performed by the surrounding expression's
7569// visitor.
7570void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
7571
7572void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
7573 UNREACHABLE();
7574}
7575
7576void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
7577 const int register_count = expr->import_options() ? 4 : 3;
7578 // args is a list of [ function_closure, specifier, phase, import_options ].
7579 RegisterList args = register_allocator()->NewRegisterList(register_count);
7580
7582 VisitForRegisterValue(expr->specifier(), args[1]);
7583 builder()
7584 ->LoadLiteral(Smi::FromInt(static_cast<int>(expr->phase())))
7586
7587 if (expr->import_options()) {
7588 VisitForRegisterValue(expr->import_options(), args[3]);
7589 }
7590
7591 builder()->CallRuntime(Runtime::kDynamicImportCall, args);
7592}
7593
7595 if (hint == IteratorType::kAsync) {
7596 RegisterAllocationScope scope(this);
7597
7600
7601 // Set method to GetMethod(obj, @@asyncIterator)
7603 obj, feedback_index(feedback_spec()->AddLoadICSlot()));
7604
7605 BytecodeLabel async_iterator_undefined, done;
7606 builder()->JumpIfUndefinedOrNull(&async_iterator_undefined);
7607
7608 // Let iterator be Call(method, obj)
7610 method, RegisterList(obj),
7611 feedback_index(feedback_spec()->AddCallICSlot()));
7612
7613 // If Type(iterator) is not Object, throw a TypeError exception.
7614 builder()->JumpIfJSReceiver(&done);
7615 builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
7616
7617 builder()->Bind(&async_iterator_undefined);
7618 // If method is undefined,
7619 // Let syncMethod be GetMethod(obj, @@iterator)
7620 builder()
7622 feedback_index(feedback_spec()->AddLoadICSlot()))
7624
7625 // Let syncIterator be Call(syncMethod, obj)
7627 feedback_index(feedback_spec()->AddCallICSlot()));
7628
7629 // Return CreateAsyncFromSyncIterator(syncIterator)
7630 // alias `method` register as it's no longer used
7631 Register sync_iter = method;
7633 Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
7634
7635 builder()->Bind(&done);
7636 } else {
7637 {
7638 RegisterAllocationScope scope(this);
7639
7641 int load_feedback_index =
7642 feedback_index(feedback_spec()->AddLoadICSlot());
7643 int call_feedback_index =
7644 feedback_index(feedback_spec()->AddCallICSlot());
7645
7646 // Let method be GetMethod(obj, @@iterator) and
7647 // iterator be Call(method, obj). If iterator is
7648 // not JSReceiver, then throw TypeError.
7650 obj, load_feedback_index, call_feedback_index);
7651 }
7652 }
7653}
7654
7655// Returns an IteratorRecord which is valid for the lifetime of the current
7656// register_allocation_scope.
7658 Register next, Register object, IteratorType hint) {
7659 DCHECK(next.is_valid() && object.is_valid());
7660 BuildGetIterator(hint);
7661
7662 builder()
7664 .LoadNamedProperty(object, ast_string_constants()->next_string(),
7665 feedback_index(feedback_spec()->AddLoadICSlot()))
7667 return IteratorRecord(object, next, hint);
7668}
7669
7676
7678 Register next_result) {
7679 DCHECK(next_result.is_valid());
7680 builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
7681 feedback_index(feedback_spec()->AddCallICSlot()));
7682
7683 if (iterator.type() == IteratorType::kAsync) {
7684 BuildAwait();
7685 }
7686
7687 BytecodeLabel is_object;
7688 builder()
7689 ->StoreAccumulatorInRegister(next_result)
7690 .JumpIfJSReceiver(&is_object)
7691 .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
7692 .Bind(&is_object);
7693}
7694
7696 const AstRawString* method_name,
7697 RegisterList receiver_and_args,
7698 BytecodeLabel* if_called,
7699 BytecodeLabels* if_notcalled) {
7700 RegisterAllocationScope register_scope(this);
7701
7704 builder()
7705 ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
7706 .JumpIfUndefinedOrNull(if_notcalled->New())
7708 .CallProperty(method, receiver_and_args,
7709 feedback_index(feedback_spec()->AddCallICSlot()))
7710 .Jump(if_called);
7711}
7712
7714 Expression* expr) {
7715 RegisterAllocationScope register_scope(this);
7716 BytecodeLabels done(zone());
7717 BytecodeLabel if_called;
7718 RegisterList args = RegisterList(iterator.object());
7720 ast_string_constants()->return_string(), args,
7721 &if_called, &done);
7722 builder()->Bind(&if_called);
7723
7724 if (iterator.type() == IteratorType::kAsync) {
7725 DCHECK_NOT_NULL(expr);
7726 BuildAwait(expr->position());
7727 }
7728
7729 builder()->JumpIfJSReceiver(done.New());
7730 {
7731 RegisterAllocationScope inner_register_scope(this);
7732 Register return_result = register_allocator()->NewRegister();
7733 builder()
7734 ->StoreAccumulatorInRegister(return_result)
7735 .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
7736 }
7737
7738 done.Bind(builder());
7739}
7740
7741void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
7743 size_t entry = builder()->AllocateDeferredConstantPoolEntry();
7744 template_objects_.push_back(std::make_pair(expr, entry));
7745 FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
7746 builder()->GetTemplateObject(entry, feedback_index(literal_slot));
7747}
7748
7749void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
7750 const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
7751 const ZonePtrList<Expression>& substitutions = *expr->substitutions();
7752 // Template strings with no substitutions are turned into StringLiterals.
7753 DCHECK_GT(substitutions.length(), 0);
7754 DCHECK_EQ(parts.length(), substitutions.length() + 1);
7755
7756 // Generate string concatenation
7757 // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
7758 // a simple, concise, reusable mechanism to lazily create reusable slots.
7760 Register last_part = register_allocator()->NewRegister();
7761 bool last_part_valid = false;
7762
7764 for (int i = 0; i < substitutions.length(); ++i) {
7765 if (i != 0) {
7766 builder()->StoreAccumulatorInRegister(last_part);
7767 last_part_valid = true;
7768 }
7769
7770 if (!parts[i]->IsEmpty()) {
7771 builder()->LoadLiteral(parts[i]);
7772 if (last_part_valid) {
7773 builder()->BinaryOperation(Token::kAdd, last_part,
7774 feedback_index(slot));
7775 }
7776 builder()->StoreAccumulatorInRegister(last_part);
7777 last_part_valid = true;
7778 }
7779
7780 TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
7781 if (!IsStringTypeHint(type_hint)) {
7782 builder()->ToString();
7783 }
7784 if (last_part_valid) {
7785 builder()->BinaryOperation(Token::kAdd, last_part, feedback_index(slot));
7786 }
7787 last_part_valid = false;
7788 }
7789
7790 if (!parts.last()->IsEmpty()) {
7791 builder()->StoreAccumulatorInRegister(last_part);
7792 builder()->LoadLiteral(parts.last());
7793 builder()->BinaryOperation(Token::kAdd, last_part, feedback_index(slot));
7794 }
7795}
7796
7798 DeclarationScope* receiver_scope = closure_scope()->GetReceiverScope();
7799 Variable* var = receiver_scope->receiver();
7800 // TODO(littledan): implement 'this' hole check elimination.
7801 HoleCheckMode hole_check_mode =
7802 IsDerivedConstructor(receiver_scope->function_kind())
7805 BuildVariableLoad(var, hole_check_mode);
7806}
7807
7808void BytecodeGenerator::VisitThisExpression(ThisExpression* expr) {
7810}
7811
7812void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
7813 // Handled by VisitCall().
7814 UNREACHABLE();
7815}
7816
7817void BytecodeGenerator::VisitSuperPropertyReference(
7818 SuperPropertyReference* expr) {
7819 // Handled by VisitAssignment(), VisitCall(), VisitDelete() and
7820 // VisitPropertyLoad().
7821 UNREACHABLE();
7822}
7823
7829
7831 DCHECK_GT(expr->subsequent_length(), 0);
7832
7833 VisitForEffect(expr->first());
7834 for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
7836 VisitForEffect(expr->subsequent(i));
7837 }
7839 expr->subsequent(expr->subsequent_length() - 1));
7840 Visit(expr->subsequent(expr->subsequent_length() - 1));
7841}
7842
7844 Token::Value token, Expression* expr, BytecodeLabels* then_labels,
7845 BytecodeLabels* else_labels, int coverage_slot) {
7846 DCHECK(token == Token::kOr || token == Token::kAnd ||
7847 token == Token::kNullish);
7848
7849 BytecodeLabels test_next(zone());
7850 if (token == Token::kOr) {
7851 VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
7852 } else if (token == Token::kAnd) {
7853 VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
7854 } else {
7855 DCHECK_EQ(Token::kNullish, token);
7856 VisitForNullishTest(expr, then_labels, &test_next, else_labels);
7857 }
7858 test_next.Bind(builder());
7859
7861}
7862
7864 Expression* right,
7865 int right_coverage_slot) {
7866 DCHECK(token == Token::kOr || token == Token::kAnd ||
7867 token == Token::kNullish);
7868 TestResultScope* test_result = execution_result()->AsTest();
7869 BytecodeLabels* then_labels = test_result->then_labels();
7870 BytecodeLabels* else_labels = test_result->else_labels();
7871 TestFallthrough fallthrough = test_result->fallthrough();
7872
7873 VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
7874 right_coverage_slot);
7875 // The last test has the same then, else and fallthrough as the parent test.
7876 HoleCheckElisionScope elider(this);
7877 VisitForTest(right, then_labels, else_labels, fallthrough);
7878}
7879
7881 Token::Value token, NaryOperation* expr,
7882 const NaryCodeCoverageSlots* coverage_slots) {
7883 DCHECK(token == Token::kOr || token == Token::kAnd ||
7884 token == Token::kNullish);
7885 DCHECK_GT(expr->subsequent_length(), 0);
7886
7887 TestResultScope* test_result = execution_result()->AsTest();
7888 BytecodeLabels* then_labels = test_result->then_labels();
7889 BytecodeLabels* else_labels = test_result->else_labels();
7890 TestFallthrough fallthrough = test_result->fallthrough();
7891
7892 VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
7893 coverage_slots->GetSlotFor(0));
7894 HoleCheckElisionScope elider(this);
7895 for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
7896 VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
7897 else_labels,
7898 coverage_slots->GetSlotFor(i + 1));
7899 }
7900 // The last test has the same then, else and fallthrough as the parent test.
7901 VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
7902 else_labels, fallthrough);
7903}
7904
7906 BytecodeLabels* end_labels,
7907 int coverage_slot) {
7908 if (expr->ToBooleanIsTrue()) {
7910 end_labels->Bind(builder());
7911 return true;
7912 } else if (!expr->ToBooleanIsFalse()) {
7913 TypeHint type_hint = VisitForAccumulatorValue(expr);
7915 end_labels->New());
7916 }
7917
7919
7920 return false;
7921}
7922
7924 BytecodeLabels* end_labels,
7925 int coverage_slot) {
7926 if (expr->ToBooleanIsFalse()) {
7928 end_labels->Bind(builder());
7929 return true;
7930 } else if (!expr->ToBooleanIsTrue()) {
7931 TypeHint type_hint = VisitForAccumulatorValue(expr);
7933 end_labels->New());
7934 }
7935
7937
7938 return false;
7939}
7940
7942 BytecodeLabels* end_labels,
7943 int coverage_slot) {
7944 if (expr->IsLiteralButNotNullOrUndefined()) {
7946 end_labels->Bind(builder());
7947 return true;
7948 } else if (!expr->IsNullOrUndefinedLiteral()) {
7950 BytecodeLabel is_null_or_undefined;
7951 builder()
7952 ->JumpIfUndefinedOrNull(&is_null_or_undefined)
7953 .Jump(end_labels->New());
7954 builder()->Bind(&is_null_or_undefined);
7955 }
7956
7958
7959 return false;
7960}
7961
7963 Expression* left = binop->left();
7964 Expression* right = binop->right();
7965
7966 int right_coverage_slot =
7968
7969 if (execution_result()->IsTest()) {
7970 TestResultScope* test_result = execution_result()->AsTest();
7971 if (left->ToBooleanIsTrue()) {
7972 builder()->Jump(test_result->NewThenLabel());
7973 } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
7975 builder()->Jump(test_result->NewElseLabel());
7976 } else {
7977 VisitLogicalTest(Token::kOr, left, right, right_coverage_slot);
7978 }
7979 test_result->SetResultConsumedByTest();
7980 } else {
7981 BytecodeLabels end_labels(zone());
7982 if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
7983 return;
7984 }
7986 end_labels.Bind(builder());
7987 }
7988}
7989
7991 Expression* first = expr->first();
7992 DCHECK_GT(expr->subsequent_length(), 0);
7993
7994 NaryCodeCoverageSlots coverage_slots(this, expr);
7995
7996 if (execution_result()->IsTest()) {
7997 TestResultScope* test_result = execution_result()->AsTest();
7998 if (first->ToBooleanIsTrue()) {
7999 builder()->Jump(test_result->NewThenLabel());
8000 } else {
8001 VisitNaryLogicalTest(Token::kOr, expr, &coverage_slots);
8002 }
8003 test_result->SetResultConsumedByTest();
8004 } else {
8005 BytecodeLabels end_labels(zone());
8006 if (VisitLogicalOrSubExpression(first, &end_labels,
8007 coverage_slots.GetSlotFor(0))) {
8008 return;
8009 }
8010
8011 HoleCheckElisionScope elider(this);
8012 for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
8013 if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
8014 coverage_slots.GetSlotFor(i + 1))) {
8015 return;
8016 }
8017 }
8018 // We have to visit the last value even if it's true, because we need its
8019 // actual value.
8021 end_labels.Bind(builder());
8022 }
8023}
8024
8026 Expression* left = binop->left();
8027 Expression* right = binop->right();
8028
8029 int right_coverage_slot =
8031
8032 if (execution_result()->IsTest()) {
8033 TestResultScope* test_result = execution_result()->AsTest();
8034 if (left->ToBooleanIsFalse()) {
8035 builder()->Jump(test_result->NewElseLabel());
8036 } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
8038 builder()->Jump(test_result->NewThenLabel());
8039 } else {
8040 VisitLogicalTest(Token::kAnd, left, right, right_coverage_slot);
8041 }
8042 test_result->SetResultConsumedByTest();
8043 } else {
8044 BytecodeLabels end_labels(zone());
8045 if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
8046 return;
8047 }
8049 end_labels.Bind(builder());
8050 }
8051}
8052
8054 Expression* first = expr->first();
8055 DCHECK_GT(expr->subsequent_length(), 0);
8056
8057 NaryCodeCoverageSlots coverage_slots(this, expr);
8058
8059 if (execution_result()->IsTest()) {
8060 TestResultScope* test_result = execution_result()->AsTest();
8061 if (first->ToBooleanIsFalse()) {
8062 builder()->Jump(test_result->NewElseLabel());
8063 } else {
8064 VisitNaryLogicalTest(Token::kAnd, expr, &coverage_slots);
8065 }
8066 test_result->SetResultConsumedByTest();
8067 } else {
8068 BytecodeLabels end_labels(zone());
8069 if (VisitLogicalAndSubExpression(first, &end_labels,
8070 coverage_slots.GetSlotFor(0))) {
8071 return;
8072 }
8073 HoleCheckElisionScope elider(this);
8074 for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
8075 if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
8076 coverage_slots.GetSlotFor(i + 1))) {
8077 return;
8078 }
8079 }
8080 // We have to visit the last value even if it's false, because we need its
8081 // actual value.
8083 end_labels.Bind(builder());
8084 }
8085}
8086
8088 Expression* left = binop->left();
8089 Expression* right = binop->right();
8090
8091 int right_coverage_slot =
8093
8094 if (execution_result()->IsTest()) {
8095 TestResultScope* test_result = execution_result()->AsTest();
8096 if (left->IsLiteralButNotNullOrUndefined() && left->ToBooleanIsTrue()) {
8097 builder()->Jump(test_result->NewThenLabel());
8098 } else if (left->IsNullOrUndefinedLiteral() &&
8099 right->IsNullOrUndefinedLiteral()) {
8101 builder()->Jump(test_result->NewElseLabel());
8102 } else {
8103 VisitLogicalTest(Token::kNullish, left, right, right_coverage_slot);
8104 }
8105 test_result->SetResultConsumedByTest();
8106 } else {
8107 BytecodeLabels end_labels(zone());
8108 if (VisitNullishSubExpression(left, &end_labels, right_coverage_slot)) {
8109 return;
8110 }
8112 end_labels.Bind(builder());
8113 }
8114}
8115
8117 Expression* first = expr->first();
8118 DCHECK_GT(expr->subsequent_length(), 0);
8119
8120 NaryCodeCoverageSlots coverage_slots(this, expr);
8121
8122 if (execution_result()->IsTest()) {
8123 TestResultScope* test_result = execution_result()->AsTest();
8124 if (first->IsLiteralButNotNullOrUndefined() && first->ToBooleanIsTrue()) {
8125 builder()->Jump(test_result->NewThenLabel());
8126 } else {
8127 VisitNaryLogicalTest(Token::kNullish, expr, &coverage_slots);
8128 }
8129 test_result->SetResultConsumedByTest();
8130 } else {
8131 BytecodeLabels end_labels(zone());
8132 if (VisitNullishSubExpression(first, &end_labels,
8133 coverage_slots.GetSlotFor(0))) {
8134 return;
8135 }
8136 HoleCheckElisionScope elider(this);
8137 for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
8138 if (VisitNullishSubExpression(expr->subsequent(i), &end_labels,
8139 coverage_slots.GetSlotFor(i + 1))) {
8140 return;
8141 }
8142 }
8143 // We have to visit the last value even if it's nullish, because we need its
8144 // actual value.
8146 end_labels.Bind(builder());
8147 }
8148}
8149
8151 ValueResultScope value_execution_result(this);
8152 Scope* scope = closure_scope();
8154
8155 // Create the appropriate context.
8156 DCHECK(scope->is_function_scope() || scope->is_eval_scope());
8157 int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
8159 switch (scope->scope_type()) {
8160 case EVAL_SCOPE:
8161 builder()->CreateEvalContext(scope, slot_count);
8162 break;
8163 case FUNCTION_SCOPE:
8164 builder()->CreateFunctionContext(scope, slot_count);
8165 break;
8166 default:
8167 UNREACHABLE();
8168 }
8169 } else {
8172 Runtime::kNewFunctionContext, arg);
8174 }
8175}
8176
8179
8180 if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
8181 Variable* variable = scope->receiver();
8182 Register receiver(builder()->Receiver());
8183 // Context variable (at bottom of the context chain).
8184 DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
8186 execution_context()->reg(), variable, 0);
8187 }
8188
8189 // Copy parameters into context if necessary.
8190 int num_parameters = scope->num_parameters();
8191 for (int i = 0; i < num_parameters; i++) {
8192 Variable* variable = scope->parameter(i);
8193 if (!variable->IsContextSlot()) continue;
8194
8195 Register parameter(builder()->Parameter(i));
8196 // Context variable (at bottom of the context chain).
8197 DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
8199 execution_context()->reg(), variable, 0);
8200 }
8201}
8202
8204 ValueResultScope value_execution_result(this);
8205 DCHECK(scope->is_block_scope());
8206
8207 builder()->CreateBlockContext(scope);
8208}
8209
8211 ValueResultScope value_execution_result(this);
8212
8213 Register extension_object = register_allocator()->NewRegister();
8214
8215 builder()->ToObject(extension_object);
8216 builder()->CreateWithContext(extension_object, scope);
8217
8218 register_allocator()->ReleaseRegister(extension_object);
8219}
8220
8222 ValueResultScope value_execution_result(this);
8223 DCHECK(scope->catch_variable()->IsContextSlot());
8224
8225 Register exception = register_allocator()->NewRegister();
8226 builder()->StoreAccumulatorInRegister(exception);
8227 builder()->CreateCatchContext(exception, scope);
8228 register_allocator()->ReleaseRegister(exception);
8229}
8230
8232 Register value_out) {
8233 if (property == nullptr) {
8235 } else {
8236 VisitForRegisterValue(property->value(), value_out);
8237 }
8238}
8239
8241 if (variable == nullptr) return;
8242
8243 DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
8244
8245 // Allocate and initialize a new arguments object and assign to the
8246 // {arguments} variable.
8247 builder()->CreateArguments(closure_scope()->GetArgumentsType());
8248 BuildVariableAssignment(variable, Token::kAssign, HoleCheckMode::kElided);
8249}
8250
8252 if (rest == nullptr) return;
8253
8254 // Allocate and initialize a new rest parameter and assign to the {rest}
8255 // variable.
8257 DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
8258 BuildVariableAssignment(rest, Token::kAssign, HoleCheckMode::kElided);
8259}
8260
8262 if (variable == nullptr) return;
8263
8264 // Store the closure we were called with in the given variable.
8266 BuildVariableAssignment(variable, Token::kInit, HoleCheckMode::kElided);
8267}
8268
8270 if (variable == nullptr) return;
8271
8272 // The generator resume trampoline abuses the new.target register
8273 // to pass in the generator object. In ordinary calls, new.target is always
8274 // undefined because generator functions are non-constructible, so don't
8275 // assign anything to the new.target variable.
8276 if (IsResumableFunction(info()->literal()->kind())) return;
8277
8278 if (variable->location() == VariableLocation::LOCAL) {
8279 // The new.target register was already assigned by entry trampoline.
8282 return;
8283 }
8284
8285 // Store the new target we were called with in the given variable.
8287 BuildVariableAssignment(variable, Token::kInit, HoleCheckMode::kElided);
8288}
8289
8292
8293 Variable* generator_object_var = closure_scope()->generator_object_var();
8294 RegisterAllocationScope register_scope(this);
8296 Runtime::FunctionId function_id =
8297 ((IsAsyncFunction(info()->literal()->kind()) &&
8300 ? Runtime::kInlineAsyncFunctionEnter
8301 : Runtime::kInlineCreateJSGeneratorObject;
8302 builder()
8304 .MoveRegister(builder()->Receiver(), args[1])
8305 .CallRuntime(function_id, args)
8307
8308 if (generator_object_var->location() == VariableLocation::LOCAL) {
8309 // The generator object register is already set to the variable's local
8310 // register.
8312 GetRegisterForLocalVariable(generator_object_var).index());
8313 } else {
8314 BuildVariableAssignment(generator_object_var, Token::kInit,
8316 }
8317}
8318
8324
8326 Register out_reg) {
8327 if (property->key()->IsStringLiteral()) {
8328 builder()
8329 ->LoadLiteral(property->key()->AsLiteral()->AsRawString())
8331 } else {
8332 VisitForAccumulatorValue(property->key());
8334 }
8335}
8336
8343
8351
8360
8366
8368 int coverage_array_slot) {
8369 if (block_coverage_builder_ != nullptr) {
8370 block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
8371 }
8372}
8373
8374// Visits the expression |expr| and places the result in the accumulator.
8376 Expression* expr) {
8377 ValueResultScope accumulator_scope(this);
8378 Visit(expr);
8379 // Record the type hint for the result of current expression in accumulator.
8380 const TypeHint type_hint = accumulator_scope.type_hint();
8382 if (optimizer && type_hint != TypeHint::kUnknown) {
8383 optimizer->SetTypeHintForAccumulator(type_hint);
8384 }
8385 return type_hint;
8386}
8387
8389 if (expr == nullptr) {
8390 builder()->LoadTheHole();
8391 } else {
8393 }
8394}
8395
8396// Visits the expression |expr| and discards the result.
8398 EffectResultScope effect_scope(this);
8399 Visit(expr);
8400}
8401
8402// Visits the expression |expr| and returns the register containing
8403// the expression result.
8410
8411// Visits the expression |expr| and stores the expression result in
8412// |destination|.
8419
8420// Visits the expression |expr| and pushes the result into a new register
8421// added to the end of |reg_list|.
8423 RegisterList* reg_list) {
8424 {
8425 ValueResultScope register_scope(this);
8426 Visit(expr);
8427 }
8428 // Grow the register list after visiting the expression to avoid reserving
8429 // the register across the expression evaluation, which could cause memory
8430 // leaks for deep expressions due to dead objects being kept alive by pointers
8431 // in registers.
8434}
8435
8437 BytecodeLabels* then_labels,
8438 BytecodeLabels* else_labels,
8439 TestFallthrough fallthrough) {
8440 switch (fallthrough) {
8442 builder()->JumpIfFalse(mode, else_labels->New());
8443 break;
8445 builder()->JumpIfTrue(mode, then_labels->New());
8446 break;
8448 builder()->JumpIfTrue(mode, then_labels->New());
8449 builder()->Jump(else_labels->New());
8450 break;
8451 }
8452}
8453
8454// Visits the expression |expr| for testing its boolean value and jumping to the
8455// |then| or |other| label depending on value and short-circuit semantics
8457 BytecodeLabels* then_labels,
8458 BytecodeLabels* else_labels,
8459 TestFallthrough fallthrough) {
8460 bool result_consumed;
8461 TypeHint type_hint;
8462 {
8463 // To make sure that all temporary registers are returned before generating
8464 // jumps below, we ensure that the result scope is deleted before doing so.
8465 // Dead registers might be materialized otherwise.
8466 TestResultScope test_result(this, then_labels, else_labels, fallthrough);
8467 Visit(expr);
8468 result_consumed = test_result.result_consumed_by_test();
8469 type_hint = test_result.type_hint();
8470 // Labels and fallthrough might have been mutated, so update based on
8471 // TestResultScope.
8472 then_labels = test_result.then_labels();
8473 else_labels = test_result.else_labels();
8474 fallthrough = test_result.fallthrough();
8475 }
8476 if (!result_consumed) {
8477 BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
8478 fallthrough);
8479 }
8480}
8481
8482// Visits the expression |expr| for testing its nullish value and jumping to the
8483// |then| or |other| label depending on value and short-circuit semantics
8485 BytecodeLabels* then_labels,
8486 BytecodeLabels* test_next_labels,
8487 BytecodeLabels* else_labels) {
8488 // Nullish short circuits on undefined or null, otherwise we fall back to
8489 // BuildTest with no fallthrough.
8490 // TODO(joshualitt): We should do this in a TestResultScope.
8491 TypeHint type_hint = VisitForAccumulatorValue(expr);
8492 ToBooleanMode mode = ToBooleanModeFromTypeHint(type_hint);
8493
8494 // Skip the nullish shortcircuit if we already have a boolean.
8495 if (mode != ToBooleanMode::kAlreadyBoolean) {
8496 builder()->JumpIfUndefinedOrNull(test_next_labels->New());
8497 }
8498 BuildTest(mode, then_labels, else_labels, TestFallthrough::kNone);
8499}
8500
8502 DCHECK(execution_result()->IsTest());
8503 {
8504 RegisterAllocationScope reg_scope(this);
8505 Visit(expr);
8506 }
8507 if (!execution_result()->AsTest()->result_consumed_by_test()) {
8508 TestResultScope* result_scope = execution_result()->AsTest();
8510 result_scope->then_labels(), result_scope->else_labels(),
8511 result_scope->fallthrough());
8512 result_scope->SetResultConsumedByTest();
8513 }
8514}
8515
8517 DCHECK(scope->declarations()->is_empty());
8518 CurrentScope current_scope(this, scope);
8519 ContextScope context_scope(this, scope);
8520 Visit(stmt);
8521}
8522
8523template <typename T>
8525 HoleCheckElisionScope elider(this);
8526 Visit(node);
8527}
8528
8535
8537 DCHECK_EQ(VariableLocation::LOCAL, variable->location());
8538 return builder()->Local(variable->index());
8539}
8540
8542 Variable* variable) {
8544 if (optimizer) {
8546 return optimizer->GetTypeHint(reg);
8547 }
8548 return TypeHint::kAny;
8549}
8550
8554
8558
8564
8570
8574
8576 DCHECK(!slot.IsInvalid());
8577 return FeedbackVector::GetIndex(slot);
8578}
8579
8581 TypeofMode typeof_mode, Variable* variable) {
8582 FeedbackSlotCache::SlotKind slot_kind =
8583 typeof_mode == TypeofMode::kInside
8586 FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
8587 if (!slot.IsInvalid()) {
8588 return slot;
8589 }
8590 slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
8591 feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
8592 return slot;
8593}
8594
8609
8611 const AstRawString* name) {
8612 DCHECK(!expr->IsSuperPropertyReference());
8613 if (!v8_flags.ignition_share_named_property_feedback) {
8614 return feedback_spec()->AddLoadICSlot();
8615 }
8616 FeedbackSlotCache::SlotKind slot_kind =
8618 if (!expr->IsVariableProxy()) {
8619 return feedback_spec()->AddLoadICSlot();
8620 }
8621 const VariableProxy* proxy = expr->AsVariableProxy();
8622 FeedbackSlot slot(
8623 feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
8624 if (!slot.IsInvalid()) {
8625 return slot;
8626 }
8627 slot = feedback_spec()->AddLoadICSlot();
8628 feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
8629 feedback_index(slot));
8630 return slot;
8631}
8632
8634 const AstRawString* name) {
8635 if (!v8_flags.ignition_share_named_property_feedback) {
8636 return feedback_spec()->AddLoadICSlot();
8637 }
8638 FeedbackSlotCache::SlotKind slot_kind =
8640
8641 FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, name));
8642 if (!slot.IsInvalid()) {
8643 return slot;
8644 }
8645 slot = feedback_spec()->AddLoadICSlot();
8646 feedback_slot_cache()->Put(slot_kind, name, feedback_index(slot));
8647 return slot;
8648}
8649
8651 const AstRawString* name) {
8652 if (!v8_flags.ignition_share_named_property_feedback) {
8654 }
8655 FeedbackSlotCache::SlotKind slot_kind =
8658 if (!expr->IsVariableProxy()) {
8660 }
8661 const VariableProxy* proxy = expr->AsVariableProxy();
8662 FeedbackSlot slot(
8663 feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
8664 if (!slot.IsInvalid()) {
8665 return slot;
8666 }
8668 feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
8669 feedback_index(slot));
8670 return slot;
8671}
8672
8674 FeedbackSlotCache::SlotKind slot_kind =
8676 int index = feedback_slot_cache()->Get(slot_kind, literal);
8677 if (index != -1) {
8678 return index;
8679 }
8681 JSParameterCount(literal->parameter_count()));
8682 feedback_slot_cache()->Put(slot_kind, literal, index);
8683 return index;
8684}
8685
8689
8690} // namespace interpreter
8691} // namespace internal
8692} // namespace v8
friend Zone
Definition asm-types.cc:195
RegisterAllocator * allocator_
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
Builtins::Kind kind
Definition builtins.cc:40
std::vector< std::pair< Literal *, Accessors< PropertyT > * > > ordered_accessors_
PropertyT * setter
int default_case
std::map< int, CaseClause * > covered_cases
PropertyT * getter
static const int kDefaultNotFound
#define SBXCHECK(condition)
Definition check.h:61
SourcePosition pos
virtual Local< FunctionTemplate > GetNativeFunctionTemplate(Isolate *isolate, Local< String > name)
static v8::internal::DirectHandle< To > OpenDirectHandle(v8::Local< From > handle)
Definition api.h:279
V8_INLINE int NextInt() V8_WARN_UNUSED_RESULT
int ComputeFlags(bool disable_mementos=false) const
Definition ast.h:1448
Handle< ArrayBoilerplateDescription > GetOrBuildBoilerplateDescription(IsolateT *isolate)
Definition ast.h:1459
const ArrayLiteralBoilerplateBuilder * builder() const
Definition ast.h:1482
const ZonePtrList< Expression > * values() const
Definition ast.h:1480
Token::Value op() const
Definition ast.h:2169
Expression * target() const
Definition ast.h:2170
Expression * value() const
Definition ast.h:2171
LookupHoistingMode lookup_hoisting_mode() const
Definition ast.h:2176
int position() const
Definition ast.h:155
FunctionLiteral * generated_getter() const
Definition ast.h:2502
FunctionLiteral * generated_setter() const
Definition ast.h:2503
VariableProxy * property_private_name_proxy() const
Definition ast.h:2508
Token::Value op() const
Definition ast.h:1916
Expression * right() const
Definition ast.h:1918
Expression * left() const
Definition ast.h:1917
bool IsSmiLiteralOperation(Expression **subexpr, Tagged< Smi > *literal)
Definition ast.cc:875
SpreadPosition spread_position() const
Definition ast.h:1739
const ZonePtrList< Expression > * arguments() const
Definition ast.h:1736
Expression * expression() const
Definition ast.h:1735
bool is_optional_chain_link() const
Definition ast.h:1779
bool is_possibly_eval() const
Definition ast.h:1771
CallType GetCallType() const
Definition ast.cc:977
@ PRIVATE_OPTIONAL_CHAIN_CALL
Definition ast.h:1802
@ KEYED_OPTIONAL_CHAIN_PROPERTY_CALL
Definition ast.h:1798
@ NAMED_SUPER_PROPERTY_CALL
Definition ast.h:1799
@ KEYED_SUPER_PROPERTY_CALL
Definition ast.h:1800
@ NAMED_OPTIONAL_CHAIN_PROPERTY_CALL
Definition ast.h:1797
uint32_t eval_scope_info_index() const
Definition ast.h:1783
static Handle< ClassBoilerplate > New(IsolateT *isolate, ClassLiteral *expr, AllocationType allocation=AllocationType::kYoung)
Variable * static_home_object() const
Definition ast.h:2732
ZonePtrList< Property > * public_members() const
Definition ast.h:2709
FunctionLiteral * instance_members_initializer_function() const
Definition ast.h:2726
ZonePtrList< Property > * private_members() const
Definition ast.h:2710
Variable * home_object() const
Definition ast.h:2730
ClassScope * scope() const
Definition ast.h:2706
Expression * extends() const
Definition ast.h:2707
FunctionLiteral * static_initializer() const
Definition ast.h:2724
FunctionLiteral * constructor() const
Definition ast.h:2708
ClassLiteralStaticElement StaticElement
Definition ast.h:2704
Variable * class_variable()
Definition scopes.h:1453
Token::Value op() const
Definition ast.h:2027
bool IsLiteralCompareEqualVariable(Expression **expr, Literal **literal)
Definition ast.cc:954
Expression * left() const
Definition ast.h:2028
bool IsLiteralStrictCompareBoolean(Expression **expr, Literal **literal)
Definition ast.cc:900
bool IsLiteralCompareUndefined(Expression **expr)
Definition ast.cc:921
bool IsLiteralCompareNull(Expression **expr)
Definition ast.cc:936
Expression * right() const
Definition ast.h:2029
static DirectHandle< SharedFunctionInfo > GetSharedFunctionInfo(FunctionLiteral *node, DirectHandle< Script > script, IsolateT *isolate)
Definition compiler.cc:4312
BinaryOperation * binary_operation() const
Definition ast.h:2202
Expression * else_expression() const
Definition ast.h:2088
Expression * condition_at(size_t index) const
Definition ast.h:2076
size_t conditional_chain_length() const
Definition ast.h:2085
Expression * then_expression_at(size_t index) const
Definition ast.h:2079
bool is_postfix() const
Definition ast.h:2003
Expression * expression() const
Definition ast.h:2007
Token::Value op() const
Definition ast.h:2005
FunctionKind function_kind() const
Definition scopes.h:863
bool class_scope_has_private_brand() const
Definition scopes.h:1227
Variable * LookupInModule(const AstRawString *name)
Definition scopes.h:932
Variable * parameter(int index) const
Definition scopes.h:1058
Variable * generator_object_var() const
Definition scopes.h:1051
bool has_this_declaration() const
Definition scopes.h:1039
Variable * rest_parameter() const
Definition scopes.h:1072
Variable * var() const
Definition ast.h:375
V8_INLINE bool is_null() const
Definition handles.h:693
Expression * cond() const
Definition ast.h:471
Expression * expression() const
Definition ast.h:604
bool ToBooleanIsFalse() const
Definition ast.cc:126
bool IsValidReferenceExpression() const
Definition ast.cc:134
bool IsPrivateName() const
Definition ast.cc:130
bool IsNullOrUndefinedLiteral() const
Definition ast.h:258
bool IsCompileTimeValue()
Definition ast.cc:99
V8_EXPORT_PRIVATE bool IsNumberLiteral() const
Definition ast.cc:71
bool IsStringLiteral() const
Definition ast.cc:75
bool ToBooleanIsTrue() const
Definition ast.cc:122
bool IsTheHoleLiteral() const
Definition ast.cc:95
bool IsLiteralButNotNullOrUndefined() const
Definition ast.cc:118
bool IsSmiLiteral() const
Definition ast.cc:67
bool IsPropertyName() const
Definition ast.cc:83
bool IsInvalid() const
Definition utils.h:649
static FeedbackSlot Invalid()
Definition utils.h:648
FeedbackSlot AddDefineKeyedOwnPropertyInLiteralICSlot()
FeedbackSlot AddKeyedStoreICSlot(LanguageMode language_mode)
FeedbackSlot AddStoreICSlot(LanguageMode language_mode)
FeedbackSlot AddLoadGlobalICSlot(TypeofMode typeof_mode)
int AddCreateClosureParameterCount(uint16_t parameter_count)
FeedbackSlot AddStoreGlobalICSlot(LanguageMode language_mode)
static int GetIndex(FeedbackSlot slot)
FunctionLiteral * fun() const
Definition ast.h:436
FunctionKind kind() const
Definition ast.cc:231
int function_literal_id() const
Definition ast.h:2408
ZonePtrList< Statement > * body()
Definition ast.h:2316
static Handle< SharedFunctionInfo > GetOrCreateSharedFunctionInfo(Isolate *isolate, DirectHandle< FunctionTemplateInfo > info, MaybeDirectHandle< Name > maybe_name)
Definition templates.cc:32
Handle< TemplateObjectDescription > GetOrBuildDescription(IsolateT *isolate)
V8_INLINE bool is_null() const
Definition handles.h:69
ZonePtrList< Property > * fields() const
Definition ast.h:2645
Bootstrapper * bootstrapper()
Definition isolate.h:1178
Statement * body() const
Definition ast.h:451
void Enqueue(LocalIsolate *isolate, Handle< SharedFunctionInfo > shared_info, std::unique_ptr< Utf16CharacterStream > character_stream)
static bool Match(void *literal1, void *literal2)
Definition ast.cc:1128
const AstRawString * AsRawString()
Definition ast.h:1015
base::RandomNumberGenerator * fuzzer_rng() const
SourceTextModuleDescriptor * module() const
Definition scopes.h:1379
Expression * subsequent(size_t index) const
Definition ast.h:1944
int subsequent_op_position(size_t index) const
Definition ast.h:1949
Token::Value op() const
Definition ast.h:1942
size_t subsequent_length() const
Definition ast.h:1948
Expression * first() const
Definition ast.h:1943
v8::Extension * extension() const
Definition ast.h:2781
DirectHandle< String > name() const
Definition ast.h:2779
Handle< ObjectBoilerplateDescription > GetOrBuildBoilerplateDescription(IsolateT *isolate)
Definition ast.h:1342
const ObjectLiteralBoilerplateBuilder * builder() const
Definition ast.h:1410
ObjectLiteralProperty Property
Definition ast.h:1393
ZoneList< Property * > * properties()
Definition ast.h:1408
Variable * home_object() const
Definition ast.h:1414
Expression * expression() const
Definition ast.h:1642
static AssignType GetAssignType(Property *property)
Definition ast.h:1686
static constexpr int kFunctionLiteralReturnPosition
Definition ast.h:666
int num_heap_slots() const
Definition scopes.h:503
base::ThreadedList< Declaration > * declarations()
Definition scopes.h:229
Variable * catch_variable() const
Definition scopes.h:490
Scope * outer_scope() const
Definition scopes.h:488
int ContextChainLength(Scope *scope) const
Definition scopes.cc:1431
ClassScope * AsClassScope()
Definition scopes.cc:566
bool is_script_scope() const
Definition scopes.h:364
ModuleScope * AsModuleScope()
Definition scopes.cc:556
DeclarationScope * GetReceiverScope()
Definition scopes.cc:1501
bool is_class_scope() const
Definition scopes.h:373
bool is_eval_scope() const
Definition scopes.h:361
bool is_function_scope() const
Definition scopes.h:362
ScopeType scope_type() const
Definition scopes.h:474
bool has_await_using_declaration() const
Definition scopes.h:393
bool NeedsContext() const
Definition scopes.h:430
bool is_block_scope() const
Definition scopes.h:368
int ContextChainLengthUntilOutermostSloppyEval() const
Definition scopes.cc:1440
LanguageMode language_mode() const
Definition scopes.h:477
DeclarationScope * GetConstructorScope()
Definition scopes.cc:1511
int start_position() const
Definition scopes.h:338
bool is_module_scope() const
Definition scopes.h:363
static constexpr Tagged< Smi > FromEnum(E value)
Definition smi.h:58
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static constexpr int kMinValue
Definition smi.h:100
static constexpr Tagged< Smi > zero()
Definition smi.h:99
static constexpr int kMaxValue
Definition smi.h:101
const ZoneVector< const Entry * > & namespace_imports() const
Definition modules.h:184
Expression * expression() const
Definition ast.h:2057
VariableProxy * new_target_var() const
Definition ast.h:2815
VariableProxy * this_function_var() const
Definition ast.h:2816
Expression * expression() const
Definition ast.h:2234
OnAbruptResume on_abrupt_resume() const
Definition ast.h:2235
const ZonePtrList< const AstRawString > * string_parts() const
Definition ast.h:2908
const ZonePtrList< Expression > * substitutions() const
Definition ast.h:2911
static ThreadId Current()
Definition thread-id.h:32
Expression * exception() const
Definition ast.h:2285
static bool IsEqualityOp(Value op)
Definition token.h:303
Expression * expression() const
Definition ast.h:1896
Token::Value op() const
Definition ast.h:1895
void set_coverage_info(Handle< CoverageInfo > coverage_info)
Variable * var() const
Definition ast.h:1517
HoleCheckMode hole_check_mode() const
Definition ast.h:1552
bool is_resolved() const
Definition ast.h:1542
bool IsStackLocal() const
Definition variables.h:128
Scope * scope() const
Definition variables.h:58
const AstRawString * raw_name() const
Definition variables.h:65
bool IsContextSlot() const
Definition variables.h:130
V8_INLINE int length() const
Definition zone-list.h:101
T & at(int i) const
Definition zone-list.h:88
V8_INLINE bool is_empty() const
Definition zone-list.h:100
T * New(Args &&... args)
Definition zone.h:114
const ZoneVector< SourceRange > & slots() const
int AllocateNaryBlockCoverageSlot(NaryOperation *node, size_t index)
int AllocateConditionalChainBlockCoverageSlot(ConditionalChain *node, SourceRangeKind kind, size_t index)
int AllocateBlockCoverageSlot(ZoneObject *node, SourceRangeKind kind)
void BreakIfTrue(BytecodeArrayBuilder::ToBooleanMode mode)
BytecodeArrayBuilder & LoadLookupSlot(const AstRawString *name, TypeofMode typeof_mode)
BytecodeArrayBuilder & ThrowIfNotSuperConstructor(Register constructor)
BytecodeArrayBuilder & UnaryOperation(Token::Value op, int feedback_slot)
BytecodeArrayBuilder & ForInStep(Register index)
BytecodeArrayBuilder & StoreInArrayLiteral(Register array, Register index, int feedback_slot)
BytecodeArrayBuilder & CallJSRuntime(int context_index, RegisterList args)
BytecodeArrayBuilder & SetNamedProperty(Register object, const AstRawString *name, int feedback_slot, LanguageMode language_mode)
BytecodeArrayBuilder & TypeOf(int feedback_slot)
BytecodeArrayBuilder & ForInNext(Register receiver, Register index, RegisterList cache_type_array_pair, int feedback_slot)
BytecodeArrayBuilder & CallAnyReceiver(Register callable, RegisterList args, int feedback_slot)
BytecodeArrayBuilder & CreateCatchContext(Register exception, const Scope *scope)
BytecodeArrayBuilder & LoadIteratorProperty(Register object, int feedback_slot)
BytecodeArrayBuilder & GetTemplateObject(size_t template_object_description_entry, int feedback_slot)
BytecodeArrayBuilder & LoadAsyncIteratorProperty(Register object, int feedback_slot)
BytecodeArrayBuilder & ForInPrepare(RegisterList cache_info_triple, int feedback_slot)
BytecodeArrayBuilder & LoadClassFieldsInitializer(Register constructor, int feedback_slot)
BytecodeArrayBuilder & CreateBlockContext(const Scope *scope)
BytecodeArrayBuilder & Delete(Register object, LanguageMode language_mode)
BytecodeArrayBuilder & StoreContextSlot(Register context, Variable *variable, int depth)
BytecodeArrayBuilder & ToBoolean(ToBooleanMode mode)
BytecodeArrayBuilder & LoadGlobal(const AstRawString *name, int feedback_slot, TypeofMode typeof_mode)
BytecodeArrayBuilder & CallUndefinedReceiver(Register callable, RegisterList args, int feedback_slot)
BytecodeArrayBuilder & LoadModuleVariable(int cell_index, int depth)
BytecodeArrayBuilder & SuspendGenerator(Register generator, RegisterList registers, int suspend_id)
BytecodeArrayBuilder & CreateClosure(size_t shared_function_info_entry, int slot, int flags)
BytecodeArrayBuilder & GetIterator(Register object, int load_feedback_slot, int call_feedback_slot)
BytecodeArrayBuilder & JumpIfNotNil(BytecodeLabel *label, Token::Value op, NilValue nil)
BytecodeArrayBuilder & LoadContextSlot(Register context, Variable *variable, int depth, ContextSlotMutability immutable)
void PushSourcePosition(BytecodeSourceInfo source_info)
BytecodeArrayBuilder & DefineKeyedOwnPropertyInLiteral(Register object, Register name, DefineKeyedOwnPropertyInLiteralFlags flags, int feedback_slot)
BytecodeArrayBuilder & CompareNil(Token::Value op, NilValue nil)
BytecodeJumpTable * AllocateJumpTable(int size, int case_value_base)
BytecodeArrayBuilder & ToNumeric(int feedback_slot)
BytecodeArrayBuilder & LoadLiteral(Tagged< Smi > value)
BytecodeArrayBuilder & StoreAccumulatorInRegister(Register reg)
BytecodeArrayBuilder & LogicalNot(ToBooleanMode mode)
BytecodeArrayBuilder & CreateRegExpLiteral(const AstRawString *pattern, int literal_index, int flags)
BytecodeArrayBuilder & CompareReference(Register reg)
BytecodeArrayBuilder & LoadNamedProperty(Register object, const AstRawString *name, int feedback_slot)
BytecodeArrayBuilder & JumpIfFalse(ToBooleanMode mode, BytecodeLabel *label)
BytecodeArrayBuilder & ResumeGenerator(Register generator, RegisterList registers)
BytecodeArrayBuilder & Bind(BytecodeLabel *label)
BytecodeArrayBuilder & JumpIfUndefined(BytecodeLabel *label)
BytecodeArrayBuilder & SwitchOnSmiNoFeedback(BytecodeJumpTable *jump_table)
BytecodeArrayBuilder & JumpIfTrue(ToBooleanMode mode, BytecodeLabel *label)
BytecodeArrayBuilder & CallRuntimeForPair(Runtime::FunctionId function_id, RegisterList args, RegisterList return_pair)
BytecodeArrayBuilder & CompareOperation(Token::Value op, Register reg, int feedback_slot)
BytecodeArrayBuilder & ForInEnumerate(Register receiver)
BytecodeArrayBuilder & CreateEvalContext(const Scope *scope, int slots)
BytecodeArrayBuilder & FindNonDefaultConstructorOrConstruct(Register this_function, Register new_target, RegisterList output)
BytecodeArrayBuilder & LoadKeyedProperty(Register object, int feedback_slot)
BytecodeArrayBuilder & CreateFunctionContext(const Scope *scope, int slots)
BytecodeArrayBuilder & ThrowReferenceErrorIfHole(const AstRawString *name)
BytecodeArrayBuilder & CloneObject(Register source, int flags, int feedback_slot)
BytecodeArrayBuilder & LoadLookupContextSlot(const AstRawString *name, TypeofMode typeof_mode, ContextKind context_kind, int slot_index, int depth)
BytecodeArrayBuilder & MoveRegister(Register from, Register to)
BytecodeArrayBuilder & BinaryOperationSmiLiteral(Token::Value binop, Tagged< Smi > literal, int feedback_slot)
std::optional< BytecodeSourceInfo > MaybePopSourcePosition(int scope_start)
BytecodeArrayBuilder & JumpIfNotUndefined(BytecodeLabel *label)
BytecodeArrayBuilder & StoreClassFieldsInitializer(Register constructor, int feedback_slot)
BytecodeArrayBuilder & CreateEmptyArrayLiteral(int literal_index)
BytecodeArrayBuilder & StoreLookupSlot(const AstRawString *name, LanguageMode language_mode, LookupHoistingMode lookup_hoisting_mode)
BytecodeArrayBuilder & StoreModuleVariable(int cell_index, int depth)
BytecodeArrayBuilder & ConstructForwardAllArgs(Register constructor, int feedback_slot)
BytecodeArrayBuilder & SwitchOnGeneratorState(Register generator, BytecodeJumpTable *jump_table)
BytecodeArrayBuilder & CallWithSpread(Register callable, RegisterList args, int feedback_slot)
BytecodeArrayBuilder & GetSuperConstructor(Register out)
BytecodeArrayBuilder & ConstructWithSpread(Register constructor, RegisterList args, int feedback_slot)
BytecodeArrayBuilder & LoadEnumeratedKeyedProperty(Register object, Register enum_index, Register cache_type, int feedback_slot)
BytecodeArrayBuilder & DefineNamedOwnProperty(Register object, const AstRawString *name, int feedback_slot)
BytecodeArrayBuilder & BinaryOperation(Token::Value binop, Register reg, int feedback_slot)
BytecodeArrayBuilder & StoreGlobal(const AstRawString *name, int feedback_slot)
BytecodeArrayBuilder & SetKeyedProperty(Register object, Register key, int feedback_slot, LanguageMode language_mode)
DirectHandle< TrustedByteArray > ToSourcePositionTable(IsolateT *isolate)
BytecodeArrayBuilder & CreateObjectLiteral(size_t constant_properties_entry, int literal_index, int flags)
BytecodeArrayBuilder & LoadAccumulatorWithRegister(Register reg)
BytecodeArrayBuilder & Jump(BytecodeLabel *label)
BytecodeArrayBuilder & CallProperty(Register callable, RegisterList args, int feedback_slot)
Handle< BytecodeArray > ToBytecodeArray(IsolateT *isolate)
BytecodeArrayBuilder & LoadLookupGlobalSlot(const AstRawString *name, TypeofMode typeof_mode, int feedback_slot, int depth)
BytecodeArrayBuilder & DefineKeyedOwnProperty(Register object, Register key, DefineKeyedOwnPropertyFlags flags, int feedback_slot)
BytecodeArrayBuilder & LoadConstantPoolEntry(size_t entry)
BytecodeArrayBuilder & CreateWithContext(Register object, const Scope *scope)
BytecodeArrayBuilder & Construct(Register constructor, RegisterList args, int feedback_slot)
BytecodeArrayBuilder & CreateArguments(CreateArgumentsType type)
BytecodeArrayBuilder & JumpIfJSReceiver(BytecodeLabel *label)
BytecodeArrayBuilder & LoadNamedPropertyFromSuper(Register object, const AstRawString *name, int feedback_slot)
BytecodeArrayBuilder & JumpIfUndefinedOrNull(BytecodeLabel *label)
BytecodeArrayBuilder & CompareTypeOf(TestTypeOfFlags::LiteralFlag literal_flag)
BytecodeArrayBuilder & CreateArrayLiteral(size_t constant_elements_entry, int literal_index, int flags)
BytecodeArrayBuilder & JumpIfNil(BytecodeLabel *label, Token::Value op, NilValue nil)
void SetDeferredConstantPoolEntry(size_t entry, Handle< Object > object)
BytecodeArrayBuilder & CallRuntime(Runtime::FunctionId function_id, RegisterList args)
AccumulatorPreservingScope(const AccumulatorPreservingScope &)=delete
AccumulatorPreservingScope & operator=(const AccumulatorPreservingScope &)=delete
AccumulatorPreservingScope(BytecodeGenerator *generator, AccumulatorPreservingMode mode)
AssignmentLhsData(AssignType assign_type, Expression *expr, RegisterList super_property_args, Register object, Register key, Expression *object_expr, const AstRawString *name)
static AssignmentLhsData PrivateMethodOrAccessor(AssignType type, Property *property, Register object, Register key)
static AssignmentLhsData NamedProperty(Expression *object_expr, Register object, const AstRawString *name)
static AssignmentLhsData PrivateDebugEvaluate(AssignType type, Property *property, Register object)
static AssignmentLhsData KeyedSuperProperty(RegisterList super_property_args)
static AssignmentLhsData NamedSuperProperty(RegisterList super_property_args)
static AssignmentLhsData KeyedProperty(Register object, Register key)
ContextScope(BytecodeGenerator *generator, Scope *scope, Register outer_context_reg=Register())
ContextScope & operator=(const ContextScope &)=delete
ControlScopeForBreakable(BytecodeGenerator *generator, BreakableStatement *statement, BreakableControlFlowBuilder *control_builder)
bool Execute(Command command, Statement *statement, int source_position) override
ControlScopeForDerivedConstructor(BytecodeGenerator *generator, Register result_register, BytecodeLabels *check_return_value_labels)
bool Execute(Command command, Statement *statement, int source_position) override
ControlScopeForIteration(BytecodeGenerator *generator, IterationStatement *statement, LoopBuilder *loop_builder)
bool Execute(Command command, Statement *statement, int source_position) override
bool Execute(Command command, Statement *statement, int source_position) override
ControlScopeForTryCatch(BytecodeGenerator *generator, TryCatchBuilder *try_catch_builder)
bool Execute(Command command, Statement *statement, int source_position) override
ControlScopeForTryFinally(BytecodeGenerator *generator, TryFinallyBuilder *try_finally_builder, DeferredCommands *commands)
bool Execute(Command command, Statement *statement, int source_position) override
DeferredCommands(BytecodeGenerator *generator, Register token_register, Register result_register, Register message_register)
void PerformCommand(Command command, Statement *statement, int source_position)
virtual bool Execute(Command command, Statement *statement, int source_position)=0
static constexpr bool CommandUsesAccumulator(Command command)
ControlScope & operator=(const ControlScope &)=delete
CurrentScope(BytecodeGenerator *generator, Scope *scope)
CurrentScope & operator=(const CurrentScope &)=delete
ExpressionResultScope(BytecodeGenerator *generator, Expression::Context kind)
ExpressionResultScope & operator=(const ExpressionResultScope &)=delete
int Get(SlotKind slot_kind, const AstRawString *name) const
void Put(SlotKind slot_kind, const AstRawString *name, int slot_index)
void Put(SlotKind slot_kind, Variable *variable, int slot_index)
int Get(SlotKind slot_kind, Variable *variable) const
int Get(SlotKind slot_kind, int variable_index, const AstRawString *name) const
void Put(SlotKind slot_kind, int variable_index, const AstRawString *name, int slot_index)
int GetImpl(SlotKind slot_kind, int index, const void *node) const
void PutImpl(SlotKind slot_kind, int index, const void *node, int slot_index)
void Put(SlotKind slot_kind, AstNode *node, int slot_index)
ForInScope(BytecodeGenerator *bytecode_generator, ForInStatement *stmt, Register enum_index, Register cache_type)
IteratorRecord(Register object_register, Register next_register, IteratorType type=IteratorType::kNormal)
LoopScope(BytecodeGenerator *bytecode_generator, LoopBuilder *loop)
MultipleEntryBlockContextScope(const MultipleEntryBlockContextScope &)=delete
MultipleEntryBlockContextScope & operator=(const MultipleEntryBlockContextScope &)=delete
NaryCodeCoverageSlots(BytecodeGenerator *generator, NaryOperation *expr)
RegisterAllocationScope & operator=(const RegisterAllocationScope &)=delete
RegisterAllocationScope(const RegisterAllocationScope &)=delete
TestResultScope(BytecodeGenerator *generator, BytecodeLabels *then_labels, BytecodeLabels *else_labels, TestFallthrough fallthrough)
TestResultScope & operator=(const TestResultScope &)=delete
Handle< FixedArray > AllocateDeclarations(UnoptimizedCompilationInfo *info, BytecodeGenerator *generator, Handle< Script > script, IsolateT *isolate)
void BuildVariableLoadForAccumulatorValue(Variable *variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode=TypeofMode::kNotInside)
void SetVariableInRegister(Variable *var, Register reg)
void BuildHoleCheckForVariableAssignment(Variable *variable, Token::Value op)
void BuildFillArrayWithIterator(IteratorRecord iterator, Register array, Register index, Register value, FeedbackSlot next_value_slot, FeedbackSlot next_done_slot, FeedbackSlot index_slot, FeedbackSlot element_slot)
UnoptimizedCompilationInfo * info() const
void BuildDisposeScope(WrappedFunc wrapped_func, bool has_await_using)
void BuildCreateObjectLiteral(Register literal, uint8_t flags, size_t entry)
void BuildPrivateBrandInitialization(Register receiver, Variable *brand)
void VisitGlobalDeclarations(Declaration::List *declarations)
DirectHandle< TrustedByteArray > FinalizeSourcePositionTable(IsolateT *isolate)
void VisitIterationBody(IterationStatement *stmt, LoopBuilder *loop_builder)
FeedbackSlot GetCachedStoreICSlot(const Expression *expr, const AstRawString *name)
ZoneVector< std::pair< Call *, Scope * > > eval_calls_
TypeHint GetTypeHintForLocalVariable(Variable *variable)
ZoneVector< std::pair< ObjectLiteralBoilerplateBuilder *, size_t > > object_literals_
void VisitArguments(const ZonePtrList< Expression > *args, RegisterList *arg_regs)
void BuildClassProperty(ClassLiteral::Property *property)
ZoneVector< std::pair< ClassLiteral *, size_t > > class_literals_
void VisitLogicalTestSubExpression(Token::Value token, Expression *expr, BytecodeLabels *then_labels, BytecodeLabels *else_labels, int coverage_slot)
void BuildTryCatch(TryBodyFunc try_body_func, CatchBodyFunc catch_body_func, HandlerTable::CatchPrediction catch_prediction, TryCatchStatement *stmt_for_coverage=nullptr)
void BuildDestructuringArrayAssignment(ArrayLiteral *pattern, Token::Value op, LookupHoistingMode lookup_hoisting_mode)
void BuildCreateArrayLiteral(const ZonePtrList< Expression > *elements, ArrayLiteral *expr)
void BuildTest(ToBooleanMode mode, BytecodeLabels *then_labels, BytecodeLabels *else_labels, TestFallthrough fallthrough)
V8_WARN_UNUSED_RESULT Register VisitForRegisterValue(Expression *expr)
void set_catch_prediction(HandlerTable::CatchPrediction value)
IteratorRecord BuildGetIteratorRecord(Register iterator_next, Register iterator_object, IteratorType hint)
void BuildLoadNamedProperty(const Expression *object_expr, Register object, const AstRawString *name)
int AllocateConditionalChainBlockCoverageSlotIfEnabled(ConditionalChain *node, SourceRangeKind kind, size_t index)
Register GetRegisterForLocalVariable(Variable *variable)
const AstStringConstants * ast_string_constants() const
void BuildCallIteratorMethod(Register iterator, const AstRawString *method, RegisterList receiver_and_args, BytecodeLabel *if_called, BytecodeLabels *if_notcalled)
void BuildLiteralCompareNil(Token::Value compare_op, BytecodeArrayBuilder::NilValue nil)
void VisitPropertyLoad(Register obj, Property *expr)
void VisitLogicalTest(Token::Value token, Expression *left, Expression *right, int right_coverage_slot)
ZoneVector< std::pair< FunctionLiteral *, size_t > > function_literals_
void VisitForNullishTest(Expression *expr, BytecodeLabels *then_labels, BytecodeLabels *test_next_labels, BytecodeLabels *else_labels)
void BuildTryFinally(TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func, HandlerTable::CatchPrediction catch_prediction, TryFinallyStatement *stmt_for_coverage=nullptr)
void VisitLiteralAccessor(LiteralProperty *property, Register value_out)
bool VisitLogicalOrSubExpression(Expression *expr, BytecodeLabels *end_labels, int coverage_slot)
void VisitLogicalOrExpression(BinaryOperation *binop)
void VisitKeyedSuperPropertyLoad(Property *property, Register opt_receiver_out)
void VisitIterationBodyInHoleCheckElisionScope(IterationStatement *stmt, LoopBuilder *loop_builder)
void BuildGetAndCheckSuperConstructor(Register this_function, Register new_target, Register constructor, BytecodeLabel *super_ctor_call_done)
void BuildLoadPropertyKey(LiteralProperty *property, Register out_reg)
FeedbackSlot GetCachedLoadICSlot(const Expression *expr, const AstRawString *name)
int AllocateBlockCoverageSlotIfEnabled(AstNode *node, SourceRangeKind kind)
void VisitArithmeticExpression(BinaryOperation *binop)
void VisitForTest(Expression *expr, BytecodeLabels *then_labels, BytecodeLabels *else_labels, TestFallthrough fallthrough)
void BuildAwait(int position=kNoSourcePosition)
FeedbackSlot GetCachedLoadSuperICSlot(const AstRawString *name)
void BuildVariableLoad(Variable *variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode=TypeofMode::kNotInside)
void BuildPrivateBrandCheck(Property *property, Register object)
int GetCachedCreateClosureSlot(FunctionLiteral *literal)
void BuildIteratorClose(const IteratorRecord &iterator, Expression *expr=nullptr)
void BuildAssignment(const AssignmentLhsData &data, Token::Value op, LookupHoistingMode lookup_hoisting_mode)
TopLevelDeclarationsBuilder * top_level_builder()
void VisitClassLiteral(ClassLiteral *expr, Register name)
void AddToEagerLiteralsIfEager(FunctionLiteral *literal)
void BuildSetNamedProperty(const Expression *object_expr, Register object, const AstRawString *name)
FeedbackSlot GetCachedLoadGlobalICSlot(TypeofMode typeof_mode, Variable *variable)
void BuildVariableAssignment(Variable *variable, Token::Value op, HoleCheckMode hole_check_mode, LookupHoistingMode lookup_hoisting_mode=LookupHoistingMode::kNormal)
void BuildPrivateDebugDynamicSet(Property *property, Register obj, Register value)
void BuildIncrementBlockCoverageCounterIfEnabled(AstNode *node, SourceRangeKind kind)
void BuildInstanceMemberInitialization(Register constructor, Register instance)
void VisitAndPushIntoRegisterList(Expression *expr, RegisterList *reg_list)
void BuildOptionalChain(ExpressionFunc expression_func)
void BuildInstanceInitializationAfterSuperCall(Register this_function, Register instance)
void BuildPushUndefinedIntoRegisterList(RegisterList *reg_list)
HandlerTable::CatchPrediction catch_prediction() const
BytecodeGenerator(LocalIsolate *local_isolate, Zone *zone, UnoptimizedCompilationInfo *info, const AstStringConstants *ast_string_constants, std::vector< FunctionLiteral * > *eager_inner_literals, Handle< Script > script)
bool IsVariableInRegister(Variable *var, Register reg)
ExpressionResultScope * execution_result() const
void BuildSuperCallOptimization(Register this_function, Register new_target, Register constructor_then_instance, BytecodeLabel *super_ctor_call_done)
bool VisitNullishSubExpression(Expression *expr, BytecodeLabels *end_labels, int coverage_slot)
Expression * GetDestructuringDefaultValue(Expression **target)
bool VariableNeedsHoleCheckInCurrentBlockForAssignment(Variable *variable, Token::Value op, HoleCheckMode hole_check_mode)
static constexpr ToBooleanMode ToBooleanModeFromTypeHint(TypeHint type_hint)
std::vector< FunctionLiteral * > * eager_inner_literals_
bool VariableNeedsHoleCheckInCurrentBlock(Variable *variable, HoleCheckMode hole_check_mode)
void BuildPrivateMethodIn(Variable *private_name, Expression *object_expression)
void BuildIteratorNext(const IteratorRecord &iterator, Register next_result)
int AllocateNaryBlockCoverageSlotIfEnabled(NaryOperation *node, size_t index)
void BuildDestructuringObjectAssignment(ObjectLiteral *pattern, Token::Value op, LookupHoistingMode lookup_hoisting_mode)
void VisitPropertyLoadForRegister(Register obj, Property *expr, Register destination)
void BuildPrivateGetterAccess(Register obj, Register access_pair)
void BuildClassLiteral(ClassLiteral *expr, Register name)
FeedbackSlot GetCachedStoreGlobalICSlot(LanguageMode language_mode, Variable *variable)
void BuildPrivateDebugDynamicGet(Property *property, Register obj)
void VisitModuleDeclarations(Declaration::List *declarations)
BytecodeRegisterAllocator * register_allocator()
void VisitNamedSuperPropertyLoad(Property *property, Register opt_receiver_out)
ZoneVector< std::pair< GetTemplateObject *, size_t > > template_objects_
ZoneVector< std::pair< NativeFunctionLiteral *, size_t > > native_function_literals_
void VisitLogicalAndExpression(BinaryOperation *binop)
void VisitDeclarations(Declaration::List *declarations)
AssignmentLhsData PrepareAssignmentLhs(Expression *lhs, AccumulatorPreservingMode accumulator_preserving_mode=AccumulatorPreservingMode::kNone)
void VisitInScope(Statement *stmt, Scope *scope)
void BuildFinalizeIteration(IteratorRecord iterator, Register done, Register iteration_continuation_token)
void VisitStatements(const ZonePtrList< Statement > *statments, int start=0)
void BuildLoadKeyedProperty(Register object, FeedbackSlot slot)
TypeHint VisitInHoleCheckElisionScopeForAccumulatorValue(Expression *expr)
ZoneVector< std::pair< ArrayLiteralBoilerplateBuilder *, size_t > > array_literals_
void BuildInvalidPropertyAccess(MessageTemplate tmpl, Property *property)
Handle< BytecodeArray > FinalizeBytecode(IsolateT *isolate, Handle< Script > script)
void AllocateDeferredConstants(IsolateT *isolate, Handle< Script > script)
void BuildPrivateSetterAccess(Register obj, Register access_pair, Register value)
bool VisitLogicalAndSubExpression(Expression *expr, BytecodeLabels *end_labels, int coverage_slot)
void VisitNaryLogicalTest(Token::Value token, NaryOperation *expr, const NaryCodeCoverageSlots *coverage_slots)
void Bind(BytecodeArrayBuilder *builder)
static uint8_t Encode(bool use_fast_shallow_clone, int runtime_flags)
static uint8_t Encode(bool pretenure, bool is_function_scope, bool might_always_turbofan)
static uint8_t Encode(int runtime_flags, bool fast_clone_supported)
const RegisterList Truncate(int new_count)
static constexpr Register virtual_accumulator()
static constexpr Register current_context()
static constexpr Register invalid_value()
static constexpr Register function_closure()
static LiteralFlag GetFlagForLiteral(const AstStringConstants *ast_constants, Literal *literal)
Zone * zone_
Register const object_
const MapRef map_
const ObjectRef type_
const PropertyKind kind_
bool is_empty
Definition sweeper.cc:229
PlatformAwareObjectStartBitmap * bitmap_
Definition sweeper.cc:109
int start
int end
DeclarationScope * scope_
Handle< Context > context_
DeclarationScope * closure_scope_
Handle< Script > script_
LineAndColumn current
LineAndColumn previous
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
DirectHandle< Object > new_target
Definition execution.cc:75
Isolate * isolate
TNode< Context > context
TNode< Object > receiver
std::string pattern
const std::string property
std::unique_ptr< icu::DateTimePatternGenerator > generator_
ZoneVector< RpoNumber > & result
LiftoffRegister reg
FunctionLiteral * literal
Definition liveedit.cc:294
int position
Definition liveedit.cc:290
#define LOG_CODE_EVENT(isolate, Call)
Definition log.h:83
LocalIsolate * local_isolate_
RegListBase< RegisterT > registers
InstructionOperand source
InstructionOperand destination
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
Definition graph.h:1231
SnapshotTable< OpIndex, VariableData >::Key Variable
Definition operations.h:82
static bool IsTypeof(Expression *expr)
static bool IsLiteralCompareTypeof(CompareOperation *expr, Expression **sub_expr, TestTypeOfFlags::LiteralFlag *flag, const AstStringConstants *ast_constants)
static bool IsCharU(const AstRawString *str)
bool is_sloppy(LanguageMode language_mode)
Definition globals.h:773
constexpr int kNoSourcePosition
Definition globals.h:850
bool IsLexicalVariableMode(VariableMode mode)
Definition globals.h:2155
bool IsDeclaredVariableMode(VariableMode mode)
Definition globals.h:2120
bool IsSmiDouble(double value)
bool IsDerivedConstructor(FunctionKind kind)
@ PRIVATE_GETTER_ONLY
Definition ast.h:1664
@ NAMED_PROPERTY
Definition ast.h:1659
@ NAMED_SUPER_PROPERTY
Definition ast.h:1661
@ PRIVATE_SETTER_ONLY
Definition ast.h:1665
@ KEYED_SUPER_PROPERTY
Definition ast.h:1662
@ NON_PROPERTY
Definition ast.h:1658
@ PRIVATE_METHOD
Definition ast.h:1663
@ KEYED_PROPERTY
Definition ast.h:1660
@ PRIVATE_DEBUG_DYNAMIC
Definition ast.h:1667
@ PRIVATE_GETTER_AND_SETTER
Definition ast.h:1666
bool IsResumableFunction(FunctionKind kind)
V8_INLINE IndirectHandle< T > indirect_handle(DirectHandle< T > handle)
Definition handles.h:757
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
bool IsAsyncFunction(FunctionKind kind)
bool IsModuleWithTopLevelAwait(FunctionKind kind)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
Flag flags[]
Definition flags.cc:3797
void Print(Tagged< Object > obj)
Definition objects.h:774
bool IsBaseConstructor(FunctionKind kind)
bool IsAsyncGeneratorFunction(FunctionKind kind)
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
Definition tagged.h:893
base::Flags< DefineKeyedOwnPropertyInLiteralFlag > DefineKeyedOwnPropertyInLiteralFlags
Definition globals.h:2441
bool is_strict(LanguageMode language_mode)
Definition globals.h:777
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
bool IsPrivateMethodOrAccessorVariableMode(VariableMode mode)
Definition globals.h:2135
return value
Definition map-inl.h:893
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit requires_instance_members_initializer
bool IsDefaultConstructor(FunctionKind kind)
ZoneList< T * > ZonePtrList
TorqueStructIteratorRecord IteratorRecord
Local< T > Handle
OptimizedCompilationInfo * info_
Definition pipeline.cc:305
RegExpBuilder builder_
Node * prev_
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK_LT(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
Symbol method
Symbol statement
#define V8_LIKELY(condition)
Definition v8config.h:661
#define V8_UNLIKELY(condition)
Definition v8config.h:660
#define V8_NODISCARD
Definition v8config.h:693
std::unique_ptr< ValueMirror > key