v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
code-stub-assembler.cc
Go to the documentation of this file.
1// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <stdio.h>
8
9#include <functional>
10#include <optional>
11
12#include "include/v8-internal.h"
13#include "src/base/macros.h"
16#include "src/codegen/tnode.h"
17#include "src/common/globals.h"
22#include "src/heap/heap-inl.h" // For MutablePageMetadata. TODO(jkummerow): Drop.
27#include "src/objects/cell.h"
35#include "src/objects/oddball.h"
40#include "src/roots/roots.h"
41#include "third_party/v8/codegen/fp16-inl.h"
42
43namespace v8 {
44namespace internal {
45
47
48#ifdef DEBUG
49#define CSA_DCHECK_BRANCH(csa, gen, ...) \
50 (csa)->Dcheck(gen, #gen, __FILE__, __LINE__, CSA_DCHECK_ARGS(__VA_ARGS__))
51#else
52#define CSA_DCHECK_BRANCH(csa, ...) ((void)0)
53#endif
54
55namespace {
56
57Builtin BigIntComparisonBuiltinOf(Operation const& op) {
58 switch (op) {
59 case Operation::kLessThan:
60 return Builtin::kBigIntLessThan;
61 case Operation::kGreaterThan:
62 return Builtin::kBigIntGreaterThan;
63 case Operation::kLessThanOrEqual:
64 return Builtin::kBigIntLessThanOrEqual;
65 case Operation::kGreaterThanOrEqual:
66 return Builtin::kBigIntGreaterThanOrEqual;
67 default:
69 }
70}
71
72} // namespace
73
75 : compiler::CodeAssembler(state),
76 TorqueGeneratedExportedMacrosAssembler(state) {
77 if (v8_flags.csa_trap_on_node != nullptr) {
78 HandleBreakOnNode();
79 }
80}
81
83 // v8_flags.csa_trap_on_node should be in a form "STUB,NODE" where STUB is a
84 // string specifying the name of a stub and NODE is number specifying node id.
85 const char* name = state()->name();
86 size_t name_length = strlen(name);
87 if (strncmp(v8_flags.csa_trap_on_node, name, name_length) != 0) {
88 // Different name.
89 return;
90 }
91 size_t option_length = strlen(v8_flags.csa_trap_on_node);
92 if (option_length < name_length + 2 ||
93 v8_flags.csa_trap_on_node[name_length] != ',') {
94 // Option is too short.
95 return;
96 }
97 const char* start = &v8_flags.csa_trap_on_node[name_length + 1];
98 char* end;
99 int node_id = static_cast<int>(strtol(start, &end, 10));
100 if (start == end) {
101 // Bad node id.
102 return;
103 }
104 BreakOnNode(node_id);
105}
106
108 const char* message, const char* file, int line,
109 std::initializer_list<ExtraNode> extra_nodes,
110 const SourceLocation& loc) {
111#if defined(DEBUG)
112 if (v8_flags.debug_code) {
113 Check(branch, message, file, line, extra_nodes, loc);
114 }
115#endif
116}
117
119 const char* message, const char* file, int line,
120 std::initializer_list<ExtraNode> extra_nodes,
121 const SourceLocation& loc) {
122#if defined(DEBUG)
123 if (v8_flags.debug_code) {
124 Check(condition_body, message, file, line, extra_nodes, loc);
125 }
126#endif
127}
128
130 const char* message, const char* file, int line,
131 std::initializer_list<ExtraNode> extra_nodes,
132 const SourceLocation& loc) {
133#if defined(DEBUG)
134 if (v8_flags.debug_code) {
135 Check(condition_node, message, file, line, extra_nodes, loc);
136 }
137#endif
138}
139
141 const char* message, const char* file, int line,
142 std::initializer_list<ExtraNode> extra_nodes,
143 const SourceLocation& loc) {
144 Label ok(this);
145 Label not_ok(this, Label::kDeferred);
146 if (message != nullptr) {
147 Comment({"[ Assert: ", loc}, message);
148 } else {
149 Comment({"[ Assert: ", loc});
150 }
151 branch(&ok, &not_ok);
152
153 BIND(&not_ok);
154 std::vector<FileAndLine> file_and_line;
155 if (file != nullptr) {
156 file_and_line.push_back({file, line});
157 }
158 FailAssert(message, file_and_line, extra_nodes, loc);
159
160 BIND(&ok);
161 Comment({"] Assert", SourceLocation()});
162}
163
165 const char* message, const char* file, int line,
166 std::initializer_list<ExtraNode> extra_nodes,
167 const SourceLocation& loc) {
168 BranchGenerator branch = [=, this](Label* ok, Label* not_ok) {
169 TNode<BoolT> condition = condition_body();
170 Branch(condition, ok, not_ok);
171 };
172
173 Check(branch, message, file, line, extra_nodes, loc);
174}
175
177 const char* message, const char* file, int line,
178 std::initializer_list<ExtraNode> extra_nodes,
179 const SourceLocation& loc) {
180 BranchGenerator branch = [=, this](Label* ok, Label* not_ok) {
181 Branch(condition_node, ok, not_ok);
182 };
183
184 Check(branch, message, file, line, extra_nodes, loc);
185}
186
188 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot_id) {
189 Comment("increment call count");
190 TNode<Smi> call_count =
191 CAST(LoadFeedbackVectorSlot(feedback_vector, slot_id, kTaggedSize));
192 // The lowest {FeedbackNexus::CallCountField::kShift} bits of the call
193 // count are used as flags. To increment the call count by 1 we hence
194 // have to increment by 1 << {FeedbackNexus::CallCountField::kShift}.
195 TNode<Smi> new_count = SmiAdd(
197 // Count is Smi, so we don't need a write barrier.
198 StoreFeedbackVectorSlot(feedback_vector, slot_id, new_count,
200}
201
203 Label ok(this), not_ok(this, Label::kDeferred);
204 Branch(condition, &ok, &not_ok);
205 BIND(&not_ok);
206 Unreachable();
207 BIND(&ok);
208}
209
211 const char* message, const std::vector<FileAndLine>& files_and_lines,
212 std::initializer_list<ExtraNode> extra_nodes, const SourceLocation& loc) {
213 DCHECK_NOT_NULL(message);
215 std::stringstream stream;
216 for (auto it = files_and_lines.rbegin(); it != files_and_lines.rend(); ++it) {
217 if (it->first != nullptr) {
218 stream << " [" << it->first << ":" << it->second << "]";
219#ifndef DEBUG
220 // To limit the size of these strings in release builds, we include only
221 // the innermost macro's file name and line number.
222 break;
223#endif
224 }
225 }
226 std::string files_and_lines_text = stream.str();
227 if (!files_and_lines_text.empty()) {
228 SNPrintF(chars, "%s%s", message, files_and_lines_text.c_str());
229 message = chars.begin();
230 }
231 TNode<String> message_node = StringConstant(message);
232
233#ifdef DEBUG
234 // Only print the extra nodes in debug builds.
235 for (auto& node : extra_nodes) {
236 CallRuntime(Runtime::kPrintWithNameForAssert, SmiConstant(0),
237 StringConstant(node.second), node.first);
238 }
239#endif
240
241 AbortCSADcheck(message_node);
242 Unreachable();
243}
244
246 int true_value,
247 int false_value) {
249 Int32Constant(false_value));
250}
251
253 int true_value,
254 int false_value) {
256 IntPtrConstant(false_value));
257}
258
263
270
274
277 ExternalReference::array_buffer_max_allocation_address(isolate()));
278 return Load<UintPtrT>(address);
279}
280
281#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
282 TNode<RemoveTagged<decltype(std::declval<Heap>().rootAccessorName())>::type> \
283 CodeStubAssembler::name##Constant() { \
284 return UncheckedCast<RemoveTagged< \
285 decltype(std::declval<Heap>().rootAccessorName())>::type>( \
286 LoadRoot(RootIndex::k##rootIndexName)); \
287 }
289#undef HEAP_CONSTANT_ACCESSOR
290
291#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name) \
292 TNode<RemoveTagged< \
293 decltype(std::declval<ReadOnlyRoots>().rootAccessorName())>::type> \
294 CodeStubAssembler::name##Constant() { \
295 return UncheckedCast<RemoveTagged< \
296 decltype(std::declval<ReadOnlyRoots>().rootAccessorName())>::type>( \
297 LoadRoot(RootIndex::k##rootIndexName)); \
298 }
300#undef HEAP_CONSTANT_ACCESSOR
301
302#define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name) \
303 TNode<BoolT> CodeStubAssembler::Is##name(TNode<Object> value) { \
304 return TaggedEqual(value, name##Constant()); \
305 } \
306 TNode<BoolT> CodeStubAssembler::IsNot##name(TNode<Object> value) { \
307 return TaggedNotEqual(value, name##Constant()); \
308 }
310#undef HEAP_CONSTANT_TEST
311
313#if defined(BINT_IS_SMI)
314 return SmiConstant(value);
315#elif defined(BINT_IS_INTPTR)
316 return IntPtrConstant(value);
317#else
318#error Unknown architecture.
319#endif
320}
321
322template <>
324 return SmiConstant(value);
325}
326
327template <>
328TNode<IntPtrT> CodeStubAssembler::IntPtrOrSmiConstant<IntPtrT>(int value) {
329 return IntPtrConstant(value);
330}
331
332template <>
333TNode<UintPtrT> CodeStubAssembler::IntPtrOrSmiConstant<UintPtrT>(int value) {
334 return Unsigned(IntPtrConstant(value));
335}
336
337template <>
338TNode<RawPtrT> CodeStubAssembler::IntPtrOrSmiConstant<RawPtrT>(int value) {
340}
341
343 TNode<Smi> maybe_constant, int* value) {
344 Tagged<Smi> smi_constant;
345 if (TryToSmiConstant(maybe_constant, &smi_constant)) {
346 *value = Smi::ToInt(smi_constant);
347 return true;
348 }
349 return false;
350}
351
353 TNode<IntPtrT> maybe_constant, int* value) {
354 int32_t int32_constant;
355 if (TryToInt32Constant(maybe_constant, &int32_constant)) {
356 *value = int32_constant;
357 return true;
358 }
359 return false;
360}
361
363 TNode<IntPtrT> value) {
364 Comment("IntPtrRoundUpToPowerOfTwo32");
365 CSA_DCHECK(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
366 value = Signed(IntPtrSub(value, IntPtrConstant(1)));
367 for (int i = 1; i <= 16; i *= 2) {
368 value = Signed(WordOr(value, WordShr(value, IntPtrConstant(i))));
369 }
370 return Signed(IntPtrAdd(value, IntPtrConstant(1)));
371}
372
374 intptr_t constant;
375 if (TryToIntPtrConstant(value, &constant)) {
376 return BoolConstant(base::bits::IsPowerOfTwo(constant));
377 }
378 // value && !(value & (value - 1))
379 return IntPtrEqual(Select<IntPtrT>(
380 IntPtrEqual(value, IntPtrConstant(0)),
381 [=, this] { return IntPtrConstant(1); },
382 [=, this] {
383 return WordAnd(value,
384 IntPtrSub(value, IntPtrConstant(1)));
385 }),
386 IntPtrConstant(0));
387}
388
391 double max_relative_error) {
393 Label done(this);
394
395 GotoIf(Float64Equal(x, y), &done);
396 GotoIf(Float64LessThan(Float64Div(Float64Abs(Float64Sub(x, y)),
397 Float64Max(Float64Abs(x), Float64Abs(y))),
398 Float64Constant(max_relative_error)),
399 &done);
400
401 result = BoolConstant(false);
402 Goto(&done);
403
404 BIND(&done);
405 return result.value();
406}
407
410 TNode<Float64T> one_half = Float64Constant(0.5);
411
412 Label return_x(this);
413
414 // Round up {x} towards Infinity.
416
417 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
418 &return_x);
419 var_x = Float64Sub(var_x.value(), one);
420 Goto(&return_x);
421
422 BIND(&return_x);
423 return var_x.value();
424}
425
427 TVARIABLE(Float64T, var_x, x);
428 Label round_op_supported(this), round_op_fallback(this), return_x(this);
429 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
430 // that the graph structure does not depend on the value of the predicate
431 // (BoolConstant uses cached nodes).
433 &round_op_fallback);
434
435 BIND(&round_op_supported);
436 {
437 // This optional operation is used behind a static check and we rely
438 // on the dead code elimination to remove this unused unsupported
439 // instruction. We generate builtins this way in order to ensure that
440 // builtins PGO profiles are interchangeable between architectures.
441 var_x = Float64RoundUp(x);
442 Goto(&return_x);
443 }
444
445 BIND(&round_op_fallback);
446 {
449 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
450 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
451
452 Label return_minus_x(this);
453
454 // Check if {x} is greater than zero.
455 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
456 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
457 &if_xnotgreaterthanzero);
458
459 BIND(&if_xgreaterthanzero);
460 {
461 // Just return {x} unless it's in the range ]0,2^52[.
462 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
463
464 // Round positive {x} towards Infinity.
465 var_x = Float64Sub(Float64Add(two_52, x), two_52);
466 GotoIfNot(Float64LessThan(var_x.value(), x), &return_x);
467 var_x = Float64Add(var_x.value(), one);
468 Goto(&return_x);
469 }
470
471 BIND(&if_xnotgreaterthanzero);
472 {
473 // Just return {x} unless it's in the range ]-2^52,0[
474 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
475 GotoIfNot(Float64LessThan(x, zero), &return_x);
476
477 // Round negated {x} towards Infinity and return the result negated.
478 TNode<Float64T> minus_x = Float64Neg(x);
479 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
480 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
481 var_x = Float64Sub(var_x.value(), one);
482 Goto(&return_minus_x);
483 }
484
485 BIND(&return_minus_x);
486 var_x = Float64Neg(var_x.value());
487 Goto(&return_x);
488 }
489 BIND(&return_x);
490 return var_x.value();
491}
492
494 TVARIABLE(Float64T, var_x, x);
495 Label round_op_supported(this), round_op_fallback(this), return_x(this);
496 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
497 // that the graph structure does not depend on the value of the predicate
498 // (BoolConstant uses cached nodes).
500 &round_op_supported, &round_op_fallback);
501
502 BIND(&round_op_supported);
503 {
504 // This optional operation is used behind a static check and we rely
505 // on the dead code elimination to remove this unused unsupported
506 // instruction. We generate builtins this way in order to ensure that
507 // builtins PGO profiles are interchangeable between architectures.
508 var_x = Float64RoundDown(x);
509 Goto(&return_x);
510 }
511
512 BIND(&round_op_fallback);
513 {
516 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
517 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
518
519 Label return_minus_x(this);
520
521 // Check if {x} is greater than zero.
522 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
523 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
524 &if_xnotgreaterthanzero);
525
526 BIND(&if_xgreaterthanzero);
527 {
528 // Just return {x} unless it's in the range ]0,2^52[.
529 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
530
531 // Round positive {x} towards -Infinity.
532 var_x = Float64Sub(Float64Add(two_52, x), two_52);
533 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
534 var_x = Float64Sub(var_x.value(), one);
535 Goto(&return_x);
536 }
537
538 BIND(&if_xnotgreaterthanzero);
539 {
540 // Just return {x} unless it's in the range ]-2^52,0[
541 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
542 GotoIfNot(Float64LessThan(x, zero), &return_x);
543
544 // Round negated {x} towards -Infinity and return the result negated.
545 TNode<Float64T> minus_x = Float64Neg(x);
546 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
547 GotoIfNot(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
548 var_x = Float64Add(var_x.value(), one);
549 Goto(&return_minus_x);
550 }
551
552 BIND(&return_minus_x);
553 var_x = Float64Neg(var_x.value());
554 Goto(&return_x);
555 }
556 BIND(&return_x);
557 return var_x.value();
558}
559
561 TVARIABLE(Float64T, var_result);
562 Label round_op_supported(this), round_op_fallback(this), done(this);
563 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
564 // that the graph structure does not depend on the value of the predicate
565 // (BoolConstant uses cached nodes).
567 &round_op_supported, &round_op_fallback);
568
569 BIND(&round_op_supported);
570 {
571 // This optional operation is used behind a static check and we rely
572 // on the dead code elimination to remove this unused unsupported
573 // instruction. We generate builtins this way in order to ensure that
574 // builtins PGO profiles are interchangeable between architectures.
575 var_result = Float64RoundTiesEven(x);
576 Goto(&done);
577 }
578
579 BIND(&round_op_fallback);
580 {
581 // See ES#sec-touint8clamp for details.
583 TNode<Float64T> f_and_half = Float64Add(f, Float64Constant(0.5));
584
585 Label return_f(this), return_f_plus_one(this);
586
587 GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
588 GotoIf(Float64LessThan(x, f_and_half), &return_f);
589 {
590 TNode<Float64T> f_mod_2 = Float64Mod(f, Float64Constant(2.0));
591 Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
592 &return_f_plus_one);
593 }
594
595 BIND(&return_f);
596 var_result = f;
597 Goto(&done);
598
599 BIND(&return_f_plus_one);
600 var_result = Float64Add(f, Float64Constant(1.0));
601 Goto(&done);
602 }
603 BIND(&done);
604 return var_result.value();
605}
606
608 TVARIABLE(Float64T, var_x, x);
609 Label trunc_op_supported(this), trunc_op_fallback(this), return_x(this);
610 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
611 // that the graph structure does not depend on the value of the predicate
612 // (BoolConstant uses cached nodes).
614 &trunc_op_supported, &trunc_op_fallback);
615
616 BIND(&trunc_op_supported);
617 {
618 // This optional operation is used behind a static check and we rely
619 // on the dead code elimination to remove this unused unsupported
620 // instruction. We generate builtins this way in order to ensure that
621 // builtins PGO profiles are interchangeable between architectures.
622 var_x = Float64RoundTruncate(x);
623 Goto(&return_x);
624 }
625
626 BIND(&trunc_op_fallback);
627 {
630 TNode<Float64T> two_52 = Float64Constant(4503599627370496.0E0);
631 TNode<Float64T> minus_two_52 = Float64Constant(-4503599627370496.0E0);
632
633 Label return_minus_x(this);
634
635 // Check if {x} is greater than 0.
636 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
637 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
638 &if_xnotgreaterthanzero);
639
640 BIND(&if_xgreaterthanzero);
641 {
642 Label round_op_supported(this), round_op_fallback(this);
644 &round_op_supported, &round_op_fallback);
645 BIND(&round_op_supported);
646 {
647 // This optional operation is used behind a static check and we rely
648 // on the dead code elimination to remove this unused unsupported
649 // instruction. We generate builtins this way in order to ensure that
650 // builtins PGO profiles are interchangeable between architectures.
651 var_x = Float64RoundDown(x);
652 Goto(&return_x);
653 }
654 BIND(&round_op_fallback);
655 {
656 // Just return {x} unless it's in the range ]0,2^52[.
657 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
658
659 // Round positive {x} towards -Infinity.
660 var_x = Float64Sub(Float64Add(two_52, x), two_52);
661 GotoIfNot(Float64GreaterThan(var_x.value(), x), &return_x);
662 var_x = Float64Sub(var_x.value(), one);
663 Goto(&return_x);
664 }
665 }
666
667 BIND(&if_xnotgreaterthanzero);
668 {
669 Label round_op_supported(this), round_op_fallback(this);
671 &round_op_supported, &round_op_fallback);
672 BIND(&round_op_supported);
673 {
674 // This optional operation is used behind a static check and we rely
675 // on the dead code elimination to remove this unused unsupported
676 // instruction. We generate builtins this way in order to ensure that
677 // builtins PGO profiles are interchangeable between architectures.
678 var_x = Float64RoundUp(x);
679 Goto(&return_x);
680 }
681 BIND(&round_op_fallback);
682 {
683 // Just return {x} unless its in the range ]-2^52,0[.
684 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
685 GotoIfNot(Float64LessThan(x, zero), &return_x);
686
687 // Round negated {x} towards -Infinity and return result negated.
688 TNode<Float64T> minus_x = Float64Neg(x);
689 var_x = Float64Sub(Float64Add(two_52, minus_x), two_52);
690 GotoIfNot(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
691 var_x = Float64Sub(var_x.value(), one);
692 Goto(&return_minus_x);
693 }
694 }
695
696 BIND(&return_minus_x);
697 var_x = Float64Neg(var_x.value());
698 Goto(&return_x);
699 }
700 BIND(&return_x);
701 return var_x.value();
702}
703
705 TNode<UintPtrT> value) {
706 // Taken from slow path of base::bits::CountPopulation, the comments here show
707 // C++ code and comments from there for reference.
708 // Fall back to divide-and-conquer popcount (see "Hacker's Delight" by Henry
709 // S. Warren, Jr.), chapter 5-1.
710 constexpr uintptr_t mask[] = {static_cast<uintptr_t>(0x5555555555555555),
711 static_cast<uintptr_t>(0x3333333333333333),
712 static_cast<uintptr_t>(0x0f0f0f0f0f0f0f0f)};
713
714 // TNode<UintPtrT> value = Unsigned(value_word);
715 TNode<UintPtrT> lhs, rhs;
716
717 // Start with 64 buckets of 1 bits, holding values from [0,1].
718 // {value = ((value >> 1) & mask[0]) + (value & mask[0])}
719 lhs = WordAnd(WordShr(value, UintPtrConstant(1)), UintPtrConstant(mask[0]));
720 rhs = WordAnd(value, UintPtrConstant(mask[0]));
721 value = UintPtrAdd(lhs, rhs);
722
723 // Having 32 buckets of 2 bits, holding values from [0,2] now.
724 // {value = ((value >> 2) & mask[1]) + (value & mask[1])}
725 lhs = WordAnd(WordShr(value, UintPtrConstant(2)), UintPtrConstant(mask[1]));
726 rhs = WordAnd(value, UintPtrConstant(mask[1]));
727 value = UintPtrAdd(lhs, rhs);
728
729 // Having 16 buckets of 4 bits, holding values from [0,4] now.
730 // {value = ((value >> 4) & mask[2]) + (value & mask[2])}
731 lhs = WordAnd(WordShr(value, UintPtrConstant(4)), UintPtrConstant(mask[2]));
732 rhs = WordAnd(value, UintPtrConstant(mask[2]));
733 value = UintPtrAdd(lhs, rhs);
734
735 // Having 8 buckets of 8 bits, holding values from [0,8] now.
736 // From this point on, the buckets are bigger than the number of bits
737 // required to hold the values, and the buckets are bigger the maximum
738 // result, so there's no need to mask value anymore, since there's no
739 // more risk of overflow between buckets.
740 // {value = (value >> 8) + value}
741 lhs = WordShr(value, UintPtrConstant(8));
742 value = UintPtrAdd(lhs, value);
743
744 // Having 4 buckets of 16 bits, holding values from [0,16] now.
745 // {value = (value >> 16) + value}
746 lhs = WordShr(value, UintPtrConstant(16));
747 value = UintPtrAdd(lhs, value);
748
749 if (Is64()) {
750 // Having 2 buckets of 32 bits, holding values from [0,32] now.
751 // {value = (value >> 32) + value}
752 lhs = WordShr(value, UintPtrConstant(32));
753 value = UintPtrAdd(lhs, value);
754 }
755
756 // Having 1 buckets of sizeof(intptr_t) bits, holding values from [0,64] now.
757 // {return static_cast<unsigned>(value & 0xff)}
758 return Signed(WordAnd(value, UintPtrConstant(0xff)));
759}
760
763 return Word64Popcnt(value);
764 }
765
766 if (Is32()) {
767 // Unsupported.
768 UNREACHABLE();
769 }
770
773}
774
777 return Word32Popcnt(value);
778 }
779
780 if (Is32()) {
781 TNode<IntPtrT> res =
783 return ReinterpretCast<Int32T>(res);
784 } else {
786 ReinterpretCast<UintPtrT>(ChangeUint32ToUint64(value)));
787 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(res));
788 }
789}
790
792 if (IsWord64CtzSupported()) {
793 return Word64Ctz(value);
794 }
795
796 if (Is32()) {
797 // Unsupported.
798 UNREACHABLE();
799 }
800
801 // Same fallback as in base::bits::CountTrailingZeros.
802 // Fall back to popcount (see "Hacker's Delight" by Henry S. Warren, Jr.),
803 // chapter 5-4. On x64, since is faster than counting in a loop and faster
804 // than doing binary search.
805 TNode<Word64T> lhs = Word64Not(value);
807 return PopulationCount64(Word64And(lhs, rhs));
808}
809
811 if (IsWord32CtzSupported()) {
812 return Word32Ctz(value);
813 }
814
815 if (Is32()) {
816 // Same fallback as in Word64CountTrailingZeros.
817 TNode<Word32T> lhs = Word32BitwiseNot(value);
819 return PopulationCount32(Word32And(lhs, rhs));
820 } else {
821 TNode<Int64T> res64 = CountTrailingZeros64(ChangeUint32ToUint64(value));
822 return TruncateInt64ToInt32(Signed(res64));
823 }
824}
825
827 return Word64Clz(value);
828}
829
831 return Word32Clz(value);
832}
833
834template <>
838
839template <>
843
845 TNode<TaggedIndex> value) {
846 return Signed(WordSarShiftOutZeros(BitcastTaggedToWordForTagAndSmiBits(value),
848}
849
855
857 if (SmiValuesAre32Bits()) {
859 return BitcastWordToTaggedSigned(
860 WordShl(BitcastTaggedToWordForTagAndSmiBits(value),
862 }
865 return ReinterpretCast<Smi>(value);
866}
867
870 return ReinterpretCast<TaggedIndex>(value);
871 }
872 if (SmiValuesAre32Bits()) {
874 return ReinterpretCast<TaggedIndex>(BitcastWordToTaggedSigned(
875 WordSar(BitcastTaggedToWordForTagAndSmiBits(value),
877 }
880 // Just sign-extend the lower 32 bits.
881 TNode<Int32T> raw =
882 TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(value));
884 BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw)));
885}
886
889 TNode<Int32T> raw =
890 TruncateWordToInt32(BitcastTaggedToWordForTagAndSmiBits(smi_index));
891 smi_index = BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(raw));
892 }
893 return smi_index;
894}
895
898 static_assert(!COMPRESS_POINTERS_BOOL || (kSmiShiftSize + kSmiTagSize == 1),
899 "Use shifting instead of add");
900 return BitcastWordToTaggedSigned(
901 ChangeUint32ToWord(Int32Add(value, value)));
902 }
903 return SmiTag(ChangeInt32ToIntPtr(value));
904}
905
911
913 intptr_t constant_value;
914 if (TryToIntPtrConstant(value, &constant_value)) {
915 return (static_cast<uintptr_t>(constant_value) <=
916 static_cast<uintptr_t>(Smi::kMaxValue))
919 }
920
922}
923
925 int32_t constant_value;
926 if (TryToInt32Constant(value, &constant_value) &&
927 Smi::IsValid(constant_value)) {
928 return SmiConstant(constant_value);
929 }
931 return SmiFromInt32(TruncateIntPtrToInt32(value));
932 }
933 TNode<Smi> smi =
934 BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
935 return smi;
936}
937
939 intptr_t constant_value;
940 if (TryToIntPtrConstant(value, &constant_value)) {
941 return IntPtrConstant(constant_value >> (kSmiShiftSize + kSmiTagSize));
942 }
943 TNode<IntPtrT> raw_bits = BitcastTaggedToWordForTagAndSmiBits(value);
945 return ChangeInt32ToIntPtr(Word32SarShiftOutZeros(
947 }
948 return Signed(WordSarShiftOutZeros(raw_bits, SmiShiftBitsConstant()));
949}
950
953 return Signed(Word32SarShiftOutZeros(
954 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
956 }
959}
960
962 DCHECK(SmiGreaterThanOrEqual(value, SmiConstant(0)));
963 return Unsigned(SmiToInt32(value));
964}
965
969
971 return ChangeInt32ToFloat64(SmiToInt32(value));
972}
973
977
981
984 Label* if_overflow) {
985 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrAddWithOverflow(a, b);
986 TNode<BoolT> overflow = Projection<1>(pair);
987 GotoIf(overflow, if_overflow);
988 return Projection<0>(pair);
989}
990
993 Label* if_overflow) {
994 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrSubWithOverflow(a, b);
995 TNode<BoolT> overflow = Projection<1>(pair);
996 GotoIf(overflow, if_overflow);
997 return Projection<0>(pair);
998}
999
1002 Label* if_overflow) {
1003 TNode<PairT<IntPtrT, BoolT>> pair = IntPtrMulWithOverflow(a, b);
1004 TNode<BoolT> overflow = Projection<1>(pair);
1005 GotoIf(overflow, if_overflow);
1006 return Projection<0>(pair);
1007}
1008
1011 Label* if_div_zero) {
1012 GotoIf(IntPtrEqual(b, IntPtrConstant(0)), if_div_zero);
1013 return IntPtrDiv(a, b);
1014}
1015
1018 Label* if_div_zero) {
1019 GotoIf(IntPtrEqual(b, IntPtrConstant(0)), if_div_zero);
1020 return IntPtrMod(a, b);
1021}
1022
1024 Label* if_overflow) {
1025 TNode<PairT<Int32T, BoolT>> pair = Int32MulWithOverflow(a, b);
1026 TNode<BoolT> overflow = Projection<1>(pair);
1027 GotoIf(overflow, if_overflow);
1028 return Projection<0>(pair);
1029}
1030
1032 Label* if_overflow) {
1033 if (SmiValuesAre32Bits()) {
1034 return BitcastWordToTaggedSigned(
1035 TryIntPtrAdd(BitcastTaggedToWordForTagAndSmiBits(lhs),
1036 BitcastTaggedToWordForTagAndSmiBits(rhs), if_overflow));
1037 } else {
1039 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(
1040 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
1041 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
1042 TNode<BoolT> overflow = Projection<1>(pair);
1043 GotoIf(overflow, if_overflow);
1045 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
1046 }
1047}
1048
1050 Label* if_overflow) {
1051 if (SmiValuesAre32Bits()) {
1053 IntPtrSubWithOverflow(BitcastTaggedToWordForTagAndSmiBits(lhs),
1054 BitcastTaggedToWordForTagAndSmiBits(rhs));
1055 TNode<BoolT> overflow = Projection<1>(pair);
1056 GotoIf(overflow, if_overflow);
1058 return BitcastWordToTaggedSigned(result);
1059 } else {
1061 TNode<PairT<Int32T, BoolT>> pair = Int32SubWithOverflow(
1062 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(lhs)),
1063 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(rhs)));
1064 TNode<BoolT> overflow = Projection<1>(pair);
1065 GotoIf(overflow, if_overflow);
1067 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
1068 }
1069}
1070
1072 if (SmiValuesAre32Bits()) {
1074 IntPtrAbsWithOverflow(BitcastTaggedToWordForTagAndSmiBits(a));
1075 TNode<BoolT> overflow = Projection<1>(pair);
1076 GotoIf(overflow, if_overflow);
1078 return BitcastWordToTaggedSigned(result);
1079 } else {
1082 TNode<PairT<Int32T, BoolT>> pair = Int32AbsWithOverflow(
1083 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)));
1084 TNode<BoolT> overflow = Projection<1>(pair);
1085 GotoIf(overflow, if_overflow);
1087 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(result));
1088 }
1089}
1090
1092 // TODO(danno): This could be optimized by specifically handling smi cases.
1094 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
1095 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
1096 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
1097 result = NanConstant();
1098 Goto(&done);
1099 BIND(&greater_than_equal_a);
1100 result = a;
1101 Goto(&done);
1102 BIND(&greater_than_equal_b);
1103 result = b;
1104 Goto(&done);
1105 BIND(&done);
1106 return result.value();
1107}
1108
1110 // TODO(danno): This could be optimized by specifically handling smi cases.
1112 Label done(this), greater_than_equal_a(this), greater_than_equal_b(this);
1113 GotoIfNumberGreaterThanOrEqual(a, b, &greater_than_equal_a);
1114 GotoIfNumberGreaterThanOrEqual(b, a, &greater_than_equal_b);
1115 result = NanConstant();
1116 Goto(&done);
1117 BIND(&greater_than_equal_a);
1118 result = b;
1119 Goto(&done);
1120 BIND(&greater_than_equal_b);
1121 result = a;
1122 Goto(&done);
1123 BIND(&done);
1124 return result.value();
1125}
1126
1128 TVARIABLE(Number, var_result);
1129 Label return_result(this, &var_result),
1130 return_minuszero(this, Label::kDeferred),
1131 return_nan(this, Label::kDeferred);
1132
1133 // Untag {a} and {b}.
1134 TNode<Int32T> int_a = SmiToInt32(a);
1135 TNode<Int32T> int_b = SmiToInt32(b);
1136
1137 // Return NaN if {b} is zero.
1138 GotoIf(Word32Equal(int_b, Int32Constant(0)), &return_nan);
1139
1140 // Check if {a} is non-negative.
1141 Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
1142 Branch(Int32LessThanOrEqual(Int32Constant(0), int_a), &if_aisnotnegative,
1143 &if_aisnegative);
1144
1145 BIND(&if_aisnotnegative);
1146 {
1147 // Fast case, don't need to check any other edge cases.
1148 TNode<Int32T> r = Int32Mod(int_a, int_b);
1149 var_result = SmiFromInt32(r);
1150 Goto(&return_result);
1151 }
1152
1153 BIND(&if_aisnegative);
1154 {
1155 if (SmiValuesAre32Bits()) {
1156 // Check if {a} is kMinInt and {b} is -1 (only relevant if the
1157 // kMinInt is actually representable as a Smi).
1158 Label join(this);
1159 GotoIfNot(Word32Equal(int_a, Int32Constant(kMinInt)), &join);
1160 GotoIf(Word32Equal(int_b, Int32Constant(-1)), &return_minuszero);
1161 Goto(&join);
1162 BIND(&join);
1163 }
1164
1165 // Perform the integer modulus operation.
1166 TNode<Int32T> r = Int32Mod(int_a, int_b);
1167
1168 // Check if {r} is zero, and if so return -0, because we have to
1169 // take the sign of the left hand side {a}, which is negative.
1170 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
1171
1172 // The remainder {r} can be outside the valid Smi range on 32bit
1173 // architectures, so we cannot just say SmiFromInt32(r) here.
1174 var_result = ChangeInt32ToTagged(r);
1175 Goto(&return_result);
1176 }
1177
1178 BIND(&return_minuszero);
1179 var_result = MinusZeroConstant();
1180 Goto(&return_result);
1181
1182 BIND(&return_nan);
1183 var_result = NanConstant();
1184 Goto(&return_result);
1185
1186 BIND(&return_result);
1187 return var_result.value();
1188}
1189
1191 TVARIABLE(Number, var_result);
1192 TVARIABLE(Float64T, var_lhs_float64);
1193 TVARIABLE(Float64T, var_rhs_float64);
1194 Label return_result(this, &var_result);
1195
1196 // Both {a} and {b} are Smis. Convert them to integers and multiply.
1197 TNode<Int32T> lhs32 = SmiToInt32(a);
1198 TNode<Int32T> rhs32 = SmiToInt32(b);
1199 auto pair = Int32MulWithOverflow(lhs32, rhs32);
1200
1201 TNode<BoolT> overflow = Projection<1>(pair);
1202
1203 // Check if the multiplication overflowed.
1204 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
1205 Branch(overflow, &if_overflow, &if_notoverflow);
1206 BIND(&if_notoverflow);
1207 {
1208 // If the answer is zero, we may need to return -0.0, depending on the
1209 // input.
1210 Label answer_zero(this), answer_not_zero(this);
1211 TNode<Int32T> answer = Projection<0>(pair);
1212 TNode<Int32T> zero = Int32Constant(0);
1213 Branch(Word32Equal(answer, zero), &answer_zero, &answer_not_zero);
1214 BIND(&answer_not_zero);
1215 {
1216 var_result = ChangeInt32ToTagged(answer);
1217 Goto(&return_result);
1218 }
1219 BIND(&answer_zero);
1220 {
1221 TNode<Int32T> or_result = Word32Or(lhs32, rhs32);
1222 Label if_should_be_negative_zero(this), if_should_be_zero(this);
1223 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
1224 &if_should_be_zero);
1225 BIND(&if_should_be_negative_zero);
1226 {
1227 var_result = MinusZeroConstant();
1228 Goto(&return_result);
1229 }
1230 BIND(&if_should_be_zero);
1231 {
1232 var_result = SmiConstant(0);
1233 Goto(&return_result);
1234 }
1235 }
1236 }
1237 BIND(&if_overflow);
1238 {
1239 var_lhs_float64 = SmiToFloat64(a);
1240 var_rhs_float64 = SmiToFloat64(b);
1241 TNode<Float64T> value =
1242 Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
1243 var_result = AllocateHeapNumberWithValue(value);
1244 Goto(&return_result);
1245 }
1246
1247 BIND(&return_result);
1248 return var_result.value();
1249}
1250
1252 Label* bailout) {
1253 // Both {a} and {b} are Smis. Bailout to floating point division if {divisor}
1254 // is zero.
1255 GotoIf(TaggedEqual(divisor, SmiConstant(0)), bailout);
1256
1257 // Do floating point division if {dividend} is zero and {divisor} is
1258 // negative.
1259 Label dividend_is_zero(this), dividend_is_not_zero(this);
1260 Branch(TaggedEqual(dividend, SmiConstant(0)), &dividend_is_zero,
1261 &dividend_is_not_zero);
1262
1263 BIND(&dividend_is_zero);
1264 {
1265 GotoIf(SmiLessThan(divisor, SmiConstant(0)), bailout);
1266 Goto(&dividend_is_not_zero);
1267 }
1268 BIND(&dividend_is_not_zero);
1269
1270 TNode<Int32T> untagged_divisor = SmiToInt32(divisor);
1271 TNode<Int32T> untagged_dividend = SmiToInt32(dividend);
1272
1273 // Do floating point division if {dividend} is kMinInt (or kMinInt - 1
1274 // if the Smi size is 31) and {divisor} is -1.
1275 Label divisor_is_minus_one(this), divisor_is_not_minus_one(this);
1276 Branch(Word32Equal(untagged_divisor, Int32Constant(-1)),
1277 &divisor_is_minus_one, &divisor_is_not_minus_one);
1278
1279 BIND(&divisor_is_minus_one);
1280 {
1282 untagged_dividend,
1283 Int32Constant(kSmiValueSize == 32 ? kMinInt : (kMinInt >> 1))),
1284 bailout);
1285 Goto(&divisor_is_not_minus_one);
1286 }
1287 BIND(&divisor_is_not_minus_one);
1288
1289 TNode<Int32T> untagged_result = Int32Div(untagged_dividend, untagged_divisor);
1290 TNode<Int32T> truncated = Int32Mul(untagged_result, untagged_divisor);
1291
1292 // Do floating point division if the remainder is not 0.
1293 GotoIf(Word32NotEqual(untagged_dividend, truncated), bailout);
1294
1295 return SmiFromInt32(untagged_result);
1296}
1297
1299 TNode<Smi> y) {
1300 TNode<ExternalReference> smi_lexicographic_compare =
1301 ExternalConstant(ExternalReference::smi_lexicographic_compare_function());
1302 TNode<ExternalReference> isolate_ptr =
1304 return CAST(CallCFunction(smi_lexicographic_compare, MachineType::AnyTagged(),
1305 std::make_pair(MachineType::Pointer(), isolate_ptr),
1306 std::make_pair(MachineType::AnyTagged(), x),
1307 std::make_pair(MachineType::AnyTagged(), y)));
1308}
1309
1313 ExternalConstant(ExternalReference::debug_get_coverage_info_function());
1314 TNode<ExternalReference> isolate_ptr =
1317 std::make_pair(MachineType::Pointer(), isolate_ptr),
1318 std::make_pair(MachineType::TaggedPointer(), sfi)));
1319}
1320
1322 if (Is64()) {
1323 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
1324 }
1325 return ReinterpretCast<Int32T>(value);
1326}
1327
1329 if (Is64()) {
1330 return TruncateInt64ToInt32(ReinterpretCast<Int64T>(value));
1331 }
1332 return ReinterpretCast<Int32T>(value);
1333}
1334
1338
1340 static_assert(kSmiTagMask < kMaxUInt32);
1341 return Word32Equal(
1342 Word32And(TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1344 Int32Constant(0));
1345}
1346
1350
1352#if defined(V8_HOST_ARCH_32_BIT) || defined(V8_31BIT_SMIS_ON_64BIT_ARCH)
1353 return Word32Equal(
1354 Word32And(
1355 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(a)),
1356 Uint32Constant(static_cast<uint32_t>(kSmiTagMask | kSmiSignMask))),
1357 Int32Constant(0));
1358#else
1359 return WordEqual(WordAnd(BitcastTaggedToWordForTagAndSmiBits(a),
1361 IntPtrConstant(0));
1362#endif
1363}
1364
1366 size_t alignment) {
1367 DCHECK(base::bits::IsPowerOfTwo(alignment));
1368 DCHECK_LE(alignment, kMaxUInt32);
1369 return Word32Equal(
1370 Int32Constant(0),
1372 Uint32Constant(static_cast<uint32_t>(alignment) - 1)));
1373}
1374
1375#if DEBUG
1377 CodeAssembler::Bind(label, debug_info);
1378}
1379#endif // DEBUG
1380
1381void CodeStubAssembler::Bind(Label* label) { CodeAssembler::Bind(label); }
1382
1387
1389 Label* if_false) {
1390 GotoIf(TaggedIsSmi(object), if_false);
1391 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1392 Branch(IsJSReceiver(CAST(object)), if_true, if_false);
1393}
1394
1396#ifdef V8_ENABLE_FORCE_SLOW_PATH
1397 bool enable_force_slow_path = true;
1398#else
1399 bool enable_force_slow_path = false;
1400#endif
1401
1402 Label done(this);
1403 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
1404 // that the graph structure does not depend on the value of the predicate
1405 // (BoolConstant uses cached nodes).
1406 GotoIf(UniqueInt32Constant(!enable_force_slow_path), &done);
1407 {
1408 // This optional block is used behind a static check and we rely
1409 // on the dead code elimination to remove it. We generate builtins this
1410 // way in order to ensure that builtins PGO profiles are agnostic to
1411 // V8_ENABLE_FORCE_SLOW_PATH value.
1412 const TNode<ExternalReference> force_slow_path_addr =
1413 ExternalConstant(ExternalReference::force_slow_path(isolate()));
1414 const TNode<Uint8T> force_slow = Load<Uint8T>(force_slow_path_addr);
1415 Branch(force_slow, if_true, &done);
1416 }
1417 BIND(&done);
1418}
1419
1421 AllocationFlags flags,
1422 TNode<RawPtrT> top_address,
1423 TNode<RawPtrT> limit_address) {
1424 Label if_out_of_memory(this, Label::kDeferred);
1425
1426 // TODO(jgruber,jkummerow): Extract the slow paths (= probably everything
1427 // but bump pointer allocation) into a builtin to save code space. The
1428 // size_in_bytes check may be moved there as well since a non-smi
1429 // size_in_bytes probably doesn't fit into the bump pointer region
1430 // (double-check that).
1431
1432 intptr_t size_in_bytes_constant;
1433 bool size_in_bytes_is_constant = false;
1434 if (TryToIntPtrConstant(size_in_bytes, &size_in_bytes_constant)) {
1435 size_in_bytes_is_constant = true;
1436 CHECK(Internals::IsValidSmi(size_in_bytes_constant));
1437 CHECK_GT(size_in_bytes_constant, 0);
1438 } else {
1439 GotoIfNot(IsValidPositiveSmi(size_in_bytes), &if_out_of_memory);
1440 }
1441
1442 TNode<RawPtrT> top = Load<RawPtrT>(top_address);
1443 TNode<RawPtrT> limit = Load<RawPtrT>(limit_address);
1444
1445 // If there's not enough space, call the runtime.
1447 Label runtime_call(this, Label::kDeferred), no_runtime_call(this), out(this);
1448
1449 bool needs_double_alignment = flags & AllocationFlag::kDoubleAlignment;
1450
1451 {
1452 Label next(this);
1453 GotoIf(IsRegularHeapObjectSize(size_in_bytes), &next);
1454
1455 TNode<Smi> runtime_flags = SmiConstant(
1456 Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1457 result =
1458 CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1459 SmiTag(size_in_bytes), runtime_flags);
1460 Goto(&out);
1461
1462 BIND(&next);
1463 }
1464
1465 TVARIABLE(IntPtrT, adjusted_size, size_in_bytes);
1466
1467 if (needs_double_alignment) {
1468 Label next(this);
1470
1471 adjusted_size = IntPtrAdd(size_in_bytes, IntPtrConstant(4));
1472 Goto(&next);
1473
1474 BIND(&next);
1475 }
1476
1477 adjusted_size = AlignToAllocationAlignment(adjusted_size.value());
1478 TNode<IntPtrT> new_top =
1479 IntPtrAdd(UncheckedCast<IntPtrT>(top), adjusted_size.value());
1480
1481 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
1482 &no_runtime_call);
1483
1484 BIND(&runtime_call);
1485 {
1486 TNode<Smi> runtime_flags = SmiConstant(
1487 Smi::FromInt(AllocateDoubleAlignFlag::encode(needs_double_alignment)));
1488 if (flags & AllocationFlag::kPretenured) {
1489 result =
1490 CallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1491 SmiTag(size_in_bytes), runtime_flags);
1492 } else {
1493 result =
1494 CallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1495 SmiTag(size_in_bytes), runtime_flags);
1496 }
1497 Goto(&out);
1498 }
1499
1500 // When there is enough space, return `top' and bump it up.
1501 BIND(&no_runtime_call);
1502 {
1504 new_top);
1505
1507
1508 if (needs_double_alignment) {
1509 Label next(this);
1510 GotoIf(IntPtrEqual(adjusted_size.value(), size_in_bytes), &next);
1511
1512 // Store a filler and increase the address by 4.
1514 OnePointerFillerMapConstant());
1516 Goto(&next);
1517
1518 BIND(&next);
1519 }
1520
1521 result = BitcastWordToTagged(
1522 IntPtrAdd(address.value(), IntPtrConstant(kHeapObjectTag)));
1523 Goto(&out);
1524 }
1525
1526 if (!size_in_bytes_is_constant) {
1527 BIND(&if_out_of_memory);
1528 CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
1530 Unreachable();
1531 }
1532
1533 BIND(&out);
1534 if (v8_flags.sticky_mark_bits && (flags & AllocationFlag::kPretenured)) {
1535 CSA_DCHECK(this, IsMarked(result.value()));
1536 }
1537 return UncheckedCast<HeapObject>(result.value());
1538}
1539
1541 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1542 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1544 return AllocateRaw(size_in_bytes, flags, top_address, limit_address);
1545}
1546
1548 TNode<IntPtrT> size_in_bytes, AllocationFlags flags,
1549 TNode<RawPtrT> top_address, TNode<RawPtrT> limit_address) {
1550#if defined(V8_HOST_ARCH_32_BIT)
1551 return AllocateRaw(size_in_bytes, flags | AllocationFlag::kDoubleAlignment,
1552 top_address, limit_address);
1553#elif defined(V8_HOST_ARCH_64_BIT)
1554#ifdef V8_COMPRESS_POINTERS
1555// TODO(ishell, v8:8875): Consider using aligned allocations once the
1556// allocation alignment inconsistency is fixed. For now we keep using
1557// unaligned access since both x64 and arm64 architectures (where pointer
1558// compression is supported) allow unaligned access to doubles and full words.
1559#endif // V8_COMPRESS_POINTERS
1560 // Allocation on 64 bit machine is naturally double aligned
1561 return AllocateRaw(size_in_bytes, flags & ~AllocationFlag::kDoubleAlignment,
1562 top_address, limit_address);
1563#else
1564#error Architecture not supported
1565#endif
1566}
1567
1569 TNode<IntPtrT> size_in_bytes, AllocationFlags flags) {
1570 DCHECK(flags == AllocationFlag::kNone ||
1572 CSA_DCHECK(this, IsRegularHeapObjectSize(size_in_bytes));
1573 return Allocate(size_in_bytes, flags);
1574}
1575
1577 AllocationFlags flags) {
1578 Comment("Allocate");
1579 if (v8_flags.single_generation) flags |= AllocationFlag::kPretenured;
1580 bool const new_space = !(flags & AllocationFlag::kPretenured);
1581 if (!(flags & AllocationFlag::kDoubleAlignment)) {
1582 TNode<HeapObject> heap_object =
1583 OptimizedAllocate(size_in_bytes, new_space ? AllocationType::kYoung
1585 if (v8_flags.sticky_mark_bits && !new_space) {
1586 CSA_DCHECK(this, IsMarked(heap_object));
1587 }
1588 return heap_object;
1589 }
1591 new_space
1592 ? ExternalReference::new_space_allocation_top_address(isolate())
1593 : ExternalReference::old_space_allocation_top_address(isolate()));
1594
1595#ifdef DEBUG
1596 // New space is optional and if disabled both top and limit return
1597 // kNullAddress.
1598 if (ExternalReference::new_space_allocation_top_address(isolate())
1599 .address() != kNullAddress) {
1600 Address raw_top_address =
1601 ExternalReference::new_space_allocation_top_address(isolate())
1602 .address();
1603 Address raw_limit_address =
1604 ExternalReference::new_space_allocation_limit_address(isolate())
1605 .address();
1606
1607 CHECK_EQ(kSystemPointerSize, raw_limit_address - raw_top_address);
1608 }
1609
1611 ExternalReference::old_space_allocation_limit_address(isolate())
1612 .address() -
1613 ExternalReference::old_space_allocation_top_address(isolate())
1614 .address());
1615#endif
1616
1617 TNode<IntPtrT> limit_address =
1620
1622 return AllocateRawDoubleAligned(size_in_bytes, flags,
1623 ReinterpretCast<RawPtrT>(top_address),
1624 ReinterpretCast<RawPtrT>(limit_address));
1625 } else {
1626 return AllocateRawUnaligned(size_in_bytes, flags,
1627 ReinterpretCast<RawPtrT>(top_address),
1628 ReinterpretCast<RawPtrT>(limit_address));
1629 }
1630}
1631
1633 AllocationFlags flags) {
1634 CHECK(flags == AllocationFlag::kNone ||
1636 DCHECK_LE(size_in_bytes, kMaxRegularHeapObjectSize);
1637 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1638}
1639
1641 AllocationFlags flags) {
1642 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
1643}
1644
1649
1651 Label* if_true,
1652 Label* if_false) {
1653 Label if_smi(this, Label::kDeferred), if_heapnumber(this, Label::kDeferred),
1654 if_bigint(this, Label::kDeferred);
1655
1656 // Check if {value} is a Smi.
1657 GotoIf(TaggedIsSmi(value), &if_smi);
1658
1659 TNode<HeapObject> value_heapobject = CAST(value);
1660
1661#if V8_STATIC_ROOTS_BOOL
1662 // Check if {object} is a falsey root or the true value.
1663 // Undefined is the first root, so it's the smallest possible pointer
1664 // value, which means we don't have to subtract it for the range check.
1665 ReadOnlyRoots roots(isolate());
1666 static_assert(StaticReadOnlyRoot::kFirstAllocatedRoot ==
1667 StaticReadOnlyRoot::kUndefinedValue);
1668 static_assert(StaticReadOnlyRoot::kUndefinedValue + sizeof(Undefined) ==
1669 StaticReadOnlyRoot::kNullValue);
1670 static_assert(StaticReadOnlyRoot::kNullValue + sizeof(Null) ==
1671 StaticReadOnlyRoot::kempty_string);
1672 static_assert(StaticReadOnlyRoot::kempty_string +
1674 StaticReadOnlyRoot::kFalseValue);
1675 static_assert(StaticReadOnlyRoot::kFalseValue + sizeof(False) ==
1676 StaticReadOnlyRoot::kTrueValue);
1677 TNode<Word32T> object_as_word32 =
1678 TruncateIntPtrToInt32(BitcastTaggedToWord(value_heapobject));
1679 TNode<Word32T> true_as_word32 = Int32Constant(StaticReadOnlyRoot::kTrueValue);
1680 GotoIf(Uint32LessThan(object_as_word32, true_as_word32), if_false);
1681 GotoIf(Word32Equal(object_as_word32, true_as_word32), if_true);
1682#else
1683 // Rule out false {value}.
1684 GotoIf(TaggedEqual(value, FalseConstant()), if_false);
1685
1686 // Fast path on true {value}.
1687 GotoIf(TaggedEqual(value, TrueConstant()), if_true);
1688
1689 // Check if {value} is the empty string.
1690 GotoIf(IsEmptyString(value_heapobject), if_false);
1691#endif
1692
1693 // The {value} is a HeapObject, load its map.
1694 TNode<Map> value_map = LoadMap(value_heapobject);
1695
1696 // Only null, undefined and document.all have the undetectable bit set,
1697 // so we can return false immediately when that bit is set. With static roots
1698 // we've already checked for null and undefined, but we need to check the
1699 // undetectable bit for document.all anyway on the common path and it doesn't
1700 // help to check the undetectable object protector in builtins since we can't
1701 // deopt.
1702 GotoIf(IsUndetectableMap(value_map), if_false);
1703
1704 // We still need to handle numbers specially, but all other {value}s
1705 // that make it here yield true.
1706 GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
1707 Branch(IsBigInt(value_heapobject), &if_bigint, if_true);
1708
1709 BIND(&if_smi);
1710 {
1711 // Check if the Smi {value} is a zero.
1712 Branch(TaggedEqual(value, SmiConstant(0)), if_false, if_true);
1713 }
1714
1715 BIND(&if_heapnumber);
1716 {
1717 // Load the floating point value of {value}.
1719 value_heapobject, offsetof(HeapNumber, value_));
1720
1721 // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
1722 Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
1723 if_true, if_false);
1724 }
1725
1726 BIND(&if_bigint);
1727 {
1728 TNode<BigInt> bigint = CAST(value);
1729 TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
1731 Branch(Word32Equal(length, Int32Constant(0)), if_false, if_true);
1732 }
1733}
1734
1736 TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
1737#ifdef V8_ENABLE_SANDBOX
1739 LoadObjectField<SandboxedPtrT>(object, field_offset));
1740#else
1741 return LoadObjectField<RawPtrT>(object, field_offset);
1742#endif // V8_ENABLE_SANDBOX
1743}
1744
1747 TNode<RawPtrT> pointer) {
1748#ifdef V8_ENABLE_SANDBOX
1750
1751 // Ensure pointer points into the sandbox.
1752 TNode<ExternalReference> sandbox_base_address =
1753 ExternalConstant(ExternalReference::sandbox_base_address());
1754 TNode<ExternalReference> sandbox_end_address =
1755 ExternalConstant(ExternalReference::sandbox_end_address());
1756 TNode<UintPtrT> sandbox_base = Load<UintPtrT>(sandbox_base_address);
1757 TNode<UintPtrT> sandbox_end = Load<UintPtrT>(sandbox_end_address);
1758 CSA_CHECK(this, UintPtrGreaterThanOrEqual(sbx_ptr, sandbox_base));
1759 CSA_CHECK(this, UintPtrLessThan(sbx_ptr, sandbox_end));
1760
1762#else
1764#endif // V8_ENABLE_SANDBOX
1765}
1766
1768#ifdef V8_ENABLE_SANDBOX
1769 // TODO(chromium:1218005) consider creating a LoadSandboxedPointerConstant()
1770 // if more of these constants are required later on.
1771 TNode<ExternalReference> empty_backing_store_buffer =
1772 ExternalConstant(ExternalReference::empty_backing_store_buffer());
1773 return Load<RawPtrT>(empty_backing_store_buffer);
1774#else
1776#endif // V8_ENABLE_SANDBOX
1777}
1778
1780 TNode<HeapObject> object, TNode<IntPtrT> field_offset) {
1781#ifdef V8_ENABLE_SANDBOX
1782 TNode<Uint64T> raw_value = LoadObjectField<Uint64T>(object, field_offset);
1783 TNode<Uint64T> shift_amount = Uint64Constant(kBoundedSizeShift);
1784 TNode<Uint64T> decoded_value = Word64Shr(raw_value, shift_amount);
1785 return ReinterpretCast<UintPtrT>(decoded_value);
1786#else
1787 return LoadObjectField<UintPtrT>(object, field_offset);
1788#endif // V8_ENABLE_SANDBOX
1789}
1790
1793 TNode<UintPtrT> value) {
1794#ifdef V8_ENABLE_SANDBOX
1796 value, IntPtrConstant(kMaxSafeBufferSizeForSandbox)));
1797 TNode<Uint64T> raw_value = ReinterpretCast<Uint64T>(value);
1798 TNode<Uint64T> shift_amount = Uint64Constant(kBoundedSizeShift);
1799 TNode<Uint64T> encoded_value = Word64Shl(raw_value, shift_amount);
1800 StoreObjectFieldNoWriteBarrier<Uint64T>(object, offset, encoded_value);
1801#else
1803#endif // V8_ENABLE_SANDBOX
1804}
1805
1806#ifdef V8_ENABLE_SANDBOX
1808 ExternalPointerTagRange tag_range) {
1809 if (IsSharedExternalPointerType(tag_range)) {
1810 TNode<ExternalReference> table_address_address = ExternalConstant(
1811 ExternalReference::shared_external_pointer_table_address_address(
1812 isolate()));
1814 Load(MachineType::Pointer(), table_address_address));
1815 }
1816 return ExternalConstant(
1817 ExternalReference::external_pointer_table_address(isolate()));
1818}
1819#endif // V8_ENABLE_SANDBOX
1820
1823 ExternalPointerTagRange tag_range) {
1824#ifdef V8_ENABLE_SANDBOX
1825 DCHECK(!tag_range.IsEmpty());
1826 TNode<RawPtrT> external_pointer_table_address =
1827 ExternalPointerTableAddress(tag_range);
1829 Load(MachineType::Pointer(), external_pointer_table_address,
1831
1834
1835 // Use UniqueUint32Constant instead of Uint32Constant here in order to ensure
1836 // that the graph structure does not depend on the configuration-specific
1837 // constant value (Uint32Constant uses cached nodes).
1838 TNode<Uint32T> index =
1839 Word32Shr(handle, UniqueUint32Constant(kExternalPointerIndexShift));
1842
1843 // We don't expect to see empty fields here. If this is ever needed, consider
1844 // using an dedicated empty value entry for those tags instead (i.e. an entry
1845 // with the right tag and nullptr payload).
1846 DCHECK(!ExternalPointerCanBeEmpty(tag_range));
1847
1848 TNode<IntPtrT> entry = Load<IntPtrT>(table, table_offset);
1849 if (tag_range.Size() == 1) {
1850 // The common and simple case: we expect exactly one tag.
1853 tag_bits = UncheckedCast<IntPtrT>(
1855 TNode<Uint32T> tag =
1857 TNode<Uint32T> expected_tag = Uint32Constant(tag_range.first);
1858 CSA_SBXCHECK(this, Word32Equal(expected_tag, tag));
1859 } else {
1860 // Not currently supported. Implement once needed.
1862 UNREACHABLE();
1863 }
1866#else
1867 return LoadObjectField<RawPtrT>(object, offset);
1868#endif // V8_ENABLE_SANDBOX
1869}
1870
1873 TNode<RawPtrT> pointer,
1874 ExternalPointerTag tag) {
1875#ifdef V8_ENABLE_SANDBOX
1877 TNode<RawPtrT> external_pointer_table_address =
1880 Load(MachineType::Pointer(), external_pointer_table_address,
1884
1885 // Use UniqueUint32Constant instead of Uint32Constant here in order to ensure
1886 // that the graph structure does not depend on the configuration-specific
1887 // constant value (Uint32Constant uses cached nodes).
1888 TNode<Uint32T> index =
1889 Word32Shr(handle, UniqueUint32Constant(kExternalPointerIndexShift));
1892
1895 value, UintPtrConstant((uint64_t{tag} << kExternalPointerTagShift) |
1898 value);
1899#else
1901#endif // V8_ENABLE_SANDBOX
1902}
1903
1905 TNode<HeapObject> object, int field_offset, IndirectPointerTag tag) {
1906#ifdef V8_ENABLE_SANDBOX
1907 return LoadIndirectPointerFromObject(object, field_offset, tag);
1908#else
1909 return LoadObjectField<TrustedObject>(object, field_offset);
1910#endif // V8_ENABLE_SANDBOX
1911}
1912
1914 TNode<HeapObject> object, int field_offset) {
1916 object, field_offset, kCodeIndirectPointerTag));
1917}
1918
1919#ifdef V8_ENABLE_LEAPTIERING
1920
1921TNode<UintPtrT> CodeStubAssembler::ComputeJSDispatchTableEntryOffset(
1923 TNode<Uint32T> index =
1924 Word32Shr(handle, Uint32Constant(kJSDispatchHandleShift));
1925 // We're using a 32-bit shift here to reduce code size, but for that we need
1926 // to be sure that the offset will always fit into a 32-bit integer.
1927 static_assert(kJSDispatchTableReservationSize <= 4ULL * GB);
1930 return offset;
1931}
1932
1933TNode<Code> CodeStubAssembler::LoadCodeObjectFromJSDispatchTable(
1934 TNode<JSDispatchHandleT> handle) {
1935 TNode<RawPtrT> table =
1936 ExternalConstant(ExternalReference::js_dispatch_table_address());
1937 TNode<UintPtrT> offset = ComputeJSDispatchTableEntryOffset(handle);
1938 offset =
1939 UintPtrAdd(offset, UintPtrConstant(JSDispatchEntry::kCodeObjectOffset));
1940 TNode<UintPtrT> value = Load<UintPtrT>(table, offset);
1941 // The LSB is used as marking bit by the js dispatch table, so here we have
1942 // to set it using a bitwise OR as it may or may not be set.
1944 WordShr(value, UintPtrConstant(JSDispatchEntry::kObjectPointerShift)),
1946 return CAST(BitcastWordToTagged(value));
1947}
1948
1949TNode<Uint16T> CodeStubAssembler::LoadParameterCountFromJSDispatchTable(
1950 TNode<JSDispatchHandleT> handle) {
1951 TNode<RawPtrT> table =
1952 ExternalConstant(ExternalReference::js_dispatch_table_address());
1953 TNode<UintPtrT> offset = ComputeJSDispatchTableEntryOffset(handle);
1955 UintPtrConstant(JSDispatchEntry::kParameterCountOffset));
1956 static_assert(JSDispatchEntry::kParameterCountSize == 2);
1957 return Load<Uint16T>(table, offset);
1958}
1959
1960#endif // V8_ENABLE_LEAPTIERING
1961
1962#ifdef V8_ENABLE_SANDBOX
1963
1964TNode<TrustedObject> CodeStubAssembler::LoadIndirectPointerFromObject(
1965 TNode<HeapObject> object, int field_offset, IndirectPointerTag tag) {
1966 TNode<IndirectPointerHandleT> handle =
1967 LoadObjectField<IndirectPointerHandleT>(object, field_offset);
1968 return ResolveIndirectPointerHandle(handle, tag);
1969}
1970
1971TNode<BoolT> CodeStubAssembler::IsTrustedPointerHandle(
1972 TNode<IndirectPointerHandleT> handle) {
1974 Int32Constant(0));
1975}
1976
1977TNode<TrustedObject> CodeStubAssembler::ResolveIndirectPointerHandle(
1978 TNode<IndirectPointerHandleT> handle, IndirectPointerTag tag) {
1979 // The tag implies which pointer table to use.
1980 if (tag == kUnknownIndirectPointerTag) {
1981 // In this case we have to rely on the handle marking to determine which
1982 // pointer table to use.
1983 return Select<TrustedObject>(
1984 IsTrustedPointerHandle(handle),
1985 [=, this] { return ResolveTrustedPointerHandle(handle, tag); },
1986 [=, this] { return ResolveCodePointerHandle(handle); });
1987 } else if (tag == kCodeIndirectPointerTag) {
1988 return ResolveCodePointerHandle(handle);
1989 } else {
1990 return ResolveTrustedPointerHandle(handle, tag);
1991 }
1992}
1993
1994TNode<Code> CodeStubAssembler::ResolveCodePointerHandle(
1995 TNode<IndirectPointerHandleT> handle) {
1996 TNode<RawPtrT> table = LoadCodePointerTableBase();
1997 TNode<UintPtrT> offset = ComputeCodePointerTableEntryOffset(handle);
2000 TNode<UintPtrT> value = Load<UintPtrT>(table, offset);
2001 // The LSB is used as marking bit by the code pointer table, so here we have
2002 // to set it using a bitwise OR as it may or may not be set.
2003 value =
2005 return CAST(BitcastWordToTagged(value));
2006}
2007
2008TNode<TrustedObject> CodeStubAssembler::ResolveTrustedPointerHandle(
2009 TNode<IndirectPointerHandleT> handle, IndirectPointerTag tag) {
2010 TNode<RawPtrT> table = ExternalConstant(
2011 ExternalReference::trusted_pointer_table_base_address(isolate()));
2012 TNode<Uint32T> index =
2014 // We're using a 32-bit shift here to reduce code size, but for that we need
2015 // to be sure that the offset will always fit into a 32-bit integer.
2016 static_assert(kTrustedPointerTableReservationSize <= 4ULL * GB);
2017 TNode<UintPtrT> offset = ChangeUint32ToWord(
2019 TNode<UintPtrT> value = Load<UintPtrT>(table, offset);
2020 // Untag the pointer and remove the marking bit in one operation.
2023 return CAST(BitcastWordToTagged(value));
2024}
2025
2026TNode<UintPtrT> CodeStubAssembler::ComputeCodePointerTableEntryOffset(
2027 TNode<IndirectPointerHandleT> handle) {
2028 TNode<Uint32T> index =
2030 // We're using a 32-bit shift here to reduce code size, but for that we need
2031 // to be sure that the offset will always fit into a 32-bit integer.
2032 static_assert(kCodePointerTableReservationSize <= 4ULL * GB);
2033 TNode<UintPtrT> offset = ChangeUint32ToWord(
2035 return offset;
2036}
2037
2038TNode<RawPtrT> CodeStubAssembler::LoadCodeEntrypointViaCodePointerField(
2039 TNode<HeapObject> object, TNode<IntPtrT> field_offset,
2040 CodeEntrypointTag tag) {
2041 TNode<IndirectPointerHandleT> handle =
2042 LoadObjectField<IndirectPointerHandleT>(object, field_offset);
2043 return LoadCodeEntryFromIndirectPointerHandle(handle, tag);
2044}
2045
2046TNode<RawPtrT> CodeStubAssembler::LoadCodeEntryFromIndirectPointerHandle(
2047 TNode<IndirectPointerHandleT> handle, CodeEntrypointTag tag) {
2048 TNode<RawPtrT> table = LoadCodePointerTableBase();
2049 TNode<UintPtrT> offset = ComputeCodePointerTableEntryOffset(handle);
2050 TNode<UintPtrT> entry = Load<UintPtrT>(table, offset);
2051 if (tag != 0) {
2052 entry = UncheckedCast<UintPtrT>(WordXor(entry, UintPtrConstant(tag)));
2053 }
2055}
2056
2057TNode<RawPtrT> CodeStubAssembler::LoadCodePointerTableBase() {
2058#ifdef V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
2059 // Embed the code pointer table address into the code.
2060 return ExternalConstant(
2061 ExternalReference::code_pointer_table_base_address(isolate()));
2062#else
2063 // Embed the code pointer table address into the code.
2064 return ExternalConstant(
2065 ExternalReference::global_code_pointer_table_base_address());
2066#endif // V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES
2067}
2068#endif // V8_ENABLE_SANDBOX
2069
2071 TNode<JSFunction> callee, TNode<JSDispatchHandleT> dispatch_handle) {
2072 TNode<Uint16T> dynamic_parameter_count;
2073#ifdef V8_ENABLE_LEAPTIERING
2074 dynamic_parameter_count =
2075 LoadParameterCountFromJSDispatchTable(dispatch_handle);
2076#else
2077 // TODO(olivf): Remove once leaptiering is supported everywhere.
2078 TNode<SharedFunctionInfo> shared = LoadJSFunctionSharedFunctionInfo(callee);
2079 dynamic_parameter_count =
2080 LoadSharedFunctionInfoFormalParameterCountWithReceiver(shared);
2081#endif
2082 SetDynamicJSParameterCount(dynamic_parameter_count);
2083}
2084
2089
2094
2099
2106
2108 TNode<HeapObject> object, int offset) {
2110 CSA_DCHECK(this, Int32GreaterThanOrEqual(value, Int32Constant(0)));
2111 return Signed(ChangeUint32ToWord(value));
2112}
2113
2115 TNode<HeapObject> object, int offset) {
2116 // Please use LoadMap(object) instead.
2118 if (SmiValuesAre32Bits()) {
2119#if V8_TARGET_LITTLE_ENDIAN
2120 offset += 4;
2121#endif
2122 return LoadObjectField<Int32T>(object, offset);
2123 } else {
2124 return SmiToInt32(LoadObjectField<Smi>(object, offset));
2125 }
2126}
2127
2129 TNode<HeapObject> object) {
2130 CSA_DCHECK(this, Word32Or(IsHeapNumber(object), IsTheHole(object)));
2131 static_assert(offsetof(HeapNumber, value_) == Hole::kRawNumericValueOffset);
2132 return LoadObjectField<Float64T>(object, offsetof(HeapNumber, value_));
2133}
2134
2139
2146
2148 RootIndex map_idx = Map::TryGetMapRootIdxFor(instance_type).value();
2149 return HeapConstantNoHole(
2150 i::Cast<Map>(isolate()->roots_table().handle_at(map_idx)));
2151}
2152
2155#ifdef V8_MAP_PACKING
2156 // Check the loaded map is unpacked. i.e. the lowest two bits != 0b10
2157 CSA_DCHECK(this,
2159 IntPtrConstant(Internals::kMapWordXorMask)),
2160 IntPtrConstant(Internals::kMapWordSignature)));
2161#endif
2162 return map;
2163}
2164
2168
2170 InstanceType instance_type) {
2172 if (std::optional<RootIndex> expected_map =
2174 TNode<Map> map = LoadMap(object);
2175 return TaggedEqual(map, LoadRoot(*expected_map));
2176 }
2177 }
2178 return InstanceTypeEqual(LoadInstanceType(object), instance_type);
2179}
2180
2182 TNode<HeapObject> object, InstanceType instance_type) {
2184 if (std::optional<RootIndex> expected_map =
2186 TNode<Map> map = LoadMap(object);
2187 return TaggedNotEqual(map, LoadRoot(*expected_map));
2188 }
2189 }
2190 return Word32NotEqual(LoadInstanceType(object), Int32Constant(instance_type));
2191}
2192
2194 TNode<HeapObject> any_tagged, InstanceType type) {
2195 /* return Phi <TaggedIsSmi(val), DoesntHaveInstanceType(val, type)> */
2196 TNode<BoolT> tagged_is_smi = TaggedIsSmi(any_tagged);
2197 return Select<BoolT>(
2198 tagged_is_smi, [=]() { return tagged_is_smi; },
2199 [=, this]() { return DoesntHaveInstanceType(any_tagged, type); });
2200}
2201
2203 TNode<BoolT> is_special =
2205 uint32_t mask = Map::Bits1::HasNamedInterceptorBit::kMask |
2206 Map::Bits1::IsAccessCheckNeededBit::kMask;
2207 USE(mask);
2208 // Interceptors or access checks imply special receiver.
2209 CSA_DCHECK(this,
2211 is_special, Int32TrueConstant()));
2212 return is_special;
2213}
2214
2221
2223 Label* if_slow) {
2224 GotoIf(IsStringWrapperElementsKind(map), if_slow);
2225 GotoIf(IsSpecialReceiverMap(map), if_slow);
2226 GotoIf(IsDictionaryMap(map), if_slow);
2227}
2228
2230 TNode<JSReceiver> object, bool skip_empty_check) {
2231 CSA_SLOW_DCHECK(this, Word32BinaryNot(IsDictionaryMap(LoadMap(object))));
2232 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
2233 if (skip_empty_check) {
2234 return CAST(properties);
2235 } else {
2236 // TODO(ishell): use empty_property_array instead of empty_fixed_array here.
2237 return Select<HeapObject>(
2238 TaggedIsSmi(properties),
2239 [=, this] { return EmptyFixedArrayConstant(); },
2240 [=, this] { return CAST(properties); });
2241 }
2242}
2243
2245 TNode<JSReceiver> object) {
2246 CSA_SLOW_DCHECK(this, IsDictionaryMap(LoadMap(object)));
2247 TNode<Object> properties = LoadJSReceiverPropertiesOrHash(object);
2248 NodeGenerator<HeapObject> make_empty = [=, this]() -> TNode<HeapObject> {
2250 return EmptySwissPropertyDictionaryConstant();
2251 } else {
2252 return EmptyPropertyDictionaryConstant();
2253 }
2254 };
2255 NodeGenerator<HeapObject> cast_properties = [=, this] {
2256 TNode<HeapObject> dict = CAST(properties);
2257 CSA_DCHECK(this,
2259 return dict;
2260 };
2261 return Select<HeapObject>(TaggedIsSmi(properties), make_empty,
2262 cast_properties);
2263}
2264
2267 CSA_DCHECK(this, IsJSArgumentsObjectWithLength(context, array));
2268 constexpr int offset = JSStrictArgumentsObject::kLengthOffset;
2269 static_assert(offset == JSSloppyArgumentsObject::kLengthOffset);
2270 return LoadObjectField(array, offset);
2271}
2272
2274 TNode<Number> length = LoadJSArrayLength(array);
2277 LoadElementsKind(array),
2280 // JSArray length is always a positive Smi for fast arrays.
2281 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length));
2282 return CAST(length);
2283}
2284
2290
2296
2304
2306 TNode<FeedbackVector> vector) {
2307 TNode<Int32T> length =
2308 LoadObjectField<Int32T>(vector, FeedbackVector::kLengthOffset);
2309 return ChangePositiveInt32ToIntPtr(length);
2310}
2311
2316
2322
2324 TNode<WeakFixedArray> array) {
2325 TNode<Int32T> length =
2327 CSA_DCHECK(this, Int32GreaterThanOrEqual(length, Int32Constant(0)));
2328 return Unsigned(length);
2329}
2330
2332 TNode<BytecodeArray> array) {
2333 TNode<Int32T> value =
2334 LoadAndUntagToWord32ObjectField(array, BytecodeArray::kLengthOffset);
2335 CSA_DCHECK(this, Int32GreaterThanOrEqual(value, Int32Constant(0)));
2336 return Unsigned(value);
2337}
2338
2340 TNode<DescriptorArray> array) {
2342 array, DescriptorArray::kNumberOfDescriptorsOffset));
2343}
2344
2350
2355
2360
2364
2366 return LoadObjectField<Uint16T>(map, Map::kInstanceTypeOffset);
2367}
2368
2373
2377
2381
2385
2390
2392 TNode<Map> map) {
2393 // See Map::GetInObjectPropertiesStartInWords() for details.
2394 CSA_DCHECK(this, IsJSObjectMap(map));
2396 map, Map::kInobjectPropertiesStartOrConstructorFunctionIndexOffset));
2397}
2398
2400 TNode<IntPtrT> used_or_unused =
2401 ChangeInt32ToIntPtr(LoadMapUsedOrUnusedInstanceSizeInWords(map));
2402
2403 return Select<IntPtrT>(
2404 UintPtrGreaterThanOrEqual(used_or_unused,
2406 [=] { return used_or_unused; },
2407 [=, this] { return LoadMapInstanceSizeInWords(map); });
2408}
2409
2414
2416 TNode<Map> map) {
2417 // See Map::GetConstructorFunctionIndex() for details.
2420 map, Map::kInobjectPropertiesStartOrConstructorFunctionIndexOffset));
2421}
2422
2426 map, Map::kConstructorOrBackPointerOrNativeContextOffset));
2427
2428 Label done(this), loop(this, &result);
2429 Goto(&loop);
2430 BIND(&loop);
2431 {
2432 GotoIf(TaggedIsSmi(result.value()), &done);
2433 TNode<BoolT> is_map_type =
2434 InstanceTypeEqual(LoadInstanceType(CAST(result.value())), MAP_TYPE);
2435 GotoIfNot(is_map_type, &done);
2436 result =
2437 LoadObjectField(CAST(result.value()),
2438 Map::kConstructorOrBackPointerOrNativeContextOffset);
2439 Goto(&loop);
2440 }
2441 BIND(&done);
2442 return result.value();
2443}
2444
2449
2452 map, Map::kConstructorOrBackPointerOrNativeContextOffset));
2453 return Select<Object>(
2454 IsMap(object), [=] { return object; },
2455 [=, this] { return UndefinedConstant(); });
2456}
2457
2459 TNode<Map> map, TNode<Int32T> instance_type, Label* bailout) {
2460 // This check can have false positives, since it applies to any
2461 // JSPrimitiveWrapper type.
2462 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), bailout);
2463
2465 GotoIf(IsSetWord32(bit_field3, Map::Bits3::IsDictionaryMapBit::kMask),
2466 bailout);
2467
2468 return bit_field3;
2469}
2470
2472 TNode<JSReceiver> receiver, Label* if_no_hash) {
2473 TVARIABLE(Uint32T, var_hash);
2474 Label done(this), if_smi(this), if_property_array(this),
2475 if_swiss_property_dictionary(this), if_property_dictionary(this),
2476 if_fixed_array(this);
2477
2478 TNode<Object> properties_or_hash =
2479 LoadObjectField(receiver, JSReceiver::kPropertiesOrHashOffset);
2480 GotoIf(TaggedIsSmi(properties_or_hash), &if_smi);
2481
2482 TNode<HeapObject> properties = CAST(properties_or_hash);
2483 TNode<Uint16T> properties_instance_type = LoadInstanceType(properties);
2484
2485 GotoIf(InstanceTypeEqual(properties_instance_type, PROPERTY_ARRAY_TYPE),
2486 &if_property_array);
2488 GotoIf(
2489 InstanceTypeEqual(properties_instance_type, SWISS_NAME_DICTIONARY_TYPE),
2490 &if_swiss_property_dictionary);
2491 }
2492 Branch(InstanceTypeEqual(properties_instance_type, NAME_DICTIONARY_TYPE),
2493 &if_property_dictionary, &if_fixed_array);
2494
2495 BIND(&if_fixed_array);
2496 {
2498 Goto(&done);
2499 }
2500
2501 BIND(&if_smi);
2502 {
2503 var_hash = PositiveSmiToUint32(CAST(properties_or_hash));
2504 Goto(&done);
2505 }
2506
2507 BIND(&if_property_array);
2508 {
2510 properties, PropertyArray::kLengthAndHashOffset);
2511 var_hash = DecodeWord32<PropertyArray::HashField>(length_and_hash);
2512 Goto(&done);
2513 }
2515 BIND(&if_swiss_property_dictionary);
2516 {
2517 var_hash = LoadSwissNameDictionaryHash(CAST(properties));
2518 CSA_DCHECK(this, Uint32LessThanOrEqual(var_hash.value(),
2520 Goto(&done);
2521 }
2522 }
2523
2524 BIND(&if_property_dictionary);
2525 {
2527 CAST(properties), NameDictionary::kObjectHashIndex)));
2528 Goto(&done);
2529 }
2530
2531 BIND(&done);
2532 if (if_no_hash != nullptr) {
2533 GotoIf(Word32Equal(var_hash.value(),
2535 if_no_hash);
2536 }
2537 return var_hash.value();
2538}
2539
2545
2547 Label* if_hash_not_computed) {
2548 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(name);
2549 if (if_hash_not_computed != nullptr) {
2551 if_hash_not_computed);
2552 }
2553 return DecodeWord32<Name::HashBits>(raw_hash_field);
2554}
2555
2557 TVARIABLE(Uint32T, var_raw_hash);
2558
2559 Label if_forwarding_index(this, Label::kDeferred), done(this);
2560
2561 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(name);
2563 &if_forwarding_index);
2564
2565 var_raw_hash = raw_hash_field;
2566 Goto(&done);
2567
2568 BIND(&if_forwarding_index);
2569 {
2570 CSA_DCHECK(this,
2572 raw_hash_field, Name::HashFieldType::kForwardingIndex));
2573 TNode<ExternalReference> function =
2574 ExternalConstant(ExternalReference::raw_hash_from_forward_table());
2575 const TNode<ExternalReference> isolate_ptr =
2578 function, MachineType::Uint32(),
2579 std::make_pair(MachineType::Pointer(), isolate_ptr),
2580 std::make_pair(
2583
2584 var_raw_hash = result;
2585 Goto(&done);
2586 }
2587
2588 BIND(&done);
2589 return var_raw_hash.value();
2590}
2591
2595
2599
2604
2607 return LoadObjectField(object, JSPrimitiveWrapper::kValueOffset);
2608}
2609
2611 Label* if_smi, Label* if_cleared,
2612 Label* if_weak, Label* if_strong,
2613 TVariable<Object>* extracted) {
2614 Label inner_if_smi(this), inner_if_strong(this);
2615
2616 GotoIf(TaggedIsSmi(maybe_object), &inner_if_smi);
2617
2618 GotoIf(IsCleared(maybe_object), if_cleared);
2619
2620 TNode<HeapObjectReference> object_ref = CAST(maybe_object);
2621
2622 GotoIf(IsStrong(object_ref), &inner_if_strong);
2623
2624 *extracted = GetHeapObjectAssumeWeak(maybe_object);
2625 Goto(if_weak);
2626
2627 BIND(&inner_if_smi);
2628 *extracted = CAST(maybe_object);
2629 Goto(if_smi);
2630
2631 BIND(&inner_if_strong);
2632 *extracted = CAST(maybe_object);
2633 Goto(if_strong);
2634}
2635
2637#ifdef V8_MAP_PACKING
2638 // Test if the map is an unpacked and valid map
2639 CSA_DCHECK(this, IsMap(LoadMap(object)));
2640#endif
2641}
2642
2649
2651 return IsNotSetWord32(
2652 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
2654}
2655
2657 TNode<MaybeObject> value, Label* if_not_strong) {
2658 GotoIfNot(IsStrong(value), if_not_strong);
2659 return CAST(value);
2660}
2661
2663 TNode<HeapObjectReference> value, Label* if_not_strong) {
2664 GotoIfNot(IsStrong(value), if_not_strong);
2665 return ReinterpretCast<HeapObject>(value);
2666}
2667
2674
2677 return IsSetWord32(
2678 TruncateIntPtrToInt32(BitcastTaggedToWordForTagAndSmiBits(value)),
2680}
2681
2686
2688 TNode<MaybeObject> value) {
2689 CSA_DCHECK(this, IsWeakOrCleared(value));
2690 CSA_DCHECK(this, IsNotCleared(value));
2691 return UncheckedCast<HeapObject>(BitcastWordToTagged(WordAnd(
2692 BitcastMaybeObjectToWord(value), IntPtrConstant(~kWeakHeapObjectMask))));
2693}
2694
2696 TNode<MaybeObject> value, Label* if_cleared) {
2697 GotoIf(IsCleared(value), if_cleared);
2698 return GetHeapObjectAssumeWeak(value);
2699}
2700
2701// This version generates
2702// (maybe_object & ~mask) == value
2703// It works for non-Smi |maybe_object| and for both Smi and HeapObject values
2704// but requires a big constant for ~mask.
2706 TNode<MaybeObject> maybe_object, TNode<Object> value) {
2707 CSA_DCHECK(this, TaggedIsNotSmi(maybe_object));
2709 return Word32Equal(
2710 Word32And(TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
2711 Uint32Constant(~static_cast<uint32_t>(kWeakHeapObjectMask))),
2713 } else {
2714 return WordEqual(WordAnd(BitcastMaybeObjectToWord(maybe_object),
2716 BitcastTaggedToWord(value));
2717 }
2718}
2719
2720// This version generates
2721// maybe_object == (heap_object | mask)
2722// It works for any |maybe_object| values and generates a better code because it
2723// uses a small constant for mask.
2725 TNode<MaybeObject> maybe_object, TNode<HeapObject> heap_object) {
2727 return Word32Equal(
2728 TruncateWordToInt32(BitcastMaybeObjectToWord(maybe_object)),
2731 } else {
2732 return WordEqual(BitcastMaybeObjectToWord(maybe_object),
2733 WordOr(BitcastTaggedToWord(heap_object),
2735 }
2736}
2737
2743
2748
2749template <>
2753
2754template <>
2759
2760template <>
2765
2766template <>
2771
2772template <>
2776
2777template <>
2781
2782template <>
2788
2789template <>
2794
2795template <>
2800
2801template <typename Array, typename TIndex, typename TValue>
2803 int array_header_size,
2804 TNode<TIndex> index_node,
2805 int additional_offset) {
2806 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2807 static_assert(
2808 std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, UintPtrT> ||
2809 std::is_same_v<TIndex, IntPtrT> ||
2810 std::is_same_v<TIndex, TaggedIndex>,
2811 "Only Smi, UintPtrT, IntPtrT or TaggedIndex indices are allowed");
2813 IntPtrConstant(0)));
2814 DCHECK(IsAligned(additional_offset, kTaggedSize));
2815 int32_t header_size = array_header_size + additional_offset - kHeapObjectTag;
2817 ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
2819 array_header_size));
2820 constexpr MachineType machine_type = MachineTypeOf<TValue>::value;
2821 return UncheckedCast<TValue>(LoadFromObject(machine_type, array, offset));
2822}
2823
2838
2839template <typename TIndex>
2841 TNode<FixedArray> object, TNode<TIndex> index, int additional_offset,
2842 CheckBounds check_bounds) {
2843 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
2844 static_assert(
2845 std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, UintPtrT> ||
2846 std::is_same_v<TIndex, IntPtrT> ||
2847 std::is_same_v<TIndex, TaggedIndex>,
2848 "Only Smi, UintPtrT, IntPtrT or TaggedIndex indexes are allowed");
2849 CSA_DCHECK(this, IsFixedArraySubclass(object));
2851
2852 if (NeedsBoundsCheck(check_bounds)) {
2853 FixedArrayBoundsCheck(object, index, additional_offset);
2854 }
2856 object, OFFSET_OF_DATA_START(FixedArray), index, additional_offset);
2857 return CAST(element);
2858}
2859
2862 int, CheckBounds);
2865 TNode<TaggedIndex>, int,
2866 CheckBounds);
2869 TNode<UintPtrT>, int,
2870 CheckBounds);
2873 TNode<IntPtrT>, int,
2874 CheckBounds);
2875
2877 TNode<Smi> index,
2878 int additional_offset) {
2879 if (!v8_flags.fixed_array_bounds_checks) return;
2880 DCHECK(IsAligned(additional_offset, kTaggedSize));
2881 TNode<Smi> effective_index;
2882 Tagged<Smi> constant_index;
2883 bool index_is_constant = TryToSmiConstant(index, &constant_index);
2884 if (index_is_constant) {
2885 effective_index = SmiConstant(Smi::ToInt(constant_index) +
2886 additional_offset / kTaggedSize);
2887 } else {
2888 effective_index =
2889 SmiAdd(index, SmiConstant(additional_offset / kTaggedSize));
2890 }
2891 CSA_CHECK(this, SmiBelow(effective_index, LoadFixedArrayBaseLength(array)));
2892}
2893
2895 TNode<IntPtrT> index,
2896 int additional_offset) {
2897 if (!v8_flags.fixed_array_bounds_checks) return;
2898 DCHECK(IsAligned(additional_offset, kTaggedSize));
2899 // IntPtrAdd does constant-folding automatically.
2900 TNode<IntPtrT> effective_index =
2901 IntPtrAdd(index, IntPtrConstant(additional_offset / kTaggedSize));
2902 CSA_CHECK(this, UintPtrLessThan(effective_index,
2904}
2905
2907 TNode<PropertyArray> object, TNode<IntPtrT> index) {
2908 int additional_offset = 0;
2909 return CAST(LoadArrayElement(object, PropertyArray::kHeaderSize, index,
2910 additional_offset));
2911}
2912
2914 TNode<TaggedIndex> index,
2915 int additional_offset) {
2916 if (!v8_flags.fixed_array_bounds_checks) return;
2917 DCHECK(IsAligned(additional_offset, kTaggedSize));
2918 // IntPtrAdd does constant-folding automatically.
2919 TNode<IntPtrT> effective_index =
2921 IntPtrConstant(additional_offset / kTaggedSize));
2922 CSA_CHECK(this, UintPtrLessThan(effective_index,
2924}
2925
2933
2935 TNode<JSTypedArray> typed_array) {
2936 // Data pointer = external_pointer + static_cast<Tagged_t>(base_pointer).
2937 TNode<RawPtrT> external_pointer =
2939
2940 TNode<IntPtrT> base_pointer;
2942 TNode<Int32T> compressed_base =
2943 LoadObjectField<Int32T>(typed_array, JSTypedArray::kBasePointerOffset);
2944 // Zero-extend TaggedT to WordT according to current compression scheme
2945 // so that the addition with |external_pointer| (which already contains
2946 // compensated offset value) below will decompress the tagged value.
2947 // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for
2948 // details.
2949 base_pointer = Signed(ChangeUint32ToWord(compressed_base));
2950 } else {
2951 base_pointer =
2952 LoadObjectField<IntPtrT>(typed_array, JSTypedArray::kBasePointerOffset);
2953 }
2954 return RawPtrAdd(external_pointer, base_pointer);
2955}
2956
2958 TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
2959 if (Is64()) {
2960 TNode<IntPtrT> value = Load<IntPtrT>(data_pointer, offset);
2961 return BigIntFromInt64(value);
2962 } else {
2963 DCHECK(!Is64());
2964#if defined(V8_TARGET_BIG_ENDIAN)
2965 TNode<IntPtrT> high = Load<IntPtrT>(data_pointer, offset);
2968#else
2969 TNode<IntPtrT> low = Load<IntPtrT>(data_pointer, offset);
2972#endif
2973 return BigIntFromInt32Pair(low, high);
2974 }
2975}
2976
2978 TNode<IntPtrT> high) {
2979 DCHECK(!Is64());
2980 TVARIABLE(BigInt, var_result);
2982 TVARIABLE(IntPtrT, var_high, high);
2983 TVARIABLE(IntPtrT, var_low, low);
2984 Label high_zero(this), negative(this), allocate_one_digit(this),
2985 allocate_two_digits(this), if_zero(this), done(this);
2986
2987 GotoIf(IntPtrEqual(var_high.value(), IntPtrConstant(0)), &high_zero);
2988 Branch(IntPtrLessThan(var_high.value(), IntPtrConstant(0)), &negative,
2989 &allocate_two_digits);
2990
2991 BIND(&high_zero);
2992 Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &if_zero,
2993 &allocate_one_digit);
2994
2995 BIND(&negative);
2996 {
2997 var_sign = Int32Constant(BigInt::SignBits::encode(true));
2998 // We must negate the value by computing "0 - (high|low)", performing
2999 // both parts of the subtraction separately and manually taking care
3000 // of the carry bit (which is 1 iff low != 0).
3001 var_high = IntPtrSub(IntPtrConstant(0), var_high.value());
3002 Label carry(this), no_carry(this);
3003 Branch(IntPtrEqual(var_low.value(), IntPtrConstant(0)), &no_carry, &carry);
3004 BIND(&carry);
3005 var_high = IntPtrSub(var_high.value(), IntPtrConstant(1));
3006 Goto(&no_carry);
3007 BIND(&no_carry);
3008 var_low = IntPtrSub(IntPtrConstant(0), var_low.value());
3009 // var_high was non-zero going into this block, but subtracting the
3010 // carry bit from it could bring us back onto the "one digit" path.
3011 Branch(IntPtrEqual(var_high.value(), IntPtrConstant(0)),
3012 &allocate_one_digit, &allocate_two_digits);
3013 }
3014
3015 BIND(&allocate_one_digit);
3016 {
3017 var_result = AllocateRawBigInt(IntPtrConstant(1));
3018 StoreBigIntBitfield(var_result.value(),
3019 Word32Or(var_sign.value(),
3021 StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
3022 Goto(&done);
3023 }
3024
3025 BIND(&allocate_two_digits);
3026 {
3027 var_result = AllocateRawBigInt(IntPtrConstant(2));
3028 StoreBigIntBitfield(var_result.value(),
3029 Word32Or(var_sign.value(),
3031 StoreBigIntDigit(var_result.value(), 0, Unsigned(var_low.value()));
3032 StoreBigIntDigit(var_result.value(), 1, Unsigned(var_high.value()));
3033 Goto(&done);
3034 }
3035
3036 BIND(&if_zero);
3037 var_result = AllocateBigInt(IntPtrConstant(0));
3038 Goto(&done);
3039
3040 BIND(&done);
3041 return var_result.value();
3042}
3043
3045 DCHECK(Is64());
3046 TVARIABLE(BigInt, var_result);
3047 Label done(this), if_positive(this), if_negative(this), if_zero(this);
3048 GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
3049 var_result = AllocateRawBigInt(IntPtrConstant(1));
3050 Branch(IntPtrGreaterThan(value, IntPtrConstant(0)), &if_positive,
3051 &if_negative);
3052
3053 BIND(&if_positive);
3054 {
3055 StoreBigIntBitfield(var_result.value(),
3058 StoreBigIntDigit(var_result.value(), 0, Unsigned(value));
3059 Goto(&done);
3060 }
3061
3062 BIND(&if_negative);
3063 {
3064 StoreBigIntBitfield(var_result.value(),
3067 StoreBigIntDigit(var_result.value(), 0,
3068 Unsigned(IntPtrSub(IntPtrConstant(0), value)));
3069 Goto(&done);
3070 }
3071
3072 BIND(&if_zero);
3073 {
3074 var_result = AllocateBigInt(IntPtrConstant(0));
3075 Goto(&done);
3076 }
3077
3078 BIND(&done);
3079 return var_result.value();
3080}
3081
3083 TNode<RawPtrT> data_pointer, TNode<IntPtrT> offset) {
3084 Label if_zero(this), done(this);
3085 if (Is64()) {
3086 TNode<UintPtrT> value = Load<UintPtrT>(data_pointer, offset);
3087 return BigIntFromUint64(value);
3088 } else {
3089 DCHECK(!Is64());
3090#if defined(V8_TARGET_BIG_ENDIAN)
3091 TNode<UintPtrT> high = Load<UintPtrT>(data_pointer, offset);
3094#else
3095 TNode<UintPtrT> low = Load<UintPtrT>(data_pointer, offset);
3098#endif
3099 return BigIntFromUint32Pair(low, high);
3100 }
3101}
3102
3104 TNode<UintPtrT> high) {
3105 DCHECK(!Is64());
3106 TVARIABLE(BigInt, var_result);
3107 Label high_zero(this), if_zero(this), done(this);
3108
3109 GotoIf(IntPtrEqual(high, IntPtrConstant(0)), &high_zero);
3110 var_result = AllocateBigInt(IntPtrConstant(2));
3111 StoreBigIntDigit(var_result.value(), 0, low);
3112 StoreBigIntDigit(var_result.value(), 1, high);
3113 Goto(&done);
3114
3115 BIND(&high_zero);
3116 GotoIf(IntPtrEqual(low, IntPtrConstant(0)), &if_zero);
3117 var_result = AllocateBigInt(IntPtrConstant(1));
3118 StoreBigIntDigit(var_result.value(), 0, low);
3119 Goto(&done);
3120
3121 BIND(&if_zero);
3122 var_result = AllocateBigInt(IntPtrConstant(0));
3123 Goto(&done);
3124
3125 BIND(&done);
3126 return var_result.value();
3127}
3128
3130 DCHECK(Is64());
3131 TVARIABLE(BigInt, var_result);
3132 Label done(this), if_zero(this);
3133 GotoIf(IntPtrEqual(value, IntPtrConstant(0)), &if_zero);
3134 var_result = AllocateBigInt(IntPtrConstant(1));
3135 StoreBigIntDigit(var_result.value(), 0, value);
3136 Goto(&done);
3137
3138 BIND(&if_zero);
3139 var_result = AllocateBigInt(IntPtrConstant(0));
3140 Goto(&done);
3141 BIND(&done);
3142 return var_result.value();
3143}
3144
3146 TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
3147 ElementsKind elements_kind) {
3149 ElementOffsetFromIndex(Signed(index), elements_kind, 0);
3150 switch (elements_kind) {
3151 case UINT8_ELEMENTS: /* fall through */
3152 case UINT8_CLAMPED_ELEMENTS:
3153 return SmiFromInt32(Load<Uint8T>(data_pointer, offset));
3154 case INT8_ELEMENTS:
3155 return SmiFromInt32(Load<Int8T>(data_pointer, offset));
3156 case UINT16_ELEMENTS:
3157 return SmiFromInt32(Load<Uint16T>(data_pointer, offset));
3158 case INT16_ELEMENTS:
3159 return SmiFromInt32(Load<Int16T>(data_pointer, offset));
3160 case UINT32_ELEMENTS:
3161 return ChangeUint32ToTagged(Load<Uint32T>(data_pointer, offset));
3162 case INT32_ELEMENTS:
3163 return ChangeInt32ToTagged(Load<Int32T>(data_pointer, offset));
3164 case FLOAT16_ELEMENTS:
3167 case FLOAT32_ELEMENTS:
3169 ChangeFloat32ToFloat64(Load<Float32T>(data_pointer, offset)));
3170 case FLOAT64_ELEMENTS:
3171 return AllocateHeapNumberWithValue(Load<Float64T>(data_pointer, offset));
3172 case BIGINT64_ELEMENTS:
3173 return LoadFixedBigInt64ArrayElementAsTagged(data_pointer, offset);
3174 case BIGUINT64_ELEMENTS:
3175 return LoadFixedBigUint64ArrayElementAsTagged(data_pointer, offset);
3176 default:
3177 UNREACHABLE();
3178 }
3179}
3180
3182 TNode<RawPtrT> data_pointer, TNode<UintPtrT> index,
3183 TNode<Int32T> elements_kind) {
3184 TVARIABLE(Numeric, var_result);
3185 Label done(this), if_unknown_type(this, Label::kDeferred);
3186 int32_t elements_kinds[] = {
3187#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) TYPE##_ELEMENTS,
3189#undef TYPED_ARRAY_CASE
3190 };
3191
3192#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) Label if_##type##array(this);
3194#undef TYPED_ARRAY_CASE
3195
3196 Label* elements_kind_labels[] = {
3197#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) &if_##type##array,
3199 // The same labels again for RAB / GSAB. We dispatch RAB / GSAB elements
3200 // kinds to the corresponding non-RAB / GSAB elements kinds.
3202#undef TYPED_ARRAY_CASE
3203 };
3204 static_assert(arraysize(elements_kinds) == arraysize(elements_kind_labels));
3205
3206 Switch(elements_kind, &if_unknown_type, elements_kinds, elements_kind_labels,
3207 arraysize(elements_kinds));
3208
3209 BIND(&if_unknown_type);
3210 Unreachable();
3211
3212#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
3213 BIND(&if_##type##array); \
3214 { \
3215 var_result = LoadFixedTypedArrayElementAsTagged(data_pointer, index, \
3216 TYPE##_ELEMENTS); \
3217 Goto(&done); \
3218 }
3220#undef TYPED_ARRAY_CASE
3221
3222 BIND(&done);
3223 return var_result.value();
3224}
3225
3226template <typename TIndex>
3228 TNode<FeedbackVector> feedback_vector, TNode<TIndex> slot,
3229 int additional_offset) {
3230 int32_t header_size = FeedbackVector::kRawFeedbackSlotsOffset +
3231 additional_offset - kHeapObjectTag;
3233 ElementOffsetFromIndex(slot, HOLEY_ELEMENTS, header_size);
3235 this, IsOffsetInBounds(offset, LoadFeedbackVectorLength(feedback_vector),
3236 FeedbackVector::kHeaderSize));
3237 return Load<MaybeObject>(feedback_vector, offset);
3238}
3239
3241 TNode<FeedbackVector> feedback_vector, TNode<TaggedIndex> slot,
3242 int additional_offset);
3244 TNode<FeedbackVector> feedback_vector, TNode<IntPtrT> slot,
3245 int additional_offset);
3247 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
3248 int additional_offset);
3249
3250template <typename Array>
3252 TNode<Array> object, int array_header_size, TNode<IntPtrT> index,
3253 int additional_offset) {
3254 DCHECK(IsAligned(additional_offset, kTaggedSize));
3255 int endian_correction = 0;
3256#if V8_TARGET_LITTLE_ENDIAN
3257 if (SmiValuesAre32Bits()) endian_correction = 4;
3258#endif
3259 int32_t header_size = array_header_size + additional_offset - kHeapObjectTag +
3260 endian_correction;
3262 ElementOffsetFromIndex(index, HOLEY_ELEMENTS, header_size);
3264 array_header_size + endian_correction));
3265 if (SmiValuesAre32Bits()) {
3266 return Load<Int32T>(object, offset);
3267 } else {
3268 return SmiToInt32(Load<Smi>(object, offset));
3269 }
3270}
3271
3273 TNode<FixedArray> object, TNode<IntPtrT> index, int additional_offset) {
3276 object, OFFSET_OF_DATA_START(FixedArray), index, additional_offset);
3277}
3278
3280 TNode<WeakFixedArray> object, TNode<IntPtrT> index, int additional_offset) {
3282 additional_offset);
3283}
3284
3297
3298#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3300CodeStubAssembler::LoadFixedDoubleArrayElementWithUndefinedCheck(
3301 TNode<FixedDoubleArray> object, TNode<IntPtrT> index, Label* if_undefined,
3302 Label* if_hole, MachineType machine_type) {
3303 int32_t header_size = OFFSET_OF_DATA_START(FixedDoubleArray) - kHeapObjectTag;
3305 ElementOffsetFromIndex(index, HOLEY_DOUBLE_ELEMENTS, header_size);
3306 CSA_DCHECK(this,
3310 return LoadDoubleWithUndefinedAndHoleCheck(object, offset, if_undefined,
3311 if_hole, machine_type);
3312}
3313#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3314
3316 TNode<FixedArrayBase> elements, TNode<IntPtrT> index,
3317 TNode<Int32T> elements_kind, Label* if_accessor, Label* if_hole) {
3318 TVARIABLE(Object, var_result);
3319 Label done(this), if_packed(this), if_holey(this), if_packed_double(this),
3320 if_holey_double(this), if_dictionary(this, Label::kDeferred);
3321
3322 int32_t kinds[] = {
3323 // Handled by if_packed.
3326 // Handled by if_holey.
3329 // Handled by if_packed_double.
3331 // Handled by if_holey_double.
3333 Label* labels[] = {// PACKED_{SMI,}_ELEMENTS
3334 &if_packed, &if_packed, &if_packed, &if_packed, &if_packed,
3335 // HOLEY_{SMI,}_ELEMENTS
3336 &if_holey, &if_holey, &if_holey, &if_holey, &if_holey,
3337 // PACKED_DOUBLE_ELEMENTS
3338 &if_packed_double,
3339 // HOLEY_DOUBLE_ELEMENTS
3340 &if_holey_double};
3341 Switch(elements_kind, &if_dictionary, kinds, labels, arraysize(kinds));
3342
3343 BIND(&if_packed);
3344 {
3345 var_result = LoadFixedArrayElement(CAST(elements), index, 0);
3346 Goto(&done);
3347 }
3348
3349 BIND(&if_holey);
3350 {
3351 var_result = LoadFixedArrayElement(CAST(elements), index);
3352 Branch(TaggedEqual(var_result.value(), TheHoleConstant()), if_hole, &done);
3353 }
3354
3355 BIND(&if_packed_double);
3356 {
3357 var_result = AllocateHeapNumberWithValue(
3358 LoadFixedDoubleArrayElement(CAST(elements), index));
3359 Goto(&done);
3360 }
3361
3362 BIND(&if_holey_double);
3363 {
3364#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3365 Label if_undefined(this);
3366 TNode<Float64T> float_value = LoadFixedDoubleArrayElementWithUndefinedCheck(
3367 CAST(elements), index, &if_undefined, if_hole);
3368 var_result = AllocateHeapNumberWithValue(float_value);
3369 Goto(&done);
3370
3371 BIND(&if_undefined);
3372 {
3373 var_result = UndefinedConstant();
3374 Goto(&done);
3375 }
3376#else
3377 var_result = AllocateHeapNumberWithValue(
3378 LoadFixedDoubleArrayElement(CAST(elements), index, if_hole));
3379 Goto(&done);
3380#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3381 }
3382
3383 BIND(&if_dictionary);
3384 {
3385 CSA_DCHECK(this, IsDictionaryElementsKind(elements_kind));
3386 var_result = BasicLoadNumberDictionaryElement(CAST(elements), index,
3387 if_accessor, if_hole);
3388 Goto(&done);
3389 }
3390
3391 BIND(&done);
3392 return var_result.value();
3393}
3394
3397 // TODO(ishell): Compare only the upper part for the hole once the
3398 // compiler is able to fold addition of already complex |offset| with
3399 // |kIeeeDoubleExponentWordOffset| into one addressing mode.
3400 if (Is64()) {
3402 return Word64Equal(element, Int64Constant(kHoleNanInt64));
3403 } else {
3404 TNode<Uint32T> element_upper = Load<Uint32T>(
3406 return Word32Equal(element_upper, Int32Constant(kHoleNanUpper32));
3407 }
3408}
3409
3410#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3411TNode<BoolT> CodeStubAssembler::IsDoubleUndefined(TNode<Object> base,
3413 // TODO(ishell): Compare only the upper part for the hole once the
3414 // compiler is able to fold addition of already complex |offset| with
3415 // |kIeeeDoubleExponentWordOffset| into one addressing mode.
3416 if (Is64()) {
3417 TNode<Uint64T> element = Load<Uint64T>(base, offset);
3418 return Word64Equal(element, Int64Constant(kUndefinedNanInt64));
3419 } else {
3420 TNode<Uint32T> element_upper = Load<Uint32T>(
3422 return Word32Equal(element_upper, Int32Constant(kUndefinedNanUpper32));
3423 }
3424}
3425
3426TNode<BoolT> CodeStubAssembler::IsDoubleUndefined(TNode<Float64T> value) {
3427 TNode<Int64T> bits = BitcastFloat64ToInt64(value);
3428 return Word64Equal(bits, Int64Constant(kUndefinedNanInt64));
3429}
3430#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3431
3433 TNode<Object> base, TNode<IntPtrT> offset, Label* if_hole,
3434 MachineType machine_type) {
3435 if (if_hole) {
3436 GotoIf(IsDoubleHole(base, offset), if_hole);
3437 }
3438 if (machine_type.IsNone()) {
3439 // This means the actual value is not needed.
3440 return TNode<Float64T>();
3441#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3442 } else {
3443 CSA_DCHECK(this, Word32BinaryNot(IsDoubleUndefined(base, offset)));
3444#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3445 }
3446 return UncheckedCast<Float64T>(Load(machine_type, base, offset));
3447}
3448
3449#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3450TNode<Float64T> CodeStubAssembler::LoadDoubleWithUndefinedAndHoleCheck(
3451 TNode<Object> base, TNode<IntPtrT> offset, Label* if_undefined,
3452 Label* if_hole, MachineType machine_type) {
3453 DCHECK_NOT_NULL(if_undefined);
3454 if (if_hole) {
3455 GotoIf(IsDoubleHole(base, offset), if_hole);
3456 }
3457 GotoIf(IsDoubleUndefined(base, offset), if_undefined);
3458 if (machine_type.IsNone()) {
3459 // This means the actual value is not needed.
3460 return TNode<Float64T>();
3461 }
3462 return UncheckedCast<Float64T>(Load(machine_type, base, offset));
3463}
3464#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3465
3467 return CAST(LoadContextElement(context, Context::SCOPE_INFO_INDEX));
3468}
3469
3476
3483
3490
3492 TNode<Context> context) {
3493 TNode<Map> map = LoadMap(context);
3494 return CAST(LoadObjectField(
3495 map, Map::kConstructorOrBackPointerOrNativeContextOffset));
3496}
3497
3500 TNode<Map> module_map = CAST(
3501 LoadContextElement(native_context, Context::MODULE_CONTEXT_MAP_INDEX));
3502 TVariable<Object> cur_context(context, this);
3503
3504 Label context_found(this);
3505
3506 Label context_search(this, &cur_context);
3507
3508 // Loop until cur_context->map() is module_map.
3509 Goto(&context_search);
3510 BIND(&context_search);
3511 {
3512 CSA_DCHECK(this, Word32BinaryNot(
3513 TaggedEqual(cur_context.value(), native_context)));
3514 GotoIf(TaggedEqual(LoadMap(CAST(cur_context.value())), module_map),
3515 &context_found);
3516
3517 cur_context =
3518 LoadContextElement(CAST(cur_context.value()), Context::PREVIOUS_INDEX);
3519 Goto(&context_search);
3520 }
3521
3522 BIND(&context_found);
3523 return UncheckedCast<Context>(cur_context.value());
3524}
3525
3527 const TNode<Context> module_context = LoadModuleContext(context);
3528 const TNode<HeapObject> module =
3529 CAST(LoadContextElement(module_context, Context::EXTENSION_INDEX));
3530 const TNode<Object> import_meta =
3531 LoadObjectField(module, SourceTextModule::kImportMetaOffset);
3532
3533 TVARIABLE(Object, return_value, import_meta);
3534
3535 Label end(this);
3536 GotoIfNot(IsTheHole(import_meta), &end);
3537
3538 return_value = CallRuntime(Runtime::kGetImportMetaObject, context);
3539 Goto(&end);
3540
3541 BIND(&end);
3542 return return_value.value();
3543}
3544
3547 TNode<JSFunction> object_function =
3548 CAST(LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX));
3549 return CAST(LoadJSFunctionPrototypeOrInitialMap(object_function));
3550}
3551
3553 TNode<IntPtrT> number_of_properties,
3554 Label* runtime) {
3555 CSA_DCHECK(this, UintPtrLessThan(number_of_properties,
3557 TNode<WeakFixedArray> cache =
3558 CAST(LoadContextElement(native_context, Context::MAP_CACHE_INDEX));
3559 TNode<MaybeObject> value =
3560 LoadWeakFixedArrayElement(cache, number_of_properties, 0);
3561 TNode<Map> result = CAST(GetHeapObjectAssumeWeak(value, runtime));
3562 return result;
3563}
3564
3567 TNode<Map> map = CAST(LoadContextElement(
3568 native_context, Context::SLOW_OBJECT_WITH_NULL_PROTOTYPE_MAP));
3569 return map;
3570}
3571
3580
3586
3588 const TNode<SharedFunctionInfo> shared_function_info =
3590 function, JSFunction::kSharedFunctionInfoOffset);
3591
3592 const TNode<Uint32T> function_kind =
3594 LoadObjectField<Uint32T>(shared_function_info,
3595 SharedFunctionInfo::kFlagsOffset));
3596 return function_kind;
3597}
3598
3600 TNode<JSFunction> function) {
3601 const TNode<Uint32T> function_kind = LoadFunctionKind(function);
3602
3603 // See IsGeneratorFunction(FunctionKind kind).
3604 return IsInRange(
3605 function_kind,
3606 static_cast<uint32_t>(FunctionKind::kAsyncConciseGeneratorMethod),
3607 static_cast<uint32_t>(FunctionKind::kConciseGeneratorMethod));
3608}
3609
3611 TNode<HeapObject> object) {
3612 // Only JSFunction maps may have HasPrototypeSlotBit set.
3614 LoadMapBitField(LoadMap(object)));
3615}
3616
3618 TNode<JSFunction> function, TNode<Int32T> function_map_bit_field,
3619 Label* if_true, Label* if_false) {
3620 // (has_prototype_slot() && IsConstructor()) ||
3621 // IsGeneratorFunction(shared()->kind())
3622 uint32_t mask = Map::Bits1::HasPrototypeSlotBit::kMask |
3623 Map::Bits1::IsConstructorBit::kMask;
3624
3625 GotoIf(IsAllSetWord32(function_map_bit_field, mask), if_true);
3626 Branch(IsGeneratorFunction(function), if_true, if_false);
3627}
3628
3630 TNode<JSFunction> function, TNode<Map> map, Label* runtime) {
3631 // !has_prototype_property() || has_non_instance_prototype()
3632 TNode<Int32T> map_bit_field = LoadMapBitField(map);
3633 Label next_check(this);
3634 BranchIfHasPrototypeProperty(function, map_bit_field, &next_check, runtime);
3635 BIND(&next_check);
3637 runtime);
3638}
3639
3641 TNode<JSFunction> function, Label* if_bailout) {
3644 LoadMapBitField(LoadMap(function))));
3646 function, JSFunction::kPrototypeOrInitialMapOffset);
3647 GotoIf(IsTheHole(proto_or_map), if_bailout);
3648
3649 TVARIABLE(HeapObject, var_result, proto_or_map);
3650 Label done(this, &var_result);
3651 GotoIfNot(IsMap(proto_or_map), &done);
3652
3653 var_result = LoadMapPrototype(CAST(proto_or_map));
3654 Goto(&done);
3655
3656 BIND(&done);
3657 return var_result.value();
3658}
3659
3661#ifdef V8_ENABLE_LEAPTIERING
3663 function, JSFunction::kDispatchHandleOffset);
3664 return LoadCodeObjectFromJSDispatchTable(dispatch_handle);
3665#else
3666 return LoadCodePointerFromObject(function, JSFunction::kCodeOffset);
3667#endif // V8_ENABLE_LEAPTIERING
3668}
3669
3672#ifdef V8_ENABLE_SANDBOX
3673 TNode<IndirectPointerHandleT> trusted_data_handle =
3675 sfi, SharedFunctionInfo::kTrustedFunctionDataOffset);
3676
3677 return Select<Object>(
3678 Word32Equal(trusted_data_handle,
3680 [=, this] { return SmiConstant(0); },
3681 [=, this] {
3682 return ResolveIndirectPointerHandle(trusted_data_handle,
3684 });
3685#else
3687 sfi, SharedFunctionInfo::kTrustedFunctionDataOffset);
3688#endif
3689}
3690
3694 sfi, SharedFunctionInfo::kUntrustedFunctionDataOffset);
3695}
3696
3702
3705 return LoadObjectField<Smi>(sfi,
3706 SharedFunctionInfo::kUntrustedFunctionDataOffset);
3707}
3708
3712 sfi, SharedFunctionInfo::kTrustedFunctionDataOffset,
3714
3715 TVARIABLE(HeapObject, var_result, function_data);
3716
3717 Label check_for_interpreter_data(this, &var_result);
3718 Label done(this, &var_result);
3719
3720 GotoIfNot(HasInstanceType(var_result.value(), CODE_TYPE),
3721 &check_for_interpreter_data);
3722 {
3723 TNode<Code> code = CAST(var_result.value());
3724#ifdef DEBUG
3725 TNode<Int32T> code_flags =
3726 LoadObjectField<Int32T>(code, Code::kFlagsOffset);
3727 CSA_DCHECK(
3729 Int32Constant(static_cast<int>(CodeKind::BASELINE))));
3730#endif // DEBUG
3732 code, Code::kDeoptimizationDataOrInterpreterDataOffset));
3733 var_result = baseline_data;
3734 }
3735 Goto(&check_for_interpreter_data);
3736
3737 BIND(&check_for_interpreter_data);
3738
3739 GotoIfNot(HasInstanceType(var_result.value(), INTERPRETER_DATA_TYPE), &done);
3741 CAST(var_result.value()), InterpreterData::kBytecodeArrayOffset));
3742 var_result = bytecode_array;
3743 Goto(&done);
3744
3745 BIND(&done);
3746 // We need an explicit check here since we use the
3747 // kUnknownIndirectPointerTag above and so don't have any type guarantees.
3748 CSA_SBXCHECK(this, HasInstanceType(var_result.value(), BYTECODE_ARRAY_TYPE));
3749 return CAST(var_result.value());
3750}
3751
3752#ifdef V8_ENABLE_WEBASSEMBLY
3754CodeStubAssembler::LoadSharedFunctionInfoWasmFunctionData(
3757 sfi, SharedFunctionInfo::kTrustedFunctionDataOffset,
3758 kWasmFunctionDataIndirectPointerTag));
3759}
3760
3761TNode<WasmExportedFunctionData>
3762CodeStubAssembler::LoadSharedFunctionInfoWasmExportedFunctionData(
3763 TNode<SharedFunctionInfo> sfi) {
3764 TNode<WasmFunctionData> function_data =
3765 LoadSharedFunctionInfoWasmFunctionData(sfi);
3766 // TODO(saelo): it would be nice if we could use LoadTrustedPointerFromObject
3767 // with a kWasmExportedFunctionDataIndirectPointerTag to avoid the SBXCHECK,
3768 // but for that our tagging scheme first needs to support type hierarchies.
3770 this, HasInstanceType(function_data, WASM_EXPORTED_FUNCTION_DATA_TYPE));
3771 return CAST(function_data);
3772}
3773
3774TNode<WasmJSFunctionData>
3775CodeStubAssembler::LoadSharedFunctionInfoWasmJSFunctionData(
3776 TNode<SharedFunctionInfo> sfi) {
3777 TNode<WasmFunctionData> function_data =
3778 LoadSharedFunctionInfoWasmFunctionData(sfi);
3779 // TODO(saelo): it would be nice if we could use LoadTrustedPointerFromObject
3780 // with a kWasmJSFunctionDataIndirectPointerTag to avoid the SBXCHECK, but
3781 // for that our tagging scheme first needs to support type hierarchies.
3782 CSA_SBXCHECK(this,
3783 HasInstanceType(function_data, WASM_JS_FUNCTION_DATA_TYPE));
3784 return CAST(function_data);
3785}
3786#endif // V8_ENABLE_WEBASSEMBLY
3787
3789 TNode<BytecodeArray> bytecode_array) {
3790 return LoadObjectField<Uint16T>(bytecode_array,
3791 BytecodeArray::kParameterSizeOffset);
3792}
3793
3799
3806
3808 TNode<Float64T> value) {
3809#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3810 CSA_DCHECK(this, Word32Equal(Int32Constant(0), IsDoubleUndefined(value)));
3811#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
3812 StoreObjectFieldNoWriteBarrier(object, offsetof(HeapNumber, value_), value);
3813}
3814
3819
3825
3832
3835 TNode<Object> value) {
3836 int const_offset;
3837 if (TryToInt32Constant(offset, &const_offset)) {
3838 StoreObjectField(object, const_offset, value);
3839 } else {
3841 }
3842}
3843
3850
3857
3861#ifdef V8_ENABLE_SANDBOX
3862 StoreIndirectPointerField(object, offset, tag, value);
3863#else
3864 StoreObjectField(object, offset, value);
3865#endif // V8_ENABLE_SANDBOX
3866}
3867
3871#ifdef V8_ENABLE_SANDBOX
3873#else
3874 StoreObjectFieldNoWriteBarrier(object, offset, value);
3875#endif // V8_ENABLE_SANDBOX
3876}
3877
3879 int offset) {
3880#ifdef V8_ENABLE_SANDBOX
3883#else
3885#endif
3886}
3887
3894
3897 TNode<Object> value) {
3898 CSA_DCHECK(this,
3902 IntPtrConstant(0)));
3903 int const_offset;
3904 if (TryToInt32Constant(offset, &const_offset)) {
3905 StoreObjectField(object, const_offset, value);
3906 } else {
3908 }
3909}
3910
3912 OptimizedStoreMap(object, map);
3913 DcheckHasValidMap(object);
3914}
3915
3917 RootIndex map_root_index) {
3918 StoreMapNoWriteBarrier(object, CAST(LoadRoot(map_root_index)));
3919}
3920
3926
3928 int offset, RootIndex root_index) {
3929 TNode<Object> root = LoadRoot(root_index);
3931 StoreMap(object, CAST(root));
3932 } else if (RootsTable::IsImmortalImmovable(root_index)) {
3934 } else {
3935 StoreObjectField(object, offset, root);
3936 }
3937}
3938
3939template <typename TIndex>
3942 TNode<Object> value, WriteBarrierMode barrier_mode, int additional_offset) {
3943 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
3944 static_assert(std::is_same_v<TIndex, Smi> ||
3945 std::is_same_v<TIndex, UintPtrT> ||
3946 std::is_same_v<TIndex, IntPtrT>,
3947 "Only Smi, UintPtrT or IntPtrT index is allowed");
3948 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
3949 barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||
3950 barrier_mode == UPDATE_WRITE_BARRIER ||
3951 barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
3952 DCHECK(IsAligned(additional_offset, kTaggedSize));
3953 static_assert(static_cast<int>(OFFSET_OF_DATA_START(FixedArray)) ==
3954 static_cast<int>(PropertyArray::kHeaderSize));
3955 int header_size =
3956 OFFSET_OF_DATA_START(FixedArray) + additional_offset - kHeapObjectTag;
3958 ElementOffsetFromIndex(index_node, HOLEY_ELEMENTS, header_size);
3959 static_assert(static_cast<int>(offsetof(FixedArray, length_)) ==
3960 static_cast<int>(offsetof(FixedDoubleArray, length_)));
3961 static_assert(static_cast<int>(offsetof(FixedArray, length_)) ==
3962 static_cast<int>(offsetof(WeakFixedArray, length_)));
3963 static_assert(static_cast<int>(offsetof(FixedArray, length_)) ==
3964 static_cast<int>(PropertyArray::kLengthAndHashOffset));
3965 // Check that index_node + additional_offset <= object.length.
3966 // TODO(cbruni): Use proper LoadXXLength helpers
3967 CSA_DCHECK(
3968 this,
3970 offset,
3971 Select<IntPtrT>(
3972 IsPropertyArray(object),
3973 [=, this] {
3975 object, PropertyArray::kLengthAndHashOffset);
3978 },
3979 [=, this] {
3982 }),
3984 if (barrier_mode == SKIP_WRITE_BARRIER) {
3986 } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
3988 value);
3989 } else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
3990 StoreEphemeronKey(object, offset, value);
3991 } else {
3992 Store(object, offset, value);
3993 }
3994}
3995
3996template V8_EXPORT_PRIVATE void
3999 WriteBarrierMode, int);
4000
4001template V8_EXPORT_PRIVATE void
4004 WriteBarrierMode, int);
4005
4006template V8_EXPORT_PRIVATE void
4009 WriteBarrierMode, int);
4010
4011template <typename TIndex>
4014 CheckBounds check_bounds) {
4015 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
4016 static_assert(std::is_same_v<TIndex, Smi> ||
4017 std::is_same_v<TIndex, UintPtrT> ||
4018 std::is_same_v<TIndex, IntPtrT>,
4019 "Only Smi, UintPtrT or IntPtrT index is allowed");
4020 if (NeedsBoundsCheck(check_bounds)) {
4021 FixedArrayBoundsCheck(object, index, 0);
4022 }
4027 // Make sure we do not store signalling NaNs into double arrays.
4028 TNode<Float64T> value_silenced = Float64SilenceNaN(value);
4029 StoreNoWriteBarrier(rep, object, offset, value_silenced);
4030}
4031
4032// Export the Smi version which is used outside of code-stub-assembler.
4035
4037 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
4038 TNode<AnyTaggedT> value, WriteBarrierMode barrier_mode,
4039 int additional_offset) {
4040 DCHECK(IsAligned(additional_offset, kTaggedSize));
4041 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
4042 barrier_mode == UNSAFE_SKIP_WRITE_BARRIER ||
4043 barrier_mode == UPDATE_WRITE_BARRIER);
4044 int header_size = FeedbackVector::kRawFeedbackSlotsOffset +
4045 additional_offset - kHeapObjectTag;
4047 ElementOffsetFromIndex(Signed(slot), HOLEY_ELEMENTS, header_size);
4048 // Check that slot <= feedback_vector.length.
4049 CSA_DCHECK(this,
4051 FeedbackVector::kHeaderSize),
4052 SmiFromIntPtr(offset), feedback_vector);
4053 if (barrier_mode == SKIP_WRITE_BARRIER) {
4055 value);
4056 } else if (barrier_mode == UNSAFE_SKIP_WRITE_BARRIER) {
4058 offset, value);
4059 } else {
4060 Store(feedback_vector, offset, value);
4061 }
4062}
4063
4065 TNode<Map> map,
4066 Label* bailout) {
4067 // Disallow pushing onto prototypes. It might be the JSArray prototype.
4068 // Disallow pushing onto non-extensible objects.
4069 Comment("Disallow pushing onto prototypes");
4071
4072 EnsureArrayLengthWritable(context, map, bailout);
4073
4076 return Signed(kind);
4077}
4078
4081 TVariable<FixedArrayBase>* var_elements, TNode<BInt> growth,
4082 Label* bailout) {
4083 Label fits(this, var_elements);
4084 TNode<BInt> capacity =
4086
4087 TNode<BInt> new_length = IntPtrOrSmiAdd(growth, length);
4088 GotoIfNot(IntPtrOrSmiGreaterThan(new_length, capacity), &fits);
4090 *var_elements = GrowElementsCapacity(array, var_elements->value(), kind, kind,
4091 capacity, new_capacity, bailout);
4092 Goto(&fits);
4093 BIND(&fits);
4094}
4095
4097 TNode<JSArray> array,
4099 TVariable<IntPtrT>* arg_index,
4100 Label* bailout) {
4101 Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
4102 Label pre_bailout(this);
4103 Label success(this);
4104 TVARIABLE(Smi, var_tagged_length, LoadFastJSArrayLength(array));
4105 TVARIABLE(BInt, var_length, SmiToBInt(var_tagged_length.value()));
4106 TVARIABLE(FixedArrayBase, var_elements, LoadElements(array));
4107
4108 // Trivial case: no values are being appended.
4109 // We have this special case here so that callers of this function can assume
4110 // that there is at least one argument if this function bails out. This may
4111 // otherwise not be the case if, due to another bug or in-sandbox memory
4112 // corruption, the JSArray's length is larger than that of its backing
4113 // FixedArray. In that case, PossiblyGrowElementsCapacity can fail even if no
4114 // element are to be appended.
4115 GotoIf(IntPtrEqual(args->GetLengthWithoutReceiver(), IntPtrConstant(0)),
4116 &success);
4117
4118 // Resize the capacity of the fixed array if it doesn't fit.
4119 TNode<IntPtrT> first = arg_index->value();
4120 TNode<BInt> growth =
4121 IntPtrToBInt(IntPtrSub(args->GetLengthWithoutReceiver(), first));
4122 PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
4123 growth, &pre_bailout);
4124
4125 // Push each argument onto the end of the array now that there is enough
4126 // capacity.
4127 CodeStubAssembler::VariableList push_vars({&var_length}, zone());
4128 TNode<FixedArrayBase> elements = var_elements.value();
4129 args->ForEach(
4130 push_vars,
4131 [&](TNode<Object> arg) {
4132 TryStoreArrayElement(kind, &pre_bailout, elements, var_length.value(),
4133 arg);
4134 Increment(&var_length);
4135 },
4136 first);
4137 {
4138 TNode<Smi> length = BIntToSmi(var_length.value());
4139 var_tagged_length = length;
4140 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
4141 Goto(&success);
4142 }
4143
4144 BIND(&pre_bailout);
4145 {
4146 TNode<Smi> length = ParameterToTagged(var_length.value());
4147 var_tagged_length = length;
4148 TNode<Smi> diff = SmiSub(length, LoadFastJSArrayLength(array));
4149 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
4150 *arg_index = IntPtrAdd(arg_index->value(), SmiUntag(diff));
4151 Goto(bailout);
4152 }
4153
4154 BIND(&success);
4155 return var_tagged_length.value();
4156}
4157
4159 TNode<FixedArrayBase> elements,
4160 TNode<BInt> index,
4161 TNode<Object> value) {
4162 if (IsSmiElementsKind(kind)) {
4163 GotoIf(TaggedIsNotSmi(value), bailout);
4164 } else if (IsDoubleElementsKind(kind)) {
4165 GotoIfNotNumber(value, bailout);
4166 }
4167
4169 StoreElement(elements, kind, index, ChangeNumberToFloat64(CAST(value)));
4170 } else {
4171 StoreElement(elements, kind, index, value);
4172 }
4173}
4174
4176 TNode<JSArray> array,
4177 TNode<Object> value,
4178 Label* bailout) {
4179 Comment("BuildAppendJSArray: ", ElementsKindToString(kind));
4180 TVARIABLE(BInt, var_length, SmiToBInt(LoadFastJSArrayLength(array)));
4181 TVARIABLE(FixedArrayBase, var_elements, LoadElements(array));
4182
4183 // Resize the capacity of the fixed array if it doesn't fit.
4185 PossiblyGrowElementsCapacity(kind, array, var_length.value(), &var_elements,
4186 growth, bailout);
4187
4188 // Push each argument onto the end of the array now that there is enough
4189 // capacity.
4190 TryStoreArrayElement(kind, bailout, var_elements.value(), var_length.value(),
4191 value);
4192 Increment(&var_length);
4193
4194 TNode<Smi> length = BIntToSmi(var_length.value());
4195 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
4196}
4197
4199 WriteBarrierMode mode) {
4201 StoreMapNoWriteBarrier(result, RootIndex::kCellMap);
4202 TNode<Cell> cell = CAST(result);
4203 StoreCellValue(cell, value, mode);
4204 return cell;
4205}
4206
4208 return LoadObjectField(cell, Cell::kValueOffset);
4209}
4210
4212 WriteBarrierMode mode) {
4214
4215 if (mode == UPDATE_WRITE_BARRIER) {
4216 StoreObjectField(cell, Cell::kValueOffset, value);
4217 } else {
4218 StoreObjectFieldNoWriteBarrier(cell, Cell::kValueOffset, value);
4219 }
4220}
4221
4225 RootIndex heap_map_index = RootIndex::kHeapNumberMap;
4226 StoreMapNoWriteBarrier(result, heap_map_index);
4228}
4229
4236
4246
4248 TVARIABLE(Object, result, object);
4249 Label done(this);
4250
4251 GotoIf(TaggedIsSmi(object), &done);
4252 // TODO(leszeks): Read the field descriptor to decide if this heap number is
4253 // mutable or not.
4254 GotoIfNot(IsHeapNumber(UncheckedCast<HeapObject>(object)), &done);
4255 {
4256 // Mutable heap number found --- allocate a clone.
4257 TNode<Float64T> value =
4260 Goto(&done);
4261 }
4262
4263 BIND(&done);
4264 return result.value();
4265}
4266
4274
4276 TNode<IntPtrT> size =
4279 TNode<HeapObject> raw_result = Allocate(size);
4280 StoreMapNoWriteBarrier(raw_result, RootIndex::kBigIntMap);
4281#ifdef BIGINT_NEEDS_PADDING
4282 static_assert(arraysize(BigInt::padding_) == sizeof(int32_t));
4283 StoreObjectFieldNoWriteBarrier(raw_result, offsetof(BigInt, padding_),
4284 Int32Constant(0));
4285#endif
4286 return UncheckedCast<BigInt>(raw_result);
4287}
4288
4290 TNode<Word32T> bitfield) {
4291 StoreObjectFieldNoWriteBarrier(bigint, offsetof(BigInt, bitfield_), bitfield);
4292}
4293
4295 intptr_t digit_index,
4296 TNode<UintPtrT> digit) {
4297 CHECK_LE(0, digit_index);
4298 CHECK_LT(digit_index, BigInt::kMaxLength);
4300 bigint,
4302 static_cast<int>(digit_index) * kSystemPointerSize,
4303 digit);
4304}
4305
4314
4319
4321 intptr_t digit_index) {
4322 CHECK_LE(0, digit_index);
4323 CHECK_LT(digit_index, BigInt::kMaxLength);
4325 bigint, OFFSET_OF_DATA_START(BigInt) +
4326 static_cast<int>(digit_index) * kSystemPointerSize);
4327}
4328
4336
4338 TNode<UintPtrT> length, AllocationFlags flags) {
4339 CSA_DCHECK(this, WordNotEqual(length, IntPtrConstant(0)));
4340
4341 Comment("AllocateNonEmptyByteArray");
4342 TVARIABLE(Object, var_result);
4343
4345 Signed(length), UINT8_ELEMENTS,
4347 TNode<IntPtrT> size =
4349
4350 TNode<HeapObject> result = Allocate(size, flags);
4351
4352 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kByteArrayMap));
4353 StoreMapNoWriteBarrier(result, RootIndex::kByteArrayMap);
4355 SmiTag(Signed(length)));
4356
4357 return CAST(result);
4358}
4359
4361 AllocationFlags flags) {
4362 // TODO(ishell): unify with AllocateNonEmptyByteArray().
4363
4364 Comment("AllocateByteArray");
4365 TVARIABLE(Object, var_result);
4366
4367 // Compute the ByteArray size and check if it fits into new space.
4368 Label if_lengthiszero(this), if_sizeissmall(this),
4369 if_notsizeissmall(this, Label::kDeferred), if_join(this);
4370 GotoIf(WordEqual(length, UintPtrConstant(0)), &if_lengthiszero);
4371
4373 Signed(length), UINT8_ELEMENTS,
4375 TNode<IntPtrT> size =
4377 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
4378 &if_sizeissmall, &if_notsizeissmall);
4379
4380 BIND(&if_sizeissmall);
4381 {
4382 // Just allocate the ByteArray in new space.
4385 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kByteArrayMap));
4386 StoreMapNoWriteBarrier(result, RootIndex::kByteArrayMap);
4388 SmiTag(Signed(length)));
4389 var_result = result;
4390 Goto(&if_join);
4391 }
4392
4393 BIND(&if_notsizeissmall);
4394 {
4395 // We might need to allocate in large object space, go to the runtime.
4397 CallRuntime(Runtime::kAllocateByteArray, NoContextConstant(),
4398 ChangeUintPtrToTagged(length));
4399 var_result = result;
4400 Goto(&if_join);
4401 }
4402
4403 BIND(&if_lengthiszero);
4404 {
4405 var_result = EmptyByteArrayConstant();
4406 Goto(&if_join);
4407 }
4408
4409 BIND(&if_join);
4410 return CAST(var_result.value());
4411}
4412
4414 uint32_t length, AllocationFlags flags) {
4415 Comment("AllocateSeqOneByteString");
4416 if (length == 0) {
4417 return EmptyStringConstant();
4418 }
4423 SmiConstant(0));
4424 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kSeqOneByteStringMap));
4425 StoreMapNoWriteBarrier(result, RootIndex::kSeqOneByteStringMap);
4427 Uint32Constant(length));
4429 offsetof(SeqOneByteString, raw_hash_field_),
4431 return CAST(result);
4432}
4433
4435 return Select<BoolT>(
4436 TaggedEqual(object, SmiConstant(0)),
4437 [=, this] { return Int32TrueConstant(); },
4438 [=, this] { return IsContext(CAST(object)); });
4439}
4440
4442 uint32_t length, AllocationFlags flags) {
4443 Comment("AllocateSeqTwoByteString");
4444 if (length == 0) {
4445 return EmptyStringConstant();
4446 }
4451 SmiConstant(0));
4452 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kSeqTwoByteStringMap));
4453 StoreMapNoWriteBarrier(result, RootIndex::kSeqTwoByteStringMap);
4455 Uint32Constant(length));
4457 offsetof(SeqTwoByteString, raw_hash_field_),
4459 return CAST(result);
4460}
4461
4463 TNode<Uint32T> length,
4464 TNode<String> parent,
4466 DCHECK(map_root_index == RootIndex::kSlicedOneByteStringMap ||
4467 map_root_index == RootIndex::kSlicedTwoByteStringMap);
4469 DCHECK(RootsTable::IsImmortalImmovable(map_root_index));
4470 StoreMapNoWriteBarrier(result, map_root_index);
4472 offsetof(SlicedString, raw_hash_field_),
4475 length);
4477 parent);
4479 offset);
4480 return CAST(result);
4481}
4482
4485 return AllocateSlicedString(RootIndex::kSlicedOneByteStringMap, length,
4486 parent, offset);
4487}
4488
4491 return AllocateSlicedString(RootIndex::kSlicedTwoByteStringMap, length,
4492 parent, offset);
4493}
4494
4496 int at_least_space_for) {
4497 return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
4498}
4499
4501 TNode<IntPtrT> at_least_space_for, AllocationFlags flags) {
4503 at_least_space_for,
4504 IntPtrConstant(NameDictionary::kMaxCapacity)));
4505 TNode<IntPtrT> capacity = HashTableComputeCapacity(at_least_space_for);
4506 return AllocateNameDictionaryWithCapacity(capacity, flags);
4507}
4508
4510 TNode<IntPtrT> capacity, AllocationFlags flags) {
4511 CSA_DCHECK(this, WordIsPowerOfTwo(capacity));
4512 CSA_DCHECK(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
4514 TNode<IntPtrT> store_size =
4515 IntPtrAdd(TimesTaggedSize(length),
4517
4519 UncheckedCast<NameDictionary>(Allocate(store_size, flags));
4520
4521 // Initialize FixedArray fields.
4522 {
4523 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kNameDictionaryMap));
4524 StoreMapNoWriteBarrier(result, RootIndex::kNameDictionaryMap);
4526 SmiFromIntPtr(length));
4527 }
4528
4529 // Initialized HashTable fields.
4530 {
4531 TNode<Smi> zero = SmiConstant(0);
4532 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
4535 NameDictionary::kNumberOfDeletedElementsIndex, zero,
4537 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
4538 SmiTag(capacity), SKIP_WRITE_BARRIER);
4539 // Initialize Dictionary fields.
4540 StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
4543 StoreFixedArrayElement(result, NameDictionary::kObjectHashIndex,
4549 }
4550
4551 // Initialize NameDictionary elements.
4552 {
4554 TNode<IntPtrT> start_address = IntPtrAdd(
4555 result_word, IntPtrConstant(NameDictionary::OffsetOfElementAt(
4556 NameDictionary::kElementsStartIndex) -
4558 TNode<IntPtrT> end_address = IntPtrAdd(
4559 result_word, IntPtrSub(store_size, IntPtrConstant(kHeapObjectTag)));
4560
4561 TNode<Undefined> filler = UndefinedConstant();
4562 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kUndefinedValue));
4563
4564 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
4565 }
4566
4567 return result;
4568}
4569
4571 int at_least_space_for) {
4572 TNode<HeapObject> dict;
4574 dict = AllocateSwissNameDictionary(at_least_space_for);
4575 } else {
4576 dict = AllocateNameDictionary(at_least_space_for);
4577 }
4579}
4580
4582 TNode<IntPtrT> at_least_space_for, AllocationFlags flags) {
4583 TNode<HeapObject> dict;
4585 dict = AllocateSwissNameDictionary(at_least_space_for);
4586 } else {
4587 dict = AllocateNameDictionary(at_least_space_for, flags);
4588 }
4590}
4591
4603
4605 TNode<NameDictionary> dictionary, Label* large_object_fallback) {
4606 Comment("Copy boilerplate property dict");
4607 TNode<IntPtrT> capacity =
4610 GotoIf(UintPtrGreaterThan(
4611 capacity, IntPtrConstant(NameDictionary::kMaxRegularCapacity)),
4612 large_object_fallback);
4613 TNode<NameDictionary> properties =
4616 CopyFixedArrayElements(PACKED_ELEMENTS, dictionary, properties, length,
4618 return properties;
4619}
4620
4621template <typename CollectionType>
4623 TNode<IntPtrT> capacity) {
4624 capacity = IntPtrRoundUpToPowerOfTwo32(capacity);
4625 capacity =
4626 IntPtrMax(capacity, IntPtrConstant(CollectionType::kInitialCapacity));
4628}
4629
4630template <typename CollectionType>
4632 TNode<IntPtrT> capacity) {
4633 CSA_DCHECK(this, WordIsPowerOfTwo(capacity));
4634 CSA_DCHECK(this,
4636 capacity, IntPtrConstant(CollectionType::kInitialCapacity)));
4637 CSA_DCHECK(this,
4638 IntPtrLessThanOrEqual(
4639 capacity, IntPtrConstant(CollectionType::MaxCapacity())));
4640
4641 static_assert(CollectionType::kLoadFactor == 2);
4642 TNode<IntPtrT> bucket_count = Signed(WordShr(capacity, IntPtrConstant(1)));
4643 TNode<IntPtrT> data_table_length =
4644 IntPtrMul(capacity, IntPtrConstant(CollectionType::kEntrySize));
4645
4646 TNode<IntPtrT> data_table_start_index = IntPtrAdd(
4647 IntPtrConstant(CollectionType::HashTableStartIndex()), bucket_count);
4648 TNode<IntPtrT> fixed_array_length =
4649 IntPtrAdd(data_table_start_index, data_table_length);
4650
4651 // Allocate the table and add the proper map.
4652 const ElementsKind elements_kind = HOLEY_ELEMENTS;
4653 TNode<Map> fixed_array_map =
4654 HeapConstantNoHole(CollectionType::GetMap(isolate()->roots_table()));
4655 TNode<CollectionType> table =
4656 CAST(AllocateFixedArray(elements_kind, fixed_array_length,
4657 AllocationFlag::kNone, fixed_array_map));
4658
4659 Comment("Initialize the OrderedHashTable fields.");
4660 const WriteBarrierMode barrier_mode = SKIP_WRITE_BARRIER;
4661 UnsafeStoreFixedArrayElement(table, CollectionType::NumberOfElementsIndex(),
4662 SmiConstant(0), barrier_mode);
4664 CollectionType::NumberOfDeletedElementsIndex(),
4665 SmiConstant(0), barrier_mode);
4666 UnsafeStoreFixedArrayElement(table, CollectionType::NumberOfBucketsIndex(),
4667 SmiFromIntPtr(bucket_count), barrier_mode);
4668
4669 TNode<IntPtrT> object_address = BitcastTaggedToWord(table);
4670
4671 static_assert(CollectionType::HashTableStartIndex() ==
4672 CollectionType::NumberOfBucketsIndex() + 1);
4673
4674 TNode<Smi> not_found_sentinel = SmiConstant(CollectionType::kNotFound);
4675
4676 intptr_t const_capacity;
4677 if (TryToIntPtrConstant(capacity, &const_capacity) &&
4678 const_capacity == CollectionType::kInitialCapacity) {
4679 int const_bucket_count =
4680 static_cast<int>(const_capacity / CollectionType::kLoadFactor);
4681 int const_data_table_length =
4682 static_cast<int>(const_capacity * CollectionType::kEntrySize);
4683 int const_data_table_start_index = static_cast<int>(
4684 CollectionType::HashTableStartIndex() + const_bucket_count);
4685
4686 Comment("Fill the buckets with kNotFound (constant capacity).");
4687 for (int i = 0; i < const_bucket_count; i++) {
4689 CollectionType::HashTableStartIndex() + i,
4690 not_found_sentinel, barrier_mode);
4691 }
4692
4693 Comment("Fill the data table with undefined (constant capacity).");
4694 for (int i = 0; i < const_data_table_length; i++) {
4695 UnsafeStoreFixedArrayElement(table, const_data_table_start_index + i,
4696 UndefinedConstant(), barrier_mode);
4697 }
4698 } else {
4699 Comment("Fill the buckets with kNotFound.");
4700 TNode<IntPtrT> buckets_start_address =
4701 IntPtrAdd(object_address,
4703 CollectionType::HashTableStartIndex()) -
4705 TNode<IntPtrT> buckets_end_address =
4706 IntPtrAdd(buckets_start_address, TimesTaggedSize(bucket_count));
4707
4708 StoreFieldsNoWriteBarrier(buckets_start_address, buckets_end_address,
4709 not_found_sentinel);
4710
4711 Comment("Fill the data table with undefined.");
4712 TNode<IntPtrT> data_start_address = buckets_end_address;
4713 TNode<IntPtrT> data_end_address = IntPtrAdd(
4714 object_address,
4715 IntPtrAdd(
4717 TimesTaggedSize(fixed_array_length)));
4718
4719 StoreFieldsNoWriteBarrier(data_start_address, data_end_address,
4720 UndefinedConstant());
4721
4722#ifdef DEBUG
4723 TNode<IntPtrT> ptr_diff =
4724 IntPtrSub(data_end_address, buckets_start_address);
4726 TNode<IntPtrT> array_data_fields = IntPtrSub(
4727 array_length, IntPtrConstant(CollectionType::HashTableStartIndex()));
4728 TNode<IntPtrT> expected_end =
4729 IntPtrAdd(data_start_address,
4731 capacity, IntPtrConstant(CollectionType::kEntrySize))));
4732
4733 CSA_DCHECK(this, IntPtrEqual(ptr_diff, TimesTaggedSize(array_data_fields)));
4734 CSA_DCHECK(this, IntPtrEqual(expected_end, data_end_address));
4735#endif
4736 }
4737
4738 return table;
4739}
4740
4750
4755
4760
4765
4770
4772 TNode<Map> map, std::optional<TNode<HeapObject>> properties,
4773 std::optional<TNode<FixedArray>> elements, AllocationFlags flags,
4774 SlackTrackingMode slack_tracking_mode) {
4775 CSA_DCHECK(this, Word32BinaryNot(IsJSFunctionMap(map)));
4776 CSA_DCHECK(this, Word32BinaryNot(InstanceTypeEqual(LoadMapInstanceType(map),
4777 JS_GLOBAL_OBJECT_TYPE)));
4778 TNode<IntPtrT> instance_size =
4780 TNode<HeapObject> object = AllocateInNewSpace(instance_size, flags);
4781 StoreMapNoWriteBarrier(object, map);
4782 InitializeJSObjectFromMap(object, map, instance_size, properties, elements,
4783 slack_tracking_mode);
4784 return CAST(object);
4785}
4786
4788 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
4789 std::optional<TNode<HeapObject>> properties,
4790 std::optional<TNode<FixedArray>> elements,
4791 SlackTrackingMode slack_tracking_mode) {
4792 // This helper assumes that the object is in new-space, as guarded by the
4793 // check in AllocatedJSObjectFromMap.
4794 if (!properties) {
4795 CSA_DCHECK(this, Word32BinaryNot(IsDictionaryMap((map))));
4796 StoreObjectFieldRoot(object, JSObject::kPropertiesOrHashOffset,
4797 RootIndex::kEmptyFixedArray);
4798 } else {
4799 CSA_DCHECK(this, Word32Or(Word32Or(IsPropertyArray(*properties),
4800 IsPropertyDictionary(*properties)),
4801 IsEmptyFixedArray(*properties)));
4802 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOrHashOffset,
4803 *properties);
4804 }
4805 if (!elements) {
4806 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
4807 RootIndex::kEmptyFixedArray);
4808 } else {
4809 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset,
4810 *elements);
4811 }
4812 switch (slack_tracking_mode) {
4814 return;
4815 case kNoSlackTracking:
4816 return InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
4817 case kWithSlackTracking:
4818 return InitializeJSObjectBodyWithSlackTracking(object, map,
4819 instance_size);
4820 }
4821}
4822
4824 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size,
4825 int start_offset) {
4826 static_assert(Map::kNoSlackTracking == 0);
4828 LoadMapBitField3(map)));
4829 InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), instance_size,
4830 RootIndex::kUndefinedValue);
4831}
4832
4834 TNode<HeapObject> object, TNode<Map> map, TNode<IntPtrT> instance_size) {
4835 Comment("InitializeJSObjectBodyNoSlackTracking");
4836
4837 // Perform in-object slack tracking if requested.
4838 int start_offset = JSObject::kHeaderSize;
4840 Label end(this), slack_tracking(this), complete(this, Label::kDeferred);
4841 static_assert(Map::kNoSlackTracking == 0);
4843 &slack_tracking);
4844 Comment("No slack tracking");
4845 InitializeJSObjectBodyNoSlackTracking(object, map, instance_size);
4846 Goto(&end);
4847
4848 BIND(&slack_tracking);
4849 {
4850 Comment("Decrease construction counter");
4851 // Slack tracking is only done on initial maps.
4852 CSA_DCHECK(this, IsUndefined(LoadMapBackPointer(map)));
4853 static_assert(Map::Bits3::ConstructionCounterBits::kLastUsedBit == 31);
4854 TNode<Word32T> new_bit_field3 = Int32Sub(
4855 bit_field3,
4856 Int32Constant(1 << Map::Bits3::ConstructionCounterBits::kShift));
4857
4858 // The object still has in-object slack therefore the |unsed_or_unused|
4859 // field contain the "used" value.
4860 TNode<IntPtrT> used_size =
4862 map, Map::kUsedOrUnusedInstanceSizeInWordsOffset))));
4863
4864 Comment("Initialize filler fields");
4865 InitializeFieldsWithRoot(object, used_size, instance_size,
4866 RootIndex::kOnePointerFillerMap);
4867
4868 Comment("Initialize undefined fields");
4869 InitializeFieldsWithRoot(object, IntPtrConstant(start_offset), used_size,
4870 RootIndex::kUndefinedValue);
4871
4872 static_assert(Map::kNoSlackTracking == 0);
4874 &complete);
4875
4876 // Setting ConstructionCounterBits to 0 requires taking the
4877 // map_updater_access mutex, which we can't do from CSA, so we only manually
4878 // update ConstructionCounterBits when its result is non-zero; otherwise we
4879 // let the runtime do it (with the GotoIf right above this comment).
4880 StoreObjectFieldNoWriteBarrier(map, Map::kBitField3Offset, new_bit_field3);
4881 static_assert(Map::kSlackTrackingCounterEnd == 1);
4882
4883 Goto(&end);
4884 }
4885
4886 // Finalize the instance size.
4887 BIND(&complete);
4888 {
4889 // CompleteInobjectSlackTracking doesn't allocate and thus doesn't need a
4890 // context.
4891 CallRuntime(Runtime::kCompleteInobjectSlackTrackingForMap,
4892 NoContextConstant(), map);
4893 Goto(&end);
4894 }
4895
4896 BIND(&end);
4897}
4898
4900 TNode<IntPtrT> end_address,
4901 TNode<Object> value) {
4902 Comment("StoreFieldsNoWriteBarrier");
4903 CSA_DCHECK(this, WordIsAligned(start_address, kTaggedSize));
4904 CSA_DCHECK(this, WordIsAligned(end_address, kTaggedSize));
4906 start_address, end_address,
4907 [=, this](TNode<IntPtrT> current) {
4909 value);
4910 },
4912}
4913
4915 CSA_DCHECK(this, IsFixedArrayMap(LoadMap(array)));
4916 Label done(this);
4917 // The empty fixed array is not modifiable anyway. And we shouldn't change its
4918 // Map.
4919 GotoIf(TaggedEqual(array, EmptyFixedArrayConstant()), &done);
4920 StoreMap(array, FixedCOWArrayMapConstant());
4921 Goto(&done);
4922 BIND(&done);
4923}
4924
4930
4932 TNode<Map> array_map, TNode<FixedArrayBase> elements, TNode<Smi> length,
4933 std::optional<TNode<AllocationSite>> allocation_site,
4934 int array_header_size) {
4935 Comment("begin allocation of JSArray passing in elements");
4936 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length));
4937
4938 int base_size = array_header_size;
4939 if (allocation_site) {
4942 }
4943
4944 TNode<IntPtrT> size = IntPtrConstant(base_size);
4946 AllocateUninitializedJSArray(array_map, length, allocation_site, size);
4947 StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, elements);
4948 return result;
4949}
4950
4951namespace {
4952
4953// To prevent GC between the array and elements allocation, the elements
4954// object allocation is folded together with the js-array allocation.
4955TNode<FixedArrayBase> InnerAllocateElements(CodeStubAssembler* csa,
4956 TNode<JSArray> js_array,
4957 int offset) {
4959 csa->BitcastWordToTagged(csa->IntPtrAdd(
4961}
4962
4963} // namespace
4964
4966 TNode<IntPtrT> value) {
4968
4969 Label not_aligned(this), is_aligned(this);
4970 TVARIABLE(IntPtrT, result, value);
4971
4972 Branch(WordIsAligned(value, kObjectAlignment8GbHeap), &is_aligned,
4973 &not_aligned);
4974
4975 BIND(&not_aligned);
4976 {
4979 } else {
4980 result =
4983 }
4984 Goto(&is_aligned);
4985 }
4986
4987 BIND(&is_aligned);
4988 return result.value();
4989}
4990
4991std::pair<TNode<JSArray>, TNode<FixedArrayBase>>
4993 ElementsKind kind, TNode<Map> array_map, TNode<Smi> length,
4994 std::optional<TNode<AllocationSite>> allocation_site,
4995 TNode<IntPtrT> capacity, AllocationFlags allocation_flags,
4996 int array_header_size) {
4997 Comment("begin allocation of JSArray with elements");
4998 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length));
4999
5000 TVARIABLE(JSArray, array);
5001 TVARIABLE(FixedArrayBase, elements);
5002
5003 Label out(this), empty(this), nonempty(this);
5004
5005 int capacity_int;
5006 if (TryToInt32Constant(capacity, &capacity_int)) {
5007 if (capacity_int == 0) {
5008 TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
5009 array = AllocateJSArray(array_map, empty_array, length, allocation_site,
5010 array_header_size);
5011 return {array.value(), empty_array};
5012 } else {
5013 Goto(&nonempty);
5014 }
5015 } else {
5016 Branch(WordEqual(capacity, IntPtrConstant(0)), &empty, &nonempty);
5017
5018 BIND(&empty);
5019 {
5020 TNode<FixedArray> empty_array = EmptyFixedArrayConstant();
5021 array = AllocateJSArray(array_map, empty_array, length, allocation_site,
5022 array_header_size);
5023 elements = empty_array;
5024 Goto(&out);
5025 }
5026 }
5027
5028 BIND(&nonempty);
5029 {
5030 int base_size = ALIGN_TO_ALLOCATION_ALIGNMENT(array_header_size);
5031 if (allocation_site) {
5034 }
5035
5036 const int elements_offset = base_size;
5037
5038 // Compute space for elements
5039 base_size += OFFSET_OF_DATA_START(FixedArray);
5041 ElementOffsetFromIndex(capacity, kind, base_size));
5042
5043 // For very large arrays in which the requested allocation exceeds the
5044 // maximal size of a regular heap object, we cannot use the allocation
5045 // folding trick. Instead, we first allocate the elements in large object
5046 // space, and then allocate the JSArray (and possibly the allocation
5047 // memento) in new space.
5048 Label next(this);
5049 GotoIf(IsRegularHeapObjectSize(size), &next);
5050
5051 CSA_CHECK(this, IsValidFastJSArrayCapacity(capacity));
5052
5053 // Allocate and initialize the elements first. Full initialization is
5054 // needed because the upcoming JSArray allocation could trigger GC.
5055 elements = AllocateFixedArray(kind, capacity, allocation_flags);
5056
5058 FillEntireFixedDoubleArrayWithZero(CAST(elements.value()), capacity);
5059 } else {
5060 FillEntireFixedArrayWithSmiZero(kind, CAST(elements.value()), capacity);
5061 }
5062
5063 // The JSArray and possibly allocation memento next. Note that
5064 // allocation_flags are *not* passed on here and the resulting JSArray
5065 // will always be in new space.
5066 array = AllocateJSArray(array_map, elements.value(), length,
5067 allocation_site, array_header_size);
5068
5069 Goto(&out);
5070
5071 BIND(&next);
5072
5073 // Fold all objects into a single new space allocation.
5074 array =
5075 AllocateUninitializedJSArray(array_map, length, allocation_site, size);
5076 elements = InnerAllocateElements(this, array.value(), elements_offset);
5077
5078 StoreObjectFieldNoWriteBarrier(array.value(), JSObject::kElementsOffset,
5079 elements.value());
5080
5081 // Setup elements object.
5082 static_assert(FixedArrayBase::kHeaderSize == 2 * kTaggedSize);
5083 RootIndex elements_map_index = IsDoubleElementsKind(kind)
5084 ? RootIndex::kFixedDoubleArrayMap
5085 : RootIndex::kFixedArrayMap;
5086 DCHECK(RootsTable::IsImmortalImmovable(elements_map_index));
5087 StoreMapNoWriteBarrier(elements.value(), elements_map_index);
5088
5089 CSA_DCHECK(this, WordNotEqual(capacity, IntPtrConstant(0)));
5090 TNode<Smi> capacity_smi = SmiTag(capacity);
5091 StoreObjectFieldNoWriteBarrier(elements.value(),
5092 offsetof(FixedArray, length_), capacity_smi);
5093 Goto(&out);
5094 }
5095
5096 BIND(&out);
5097 return {array.value(), elements.value()};
5098}
5099
5101 TNode<Map> array_map, TNode<Smi> length,
5102 std::optional<TNode<AllocationSite>> allocation_site,
5103 TNode<IntPtrT> size_in_bytes) {
5104 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length));
5105
5106 // Allocate space for the JSArray and the elements FixedArray in one go.
5107 TNode<HeapObject> array = AllocateInNewSpace(size_in_bytes);
5108
5109 StoreMapNoWriteBarrier(array, array_map);
5110 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
5111 StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
5112 RootIndex::kEmptyFixedArray);
5113
5114 if (allocation_site) {
5117 array,
5118 IntPtrConstant(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize)),
5119 *allocation_site);
5120 }
5121
5122 return CAST(array);
5123}
5124
5126 ElementsKind kind, TNode<Map> array_map, TNode<IntPtrT> capacity,
5127 TNode<Smi> length, std::optional<TNode<AllocationSite>> allocation_site,
5128 AllocationFlags allocation_flags) {
5129 CSA_SLOW_DCHECK(this, TaggedIsPositiveSmi(length));
5130
5131 TNode<JSArray> array;
5132 TNode<FixedArrayBase> elements;
5133
5134 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
5135 kind, array_map, length, allocation_site, capacity, allocation_flags);
5136
5137 Label out(this), nonempty(this);
5138
5139 Branch(WordEqual(capacity, IntPtrConstant(0)), &out, &nonempty);
5140
5141 BIND(&nonempty);
5142 {
5143 FillFixedArrayWithValue(kind, elements, IntPtrConstant(0), capacity,
5144 RootIndex::kTheHoleValue);
5145 Goto(&out);
5146 }
5147
5148 BIND(&out);
5149 return array;
5150}
5151
5153 TNode<JSArray> array,
5154 TNode<BInt> begin,
5156 TNode<Map> original_array_map = LoadMap(array);
5157 TNode<Int32T> elements_kind = LoadMapElementsKind(original_array_map);
5158
5159 // Use the canonical map for the Array's ElementsKind
5161 TNode<Map> array_map = LoadJSArrayElementsMap(elements_kind, native_context);
5162
5164 LoadElements(array), std::optional<TNode<BInt>>(begin),
5165 std::optional<TNode<BInt>>(count),
5166 std::optional<TNode<BInt>>(std::nullopt),
5167 ExtractFixedArrayFlag::kAllFixedArrays, nullptr, elements_kind);
5168
5170 array_map, new_elements, ParameterToTagged(count), std::nullopt);
5171 return result;
5172}
5173
5175 TNode<Context> context, TNode<JSArray> array,
5176 std::optional<TNode<AllocationSite>> allocation_site,
5177 HoleConversionMode convert_holes) {
5178 // TODO(dhai): we should be able to assert IsFastJSArray(array) here, but this
5179 // function is also used to copy boilerplates even when the no-elements
5180 // protector is invalid. This function should be renamed to reflect its uses.
5181
5182 TNode<Number> length = LoadJSArrayLength(array);
5183 TNode<FixedArrayBase> new_elements;
5184 TVARIABLE(FixedArrayBase, var_new_elements);
5185 TVARIABLE(Int32T, var_elements_kind, LoadMapElementsKind(LoadMap(array)));
5186
5187 Label allocate_jsarray(this), holey_extract(this),
5188 allocate_jsarray_main(this);
5189
5190 bool need_conversion =
5192 if (need_conversion) {
5193 // We need to take care of holes, if the array is of holey elements kind.
5194 GotoIf(IsHoleyFastElementsKindForRead(var_elements_kind.value()),
5195 &holey_extract);
5196 }
5197
5198 // Simple extraction that preserves holes.
5199 new_elements = ExtractFixedArray(
5200 LoadElements(array),
5201 std::optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
5202 std::optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length))),
5203 std::optional<TNode<BInt>>(std::nullopt),
5205 var_elements_kind.value());
5206 var_new_elements = new_elements;
5207 Goto(&allocate_jsarray);
5208
5209 if (need_conversion) {
5210 BIND(&holey_extract);
5211 // Convert holes to undefined.
5212 TVARIABLE(BoolT, var_holes_converted, Int32FalseConstant());
5213 // Copy |array|'s elements store. The copy will be compatible with the
5214 // original elements kind unless there are holes in the source. Any holes
5215 // get converted to undefined, hence in that case the copy is compatible
5216 // only with PACKED_ELEMENTS and HOLEY_ELEMENTS, and we will choose
5217 // PACKED_ELEMENTS. Also, if we want to replace holes, we must not use
5218 // ExtractFixedArrayFlag::kDontCopyCOW.
5219 new_elements = ExtractFixedArray(
5220 LoadElements(array),
5221 std::optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
5222 std::optional<TNode<BInt>>(TaggedToParameter<BInt>(CAST(length))),
5223 std::optional<TNode<BInt>>(std::nullopt),
5224 ExtractFixedArrayFlag::kAllFixedArrays, &var_holes_converted);
5225 var_new_elements = new_elements;
5226 // If the array type didn't change, use the original elements kind.
5227 GotoIfNot(var_holes_converted.value(), &allocate_jsarray);
5228 // Otherwise use PACKED_ELEMENTS for the target's elements kind.
5229 var_elements_kind = Int32Constant(PACKED_ELEMENTS);
5230 Goto(&allocate_jsarray);
5231 }
5232
5233 BIND(&allocate_jsarray);
5234
5235 // Handle any nonextensible elements kinds
5237 var_elements_kind.value(),
5239 GotoIf(IsElementsKindLessThanOrEqual(var_elements_kind.value(),
5241 &allocate_jsarray_main);
5242 var_elements_kind = Int32Constant(PACKED_ELEMENTS);
5243 Goto(&allocate_jsarray_main);
5244
5245 BIND(&allocate_jsarray_main);
5246 // Use the canonical map for the chosen elements kind.
5248 TNode<Map> array_map =
5249 LoadJSArrayElementsMap(var_elements_kind.value(), native_context);
5250
5251 TNode<JSArray> result = AllocateJSArray(array_map, var_new_elements.value(),
5252 CAST(length), allocation_site);
5253 return result;
5254}
5255
5256template <typename TIndex>
5259 std::optional<TNode<Map>> fixed_array_map) {
5260 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
5261 "Only Smi or IntPtrT capacity is allowed");
5262 Comment("AllocateFixedArray");
5263 CSA_DCHECK(this,
5264 IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(0)));
5265
5267 constexpr intptr_t kMaxLength = FixedArray::kMaxLength;
5268
5269 intptr_t capacity_constant;
5270 if (ToParameterConstant(capacity, &capacity_constant)) {
5271 CHECK_LE(capacity_constant, kMaxLength);
5272 } else {
5273 Label if_out_of_memory(this, Label::kDeferred), next(this);
5274 Branch(IntPtrOrSmiGreaterThan(capacity, IntPtrOrSmiConstant<TIndex>(
5275 static_cast<int>(kMaxLength))),
5276 &if_out_of_memory, &next);
5277
5278 BIND(&if_out_of_memory);
5279 CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
5281 Unreachable();
5282
5283 BIND(&next);
5284 }
5285
5286 TNode<IntPtrT> total_size = GetFixedArrayAllocationSize(capacity, kind);
5287
5289 TNode<HeapObject> array = Allocate(total_size, flags);
5290 if (fixed_array_map) {
5291 // Conservatively only skip the write barrier if there are no allocation
5292 // flags, this ensures that the object hasn't ended up in LOS. Note that the
5293 // fixed array map is currently always immortal and technically wouldn't
5294 // need the write barrier even in LOS, but it's better to not take chances
5295 // in case this invariant changes later, since it's difficult to enforce
5296 // locally here.
5297 if (flags == AllocationFlag::kNone) {
5298 StoreMapNoWriteBarrier(array, *fixed_array_map);
5299 } else {
5300 StoreMap(array, *fixed_array_map);
5301 }
5302 } else {
5304 ? RootIndex::kFixedDoubleArrayMap
5305 : RootIndex::kFixedArrayMap;
5307 StoreMapNoWriteBarrier(array, map_index);
5308 }
5310 ParameterToTagged(capacity));
5311 return UncheckedCast<FixedArrayBase>(array);
5312}
5313
5314// There is no need to export the Smi version since it is only used inside
5315// code-stub-assembler.
5319 std::optional<TNode<Map>>);
5320
5321template <typename TIndex>
5324 TNode<TIndex> capacity, TNode<Map> source_map, ElementsKind from_kind,
5325 AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags,
5326 HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted,
5327 std::optional<TNode<Int32T>> source_elements_kind) {
5328 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
5329 "Only Smi or IntPtrT first, count, and capacity are allowed");
5330
5332 CSA_DCHECK(this,
5333 IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), capacity));
5334 CSA_DCHECK(this, TaggedEqual(source_map, LoadMap(source)));
5335
5336 TVARIABLE(FixedArrayBase, var_result);
5337 TVARIABLE(Map, var_target_map, source_map);
5338
5339 Label done(this, {&var_result}), is_cow(this),
5340 new_space_handler(this, {&var_target_map});
5341
5342 // If source_map is either FixedDoubleArrayMap, or FixedCOWArrayMap but
5343 // we can't just use COW, use FixedArrayMap as the target map. Otherwise, use
5344 // source_map as the target map.
5345 if (IsDoubleElementsKind(from_kind)) {
5346 CSA_DCHECK(this, IsFixedDoubleArrayMap(source_map));
5347 var_target_map = FixedArrayMapConstant();
5348 Goto(&new_space_handler);
5349 } else {
5350 CSA_DCHECK(this, Word32BinaryNot(IsFixedDoubleArrayMap(source_map)));
5351 Branch(TaggedEqual(var_target_map.value(), FixedCOWArrayMapConstant()),
5352 &is_cow, &new_space_handler);
5353
5354 BIND(&is_cow);
5355 {
5356 // |source| is a COW array, so we don't actually need to allocate a new
5357 // array unless:
5358 // 1) |extract_flags| forces us to, or
5359 // 2) we're asked to extract only part of the |source| (|first| != 0).
5360 if (extract_flags & ExtractFixedArrayFlag::kDontCopyCOW) {
5361 Branch(IntPtrOrSmiNotEqual(IntPtrOrSmiConstant<TIndex>(0), first),
5362 &new_space_handler, [&] {
5363 var_result = source;
5364 Goto(&done);
5365 });
5366 } else {
5367 var_target_map = FixedArrayMapConstant();
5368 Goto(&new_space_handler);
5369 }
5370 }
5371 }
5372
5373 BIND(&new_space_handler);
5374 {
5375 Comment("Copy FixedArray in young generation");
5376 // We use PACKED_ELEMENTS to tell AllocateFixedArray and
5377 // CopyFixedArrayElements that we want a FixedArray.
5378 const ElementsKind to_kind = PACKED_ELEMENTS;
5380 to_kind, capacity, allocation_flags, var_target_map.value());
5381 var_result = to_elements;
5382
5383#if !defined(V8_ENABLE_SINGLE_GENERATION) && !V8_ENABLE_STICKY_MARK_BITS_BOOL
5384#ifdef DEBUG
5385 TNode<IntPtrT> object_word = BitcastTaggedToWord(to_elements);
5386 TNode<IntPtrT> object_page_header = MemoryChunkFromAddress(object_word);
5387 TNode<IntPtrT> page_flags = Load<IntPtrT>(
5388 object_page_header, IntPtrConstant(MemoryChunk::FlagsOffset()));
5389 CSA_DCHECK(
5390 this,
5392 WordAnd(page_flags,
5394 IntPtrConstant(0)));
5395#endif
5396#endif
5397
5398 if (convert_holes == HoleConversionMode::kDontConvert &&
5399 !IsDoubleElementsKind(from_kind)) {
5400 // We can use CopyElements (memcpy) because we don't need to replace or
5401 // convert any values. Since {to_elements} is in new-space, CopyElements
5402 // will efficiently use memcpy.
5403 FillFixedArrayWithValue(to_kind, to_elements, count, capacity,
5404 RootIndex::kTheHoleValue);
5405 CopyElements(to_kind, to_elements, IntPtrConstant(0), source,
5408 } else {
5409 CopyFixedArrayElements(from_kind, source, to_kind, to_elements, first,
5410 count, capacity, SKIP_WRITE_BARRIER, convert_holes,
5411 var_holes_converted);
5412 }
5413 Goto(&done);
5414 }
5415
5416 BIND(&done);
5417 return UncheckedCast<FixedArray>(var_result.value());
5418}
5419
5420template <typename TIndex>
5423 TNode<TIndex> capacity, TNode<Map> fixed_array_map,
5424 TVariable<BoolT>* var_holes_converted, AllocationFlags allocation_flags,
5425 ExtractFixedArrayFlags extract_flags) {
5426 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
5427 "Only Smi or IntPtrT first, count, and capacity are allowed");
5428
5429 DCHECK_NE(var_holes_converted, nullptr);
5430 CSA_DCHECK(this, IsFixedDoubleArrayMap(fixed_array_map));
5431
5432 TVARIABLE(FixedArrayBase, var_result);
5434 TNode<FixedArrayBase> to_elements =
5435 AllocateFixedArray(kind, capacity, allocation_flags, fixed_array_map);
5436 var_result = to_elements;
5437 // We first try to copy the FixedDoubleArray to a new FixedDoubleArray.
5438 // |var_holes_converted| is set to False preliminarily.
5439 *var_holes_converted = Int32FalseConstant();
5440
5441 // The construction of the loop and the offsets for double elements is
5442 // extracted from CopyFixedArrayElements.
5444 static_assert(OFFSET_OF_DATA_START(FixedArray) ==
5446
5447 Comment("[ ExtractFixedDoubleArrayFillingHoles");
5448
5449 // This copy can trigger GC, so we pre-initialize the array with holes.
5451 capacity, RootIndex::kTheHoleValue);
5452
5453 const int first_element_offset =
5455 TNode<IntPtrT> first_from_element_offset =
5456 ElementOffsetFromIndex(first, kind, 0);
5457 TNode<IntPtrT> limit_offset = IntPtrAdd(first_from_element_offset,
5458 IntPtrConstant(first_element_offset));
5459 TVARIABLE(IntPtrT, var_from_offset,
5460 ElementOffsetFromIndex(IntPtrOrSmiAdd(first, count), kind,
5461 first_element_offset));
5462
5463 Label decrement(this, {&var_from_offset}), done(this);
5464 TNode<IntPtrT> to_array_adjusted =
5465 IntPtrSub(BitcastTaggedToWord(to_elements), first_from_element_offset);
5466
5467 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
5468
5469 BIND(&decrement);
5470 {
5471 TNode<IntPtrT> from_offset =
5472 IntPtrSub(var_from_offset.value(), IntPtrConstant(kDoubleSize));
5473 var_from_offset = from_offset;
5474
5475 TNode<IntPtrT> to_offset = from_offset;
5476
5477 Label if_hole(this);
5478
5480 from_array, var_from_offset.value(), &if_hole, MachineType::Float64());
5481
5483 to_offset, value);
5484
5485 TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
5486 Branch(compare, &decrement, &done);
5487
5488 BIND(&if_hole);
5489 // We are unlucky: there are holes! We need to restart the copy, this time
5490 // we will copy the FixedDoubleArray to a new FixedArray with undefined
5491 // replacing holes. We signal this to the caller through
5492 // |var_holes_converted|.
5493 *var_holes_converted = Int32TrueConstant();
5494 to_elements =
5495 ExtractToFixedArray(from_array, first, count, capacity, fixed_array_map,
5496 kind, allocation_flags, extract_flags,
5498 var_result = to_elements;
5499 Goto(&done);
5500 }
5501
5502 BIND(&done);
5503 Comment("] ExtractFixedDoubleArrayFillingHoles");
5504 return var_result.value();
5505}
5506
5507template <typename TIndex>
5509 TNode<FixedArrayBase> source, std::optional<TNode<TIndex>> first,
5510 std::optional<TNode<TIndex>> count, std::optional<TNode<TIndex>> capacity,
5511 ExtractFixedArrayFlags extract_flags, TVariable<BoolT>* var_holes_converted,
5512 std::optional<TNode<Int32T>> source_elements_kind) {
5513 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
5514 "Only Smi or IntPtrT first, count, and capacity are allowed");
5517 // If we want to replace holes, ExtractFixedArrayFlag::kDontCopyCOW should
5518 // not be used, because that disables the iteration which detects holes.
5519 DCHECK_IMPLIES(var_holes_converted != nullptr,
5520 !(extract_flags & ExtractFixedArrayFlag::kDontCopyCOW));
5521 HoleConversionMode convert_holes =
5522 var_holes_converted != nullptr ? HoleConversionMode::kConvertToUndefined
5524 TVARIABLE(FixedArrayBase, var_result);
5525 auto allocation_flags = AllocationFlag::kNone;
5526 if (!first) {
5527 first = IntPtrOrSmiConstant<TIndex>(0);
5528 }
5529 if (!count) {
5530 count = IntPtrOrSmiSub(
5532
5533 CSA_DCHECK(this, IntPtrOrSmiLessThanOrEqual(IntPtrOrSmiConstant<TIndex>(0),
5534 *count));
5535 }
5536 if (!capacity) {
5537 capacity = *count;
5538 } else {
5539 CSA_DCHECK(this, Word32BinaryNot(IntPtrOrSmiGreaterThan(
5540 IntPtrOrSmiAdd(*first, *count), *capacity)));
5541 }
5542
5543 Label if_fixed_double_array(this), empty(this), done(this, &var_result);
5544 TNode<Map> source_map = LoadMap(source);
5545 GotoIf(IntPtrOrSmiEqual(IntPtrOrSmiConstant<TIndex>(0), *capacity), &empty);
5546
5547 if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
5548 if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
5549 GotoIf(IsFixedDoubleArrayMap(source_map), &if_fixed_double_array);
5550 } else {
5551 CSA_DCHECK(this, IsFixedDoubleArrayMap(source_map));
5552 }
5553 }
5554
5555 if (extract_flags & ExtractFixedArrayFlag::kFixedArrays) {
5556 // Here we can only get |source| as FixedArray, never FixedDoubleArray.
5557 // PACKED_ELEMENTS is used to signify that the source is a FixedArray.
5559 source, *first, *count, *capacity, source_map, PACKED_ELEMENTS,
5560 allocation_flags, extract_flags, convert_holes, var_holes_converted,
5561 source_elements_kind);
5562 var_result = to_elements;
5563 Goto(&done);
5564 }
5565
5566 if (extract_flags & ExtractFixedArrayFlag::kFixedDoubleArrays) {
5567 BIND(&if_fixed_double_array);
5568 Comment("Copy FixedDoubleArray");
5569
5570 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
5572 source, *first, *count, *capacity, source_map, var_holes_converted,
5573 allocation_flags, extract_flags);
5574 var_result = to_elements;
5575 } else {
5576 // We use PACKED_DOUBLE_ELEMENTS to signify that both the source and
5577 // the target are FixedDoubleArray. That it is PACKED or HOLEY does not
5578 // matter.
5580 TNode<FixedArrayBase> to_elements =
5581 AllocateFixedArray(kind, *capacity, allocation_flags, source_map);
5582 FillFixedArrayWithValue(kind, to_elements, *count, *capacity,
5583 RootIndex::kTheHoleValue);
5584 CopyElements(kind, to_elements, IntPtrConstant(0), source,
5586 var_result = to_elements;
5587 }
5588
5589 Goto(&done);
5590 }
5591
5592 BIND(&empty);
5593 {
5594 Comment("Copy empty array");
5595
5596 var_result = EmptyFixedArrayConstant();
5597 Goto(&done);
5598 }
5599
5600 BIND(&done);
5601 return var_result.value();
5602}
5603
5606 TNode<FixedArrayBase>, std::optional<TNode<Smi>>, std::optional<TNode<Smi>>,
5607 std::optional<TNode<Smi>>, ExtractFixedArrayFlags, TVariable<BoolT>*,
5608 std::optional<TNode<Int32T>>);
5609
5612 TNode<FixedArrayBase>, std::optional<TNode<IntPtrT>>,
5613 std::optional<TNode<IntPtrT>>, std::optional<TNode<IntPtrT>>,
5614 ExtractFixedArrayFlags, TVariable<BoolT>*, std::optional<TNode<Int32T>>);
5615
5617 TNode<PropertyArray> property_array, TNode<IntPtrT> length) {
5618 CSA_DCHECK(this, IntPtrGreaterThan(length, IntPtrConstant(0)));
5619 CSA_DCHECK(this,
5620 IntPtrLessThanOrEqual(
5623 property_array, PropertyArray::kLengthAndHashOffset, SmiTag(length));
5624}
5625
5627 TNode<IntPtrT> capacity) {
5628 CSA_DCHECK(this, IntPtrGreaterThan(capacity, IntPtrConstant(0)));
5629 TNode<IntPtrT> total_size = GetPropertyArrayAllocationSize(capacity);
5630
5632 RootIndex map_index = RootIndex::kPropertyArrayMap;
5634 StoreMapNoWriteBarrier(array, map_index);
5635 TNode<PropertyArray> property_array = CAST(array);
5636 InitializePropertyArrayLength(property_array, capacity);
5637 return property_array;
5638}
5639
5641 TNode<PropertyArray> array, TNode<IntPtrT> from_index,
5642 TNode<IntPtrT> to_index) {
5644 TNode<Undefined> value = UndefinedConstant();
5646 array, kind, from_index, to_index,
5647 [this, value](TNode<HeapObject> array, TNode<IntPtrT> offset) {
5649 value);
5650 },
5652}
5653
5654template <typename TIndex>
5657 TNode<TIndex> from_index,
5658 TNode<TIndex> to_index,
5659 RootIndex value_root_index) {
5660 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
5661 "Only Smi or IntPtrT from and to are allowed");
5663 DCHECK(value_root_index == RootIndex::kTheHoleValue ||
5664 value_root_index == RootIndex::kUndefinedValue);
5665
5666 // Determine the value to initialize the {array} based
5667 // on the {value_root_index} and the elements {kind}.
5668 TNode<Object> value = LoadRoot(value_root_index);
5669 TNode<Float64T> float_value;
5671 float_value = LoadHeapNumberValue(CAST(value));
5672 }
5673
5675 array, kind, from_index, to_index,
5676 [this, value, float_value, kind](TNode<HeapObject> array,
5680 float_value);
5681 } else {
5683 value);
5684 }
5685 },
5687}
5688
5689template V8_EXPORT_PRIVATE void
5694 RootIndex);
5697
5700 TNode<UintPtrT> double_hole =
5703 // TODO(danno): When we have a Float32/Float64 wrapper class that
5704 // preserves double bits during manipulation, remove this code/change
5705 // this to an indexed Float64 store.
5706 if (Is64()) {
5708 double_hole);
5709 } else {
5710 static_assert(kHoleNanLower32 == kHoleNanUpper32);
5712 double_hole);
5715 double_hole);
5716 }
5717}
5718
5719#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
5720void CodeStubAssembler::StoreDoubleUndefined(TNode<HeapObject> object,
5722 TNode<UintPtrT> double_undefined =
5723 Is64() ? ReinterpretCast<UintPtrT>(Int64Constant(kUndefinedNanInt64))
5724 : ReinterpretCast<UintPtrT>(Int32Constant(kUndefinedNanLower32));
5725 // TODO(danno): When we have a Float32/Float64 wrapper class that
5726 // preserves double bits during manipulation, remove this code/change
5727 // this to an indexed Float64 store.
5728 if (Is64()) {
5730 double_undefined);
5731 } else {
5732 static_assert(kUndefinedNanLower32 == kUndefinedNanUpper32);
5734 double_undefined);
5737 double_undefined);
5738 }
5739}
5740#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
5741
5753
5754#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
5755void CodeStubAssembler::StoreFixedDoubleArrayUndefined(
5760 CSA_DCHECK(this,
5764 StoreDoubleUndefined(array, offset);
5765}
5766#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
5767
5769 TNode<FixedArray> array,
5771 TNode<IntPtrT> length) {
5773 CSA_DCHECK(this,
5774 IntPtrLessThanOrEqual(IntPtrAdd(start, length),
5776
5777 TNode<IntPtrT> byte_length = TimesTaggedSize(length);
5778 CSA_DCHECK(this, UintPtrLessThan(length, byte_length));
5779
5780 static const int32_t fa_base_data_offset =
5783 ElementOffsetFromIndex(start, kind, fa_base_data_offset);
5784 TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array), offset);
5785
5786 // Call out to memset to perform initialization.
5788 ExternalConstant(ExternalReference::libc_memset_function());
5789 static_assert(kSizetSize == kIntptrSize);
5791 std::make_pair(MachineType::Pointer(), backing_store),
5792 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
5793 std::make_pair(MachineType::UintPtr(), byte_length));
5794}
5795
5798 TNode<IntPtrT> length) {
5799 CSA_DCHECK(this,
5800 IntPtrLessThanOrEqual(IntPtrAdd(start, length),
5802
5803 TNode<IntPtrT> byte_length = TimesDoubleSize(length);
5804 CSA_DCHECK(this, UintPtrLessThan(length, byte_length));
5805
5806 static const int32_t fa_base_data_offset =
5809 fa_base_data_offset);
5810 TNode<IntPtrT> backing_store = IntPtrAdd(BitcastTaggedToWord(array), offset);
5811
5812 // Call out to memset to perform initialization.
5814 ExternalConstant(ExternalReference::libc_memset_function());
5815 static_assert(kSizetSize == kIntptrSize);
5817 std::make_pair(MachineType::Pointer(), backing_store),
5818 std::make_pair(MachineType::IntPtr(), IntPtrConstant(0)),
5819 std::make_pair(MachineType::UintPtr(), byte_length));
5820}
5821
5823 Label* if_needs_write_barrier) {
5825 TNode<BoolT> may_need_write_barrier =
5828 // TODO(olivf): Also skip the WB with V8_ENABLE_STICKY_MARK_BITS if the mark
5829 // bit is set.
5830 GotoIf(may_need_write_barrier, if_needs_write_barrier);
5831}
5832
5834 TNode<FixedArrayBase> elements,
5835 TNode<IntPtrT> dst_index,
5836 TNode<IntPtrT> src_index,
5837 TNode<IntPtrT> length) {
5838 Label finished(this);
5839 Label needs_barrier(this);
5840#ifdef V8_DISABLE_WRITE_BARRIERS
5841 const bool needs_barrier_check = false;
5842#else
5843 const bool needs_barrier_check = !IsDoubleElementsKind(kind);
5844#endif // V8_DISABLE_WRITE_BARRIERS
5845
5847 CSA_DCHECK(this, IsFixedArrayWithKind(elements, kind));
5848 CSA_DCHECK(this,
5849 IntPtrLessThanOrEqual(IntPtrAdd(dst_index, length),
5851 CSA_DCHECK(this,
5852 IntPtrLessThanOrEqual(IntPtrAdd(src_index, length),
5854
5855 // The write barrier can be ignored if {dst_elements} is in new space, or if
5856 // the elements pointer is FixedDoubleArray.
5857 if (needs_barrier_check) {
5858 TrySkipWriteBarrier(elements, &needs_barrier);
5859 }
5860
5861 const TNode<IntPtrT> source_byte_length =
5863 static const int32_t fa_base_data_offset =
5865 TNode<IntPtrT> elements_intptr = BitcastTaggedToWord(elements);
5866 TNode<IntPtrT> target_data_ptr =
5867 IntPtrAdd(elements_intptr,
5868 ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset));
5869 TNode<IntPtrT> source_data_ptr =
5870 IntPtrAdd(elements_intptr,
5871 ElementOffsetFromIndex(src_index, kind, fa_base_data_offset));
5872 TNode<ExternalReference> memmove =
5873 ExternalConstant(ExternalReference::libc_memmove_function());
5875 std::make_pair(MachineType::Pointer(), target_data_ptr),
5876 std::make_pair(MachineType::Pointer(), source_data_ptr),
5877 std::make_pair(MachineType::UintPtr(), source_byte_length));
5878
5879 if (needs_barrier_check) {
5880 Goto(&finished);
5881
5882 BIND(&needs_barrier);
5883 {
5884 const TNode<IntPtrT> begin = src_index;
5885 const TNode<IntPtrT> end = IntPtrAdd(begin, length);
5886
5887 // If dst_index is less than src_index, then walk forward.
5888 const TNode<IntPtrT> delta =
5889 IntPtrMul(IntPtrSub(dst_index, begin),
5891 auto loop_body = [&](TNode<HeapObject> array, TNode<IntPtrT> offset) {
5892 const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
5893 const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
5894 Store(array, delta_offset, element);
5895 };
5896
5897 Label iterate_forward(this);
5898 Label iterate_backward(this);
5899 Branch(IntPtrLessThan(delta, IntPtrConstant(0)), &iterate_forward,
5900 &iterate_backward);
5901 BIND(&iterate_forward);
5902 {
5903 // Make a loop for the stores.
5904 BuildFastArrayForEach(elements, kind, begin, end, loop_body,
5907 Goto(&finished);
5908 }
5909
5910 BIND(&iterate_backward);
5911 {
5912 BuildFastArrayForEach(elements, kind, begin, end, loop_body,
5915 Goto(&finished);
5916 }
5917 }
5918 BIND(&finished);
5919 }
5920}
5921
5923 TNode<FixedArrayBase> dst_elements,
5924 TNode<IntPtrT> dst_index,
5925 TNode<FixedArrayBase> src_elements,
5926 TNode<IntPtrT> src_index,
5927 TNode<IntPtrT> length,
5928 WriteBarrierMode write_barrier) {
5929 Label finished(this);
5930 Label needs_barrier(this);
5931#ifdef V8_DISABLE_WRITE_BARRIERS
5932 const bool needs_barrier_check = false;
5933#else
5934 const bool needs_barrier_check = !IsDoubleElementsKind(kind);
5935#endif // V8_DISABLE_WRITE_BARRIERS
5936
5938 CSA_DCHECK(this, IsFixedArrayWithKind(dst_elements, kind));
5939 CSA_DCHECK(this, IsFixedArrayWithKind(src_elements, kind));
5940 CSA_DCHECK(this, IntPtrLessThanOrEqual(
5941 IntPtrAdd(dst_index, length),
5942 LoadAndUntagFixedArrayBaseLength(dst_elements)));
5943 CSA_DCHECK(this, IntPtrLessThanOrEqual(
5944 IntPtrAdd(src_index, length),
5945 LoadAndUntagFixedArrayBaseLength(src_elements)));
5946 CSA_DCHECK(this, Word32Or(TaggedNotEqual(dst_elements, src_elements),
5947 IntPtrEqual(length, IntPtrConstant(0))));
5948
5949 // The write barrier can be ignored if {dst_elements} is in new space, or if
5950 // the elements pointer is FixedDoubleArray.
5951 if (needs_barrier_check) {
5952 TrySkipWriteBarrier(dst_elements, &needs_barrier);
5953 }
5954
5955 TNode<IntPtrT> source_byte_length =
5957 static const int32_t fa_base_data_offset =
5959 TNode<IntPtrT> src_offset_start =
5960 ElementOffsetFromIndex(src_index, kind, fa_base_data_offset);
5961 TNode<IntPtrT> dst_offset_start =
5962 ElementOffsetFromIndex(dst_index, kind, fa_base_data_offset);
5963 TNode<IntPtrT> src_elements_intptr = BitcastTaggedToWord(src_elements);
5964 TNode<IntPtrT> source_data_ptr =
5965 IntPtrAdd(src_elements_intptr, src_offset_start);
5966 TNode<IntPtrT> dst_elements_intptr = BitcastTaggedToWord(dst_elements);
5967 TNode<IntPtrT> dst_data_ptr =
5968 IntPtrAdd(dst_elements_intptr, dst_offset_start);
5970 ExternalConstant(ExternalReference::libc_memcpy_function());
5972 std::make_pair(MachineType::Pointer(), dst_data_ptr),
5973 std::make_pair(MachineType::Pointer(), source_data_ptr),
5974 std::make_pair(MachineType::UintPtr(), source_byte_length));
5975
5976 if (needs_barrier_check) {
5977 Goto(&finished);
5978
5979 BIND(&needs_barrier);
5980 {
5981 const TNode<IntPtrT> begin = src_index;
5982 const TNode<IntPtrT> end = IntPtrAdd(begin, length);
5983 const TNode<IntPtrT> delta =
5984 IntPtrMul(IntPtrSub(dst_index, src_index),
5987 src_elements, kind, begin, end,
5989 const TNode<AnyTaggedT> element = Load<AnyTaggedT>(array, offset);
5990 const TNode<WordT> delta_offset = IntPtrAdd(offset, delta);
5991 if (write_barrier == SKIP_WRITE_BARRIER) {
5993 delta_offset, element);
5994 } else {
5995 Store(dst_elements, delta_offset, element);
5996 }
5997 },
5999 Goto(&finished);
6000 }
6001 BIND(&finished);
6002 }
6003}
6004
6005void CodeStubAssembler::CopyRange(TNode<HeapObject> dst_object, int dst_offset,
6006 TNode<HeapObject> src_object, int src_offset,
6007 TNode<IntPtrT> length_in_tagged,
6008 WriteBarrierMode mode) {
6009 // TODO(jgruber): This could be a lot more involved (e.g. better code when
6010 // write barriers can be skipped). Extend as needed.
6012 IntPtrConstant(0), length_in_tagged,
6013 [=, this](TNode<IntPtrT> index) {
6014 TNode<IntPtrT> current_src_offset =
6015 IntPtrAdd(TimesTaggedSize(index), IntPtrConstant(src_offset));
6016 TNode<Object> value = LoadObjectField(src_object, current_src_offset);
6017 TNode<IntPtrT> current_dst_offset =
6018 IntPtrAdd(TimesTaggedSize(index), IntPtrConstant(dst_offset));
6019 if (mode == SKIP_WRITE_BARRIER) {
6020 StoreObjectFieldNoWriteBarrier(dst_object, current_dst_offset, value);
6021 } else {
6022 StoreObjectField(dst_object, current_dst_offset, value);
6023 }
6024 },
6026}
6027
6028template <typename TIndex>
6030 ElementsKind from_kind, TNode<FixedArrayBase> from_array,
6032 TNode<TIndex> first_element, TNode<TIndex> element_count,
6033 TNode<TIndex> capacity, WriteBarrierMode barrier_mode,
6034 HoleConversionMode convert_holes, TVariable<BoolT>* var_holes_converted) {
6035 DCHECK_IMPLIES(var_holes_converted != nullptr,
6037 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(from_array, from_kind));
6039 static_assert(OFFSET_OF_DATA_START(FixedArray) ==
6041 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
6042 "Only Smi or IntPtrT indices are allowed");
6043
6044 const int first_element_offset =
6046 Comment("[ CopyFixedArrayElements");
6047
6048 // Typed array elements are not supported.
6049 DCHECK(!IsTypedArrayElementsKind(from_kind));
6051
6052 Label done(this);
6053 bool from_double_elements = IsDoubleElementsKind(from_kind);
6054 bool to_double_elements = IsDoubleElementsKind(to_kind);
6055 bool doubles_to_objects_conversion =
6056 IsDoubleElementsKind(from_kind) && IsObjectElementsKind(to_kind);
6057 bool needs_write_barrier =
6058 doubles_to_objects_conversion ||
6059 (barrier_mode == UPDATE_WRITE_BARRIER && IsObjectElementsKind(to_kind));
6060 bool element_offset_matches =
6061 !needs_write_barrier &&
6063 IsDoubleElementsKind(from_kind) == IsDoubleElementsKind(to_kind));
6064 TNode<UintPtrT> double_hole =
6067
6068 // If copying might trigger a GC, we pre-initialize the FixedArray such that
6069 // it's always in a consistent state.
6070 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
6071 DCHECK(IsObjectElementsKind(to_kind));
6072 // Use undefined for the part that we copy and holes for the rest.
6073 // Later if we run into a hole in the source we can just skip the writing
6074 // to the target and are still guaranteed that we get an undefined.
6076 element_count, RootIndex::kUndefinedValue);
6077 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
6078 RootIndex::kTheHoleValue);
6079 } else if (doubles_to_objects_conversion) {
6080 // Pre-initialized the target with holes so later if we run into a hole in
6081 // the source we can just skip the writing to the target.
6083 capacity, RootIndex::kTheHoleValue);
6084 } else if (element_count != capacity) {
6085 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
6086 RootIndex::kTheHoleValue);
6087 }
6088
6089 TNode<IntPtrT> first_from_element_offset =
6090 ElementOffsetFromIndex(first_element, from_kind, 0);
6091 TNode<IntPtrT> limit_offset = Signed(IntPtrAdd(
6092 first_from_element_offset, IntPtrConstant(first_element_offset)));
6093 TVARIABLE(IntPtrT, var_from_offset,
6094 ElementOffsetFromIndex(IntPtrOrSmiAdd(first_element, element_count),
6095 from_kind, first_element_offset));
6096 // This second variable is used only when the element sizes of source and
6097 // destination arrays do not match.
6098 TVARIABLE(IntPtrT, var_to_offset);
6099 if (element_offset_matches) {
6100 var_to_offset = var_from_offset.value();
6101 } else {
6102 var_to_offset =
6103 ElementOffsetFromIndex(element_count, to_kind, first_element_offset);
6104 }
6105
6106 VariableList vars({&var_from_offset, &var_to_offset}, zone());
6107 if (var_holes_converted != nullptr) vars.push_back(var_holes_converted);
6108 Label decrement(this, vars);
6109
6110 TNode<IntPtrT> to_array_adjusted =
6111 element_offset_matches
6112 ? IntPtrSub(BitcastTaggedToWord(to_array), first_from_element_offset)
6114
6115 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
6116
6117 BIND(&decrement);
6118 {
6119 TNode<IntPtrT> from_offset = Signed(IntPtrSub(
6120 var_from_offset.value(),
6121 IntPtrConstant(from_double_elements ? kDoubleSize : kTaggedSize)));
6122 var_from_offset = from_offset;
6123
6124 TNode<IntPtrT> to_offset;
6125 if (element_offset_matches) {
6126 to_offset = from_offset;
6127 } else {
6128 to_offset = IntPtrSub(
6129 var_to_offset.value(),
6130 IntPtrConstant(to_double_elements ? kDoubleSize : kTaggedSize));
6131 var_to_offset = to_offset;
6132 }
6133
6134 Label next_iter(this), store_double_hole(this), signal_hole(this);
6135 Label* if_hole;
6136 if (convert_holes == HoleConversionMode::kConvertToUndefined) {
6137 // The target elements array is already preinitialized with undefined
6138 // so we only need to signal that a hole was found and continue the loop.
6139 if_hole = &signal_hole;
6140 } else if (doubles_to_objects_conversion) {
6141 // The target elements array is already preinitialized with holes, so we
6142 // can just proceed with the next iteration.
6143 if_hole = &next_iter;
6144 } else if (IsDoubleElementsKind(to_kind)) {
6145 if_hole = &store_double_hole;
6146 } else {
6147 // In all the other cases don't check for holes and copy the data as is.
6148 if_hole = nullptr;
6149 }
6150
6151 if (to_double_elements) {
6152 DCHECK(!needs_write_barrier);
6154 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
6156 to_offset, value);
6157 } else {
6159 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
6160 if (needs_write_barrier) {
6161 CHECK_EQ(to_array, to_array_adjusted);
6162 Store(to_array_adjusted, to_offset, value);
6163 } else {
6165 to_array_adjusted, to_offset, value);
6166 }
6167 }
6168
6169 Goto(&next_iter);
6170
6171 if (if_hole == &store_double_hole) {
6172 BIND(&store_double_hole);
6173 // Don't use doubles to store the hole double, since manipulating the
6174 // signaling NaN used for the hole in C++, e.g. with base::bit_cast,
6175 // will change its value on ia32 (the x87 stack is used to return values
6176 // and stores to the stack silently clear the signalling bit).
6177 //
6178 // TODO(danno): When we have a Float32/Float64 wrapper class that
6179 // preserves double bits during manipulation, remove this code/change
6180 // this to an indexed Float64 store.
6181 if (Is64()) {
6183 to_offset, double_hole);
6184 } else {
6186 to_offset, double_hole);
6188 IntPtrAdd(to_offset, IntPtrConstant(kInt32Size)),
6189 double_hole);
6190 }
6191 Goto(&next_iter);
6192 } else if (if_hole == &signal_hole) {
6193 // This case happens only when IsObjectElementsKind(to_kind).
6194 BIND(&signal_hole);
6195 if (var_holes_converted != nullptr) {
6196 *var_holes_converted = Int32TrueConstant();
6197 }
6198 Goto(&next_iter);
6199 }
6200
6201 BIND(&next_iter);
6202 TNode<BoolT> compare = WordNotEqual(from_offset, limit_offset);
6203 Branch(compare, &decrement, &done);
6204 }
6205
6206 BIND(&done);
6207 Comment("] CopyFixedArrayElements");
6208}
6209
6211 TNode<HeapObject> base, Label* cast_fail) {
6212 Label fixed_array(this);
6213 TNode<Map> map = LoadMap(base);
6214 GotoIf(TaggedEqual(map, FixedArrayMapConstant()), &fixed_array);
6215 GotoIf(TaggedNotEqual(map, FixedCOWArrayMapConstant()), cast_fail);
6216 Goto(&fixed_array);
6217 BIND(&fixed_array);
6219}
6220
6223 TNode<IntPtrT> property_count,
6224 WriteBarrierMode barrier_mode,
6225 DestroySource destroy_source) {
6226 CSA_SLOW_DCHECK(this, Word32Or(IsPropertyArray(from_array),
6227 IsEmptyFixedArray(from_array)));
6228 Comment("[ CopyPropertyArrayValues");
6229
6230 bool needs_write_barrier = barrier_mode == UPDATE_WRITE_BARRIER;
6231
6232 if (destroy_source == DestroySource::kNo) {
6233 // PropertyArray may contain mutable HeapNumbers, which will be cloned on
6234 // the heap, requiring a write barrier.
6235 needs_write_barrier = true;
6236 }
6237
6241 from_array, kind, start, property_count,
6242 [this, to_array, needs_write_barrier, destroy_source](
6245
6246 if (destroy_source == DestroySource::kNo) {
6247 value = CloneIfMutablePrimitive(CAST(value));
6248 }
6249
6250 if (needs_write_barrier) {
6251 Store(to_array, offset, value);
6252 } else {
6254 value);
6255 }
6256 },
6258
6259#ifdef DEBUG
6260 // Zap {from_array} if the copying above has made it invalid.
6261 if (destroy_source == DestroySource::kYes) {
6262 Label did_zap(this);
6263 GotoIf(IsEmptyFixedArray(from_array), &did_zap);
6264 FillPropertyArrayWithUndefined(CAST(from_array), start, property_count);
6265
6266 Goto(&did_zap);
6267 BIND(&did_zap);
6268 }
6269#endif
6270 Comment("] CopyPropertyArrayValues");
6271}
6272
6275 return ExtractFixedArray(
6276 source, std::optional<TNode<BInt>>(IntPtrOrSmiConstant<BInt>(0)),
6277 std::optional<TNode<BInt>>(std::nullopt),
6278 std::optional<TNode<BInt>>(std::nullopt), flags);
6279}
6280
6281template <>
6284 ElementsKind to_kind, Label* if_hole) {
6285 CSA_DCHECK(this, IsFixedArrayWithKind(array, from_kind));
6286 DCHECK(!IsDoubleElementsKind(to_kind));
6287 if (IsDoubleElementsKind(from_kind)) {
6288#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6289 Label if_undefined(this);
6290 Label done(this);
6292
6293 TNode<Float64T> value = LoadDoubleWithUndefinedAndHoleCheck(
6294 array, offset, &if_undefined, if_hole, MachineType::Float64());
6296 Goto(&done);
6297
6298 BIND(&if_undefined);
6299 {
6300 result = UndefinedConstant();
6301 Goto(&done);
6302 }
6303
6304 BIND(&done);
6305 return result.value();
6306#else
6307 TNode<Float64T> value =
6309 return AllocateHeapNumberWithValue(value);
6310#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6311 } else {
6312 TNode<Object> value = Load<Object>(array, offset);
6313 if (if_hole) {
6314 GotoIf(TaggedEqual(value, TheHoleConstant()), if_hole);
6315 }
6316 return value;
6317 }
6318}
6319
6320template <>
6323 ElementsKind to_kind, Label* if_hole) {
6324 CSA_DCHECK(this, IsFixedArrayWithKind(array, from_kind));
6325 DCHECK(IsDoubleElementsKind(to_kind));
6326 if (IsDoubleElementsKind(from_kind)) {
6327 return LoadDoubleWithHoleCheck(array, offset, if_hole,
6329 } else {
6330 TNode<Object> value = Load<Object>(array, offset);
6331 if (if_hole) {
6332 GotoIf(TaggedEqual(value, TheHoleConstant()), if_hole);
6333 }
6334 if (IsSmiElementsKind(from_kind)) {
6335 return SmiToFloat64(CAST(value));
6336 }
6337 return LoadHeapNumberValue(CAST(value));
6338 }
6339}
6340
6341template <typename TIndex>
6343 TNode<TIndex> old_capacity) {
6344 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
6345 "Only Smi or IntPtrT old_capacity is allowed");
6346 Comment("TryGrowElementsCapacity");
6347 TNode<TIndex> half_old_capacity = WordOrSmiShr(old_capacity, 1);
6348 TNode<TIndex> new_capacity = IntPtrOrSmiAdd(half_old_capacity, old_capacity);
6349 TNode<TIndex> padding =
6351 return IntPtrOrSmiAdd(new_capacity, padding);
6352}
6353
6358
6369
6370template <typename TIndex>
6373 TNode<TIndex> key, TNode<TIndex> capacity, Label* bailout) {
6374 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
6375 "Only Smi or IntPtrT key and capacity nodes are allowed");
6376 Comment("TryGrowElementsCapacity");
6378
6379 // If the gap growth is too big, fall back to the runtime.
6381 TNode<TIndex> max_capacity = IntPtrOrSmiAdd(capacity, max_gap);
6382 GotoIf(UintPtrOrSmiGreaterThanOrEqual(key, max_capacity), bailout);
6383
6384 // Calculate the capacity of the new backing store.
6386 IntPtrOrSmiAdd(key, IntPtrOrSmiConstant<TIndex>(1)));
6387
6388 return GrowElementsCapacity(object, elements, kind, kind, capacity,
6389 new_capacity, bailout);
6390}
6391
6392template <typename TIndex>
6395 ElementsKind from_kind, ElementsKind to_kind, TNode<TIndex> capacity,
6396 TNode<TIndex> new_capacity, Label* bailout) {
6397 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
6398 "Only Smi or IntPtrT capacities are allowed");
6399 Comment("[ GrowElementsCapacity");
6400 CSA_SLOW_DCHECK(this, IsFixedArrayWithKindOrEmpty(elements, from_kind));
6401
6402 // If size of the allocation for the new capacity doesn't fit in a page
6403 // that we can bump-pointer allocate from, fall back to the runtime.
6405 GotoIf(UintPtrOrSmiGreaterThanOrEqual(new_capacity,
6406 IntPtrOrSmiConstant<TIndex>(max_size)),
6407 bailout);
6408
6409 // Allocate the new backing store.
6410 TNode<FixedArrayBase> new_elements =
6411 AllocateFixedArray(to_kind, new_capacity);
6412
6413 // Copy the elements from the old elements store to the new.
6414 // The size-check above guarantees that the |new_elements| is allocated
6415 // in new space so we can skip the write barrier.
6416 CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
6417 new_capacity, SKIP_WRITE_BARRIER);
6418
6419 StoreObjectField(object, JSObject::kElementsOffset, new_elements);
6420 Comment("] GrowElementsCapacity");
6421 return new_elements;
6422}
6423
6427
6428namespace {
6429
6430// Helper function for folded memento allocation.
6431// Memento objects are designed to be put right after the objects they are
6432// tracking on. So memento allocations have to be folded together with previous
6433// object allocations.
6434TNode<HeapObject> InnerAllocateMemento(CodeStubAssembler* csa,
6437 return csa->UncheckedCast<HeapObject>(csa->BitcastWordToTagged(
6439}
6440
6441} // namespace
6442
6444 TNode<HeapObject> base, TNode<IntPtrT> base_allocation_size,
6445 TNode<AllocationSite> allocation_site) {
6447 Comment("[Initialize AllocationMemento");
6448 TNode<HeapObject> memento =
6449 InnerAllocateMemento(this, base, base_allocation_size);
6450 StoreMapNoWriteBarrier(memento, RootIndex::kAllocationMementoMap);
6452 memento, offsetof(AllocationMemento, allocation_site_), allocation_site);
6453 if (v8_flags.allocation_site_pretenuring) {
6455 allocation_site, offsetof(AllocationSite, pretenure_create_count_));
6456
6457 TNode<Int32T> incremented_count = Int32Add(count, Int32Constant(1));
6459 allocation_site, offsetof(AllocationSite, pretenure_create_count_),
6460 incremented_count);
6461 }
6462 Comment("]");
6463}
6464
6466 TNode<Object> acc, Label* if_not_possible) {
6467 TVARIABLE(IntPtrT, acc_intptr);
6468 Label is_not_smi(this), have_int32(this);
6469
6470 GotoIfNot(TaggedIsSmi(acc), &is_not_smi);
6471 acc_intptr = SmiUntag(CAST(acc));
6472 Goto(&have_int32);
6473
6474 BIND(&is_not_smi);
6475 GotoIfNot(IsHeapNumber(CAST(acc)), if_not_possible);
6477 TNode<Int32T> value32 = RoundFloat64ToInt32(value);
6478 TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
6479 GotoIfNot(Float64Equal(value, value64), if_not_possible);
6480 acc_intptr = ChangeInt32ToIntPtr(value32);
6481 Goto(&have_int32);
6482
6483 BIND(&have_int32);
6484 return acc_intptr.value();
6485}
6486
6488 TNode<Object> value,
6489#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6490 Label* if_valueisundefined,
6491#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6492 Label* if_valueisnotnumber) {
6493 return Select<Float64T>(
6494 TaggedIsSmi(value), [&]() { return SmiToFloat64(CAST(value)); },
6495 [&]() {
6496#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6497 if (if_valueisundefined) {
6498 GotoIf(IsUndefined(value), if_valueisundefined);
6499 }
6500#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6501 GotoIfNot(IsHeapNumber(CAST(value)), if_valueisnotnumber);
6502 return LoadHeapNumberValue(CAST(value));
6503 });
6504}
6505
6507 TNode<Context> context, TNode<Object> value) {
6508 // We might need to loop once due to ToNumber conversion.
6509 TVARIABLE(Object, var_value, value);
6510 TVARIABLE(Float64T, var_result);
6511 Label loop(this, &var_value), done_loop(this, &var_result);
6512 Goto(&loop);
6513 BIND(&loop);
6514 {
6515 Label if_valueisnotnumber(this, Label::kDeferred);
6516
6517 // Load the current {value}.
6518 value = var_value.value();
6519
6520 // Convert {value} to Float64 if it is a number and convert it to a number
6521 // otherwise.
6522 var_result = TryTaggedToFloat64(value,
6523#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6524 nullptr,
6525#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
6526 &if_valueisnotnumber);
6527 Goto(&done_loop);
6528
6529 BIND(&if_valueisnotnumber);
6530 {
6531 // Convert the {value} to a Number first.
6532 var_value = CallBuiltin(Builtin::kNonNumberToNumber, context, value);
6533 Goto(&loop);
6534 }
6535 }
6536 BIND(&done_loop);
6537 return var_result.value();
6538}
6539
6541 TNode<Object> value) {
6542 TVARIABLE(Word32T, var_result);
6543 Label done(this);
6545 context, value, &done, &var_result, IsKnownTaggedPointer::kNo, {});
6546 BIND(&done);
6547 return var_result.value();
6548}
6549
6550// Truncate {value} to word32 and jump to {if_number} if it is a Number,
6551// or find that it is a BigInt and jump to {if_bigint}.
6553 TNode<Context> context, TNode<Object> value, Label* if_number,
6554 TVariable<Word32T>* var_word32, Label* if_bigint, Label* if_bigint64,
6555 TVariable<BigInt>* var_maybe_bigint) {
6557 context, value, if_number, var_word32, IsKnownTaggedPointer::kNo, {},
6558 if_bigint, if_bigint64, var_maybe_bigint);
6559}
6560
6561// Truncate {value} to word32 and jump to {if_number} if it is a Number,
6562// or find that it is a BigInt and jump to {if_bigint}. In either case,
6563// store the type feedback in {var_feedback}.
6565 TNode<Context> context, TNode<Object> value, Label* if_number,
6566 TVariable<Word32T>* var_word32, Label* if_bigint, Label* if_bigint64,
6567 TVariable<BigInt>* var_maybe_bigint, const FeedbackValues& feedback) {
6569 context, value, if_number, var_word32, IsKnownTaggedPointer::kNo,
6570 feedback, if_bigint, if_bigint64, var_maybe_bigint);
6571}
6572
6573// Truncate {pointer} to word32 and jump to {if_number} if it is a Number,
6574// or find that it is a BigInt and jump to {if_bigint}. In either case,
6575// store the type feedback in {var_feedback}.
6577 TNode<Context> context, TNode<HeapObject> pointer, Label* if_number,
6578 TVariable<Word32T>* var_word32, Label* if_bigint, Label* if_bigint64,
6579 TVariable<BigInt>* var_maybe_bigint, const FeedbackValues& feedback) {
6581 context, pointer, if_number, var_word32, IsKnownTaggedPointer::kYes,
6582 feedback, if_bigint, if_bigint64, var_maybe_bigint);
6583}
6584
6585template <Object::Conversion conversion>
6587 TNode<Context> context, TNode<Object> value, Label* if_number,
6588 TVariable<Word32T>* var_word32,
6589 IsKnownTaggedPointer is_known_tagged_pointer,
6590 const FeedbackValues& feedback, Label* if_bigint, Label* if_bigint64,
6591 TVariable<BigInt>* var_maybe_bigint) {
6592 // We might need to loop after conversion.
6593 TVARIABLE(Object, var_value, value);
6594 TVARIABLE(Object, var_exception);
6595 OverwriteFeedback(feedback.var_feedback, BinaryOperationFeedback::kNone);
6596 VariableList loop_vars({&var_value}, zone());
6597 if (feedback.var_feedback != nullptr) {
6598 loop_vars.push_back(feedback.var_feedback);
6599 }
6600 Label loop(this, loop_vars);
6601 Label if_exception(this, Label::kDeferred);
6602 if (is_known_tagged_pointer == IsKnownTaggedPointer::kNo) {
6603 GotoIf(TaggedIsNotSmi(value), &loop);
6604
6605 // {value} is a Smi.
6606 *var_word32 = SmiToInt32(CAST(value));
6607 CombineFeedback(feedback.var_feedback,
6609 Goto(if_number);
6610 } else {
6611 Goto(&loop);
6612 }
6613 BIND(&loop);
6614 {
6615 value = var_value.value();
6616 Label not_smi(this), is_heap_number(this), is_oddball(this),
6617 maybe_bigint64(this), is_bigint(this), check_if_smi(this);
6618
6619 TNode<HeapObject> value_heap_object = CAST(value);
6620 TNode<Map> map = LoadMap(value_heap_object);
6621 GotoIf(IsHeapNumberMap(map), &is_heap_number);
6622 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
6623 if (conversion == Object::Conversion::kToNumeric) {
6624 if (Is64() && if_bigint64) {
6625 GotoIf(IsBigIntInstanceType(instance_type), &maybe_bigint64);
6626 } else {
6627 GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
6628 }
6629 }
6630
6631 // Not HeapNumber (or BigInt if conversion == kToNumeric).
6632 {
6633 if (feedback.var_feedback != nullptr) {
6634 // We do not require an Or with earlier feedback here because once we
6635 // convert the value to a Numeric, we cannot reach this path. We can
6636 // only reach this path on the first pass when the feedback is kNone.
6637 CSA_DCHECK(this, SmiEqual(feedback.var_feedback->value(),
6639 }
6640 GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &is_oddball);
6641 // Not an oddball either -> convert.
6642 auto builtin = conversion == Object::Conversion::kToNumeric
6643 ? Builtin::kNonNumberToNumeric
6644 : Builtin::kNonNumberToNumber;
6645 if (feedback.var_feedback != nullptr) {
6646 ScopedExceptionHandler handler(this, &if_exception, &var_exception);
6647 var_value = CallBuiltin(builtin, context, value);
6648 } else {
6649 var_value = CallBuiltin(builtin, context, value);
6650 }
6651 OverwriteFeedback(feedback.var_feedback, BinaryOperationFeedback::kAny);
6652 Goto(&check_if_smi);
6653
6654 if (feedback.var_feedback != nullptr) {
6655 BIND(&if_exception);
6656 DCHECK(feedback.slot != nullptr);
6657 DCHECK(feedback.maybe_feedback_vector != nullptr);
6659 (*feedback.maybe_feedback_vector)(), *feedback.slot,
6660 feedback.update_mode);
6661 CallRuntime(Runtime::kReThrow, context, var_exception.value());
6662 Unreachable();
6663 }
6664
6665 BIND(&is_oddball);
6666 var_value =
6667 LoadObjectField(value_heap_object, offsetof(Oddball, to_number_));
6668 OverwriteFeedback(feedback.var_feedback,
6670 Goto(&check_if_smi);
6671 }
6672
6673 BIND(&is_heap_number);
6674 *var_word32 = TruncateHeapNumberValueToWord32(CAST(value));
6675 CombineFeedback(feedback.var_feedback, BinaryOperationFeedback::kNumber);
6676 Goto(if_number);
6677
6678 if (conversion == Object::Conversion::kToNumeric) {
6679 if (Is64() && if_bigint64) {
6680 BIND(&maybe_bigint64);
6681 GotoIfLargeBigInt(CAST(value), &is_bigint);
6682 if (var_maybe_bigint) {
6683 *var_maybe_bigint = CAST(value);
6684 }
6685 CombineFeedback(feedback.var_feedback,
6687 Goto(if_bigint64);
6688 }
6689
6690 BIND(&is_bigint);
6691 if (var_maybe_bigint) {
6692 *var_maybe_bigint = CAST(value);
6693 }
6694 CombineFeedback(feedback.var_feedback, BinaryOperationFeedback::kBigInt);
6695 Goto(if_bigint);
6696 }
6697
6698 BIND(&check_if_smi);
6699 value = var_value.value();
6700 GotoIf(TaggedIsNotSmi(value), &loop);
6701
6702 // {value} is a Smi.
6703 *var_word32 = SmiToInt32(CAST(value));
6704 CombineFeedback(feedback.var_feedback,
6706 Goto(if_number);
6707 }
6708}
6709
6711 TVARIABLE(Int32T, var_result);
6712 Label done(this), if_heapnumber(this);
6713 GotoIfNot(TaggedIsSmi(number), &if_heapnumber);
6714 var_result = SmiToInt32(CAST(number));
6715 Goto(&done);
6716
6717 BIND(&if_heapnumber);
6718 TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
6719 var_result = Signed(TruncateFloat64ToWord32(value));
6720 Goto(&done);
6721
6722 BIND(&done);
6723 return var_result.value();
6724}
6725
6727 TNode<HeapNumber> object) {
6728 TNode<Float64T> value = LoadHeapNumberValue(object);
6729 return Signed(TruncateFloat64ToWord32(value));
6730}
6731
6733 Label* not_smi) {
6734 TNode<Float64T> value = LoadHeapNumberValue(number);
6735 return TryFloat64ToSmi(value, not_smi);
6736}
6737
6739 Label* not_smi) {
6740 TNode<Int32T> ivalue = TruncateFloat32ToInt32(value);
6741 TNode<Float32T> fvalue = RoundInt32ToFloat32(ivalue);
6742
6743 Label if_int32(this);
6744
6745 GotoIfNot(Float32Equal(value, fvalue), not_smi);
6746 GotoIfNot(Word32Equal(ivalue, Int32Constant(0)), &if_int32);
6747 // if (value == -0.0)
6748 Branch(Int32LessThan(UncheckedCast<Int32T>(BitcastFloat32ToInt32(value)),
6749 Int32Constant(0)),
6750 not_smi, &if_int32);
6751
6752 BIND(&if_int32);
6753 if (SmiValuesAre32Bits()) {
6754 return SmiTag(ChangeInt32ToIntPtr(ivalue));
6755 } else {
6757 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(ivalue, ivalue);
6758 TNode<BoolT> overflow = Projection<1>(pair);
6759 GotoIf(overflow, not_smi);
6760 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
6761 }
6762}
6763
6765 Label* not_smi) {
6766 TNode<Int32T> value32 = RoundFloat64ToInt32(value);
6767 TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
6768
6769 Label if_int32(this);
6770 GotoIfNot(Float64Equal(value, value64), not_smi);
6771 GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
6772 Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
6773 Int32Constant(0)),
6774 not_smi, &if_int32);
6775
6776 TVARIABLE(Number, var_result);
6777 BIND(&if_int32);
6778 if (SmiValuesAre32Bits()) {
6779 return SmiTag(ChangeInt32ToIntPtr(value32));
6780 } else {
6782 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value32, value32);
6783 TNode<BoolT> overflow = Projection<1>(pair);
6784 GotoIf(overflow, not_smi);
6785 return BitcastWordToTaggedSigned(ChangeInt32ToIntPtr(Projection<0>(pair)));
6786 }
6787}
6788
6790 Label* if_failed) {
6791 TNode<Int32T> value32 = RoundFloat64ToInt32(value);
6792 TNode<Float64T> value64 = ChangeInt32ToFloat64(value32);
6793 Label if_int32(this);
6794 GotoIfNot(Float64Equal(value, value64), if_failed);
6795 GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32);
6796 // if (value == -0.0)
6797 Branch(Int32LessThan(UncheckedCast<Int32T>(Float64ExtractHighWord32(value)),
6798 Int32Constant(0)),
6799 if_failed, &if_int32);
6800 BIND(&if_int32);
6801 return value32;
6802}
6803
6805 TNode<Float64T> value, Label* if_failed) {
6806 DCHECK(Is64());
6807 TNode<Int64T> value_int64 = TruncateFloat64ToInt64(value);
6808 TNode<Float64T> value_roundtrip = ChangeInt64ToFloat64(value_int64);
6809 Label if_int64(this);
6810 GotoIfNot(Float64Equal(value, value_roundtrip), if_failed);
6811 GotoIfNot(Word64Equal(value_int64, Int64Constant(0)), &if_int64);
6812
6813 // if (value == -0.0)
6814 Branch(Word64Equal(BitcastFloat64ToInt64(value), Int64Constant(0)), &if_int64,
6815 if_failed);
6816
6817 BIND(&if_int64);
6818 // Check if AdditiveSafeInteger: (value - kMinAdditiveSafeInteger) >> 53 == 0
6819 TNode<Int64T> shifted_value =
6822 GotoIfNot(Word64Equal(shifted_value, Int64Constant(0)), if_failed);
6823 return UncheckedCast<AdditiveSafeIntegerT>(value_int64);
6824}
6825
6827 if (!Is64()) return BoolConstant(false);
6828
6829 Label done(this);
6831
6832 TryFloat64ToAdditiveSafeInteger(value, &done);
6833 result = BoolConstant(true);
6834 Goto(&done);
6835
6836 BIND(&done);
6837 return result.value();
6838}
6839
6841 TNode<Float64T> value) {
6842 TVARIABLE(Float16RawBitsT, float16_out);
6843 Label truncate_op_supported(this), truncate_op_fallback(this),
6844 return_out(this);
6845 // See Float64Ceil for the reason there is a branch for the static constant
6846 // (PGO profiles).
6848 &truncate_op_supported, &truncate_op_fallback);
6849
6850 BIND(&truncate_op_supported);
6851 {
6852 float16_out = TruncateFloat64ToFloat16RawBits(value);
6853 Goto(&return_out);
6854 }
6855
6856 // This is a verbatim CSA implementation of DoubleToFloat16.
6857 //
6858 // The 64-bit and 32-bit paths are implemented separately, but the algorithm
6859 // is the same in both cases. The 32-bit version requires manual pairwise
6860 // operations.
6861 BIND(&truncate_op_fallback);
6862 if (Is64()) {
6863 TVARIABLE(Uint16T, out);
6864 TNode<Int64T> signed_in = BitcastFloat64ToInt64(value);
6865
6866 // Take the absolute value of the input.
6868 TNode<Word64T> in = Word64Xor(signed_in, sign);
6869
6870 Label if_infinity_or_nan(this), if_finite(this), done(this);
6871 Branch(Uint64GreaterThanOrEqual(in,
6873 &if_infinity_or_nan, &if_finite);
6874
6875 BIND(&if_infinity_or_nan);
6876 {
6877 // Result is infinity or NaN.
6878 out = Select<Uint16T>(
6879 Uint64GreaterThan(in, Uint64Constant(kFP64Infinity)),
6880 [=, this] { return Uint16Constant(kFP16qNaN); }, // NaN->qNaN
6881 [=, this] { return Uint16Constant(kFP16Infinity); }); // Inf->Inf
6882 Goto(&done);
6883 }
6884
6885 BIND(&if_finite);
6886 {
6887 // Result is a (de)normalized number or zero.
6888
6889 Label if_denormal(this), not_denormal(this);
6890 Branch(Uint64LessThan(in, Uint64Constant(kFP16DenormalThreshold)),
6891 &if_denormal, &not_denormal);
6892
6893 BIND(&if_denormal);
6894 {
6895 // Result is a denormal or zero. Use the magic value and FP addition to
6896 // align 10 mantissa bits at the bottom of the float. Depends on FP
6897 // addition being round-to-nearest-even.
6898 TNode<Float64T> temp = Float64Add(
6899 BitcastInt64ToFloat64(ReinterpretCast<Int64T>(in)),
6902 Uint64Sub(ReinterpretCast<Uint64T>(BitcastFloat64ToInt64(temp)),
6904 Goto(&done);
6905 }
6906
6907 BIND(&not_denormal);
6908 {
6909 // Result is not a denormal.
6910
6911 // Remember if the result mantissa will be odd before rounding.
6914 Uint64Constant(1)));
6915
6916 // Update the exponent and round to nearest even.
6917 //
6918 // Rounding to nearest even is handled in two parts. First, adding
6919 // kFP64To16RebiasExponentAndRound has the effect of rebiasing the
6920 // exponent and that if any of the lower 41 bits of the mantissa are
6921 // set, the 11th mantissa bit from the front becomes set. Second, adding
6922 // mant_odd ensures ties are rounded to even.
6923 TNode<Uint64T> temp1 =
6926 TNode<Uint64T> temp2 = Uint64Add(temp1, mant_odd);
6927
6930
6931 Goto(&done);
6932 }
6933 }
6934
6935 BIND(&done);
6938 out.value()));
6939 } else {
6940 TVARIABLE(Uint16T, out);
6941 TNode<Word32T> signed_in_hi_word = Float64ExtractHighWord32(value);
6942 TNode<Word32T> in_lo_word = Float64ExtractLowWord32(value);
6943
6944 // Take the absolute value of the input.
6947 TNode<Word32T> in_hi_word = Word32Xor(signed_in_hi_word, sign);
6948
6949 Label if_infinity_or_nan(this), if_finite(this), done(this);
6950 Branch(Uint32GreaterThanOrEqual(
6951 in_hi_word,
6953 &if_infinity_or_nan, &if_finite);
6954
6955 BIND(&if_infinity_or_nan);
6956 {
6957 // Result is infinity or NaN.
6958 out = Select<Uint16T>(
6959 Uint32GreaterThan(in_hi_word,
6961 [=, this] { return Uint16Constant(kFP16qNaN); }, // NaN->qNaN
6962 [=, this] { return Uint16Constant(kFP16Infinity); }); // Inf->Inf
6963 Goto(&done);
6964 }
6965
6966 BIND(&if_finite);
6967 {
6968 // Result is a (de)normalized number or zero.
6969
6970 Label if_denormal(this), not_denormal(this);
6971 Branch(Uint32LessThan(in_hi_word, Uint64HighWordConstantNoLowWord(
6973 &if_denormal, &not_denormal);
6974
6975 BIND(&if_denormal);
6976 {
6977 // Result is a denormal or zero. Use the magic value and FP addition to
6978 // align 10 mantissa bits at the bottom of the float. Depends on FP
6979 // addition being round-to-nearest-even.
6980 TNode<Float64T> double_in = Float64InsertHighWord32(
6981 Float64InsertLowWord32(Float64Constant(0), in_lo_word), in_hi_word);
6982 TNode<Float64T> temp = Float64Add(
6983 double_in,
6986 Float64ExtractLowWord32(temp), Float64ExtractHighWord32(temp),
6989
6990 Goto(&done);
6991 }
6992
6993 BIND(&not_denormal);
6994 {
6995 // Result is not a denormal.
6996
6997 // Remember if the result mantissa will be odd before rounding.
7000 kFP16MantissaBits - 32)),
7001 Uint32Constant(1)));
7002
7003 // Update the exponent and round to nearest even.
7004 //
7005 // Rounding to nearest even is handled in two parts. First, adding
7006 // kFP64To16RebiasExponentAndRound has the effect of rebiasing the
7007 // exponent and that if any of the lower 41 bits of the mantissa are
7008 // set, the 11th mantissa bit from the front becomes set. Second, adding
7009 // mant_odd ensures ties are rounded to even.
7011 in_lo_word, in_hi_word,
7015 Int32PairAdd(Projection<0>(temp1), Projection<1>(temp1), mant_odd,
7016 Int32Constant(0));
7017
7019 Projection<1>(temp2),
7021
7022 Goto(&done);
7023 }
7024 }
7025
7026 BIND(&done);
7028 Word32Or(Word32Shr(sign, Int32Constant(16)), out.value()));
7029 }
7030 Goto(&return_out);
7031
7032 BIND(&return_out);
7033 return float16_out.value();
7034}
7035
7040
7045
7050
7055
7057 Label not_smi(this), done(this);
7058 TVARIABLE(Number, var_result);
7059 var_result = TryFloat32ToSmi(value, &not_smi);
7060 Goto(&done);
7061
7062 BIND(&not_smi);
7063 {
7064 var_result = AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(value));
7065 Goto(&done);
7066 }
7067
7068 BIND(&done);
7069 return var_result.value();
7070}
7071
7073 Label not_smi(this), done(this);
7074 TVARIABLE(Number, var_result);
7075 var_result = TryFloat64ToSmi(value, &not_smi);
7076 Goto(&done);
7077
7078 BIND(&not_smi);
7079 {
7080 var_result = AllocateHeapNumberWithValue(value);
7081 Goto(&done);
7082 }
7083 BIND(&done);
7084 return var_result.value();
7085}
7086
7088 if (SmiValuesAre32Bits()) {
7089 return SmiTag(ChangeInt32ToIntPtr(value));
7090 }
7092 TVARIABLE(Number, var_result);
7093 TNode<PairT<Int32T, BoolT>> pair = Int32AddWithOverflow(value, value);
7094 TNode<BoolT> overflow = Projection<1>(pair);
7095 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
7096 if_join(this);
7097 Branch(overflow, &if_overflow, &if_notoverflow);
7098 BIND(&if_overflow);
7099 {
7100 TNode<Float64T> value64 = ChangeInt32ToFloat64(value);
7102 var_result = result;
7103 Goto(&if_join);
7104 }
7105 BIND(&if_notoverflow);
7106 {
7107 TNode<IntPtrT> almost_tagged_value =
7109 TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
7110 var_result = result;
7111 Goto(&if_join);
7112 }
7113 BIND(&if_join);
7114 return var_result.value();
7115}
7116
7118 TNode<Int32T> value) {
7119 if (SmiValuesAre32Bits()) {
7120 return SmiTag(ChangeInt32ToIntPtr(value));
7121 }
7123 TNode<Int32T> result_int32 = Int32Add(value, value);
7124 TNode<IntPtrT> almost_tagged_value = ChangeInt32ToIntPtr(result_int32);
7125 TNode<Smi> result = BitcastWordToTaggedSigned(almost_tagged_value);
7126 return result;
7127}
7128
7130 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
7131 if_join(this);
7132 TVARIABLE(Number, var_result);
7133 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
7134 Branch(Uint32LessThan(Uint32Constant(Smi::kMaxValue), value), &if_overflow,
7135 &if_not_overflow);
7136
7137 BIND(&if_not_overflow);
7138 {
7139 // The {value} is definitely in valid Smi range.
7140 var_result = SmiTag(Signed(ChangeUint32ToWord(value)));
7141 }
7142 Goto(&if_join);
7143
7144 BIND(&if_overflow);
7145 {
7146 TNode<Float64T> float64_value = ChangeUint32ToFloat64(value);
7147 var_result = AllocateHeapNumberWithValue(float64_value);
7148 }
7149 Goto(&if_join);
7150
7151 BIND(&if_join);
7152 return var_result.value();
7153}
7154
7156 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
7157 if_join(this);
7158 TVARIABLE(Number, var_result);
7159 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
7160 Branch(UintPtrLessThan(UintPtrConstant(Smi::kMaxValue), value), &if_overflow,
7161 &if_not_overflow);
7162
7163 BIND(&if_not_overflow);
7164 {
7165 // The {value} is definitely in valid Smi range.
7166 var_result = SmiTag(Signed(value));
7167 }
7168 Goto(&if_join);
7169
7170 BIND(&if_overflow);
7171 {
7172 TNode<Float64T> float64_value = ChangeUintPtrToFloat64(value);
7173 var_result = AllocateHeapNumberWithValue(float64_value);
7174 }
7175 Goto(&if_join);
7176
7177 BIND(&if_join);
7178 return var_result.value();
7179}
7180
7184
7186 TNode<Object> value,
7187 TNode<String> method_name) {
7188 TVARIABLE(Object, var_value, value);
7189
7190 // Check if the {value} is a Smi or a HeapObject.
7191 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
7192 if_valueisstring(this);
7193 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
7194 BIND(&if_valueisnotsmi);
7195 {
7196 // Load the instance type of the {value}.
7197 TNode<Uint16T> value_instance_type = LoadInstanceType(CAST(value));
7198
7199 // Check if the {value} is already String.
7200 Label if_valueisnotstring(this, Label::kDeferred);
7201 Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
7202 &if_valueisnotstring);
7203 BIND(&if_valueisnotstring);
7204 {
7205 // Check if the {value} is null.
7206 Label if_valueisnullorundefined(this, Label::kDeferred);
7207 GotoIf(IsNullOrUndefined(value), &if_valueisnullorundefined);
7208 // Convert the {value} to a String.
7209 var_value = CallBuiltin(Builtin::kToString, context, value);
7210 Goto(&if_valueisstring);
7211
7212 BIND(&if_valueisnullorundefined);
7213 {
7214 // The {value} is either null or undefined.
7215 ThrowTypeError(context, MessageTemplate::kCalledOnNullOrUndefined,
7216 method_name);
7217 }
7218 }
7219 }
7220 BIND(&if_valueissmi);
7221 {
7222 // The {value} is a Smi, convert it to a String.
7223 var_value = CallBuiltin(Builtin::kNumberToString, context, value);
7224 Goto(&if_valueisstring);
7225 }
7226 BIND(&if_valueisstring);
7227 return CAST(var_value.value());
7228}
7229
7230// This has platform-specific and ill-defined behavior for negative inputs.
7232 TNode<Number> value) {
7233 TVARIABLE(Uint32T, var_result);
7234 Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this);
7235 Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber);
7236 BIND(&if_smi);
7237 {
7238 var_result = Unsigned(SmiToInt32(CAST(value)));
7239 Goto(&done);
7240 }
7241 BIND(&if_heapnumber);
7242 {
7243 var_result = ChangeFloat64ToUint32(LoadHeapNumberValue(CAST(value)));
7244 Goto(&done);
7245 }
7246 BIND(&done);
7247 return var_result.value();
7248}
7249
7252 Label smi(this);
7253 Label done(this, &result);
7254 GotoIf(TaggedIsSmi(value), &smi);
7256 Goto(&done);
7257
7258 BIND(&smi);
7259 {
7260 result = SmiToFloat64(CAST(value));
7261 Goto(&done);
7262 }
7263
7264 BIND(&done);
7265 return result.value();
7266}
7267
7269 TNode<Context> context, TNode<HeapObject> input) {
7270 return Select<Int32T>(
7271 IsHeapNumber(input),
7272 [=, this] {
7273 return Signed(TruncateFloat64ToWord32(LoadHeapNumberValue(input)));
7274 },
7275 [=, this] {
7277 CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input)));
7278 });
7279}
7280
7282 TNode<Object> input) {
7283 TVARIABLE(Float64T, var_result);
7284 Label end(this), not_smi(this);
7285
7286 GotoIfNot(TaggedIsSmi(input), &not_smi);
7287 var_result = SmiToFloat64(CAST(input));
7288 Goto(&end);
7289
7290 BIND(&not_smi);
7291 var_result = Select<Float64T>(
7292 IsHeapNumber(CAST(input)),
7293 [=, this] { return LoadHeapNumberValue(CAST(input)); },
7294 [=, this] {
7295 return ChangeNumberToFloat64(
7296 CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input)));
7297 });
7298 Goto(&end);
7299
7300 BIND(&end);
7301 return var_result.value();
7302}
7303
7307
7311
7315
7317 TNode<JSAny> input_value,
7318 PrimitiveType primitive_type,
7319 char const* method_name) {
7320 // We might need to loop once due to JSPrimitiveWrapper unboxing.
7321 TVARIABLE(JSAny, var_value, input_value);
7322 Label loop(this, &var_value), done_loop(this),
7323 done_throw(this, Label::kDeferred);
7324 Goto(&loop);
7325 BIND(&loop);
7326 {
7327 // Check if the {value} is a Smi or a HeapObject.
7328 GotoIf(
7329 TaggedIsSmi(var_value.value()),
7330 (primitive_type == PrimitiveType::kNumber) ? &done_loop : &done_throw);
7331
7332 TNode<HeapObject> value = CAST(var_value.value());
7333
7334 // Load the map of the {value}.
7335 TNode<Map> value_map = LoadMap(value);
7336
7337 // Load the instance type of the {value}.
7338 TNode<Uint16T> value_instance_type = LoadMapInstanceType(value_map);
7339
7340 // Check if {value} is a JSPrimitiveWrapper.
7341 Label if_valueiswrapper(this, Label::kDeferred), if_valueisnotwrapper(this);
7342 Branch(InstanceTypeEqual(value_instance_type, JS_PRIMITIVE_WRAPPER_TYPE),
7343 &if_valueiswrapper, &if_valueisnotwrapper);
7344
7345 BIND(&if_valueiswrapper);
7346 {
7347 // Load the actual value from the {value}.
7348 var_value =
7349 CAST(LoadObjectField(value, JSPrimitiveWrapper::kValueOffset));
7350 Goto(&loop);
7351 }
7352
7353 BIND(&if_valueisnotwrapper);
7354 {
7355 switch (primitive_type) {
7357 GotoIf(TaggedEqual(value_map, BooleanMapConstant()), &done_loop);
7358 break;
7360 GotoIf(TaggedEqual(value_map, HeapNumberMapConstant()), &done_loop);
7361 break;
7363 GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
7364 break;
7366 GotoIf(TaggedEqual(value_map, SymbolMapConstant()), &done_loop);
7367 break;
7368 }
7369 Goto(&done_throw);
7370 }
7371 }
7372
7373 BIND(&done_throw);
7374 {
7375 const char* primitive_name = nullptr;
7376 switch (primitive_type) {
7378 primitive_name = "Boolean";
7379 break;
7381 primitive_name = "Number";
7382 break;
7384 primitive_name = "String";
7385 break;
7387 primitive_name = "Symbol";
7388 break;
7389 }
7390 CHECK_NOT_NULL(primitive_name);
7391
7392 // The {value} is not a compatible receiver for this method.
7393 ThrowTypeError(context, MessageTemplate::kNotGeneric, method_name,
7394 primitive_name);
7395 }
7396
7397 BIND(&done_loop);
7398 return var_value.value();
7399}
7400
7402 TNode<Object> value,
7403 InstanceType instance_type,
7404 char const* method_name) {
7405 Label out(this), throw_exception(this, Label::kDeferred);
7406
7407 GotoIf(TaggedIsSmi(value), &throw_exception);
7408
7409 // Load the instance type of the {value}.
7410 TNode<Map> map = LoadMap(CAST(value));
7411 const TNode<Uint16T> value_instance_type = LoadMapInstanceType(map);
7412
7413 Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
7414 &throw_exception);
7415
7416 // The {value} is not a compatible receiver for this method.
7417 BIND(&throw_exception);
7418 ThrowTypeError(context, MessageTemplate::kIncompatibleMethodReceiver,
7419 StringConstant(method_name), value);
7420
7421 BIND(&out);
7422}
7423
7425 TNode<Object> value,
7426 MessageTemplate msg_template,
7427 const char* method_name) {
7428 Label done(this), throw_exception(this, Label::kDeferred);
7429
7430 GotoIf(TaggedIsSmi(value), &throw_exception);
7431
7432 Branch(JSAnyIsNotPrimitive(CAST(value)), &done, &throw_exception);
7433
7434 // The {value} is not a compatible receiver for this method.
7435 BIND(&throw_exception);
7436 ThrowTypeError(context, msg_template, StringConstant(method_name), value);
7437
7438 BIND(&done);
7439}
7440
7442 TNode<Object> value,
7443 const char* method_name) {
7444 Label out(this), throw_exception(this, Label::kDeferred);
7445
7446 GotoIf(TaggedIsSmi(value), &throw_exception);
7447 Branch(IsCallable(CAST(value)), &out, &throw_exception);
7448
7449 // The {value} is not a compatible receiver for this method.
7450 BIND(&throw_exception);
7451 ThrowTypeError(context, MessageTemplate::kCalledNonCallable, method_name);
7452
7453 BIND(&out);
7454}
7455
7457 MessageTemplate message,
7458 std::optional<TNode<Object>> arg0,
7459 std::optional<TNode<Object>> arg1,
7460 std::optional<TNode<Object>> arg2) {
7461 TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
7462 if (!arg0) {
7463 CallRuntime(Runtime::kThrowRangeError, context, template_index);
7464 } else if (!arg1) {
7465 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0);
7466 } else if (!arg2) {
7467 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
7468 *arg1);
7469 } else {
7470 CallRuntime(Runtime::kThrowRangeError, context, template_index, *arg0,
7471 *arg1, *arg2);
7472 }
7473 Unreachable();
7474}
7475
7477 MessageTemplate message,
7478 char const* arg0, char const* arg1) {
7479 std::optional<TNode<Object>> arg0_node;
7480 if (arg0) arg0_node = StringConstant(arg0);
7481 std::optional<TNode<Object>> arg1_node;
7482 if (arg1) arg1_node = StringConstant(arg1);
7483 ThrowTypeError(context, message, arg0_node, arg1_node);
7484}
7485
7487 MessageTemplate message,
7488 std::optional<TNode<Object>> arg0,
7489 std::optional<TNode<Object>> arg1,
7490 std::optional<TNode<Object>> arg2) {
7491 TNode<Smi> template_index = SmiConstant(static_cast<int>(message));
7492 if (!arg0) {
7493 CallRuntime(Runtime::kThrowTypeError, context, template_index);
7494 } else if (!arg1) {
7495 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0);
7496 } else if (!arg2) {
7497 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0,
7498 *arg1);
7499 } else {
7500 CallRuntime(Runtime::kThrowTypeError, context, template_index, *arg0, *arg1,
7501 *arg2);
7502 }
7503 Unreachable();
7504}
7505
7507 CallRuntime(Runtime::kTerminateExecution, context);
7508 Unreachable();
7509}
7510
7519 CSA_DCHECK(this, Word32Or(IsTheHole(message),
7521 JS_MESSAGE_OBJECT_TYPE)));
7524 StoreFullTaggedNoWriteBarrier(pending_message, message);
7525}
7526
7528 TNode<HeapObject> pending_message = GetPendingMessage();
7529 return TaggedEqual(pending_message,
7530 LoadRoot(RootIndex::kTerminationException));
7531}
7532
7534 TNode<ExternalReference> continuation_data =
7535 IsolateField(IsolateFieldId::kContinuationPreservedEmbedderData);
7536 return LoadFullTagged(continuation_data);
7537}
7538
7540 TNode<Object> value) {
7541 TNode<ExternalReference> continuation_data =
7542 IsolateField(IsolateFieldId::kContinuationPreservedEmbedderData);
7543 StoreFullTaggedNoWriteBarrier(continuation_data, value);
7544}
7545
7547 int type) {
7548 return Word32Equal(instance_type, Int32Constant(type));
7549}
7550
7554
7558
7560 int kMask =
7561 Map::Bits3::IsExtensibleBit::kMask | Map::Bits3::IsPrototypeMapBit::kMask;
7562 int kExpected = Map::Bits3::IsExtensibleBit::kMask;
7564 Int32Constant(kExpected));
7565}
7566
7570
7574
7578
7581 TNode<PropertyCell> cell = NoElementsProtectorConstant();
7582 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7583 return TaggedEqual(cell_value, invalid);
7584}
7585
7588 TNode<PropertyCell> cell = MegaDOMProtectorConstant();
7589 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7590 return TaggedEqual(cell_value, invalid);
7591}
7592
7595 TNode<PropertyCell> cell = ArrayIteratorProtectorConstant();
7596 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7597 return TaggedEqual(cell_value, invalid);
7598}
7599
7602 TNode<PropertyCell> cell = PromiseResolveProtectorConstant();
7603 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7604 return TaggedEqual(cell_value, invalid);
7605}
7606
7609 TNode<PropertyCell> cell = PromiseThenProtectorConstant();
7610 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7611 return TaggedEqual(cell_value, invalid);
7612}
7613
7616 TNode<PropertyCell> cell = ArraySpeciesProtectorConstant();
7617 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7618 return TaggedEqual(cell_value, invalid);
7619}
7620
7623 TNode<PropertyCell> cell = IsConcatSpreadableProtectorConstant();
7624 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7625 return TaggedEqual(cell_value, invalid);
7626}
7627
7630 TNode<PropertyCell> cell = TypedArraySpeciesProtectorConstant();
7631 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7632 return TaggedEqual(cell_value, invalid);
7633}
7634
7637 TNode<PropertyCell> cell = RegExpSpeciesProtectorConstant();
7638 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7639 return TaggedEqual(cell_value, invalid);
7640}
7641
7644 TNode<PropertyCell> cell = PromiseSpeciesProtectorConstant();
7645 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7646 return TaggedEqual(cell_value, invalid);
7647}
7648
7652 TNode<PropertyCell> cell = NumberStringNotRegexpLikeProtectorConstant();
7653 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7654 return TaggedEqual(cell_value, invalid);
7655}
7656
7659 TNode<PropertyCell> cell = SetIteratorProtectorConstant();
7660 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7661 return TaggedEqual(cell_value, invalid);
7662}
7663
7666 TNode<PropertyCell> cell = MapIteratorProtectorConstant();
7667 TNode<Object> cell_value = LoadObjectField(cell, PropertyCell::kValueOffset);
7668 return TaggedEqual(cell_value, invalid);
7669}
7670
7672 TNode<Context> context, TNode<Map> map) {
7674 const TNode<Object> initial_array_prototype = LoadContextElement(
7675 native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
7677 return TaggedEqual(proto, initial_array_prototype);
7678}
7679
7681 TNode<Context> context, TNode<Map> map) {
7683 const TNode<Object> typed_array_prototype =
7684 LoadContextElement(native_context, Context::TYPED_ARRAY_PROTOTYPE_INDEX);
7686 TNode<HeapObject> proto_of_proto = Select<HeapObject>(
7687 IsJSObject(proto), [=, this] { return LoadMapPrototype(LoadMap(proto)); },
7688 [=, this] { return NullConstant(); });
7689 return TaggedEqual(proto_of_proto, typed_array_prototype);
7690}
7691
7694 TNode<PropertyCell> cell = StringWrapperToPrimitiveProtectorConstant();
7695 StoreObjectField(cell, PropertyCell::kValueOffset, invalid);
7696}
7697
7699 TNode<Context> context, TNode<Map> map) {
7701 const TNode<Object> arguments_map = LoadContextElement(
7702 native_context, Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
7703 return TaggedEqual(arguments_map, map);
7704}
7705
7707 TNode<Context> context, TNode<Map> map) {
7709 const TNode<Object> arguments_map = LoadContextElement(
7710 native_context, Context::SLOW_ALIASED_ARGUMENTS_MAP_INDEX);
7711 return TaggedEqual(arguments_map, map);
7712}
7713
7715 TNode<Map> map) {
7717 const TNode<Object> arguments_map =
7718 LoadContextElement(native_context, Context::SLOPPY_ARGUMENTS_MAP_INDEX);
7719 return TaggedEqual(arguments_map, map);
7720}
7721
7723 TNode<Map> map) {
7725 const TNode<Object> arguments_map =
7726 LoadContextElement(native_context, Context::STRICT_ARGUMENTS_MAP_INDEX);
7727 return TaggedEqual(arguments_map, map);
7728}
7729
7731 return Select<BoolT>(
7732 TaggedIsSmi(object), [=, this] { return Int32FalseConstant(); },
7733 [=, this] {
7735 });
7736}
7737
7741
7743 return Select<BoolT>(
7744 TaggedIsSmi(object), [=, this] { return Int32FalseConstant(); },
7745 [=, this] { return IsCode(UncheckedCast<HeapObject>(object)); });
7746}
7747
7749 return HasInstanceType(object, CODE_TYPE);
7750}
7751
7755
7759
7763
7765 TNode<Int32T> instance_type) {
7766 static_assert(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
7767 return Int32LessThanOrEqual(instance_type,
7769}
7770
7772 TNode<Int32T> instance_type) {
7773 return Int32LessThanOrEqual(instance_type,
7775}
7776
7778 TNode<Int32T> instance_type) {
7780 return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
7781}
7782
7784 TNode<Int32T> instance_type) {
7785 return InstanceTypeEqual(instance_type, JS_TEMPORAL_INSTANT_TYPE);
7786}
7787
7795
7803
7812
7820
7822 TNode<Int32T> instance_type) {
7823 CSA_DCHECK(this, IsStringInstanceType(instance_type));
7824 static_assert(kIsIndirectStringMask == 0x1);
7825 static_assert(kIsIndirectStringTag == 0x1);
7826 return UncheckedCast<BoolT>(
7828}
7829
7837
7839 TNode<Int32T> instance_type) {
7840 CSA_DCHECK(this, IsStringInstanceType(instance_type));
7841 static_assert(kUncachedExternalStringTag != 0);
7842 return IsSetWord32(instance_type, kUncachedExternalStringMask);
7843}
7844
7846 TNode<Int32T> instance_type) {
7847 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
7848 return Int32GreaterThanOrEqual(instance_type,
7849 Int32Constant(FIRST_JS_RECEIVER_TYPE));
7850}
7851
7853#if V8_STATIC_ROOTS_BOOL
7855 Int32Constant(StaticReadOnlyRoot::kSeqOneByteStringMap));
7856#else
7858#endif
7859}
7860
7862#if V8_STATIC_ROOTS_BOOL
7863 // Both sequential string maps are allocated at the start of the read only
7864 // heap, so we can use a single comparison to check for them.
7865 static_assert(
7866 InstanceTypeChecker::kUniqueMapRangeOfStringType::kSeqString.first == 0);
7867 return IsInRange(
7869 InstanceTypeChecker::kUniqueMapRangeOfStringType::kSeqString.first,
7870 InstanceTypeChecker::kUniqueMapRangeOfStringType::kSeqString.second);
7871#else
7873#endif
7874}
7875
7877#if V8_STATIC_ROOTS_BOOL
7878 return IsInRange(
7880 InstanceTypeChecker::kUniqueMapRangeOfStringType::kExternalString.first,
7881 InstanceTypeChecker::kUniqueMapRangeOfStringType::kExternalString.second);
7882#else
7884#endif
7885}
7886
7888#if V8_STATIC_ROOTS_BOOL
7889 return IsInRange(
7891 InstanceTypeChecker::kUniqueMapRangeOfStringType::kUncachedExternalString
7892 .first,
7893 InstanceTypeChecker::kUniqueMapRangeOfStringType::kUncachedExternalString
7894 .second);
7895#else
7897#endif
7898}
7899
7901#if V8_STATIC_ROOTS_BOOL
7903
7904 // These static asserts make sure that the following bit magic on the map word
7905 // is safe. See the definition of kStringMapEncodingMask for an explanation.
7906#define VALIDATE_STRING_MAP_ENCODING_BIT(instance_type, size, name, Name) \
7907 static_assert( \
7908 ((instance_type & kStringEncodingMask) == kOneByteStringTag) == \
7909 ((StaticReadOnlyRoot::k##Name##Map & \
7910 InstanceTypeChecker::kStringMapEncodingMask) == \
7911 InstanceTypeChecker::kOneByteStringMapBit)); \
7912 static_assert( \
7913 ((instance_type & kStringEncodingMask) == kTwoByteStringTag) == \
7914 ((StaticReadOnlyRoot::k##Name##Map & \
7915 InstanceTypeChecker::kStringMapEncodingMask) == \
7916 InstanceTypeChecker::kTwoByteStringMapBit));
7917 STRING_TYPE_LIST(VALIDATE_STRING_MAP_ENCODING_BIT)
7918#undef VALIDATE_STRING_TYPE_RANGES
7919
7920 return Word32Equal(
7922 Int32Constant(InstanceTypeChecker::kStringMapEncodingMask)),
7923 Int32Constant(InstanceTypeChecker::kOneByteStringMapBit));
7924#else
7926#endif
7927}
7928
7932
7934#if V8_STATIC_ROOTS_BOOL
7935 // Assuming this is only called with primitive objects or js receivers.
7937 IsJSReceiverMap(map)));
7938 // All primitive object's maps are allocated at the start of the read only
7939 // heap. Thus JS_RECEIVER's must have maps with larger (compressed) addresses.
7940 return Uint32GreaterThanOrEqual(
7943#else
7944 return IsJSReceiverMap(map);
7945#endif
7946}
7947
7951
7953#if V8_STATIC_ROOTS_BOOL
7954 return JSAnyIsNotPrimitiveMap(LoadMap(object));
7955#else
7956 return IsJSReceiver(object);
7957#endif
7958}
7959
7963
7965 // TODO(ishell): consider using Select<BoolT>() here.
7966 return UncheckedCast<BoolT>(Word32Or(IsUndefined(value), IsNull(value)));
7967}
7968
7970 TNode<Int32T> instance_type) {
7971 return InstanceTypeEqual(instance_type, JS_GLOBAL_PROXY_TYPE);
7972}
7973
7977
7981
7985
7987 TNode<Int32T> instance_type) {
7988 static_assert(LAST_JS_OBJECT_TYPE == LAST_TYPE);
7989 return Int32GreaterThanOrEqual(instance_type,
7990 Int32Constant(FIRST_JS_OBJECT_TYPE));
7991}
7992
7994 TNode<Int32T> instance_type) {
7995 return InstanceTypeEqual(instance_type, JS_API_OBJECT_TYPE);
7996}
7997
8001
8005
8009
8013
8016 JS_FINALIZATION_REGISTRY_TYPE);
8017}
8018
8023
8027
8031
8033 return HasInstanceType(object, JS_PROXY_TYPE);
8034}
8035
8037 return HasInstanceType(object, JS_STRING_ITERATOR_TYPE);
8038}
8039
8041 return HasInstanceType(object, JS_SHADOW_REALM_TYPE);
8042}
8043
8045 TNode<HeapObject> object) {
8046 return HasInstanceType(object, JS_REG_EXP_STRING_ITERATOR_TYPE);
8047}
8048
8050 return HasInstanceType(object, MAP_TYPE);
8051}
8052
8054 TNode<Int32T> instance_type) {
8055 return InstanceTypeEqual(instance_type, JS_PRIMITIVE_WRAPPER_TYPE);
8056}
8057
8061
8065
8067 return HasInstanceType(object, JS_WRAPPED_FUNCTION_TYPE);
8068}
8069
8071 TNode<Int32T> instance_type) {
8072 return InstanceTypeEqual(instance_type, JS_ARRAY_TYPE);
8073}
8074
8078
8082
8084 return HasInstanceType(object, JS_ARRAY_ITERATOR_TYPE);
8085}
8086
8088 TNode<Int32T> instance_type) {
8089 return IsInRange(instance_type, FIRST_ALWAYS_SHARED_SPACE_JS_OBJECT_TYPE,
8090 LAST_ALWAYS_SHARED_SPACE_JS_OBJECT_TYPE);
8091}
8092
8094 TNode<Int32T> instance_type) {
8095 return InstanceTypeEqual(instance_type, JS_SHARED_ARRAY_TYPE);
8096}
8097
8101
8105
8107 return Select<BoolT>(
8108 TaggedIsSmi(object), [=, this] { return Int32FalseConstant(); },
8109 [=, this] {
8110 TNode<HeapObject> heap_object = CAST(object);
8111 return IsJSSharedArray(heap_object);
8112 });
8113}
8114
8116 TNode<Int32T> instance_type) {
8117 return InstanceTypeEqual(instance_type, JS_SHARED_STRUCT_TYPE);
8118}
8119
8123
8127
8129 return Select<BoolT>(
8130 TaggedIsSmi(object), [=, this] { return Int32FalseConstant(); },
8131 [=, this] {
8132 TNode<HeapObject> heap_object = CAST(object);
8133 return IsJSSharedStruct(heap_object);
8134 });
8135}
8136
8138 TNode<HeapObject> object) {
8139 return HasInstanceType(object, JS_ASYNC_GENERATOR_OBJECT_TYPE);
8140}
8141
8143 return HasInstanceType(object, FIXED_ARRAY_TYPE);
8144}
8145
8147 TNode<Uint16T> instance_type = LoadInstanceType(object);
8148 return UncheckedCast<BoolT>(
8149 Word32And(Int32GreaterThanOrEqual(instance_type,
8150 Int32Constant(FIRST_FIXED_ARRAY_TYPE)),
8151 Int32LessThanOrEqual(instance_type,
8152 Int32Constant(LAST_FIXED_ARRAY_TYPE))));
8153}
8154
8156 TNode<HeapObject> object) {
8157 TNode<Uint16T> instance_type = LoadInstanceType(object);
8159 Int32LessThan(instance_type, Int32Constant(FIRST_WEAK_FIXED_ARRAY_TYPE)),
8160 Int32GreaterThan(instance_type,
8161 Int32Constant(LAST_WEAK_FIXED_ARRAY_TYPE))));
8162}
8163
8165 return HasInstanceType(object, PROPERTY_ARRAY_TYPE);
8166}
8167
8169 TNode<HeapObject> object) {
8170 TNode<Uint16T> instance_type = LoadInstanceType(object);
8171 return IsInRange(instance_type, FIRST_PROMISE_REACTION_JOB_TASK_TYPE,
8172 LAST_PROMISE_REACTION_JOB_TASK_TYPE);
8173}
8174
8175// This complicated check is due to elements oddities. If a smi array is empty
8176// after Array.p.shift, it is replaced by the empty array constant. If it is
8177// later filled with a double element, we try to grow it but pass in a double
8178// elements kind. Usually this would cause a size mismatch (since the source
8179// fixed array has HOLEY_ELEMENTS and destination has
8180// HOLEY_DOUBLE_ELEMENTS), but we don't have to worry about it when the
8181// source array is empty.
8182// TODO(jgruber): It might we worth creating an empty_double_array constant to
8183// simplify this case.
8186 Label out(this);
8187 TVARIABLE(BoolT, var_result, Int32TrueConstant());
8188
8189 GotoIf(IsFixedArrayWithKind(object, kind), &out);
8190
8191 const TNode<Smi> length = LoadFixedArrayBaseLength(object);
8192 GotoIf(SmiEqual(length, SmiConstant(0)), &out);
8193
8194 var_result = Int32FalseConstant();
8195 Goto(&out);
8196
8197 BIND(&out);
8198 return var_result.value();
8199}
8200
8204 return IsFixedDoubleArray(object);
8205 } else {
8208 return IsFixedArraySubclass(object);
8209 }
8210}
8211
8215
8217 return IsPropertyCellMap(LoadMap(object));
8218}
8219
8221 TNode<Int32T> instance_type) {
8222 return InstanceTypeEqual(instance_type, HEAP_NUMBER_TYPE);
8223}
8224
8226 return Select<BoolT>(
8227 TaggedIsSmi(object), [=, this] { return Int32TrueConstant(); },
8228 [=, this] {
8229 return Word32BinaryNot(IsHoleInstanceType(
8231 });
8232}
8233
8235 TNode<Int32T> instance_type) {
8236 return InstanceTypeEqual(instance_type, HOLE_TYPE);
8237}
8238
8242
8244 TNode<Int32T> instance_type) {
8245 return InstanceTypeEqual(instance_type, ODDBALL_TYPE);
8246}
8247
8249#if V8_STATIC_ROOTS_BOOL
8250 TNode<Map> map = LoadMap(object);
8251 TNode<Word32T> map_as_word32 = ReinterpretCast<Word32T>(map);
8252 static_assert(InstanceTypeChecker::kStringMapUpperBound + Map::kSize ==
8253 StaticReadOnlyRoot::kSymbolMap);
8254 return Uint32LessThanOrEqual(map_as_word32,
8255 Int32Constant(StaticReadOnlyRoot::kSymbolMap));
8256#else
8257 return IsNameInstanceType(LoadInstanceType(object));
8258#endif
8259}
8260
8262 TNode<Int32T> instance_type) {
8263 return Int32LessThanOrEqual(instance_type, Int32Constant(LAST_NAME_TYPE));
8264}
8265
8267#if V8_STATIC_ROOTS_BOOL
8268 TNode<Map> map = LoadMap(object);
8269 TNode<Word32T> map_as_word32 =
8271 return Uint32LessThanOrEqual(
8272 map_as_word32, Int32Constant(InstanceTypeChecker::kStringMapUpperBound));
8273#else
8274 return IsStringInstanceType(LoadInstanceType(object));
8275#endif
8276}
8277
8281
8285
8289
8291 TNode<Int32T> instance_type) {
8292 return InstanceTypeEqual(instance_type, SYMBOL_TYPE);
8293}
8294
8303
8305 TNode<Int32T> instance_type) {
8307 Word32And(instance_type,
8310 // TODO(v8:12007): Internalized strings do not have kSharedStringTag until
8311 // the shared string table ships.
8312 return Word32Or(is_shared,
8314 IsInternalizedStringInstanceType(instance_type)));
8315}
8316
8318 TNode<Uint16T> instance_type = LoadInstanceType(object);
8319 return Select<BoolT>(
8320 IsInternalizedStringInstanceType(instance_type),
8321 [=, this] { return Int32TrueConstant(); },
8322 [=, this] { return IsSymbolInstanceType(instance_type); });
8323}
8324
8325// Semantics: guaranteed not to be an integer index (i.e. contains non-digit
8326// characters, or is outside MAX_SAFE_INTEGER/size_t range). Note that for
8327// non-TypedArray receivers, there are additional strings that must be treated
8328// as named property keys, namely the range [0xFFFFFFFF, MAX_SAFE_INTEGER].
8329// The hash could be a forwarding index to an integer index.
8330// For now we conservatively assume that all forwarded hashes could be integer
8331// indices, allowing false negatives.
8332// TODO(pthier): We could use 1 bit of the forward index to indicate whether the
8333// forwarded hash contains an integer index, if this is turns out to be a
8334// performance issue, at the cost of slowing down creating the forwarded string.
8336 TNode<Uint16T> instance_type = LoadInstanceType(object);
8337 return Select<BoolT>(
8338 IsInternalizedStringInstanceType(instance_type),
8339 [=, this] {
8340 return IsSetWord32(LoadNameRawHashField(CAST(object)),
8342 },
8343 [=, this] { return IsSymbolInstanceType(instance_type); });
8344}
8345
8346// Semantics: {object} is a Symbol, or a String that doesn't have a cached
8347// index. This returns {true} for strings containing representations of
8348// integers in the range above 9999999 (per kMaxCachedArrayIndexLength)
8349// and below MAX_SAFE_INTEGER. For CSA_DCHECKs ensuring correct usage, this is
8350// better than no checking; and we don't have a good/fast way to accurately
8351// check such strings for being within "array index" (uint32_t) range.
8353 TNode<HeapObject> object) {
8354 TNode<Uint16T> instance_type = LoadInstanceType(object);
8355 return Select<BoolT>(
8356 IsInternalizedStringInstanceType(instance_type),
8357 [=, this] {
8358 return IsSetWord32(LoadNameRawHash(CAST(object)),
8360 },
8361 [=, this] { return IsSymbolInstanceType(instance_type); });
8362}
8363
8368
8372
8374 Label* true_label) {
8375 // Small BigInts are BigInts in the range [-2^63 + 1, 2^63 - 1] so that they
8376 // can fit in 64-bit registers. Excluding -2^63 from the range makes the check
8377 // simpler and faster. The other BigInts are seen as "large".
8378 // TODO(panq): We might need to reevaluate of the range of small BigInts.
8379 DCHECK(Is64());
8380 Label false_label(this);
8381 TNode<Uint32T> length =
8383 GotoIf(Word32Equal(length, Uint32Constant(0)), &false_label);
8384 GotoIfNot(Word32Equal(length, Uint32Constant(1)), true_label);
8386 WordAnd(LoadBigIntDigit(bigint, 0),
8387 UintPtrConstant(static_cast<uintptr_t>(
8388 1ULL << (sizeof(uintptr_t) * 8 - 1))))),
8389 &false_label, true_label);
8390 Bind(&false_label);
8391}
8392
8394 TNode<Int32T> instance_type) {
8395 return Int32LessThanOrEqual(instance_type,
8396 Int32Constant(LAST_PRIMITIVE_HEAP_OBJECT_TYPE));
8397}
8398
8404
8406 TNode<Uint16T> instance_type = LoadInstanceType(object);
8407 return UncheckedCast<BoolT>(
8408 Word32And(Int32GreaterThanOrEqual(instance_type,
8409 Int32Constant(FIRST_HASH_TABLE_TYPE)),
8410 Int32LessThanOrEqual(instance_type,
8411 Int32Constant(LAST_HASH_TABLE_TYPE))));
8412}
8413
8415 return HasInstanceType(object, EPHEMERON_HASH_TABLE_TYPE);
8416}
8417
8421
8423 TNode<HeapObject> object) {
8424 return HasInstanceType(object, ORDERED_NAME_DICTIONARY_TYPE);
8425}
8426
8428 return HasInstanceType(object, GLOBAL_DICTIONARY_TYPE);
8429}
8430
8432 return HasInstanceType(object, NUMBER_DICTIONARY_TYPE);
8433}
8434
8436 return HasInstanceType(object, JS_GENERATOR_OBJECT_TYPE);
8437}
8438
8440 TNode<Int32T> instance_type) {
8441 return IsInRange(instance_type,
8442 FIRST_JS_FUNCTION_OR_BOUND_FUNCTION_OR_WRAPPED_FUNCTION_TYPE,
8443 LAST_JS_FUNCTION_OR_BOUND_FUNCTION_OR_WRAPPED_FUNCTION_TYPE);
8444}
8446 TNode<Int32T> instance_type) {
8447 return IsInRange(instance_type, FIRST_JS_FUNCTION_TYPE,
8448 LAST_JS_FUNCTION_TYPE);
8449}
8450
8454
8456 return HasInstanceType(object, JS_BOUND_FUNCTION_TYPE);
8457}
8458
8462
8464 TNode<Int32T> instance_type) {
8465 return InstanceTypeEqual(instance_type, JS_TYPED_ARRAY_TYPE);
8466}
8467
8471
8475
8477 return HasInstanceType(object, JS_ARRAY_BUFFER_TYPE);
8478}
8479
8481 return HasInstanceType(object, JS_DATA_VIEW_TYPE);
8482}
8483
8485 return HasInstanceType(object, JS_RAB_GSAB_DATA_VIEW_TYPE);
8486}
8487
8489 return HasInstanceType(object, JS_REG_EXP_TYPE);
8490}
8491
8493 return Select<BoolT>(
8494 TaggedIsSmi(object), [=, this] { return Int32TrueConstant(); },
8495 [=, this] {
8496 return UncheckedCast<BoolT>(
8497 Word32Or(IsHeapNumber(CAST(object)), IsBigInt(CAST(object))));
8498 });
8499}
8500
8502 TVARIABLE(BoolT, var_result, Int32TrueConstant());
8503 Label out(this);
8504
8505 GotoIf(TaggedIsSmi(number), &out);
8506
8507 TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
8508 TNode<Float64T> smi_min =
8509 Float64Constant(static_cast<double>(Smi::kMinValue));
8510 TNode<Float64T> smi_max =
8511 Float64Constant(static_cast<double>(Smi::kMaxValue));
8512
8513 GotoIf(Float64LessThan(value, smi_min), &out);
8514 GotoIf(Float64GreaterThan(value, smi_max), &out);
8515 GotoIfNot(Float64Equal(value, value), &out); // NaN.
8516
8517 var_result = Int32FalseConstant();
8518 Goto(&out);
8519
8520 BIND(&out);
8521 return var_result.value();
8522}
8523
8525 return Select<BoolT>(
8526 TaggedIsSmi(number), [=, this] { return TaggedIsPositiveSmi(number); },
8527 [=, this] { return IsHeapNumberPositive(CAST(number)); });
8528}
8529
8530// TODO(cbruni): Use TNode<HeapNumber> instead of custom name.
8532 TNode<Float64T> value = LoadHeapNumberValue(number);
8533 TNode<Float64T> float_zero = Float64Constant(0.);
8534 return Float64GreaterThanOrEqual(value, float_zero);
8535}
8536
8538 TNode<Number> number) {
8539 return Select<BoolT>(
8540 // TODO(cbruni): Introduce TaggedIsNonNegateSmi to avoid confusion.
8541 TaggedIsSmi(number), [=, this] { return TaggedIsPositiveSmi(number); },
8542 [=, this] {
8543 TNode<HeapNumber> heap_number = CAST(number);
8544 return Select<BoolT>(
8545 IsInteger(heap_number),
8546 [=, this] { return IsHeapNumberPositive(heap_number); },
8547 [=, this] { return Int32FalseConstant(); });
8548 });
8549}
8550
8552 return Select<BoolT>(
8553 TaggedIsSmi(number), [=, this] { return Int32TrueConstant(); },
8554 [=, this] {
8555 return Select<BoolT>(
8556 IsHeapNumber(CAST(number)),
8557 [=, this] {
8559 },
8560 [=, this] { return Int32FalseConstant(); });
8561 });
8562}
8563
8565 // Load the actual value of {number}.
8566 TNode<Float64T> number_value = LoadHeapNumberValue(number);
8567 // Truncate the value of {number} to an integer (or an infinity).
8568 TNode<Float64T> integer = Float64Trunc(number_value);
8569
8570 return Select<BoolT>(
8571 // Check if {number}s value matches the integer (ruling out the
8572 // infinities).
8573 Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0)),
8574 [=, this] {
8575 // Check if the {integer} value is in safe integer range.
8576 return Float64LessThanOrEqual(Float64Abs(integer),
8578 },
8579 [=, this] { return Int32FalseConstant(); });
8580}
8581
8583 return Select<BoolT>(
8584 TaggedIsSmi(number), [=, this] { return Int32TrueConstant(); },
8585 [=, this] {
8586 return Select<BoolT>(
8587 IsHeapNumber(CAST(number)),
8588 [=, this] { return IsInteger(UncheckedCast<HeapNumber>(number)); },
8589 [=, this] { return Int32FalseConstant(); });
8590 });
8591}
8592
8594 TNode<Float64T> number_value = LoadHeapNumberValue(number);
8595 // Truncate the value of {number} to an integer (or an infinity).
8596 TNode<Float64T> integer = Float64Trunc(number_value);
8597 // Check if {number}s value matches the integer (ruling out the infinities).
8598 return Float64Equal(Float64Sub(number_value, integer), Float64Constant(0.0));
8599}
8600
8602 // Check that the HeapNumber is a valid uint32
8603 return Select<BoolT>(
8604 IsHeapNumberPositive(number),
8605 [=, this] {
8606 TNode<Float64T> value = LoadHeapNumberValue(number);
8607 TNode<Uint32T> int_value = TruncateFloat64ToWord32(value);
8608 return Float64Equal(value, ChangeUint32ToFloat64(int_value));
8609 },
8610 [=, this] { return Int32FalseConstant(); });
8611}
8612
8614 return Select<BoolT>(
8615 TaggedIsSmi(number), [=, this] { return TaggedIsPositiveSmi(number); },
8616 [=, this] { return IsHeapNumberUint32(CAST(number)); });
8617}
8618
8627
8628template <typename TIndex>
8630 TNode<TIndex> element_count, int base_size) {
8631 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
8632 "Only Smi or IntPtrT element_count is allowed");
8633 int max_newspace_elements =
8634 (kMaxRegularHeapObjectSize - base_size) / kTaggedSize;
8635 return IntPtrOrSmiGreaterThan(
8636 element_count, IntPtrOrSmiConstant<TIndex>(max_newspace_elements));
8637}
8638
8640 TNode<UintPtrT> index) {
8641 CSA_DCHECK(this, UintPtrLessThan(index, LoadStringLengthAsWord(string)));
8642
8643 TVARIABLE(Uint16T, var_result);
8644
8645 Label return_result(this), if_runtime(this, Label::kDeferred),
8646 if_stringistwobyte(this), if_stringisonebyte(this);
8647
8648 ToDirectStringAssembler to_direct(state(), string);
8649 to_direct.TryToDirect(&if_runtime);
8650 const TNode<UintPtrT> offset =
8651 UintPtrAdd(index, Unsigned(to_direct.offset()));
8652 const TNode<BoolT> is_one_byte = to_direct.IsOneByte();
8653 const TNode<RawPtrT> string_data = to_direct.PointerToData(&if_runtime);
8654
8655 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
8656 Branch(is_one_byte, &if_stringisonebyte, &if_stringistwobyte);
8657
8658 BIND(&if_stringisonebyte);
8659 {
8660 var_result = Load<Uint8T>(string_data, offset);
8661 Goto(&return_result);
8662 }
8663
8664 BIND(&if_stringistwobyte);
8665 {
8666 var_result = Load<Uint16T>(string_data, WordShl(offset, IntPtrConstant(1)));
8667 Goto(&return_result);
8668 }
8669
8670 BIND(&if_runtime);
8671 {
8673 CallRuntime(Runtime::kStringCharCodeAt, NoContextConstant(), string,
8674 ChangeUintPtrToTagged(index));
8676 Goto(&return_result);
8677 }
8678
8679 BIND(&return_result);
8680 return var_result.value();
8681}
8682
8684 TVARIABLE(String, var_result);
8685
8686 // Check if the {code} is a one-byte char code.
8687 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
8688 if_done(this);
8689 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
8690 &if_codeisonebyte, &if_codeistwobyte);
8691 BIND(&if_codeisonebyte);
8692 {
8693 // Load the isolate wide single character string cache.
8694 TNode<FixedArray> cache = SingleCharacterStringTableConstant();
8695 TNode<IntPtrT> code_index = Signed(ChangeUint32ToWord(code));
8696
8697 TNode<Object> entry = UnsafeLoadFixedArrayElement(cache, code_index);
8698 CSA_DCHECK(this, Word32BinaryNot(IsUndefined(entry)));
8699
8700 // Return the entry from the {cache}.
8701 var_result = CAST(entry);
8702 Goto(&if_done);
8703 }
8704
8705 BIND(&if_codeistwobyte);
8706 {
8707 // Allocate a new SeqTwoByteString for {code}.
8712 code);
8713 var_result = result;
8714 Goto(&if_done);
8715 }
8716
8717 BIND(&if_done);
8718 return var_result.value();
8719}
8720
8722 compiler::CodeAssemblerState* state, TNode<String> string, Flags flags)
8723 : CodeStubAssembler(state),
8724 var_string_(string, this),
8726 var_map_(LoadMap(string), this),
8727#else
8728 var_instance_type_(LoadInstanceType(string), this),
8729#endif
8730 var_offset_(IntPtrConstant(0), this),
8731 var_is_external_(Int32Constant(0), this),
8732 flags_(flags) {
8733}
8734
8736 Label dispatch(this, {&var_string_, &var_offset_,
8737#if V8_STATIC_ROOTS_BOOL
8738 &var_map_
8739#else
8741#endif
8742 });
8743 Label if_iscons(this);
8744 Label if_isexternal(this);
8745 Label if_issliced(this);
8746 Label if_isthin(this);
8747 Label out(this);
8748
8749#if V8_STATIC_ROOTS_BOOL
8750 // The seq string check is in the dispatch.
8751 Goto(&dispatch);
8752#else
8754 &dispatch);
8755#endif
8756
8757 // Dispatch based on string representation.
8758 BIND(&dispatch);
8759 {
8760#if V8_STATIC_ROOTS_BOOL
8761 TNode<Int32T> map_bits =
8762 TruncateIntPtrToInt32(BitcastTaggedToWord(var_map_.value()));
8763
8764 using StringTypeRange = InstanceTypeChecker::kUniqueMapRangeOfStringType;
8765 // Check the string map ranges in dense increasing order, to avoid needing
8766 // to subtract away the lower bound. Do these couple of range checks instead
8767 // of a switch, since we can make them all single dense compares.
8768 static_assert(StringTypeRange::kSeqString.first == 0);
8769 GotoIf(Uint32LessThanOrEqual(
8770 map_bits, Int32Constant(StringTypeRange::kSeqString.second)),
8771 &out, GotoHint::kLabel);
8772
8773 static_assert(StringTypeRange::kSeqString.second + Map::kSize ==
8774 StringTypeRange::kExternalString.first);
8775 GotoIf(
8776 Uint32LessThanOrEqual(
8777 map_bits, Int32Constant(StringTypeRange::kExternalString.second)),
8778 &if_isexternal);
8779
8780 static_assert(StringTypeRange::kExternalString.second + Map::kSize ==
8781 StringTypeRange::kConsString.first);
8782 GotoIf(Uint32LessThanOrEqual(
8783 map_bits, Int32Constant(StringTypeRange::kConsString.second)),
8784 &if_iscons);
8785
8786 static_assert(StringTypeRange::kConsString.second + Map::kSize ==
8787 StringTypeRange::kSlicedString.first);
8788 GotoIf(Uint32LessThanOrEqual(
8789 map_bits, Int32Constant(StringTypeRange::kSlicedString.second)),
8790 &if_issliced);
8791
8792 static_assert(StringTypeRange::kSlicedString.second + Map::kSize ==
8793 StringTypeRange::kThinString.first);
8794 // No need to check for thin strings, they're the last string map.
8795 static_assert(StringTypeRange::kThinString.second ==
8796 InstanceTypeChecker::kStringMapUpperBound);
8797 Goto(&if_isthin);
8798#else
8799 int32_t values[] = {
8802 };
8803 Label* labels[] = {
8804 &out, &if_iscons, &if_isexternal, &if_issliced, &if_isthin,
8805 };
8806 static_assert(arraysize(values) == arraysize(labels));
8807
8808 const TNode<Int32T> representation = Word32And(
8810 Switch(representation, if_bailout, values, labels, arraysize(values));
8811#endif
8812 }
8813
8814 // Cons string. Check whether it is flat, then fetch first part.
8815 // Flat cons strings have an empty second part.
8816 BIND(&if_iscons);
8817 {
8818 const TNode<String> string = var_string_.value();
8819 GotoIfNot(IsEmptyString(LoadObjectField<String>(
8820 string, offsetof(ConsString, second_))),
8821 if_bailout, GotoHint::kFallthrough);
8822
8823 const TNode<String> lhs =
8824 LoadObjectField<String>(string, offsetof(ConsString, first_));
8825 var_string_ = lhs;
8826#if V8_STATIC_ROOTS_BOOL
8827 var_map_ = LoadMap(lhs);
8828#else
8830#endif
8831
8832 Goto(&dispatch);
8833 }
8834
8835 // Sliced string. Fetch parent and correct start index by offset.
8836 BIND(&if_issliced);
8837 {
8838 if (!v8_flags.string_slices || (flags_ & kDontUnpackSlicedStrings)) {
8839 Goto(if_bailout);
8840 } else {
8841 const TNode<String> string = var_string_.value();
8843 string, offsetof(SlicedString, offset_));
8844 var_offset_ = IntPtrAdd(var_offset_.value(), sliced_offset);
8845
8846 const TNode<String> parent =
8847 LoadObjectField<String>(string, offsetof(SlicedString, parent_));
8848 var_string_ = parent;
8849#if V8_STATIC_ROOTS_BOOL
8850 var_map_ = LoadMap(parent);
8851#else
8853#endif
8854
8855 Goto(&dispatch);
8856 }
8857 }
8858
8859 // Thin string. Fetch the actual string.
8860 BIND(&if_isthin);
8861 {
8862 const TNode<String> string = var_string_.value();
8863 const TNode<String> actual_string =
8864 LoadObjectField<String>(string, offsetof(ThinString, actual_));
8865
8866 var_string_ = actual_string;
8867#if V8_STATIC_ROOTS_BOOL
8868 var_map_ = LoadMap(actual_string);
8869#else
8870 var_instance_type_ = LoadInstanceType(actual_string);
8871#endif
8872
8873 Goto(&dispatch);
8874 }
8875
8876 // External string.
8877 BIND(&if_isexternal);
8879 Goto(&out);
8880
8881 BIND(&out);
8882 return var_string_.value();
8883}
8884
8886 Label flatten_in_runtime(this, Label::kDeferred),
8887 unreachable(this, Label::kDeferred), out(this);
8888
8889 TryToDirect(&flatten_in_runtime);
8890 Goto(&out);
8891
8892 BIND(&flatten_in_runtime);
8893 var_string_ = CAST(CallRuntime(Runtime::kFlattenString, NoContextConstant(),
8894 var_string_.value()));
8895#if V8_STATIC_ROOTS_BOOL
8896 var_map_ = LoadMap(var_string_.value());
8897#else
8899#endif
8900
8901 TryToDirect(&unreachable);
8902 Goto(&out);
8903
8904 BIND(&unreachable);
8905 Unreachable();
8906
8907 BIND(&out);
8908 return var_string_.value();
8909}
8910
8912#if V8_STATIC_ROOTS_BOOL
8913 return IsOneByteStringMap(var_map_.value());
8914#else
8916#endif
8917}
8918
8920 StringPointerKind ptr_kind, Label* if_bailout) {
8921 CHECK(ptr_kind == PTR_TO_DATA || ptr_kind == PTR_TO_STRING);
8922
8923 TVARIABLE(RawPtrT, var_result);
8924 Label out(this), if_issequential(this), if_isexternal(this, Label::kDeferred);
8925 Branch(is_external(), &if_isexternal, &if_issequential);
8926
8927 BIND(&if_issequential);
8928 {
8929 static_assert(OFFSET_OF_DATA_START(SeqOneByteString) ==
8933 if (ptr_kind == PTR_TO_DATA) {
8937 }
8938 var_result = result;
8939 Goto(&out);
8940 }
8941
8942 BIND(&if_isexternal);
8943 {
8944#if V8_STATIC_ROOTS_BOOL
8945 GotoIf(IsUncachedExternalStringMap(var_map_.value()), if_bailout);
8946#else
8948 if_bailout);
8949#endif
8950
8951 TNode<String> string = var_string_.value();
8953 if (ptr_kind == PTR_TO_STRING) {
8957 }
8958 var_result = result;
8959 Goto(&out);
8960 }
8961
8962 BIND(&out);
8963 return var_result.value();
8964}
8965
8967 Label runtime(this, Label::kDeferred);
8968 Label end(this);
8969
8970 TVARIABLE(Number, var_result);
8971
8972 // Check if string has a cached array index.
8973 TNode<Uint32T> raw_hash_field = LoadNameRawHashField(input);
8975 &runtime);
8976
8977 var_result = SmiTag(Signed(
8979 Goto(&end);
8980
8981 BIND(&runtime);
8982 {
8983 var_result =
8984 CAST(CallRuntime(Runtime::kStringToNumber, NoContextConstant(), input));
8985 Goto(&end);
8986 }
8987
8988 BIND(&end);
8989 return var_result.value();
8990}
8991
8993 Label* bailout) {
8995 TVARIABLE(Smi, smi_input);
8996 Label if_smi(this), not_smi(this), if_heap_number(this), done(this, &result);
8997
8998 // Load the number string cache.
8999 TNode<FixedArray> number_string_cache = NumberStringCacheConstant();
9000
9001 // Make the hash mask from the length of the number string cache. It
9002 // contains two elements (number and string) for each cache entry.
9003 TNode<Uint32T> number_string_cache_length =
9004 LoadAndUntagFixedArrayBaseLengthAsUint32(number_string_cache);
9007 Int32Sub(Word32Shr(number_string_cache_length, one), one);
9008
9009 GotoIfNot(TaggedIsSmi(input), &if_heap_number);
9010 smi_input = CAST(input);
9011 Goto(&if_smi);
9012
9013 BIND(&if_heap_number);
9014 TNode<HeapNumber> heap_number_input = CAST(input);
9015 {
9016 Comment("NumberToString - HeapNumber");
9017 // Try normalizing the HeapNumber.
9018 smi_input = TryHeapNumberToSmi(heap_number_input, &not_smi);
9019 Goto(&if_smi);
9020 }
9021 BIND(&if_smi);
9022 {
9023 Comment("NumberToString - Smi");
9024 // Load the smi key, make sure it matches the smi we're looking for.
9025 TNode<Word32T> hash = Word32And(SmiToInt32(smi_input.value()), mask);
9026 TNode<IntPtrT> entry_index =
9027 Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
9028 TNode<Object> smi_key =
9029 UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
9030 Label if_smi_cache_missed(this);
9031 GotoIf(TaggedNotEqual(smi_key, smi_input.value()), &if_smi_cache_missed);
9032
9033 // Smi match, return value from cache entry.
9034 result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,
9035 kTaggedSize));
9036 Goto(&done);
9037
9038 BIND(&if_smi_cache_missed);
9039 {
9040 Label store_to_cache(this);
9041
9042 // Bailout when the cache is not full-size.
9043 const int kFullCacheSize =
9045 Branch(Uint32LessThan(number_string_cache_length,
9046 Uint32Constant(kFullCacheSize)),
9047 bailout, &store_to_cache);
9048
9049 BIND(&store_to_cache);
9050 {
9051 // Generate string and update string hash field.
9052 result = IntToDecimalString(SmiToInt32(smi_input.value()));
9053
9054 // Store string into cache.
9055 StoreFixedArrayElement(number_string_cache, entry_index,
9056 smi_input.value());
9057 StoreFixedArrayElement(number_string_cache,
9058 IntPtrAdd(entry_index, IntPtrConstant(1)),
9059 result.value());
9060 Goto(&done);
9061 }
9062 }
9063 }
9064
9065 BIND(&not_smi);
9066 {
9067 // Make a hash from the two 32-bit values of the double.
9068 TNode<Int32T> low = LoadObjectField<Int32T>(heap_number_input,
9069 offsetof(HeapNumber, value_));
9071 heap_number_input, offsetof(HeapNumber, value_) + kIntSize);
9072 TNode<Word32T> hash = Word32And(Word32Xor(low, high), mask);
9073 TNode<IntPtrT> entry_index =
9074 Signed(ChangeUint32ToWord(Int32Add(hash, hash)));
9075
9076 // Cache entry's key must be a heap number
9077 TNode<Object> number_key =
9078 UnsafeLoadFixedArrayElement(number_string_cache, entry_index);
9079 GotoIf(TaggedIsSmi(number_key), bailout);
9080 TNode<HeapObject> number_key_heap_object = CAST(number_key);
9081 GotoIfNot(IsHeapNumber(number_key_heap_object), bailout);
9082
9083 // Cache entry's key must match the heap number value we're looking for.
9085 number_key_heap_object, offsetof(HeapNumber, value_));
9087 number_key_heap_object, offsetof(HeapNumber, value_) + kIntSize);
9088 GotoIfNot(Word32Equal(low, low_compare), bailout);
9089 GotoIfNot(Word32Equal(high, high_compare), bailout);
9090
9091 // Heap number match, return value from cache entry.
9092 result = CAST(UnsafeLoadFixedArrayElement(number_string_cache, entry_index,
9093 kTaggedSize));
9094 Goto(&done);
9095 }
9096 BIND(&done);
9097 return result.value();
9098}
9099
9102 Label runtime(this, Label::kDeferred), done(this, &result);
9103
9104 GotoIfForceSlowPath(&runtime);
9105
9106 result = NumberToString(input, &runtime);
9107 Goto(&done);
9108
9109 BIND(&runtime);
9110 {
9111 // No cache entry, go to the runtime.
9112 result = CAST(
9113 CallRuntime(Runtime::kNumberToStringSlow, NoContextConstant(), input));
9114 Goto(&done);
9115 }
9116 BIND(&done);
9117 return result.value();
9118}
9119
9122 BigIntHandling bigint_handling) {
9123 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(input)));
9124
9125 TVARIABLE(HeapObject, var_input, input);
9126 TVARIABLE(Numeric, var_result);
9127 TVARIABLE(Uint16T, instance_type, LoadInstanceType(var_input.value()));
9128 Label end(this), if_inputisreceiver(this, Label::kDeferred),
9129 if_inputisnotreceiver(this);
9130
9131 // We need to handle JSReceiver first since we might need to do two
9132 // conversions due to ToPritmive.
9133 Branch(IsJSReceiverInstanceType(instance_type.value()), &if_inputisreceiver,
9134 &if_inputisnotreceiver);
9135
9136 BIND(&if_inputisreceiver);
9137 {
9138 // The {var_input.value()} is a JSReceiver, we need to convert it to a
9139 // Primitive first using the ToPrimitive type conversion, preferably
9140 // yielding a Number.
9141 Builtin builtin =
9143 TNode<Object> result = CallBuiltin(builtin, context, var_input.value());
9144
9145 // Check if the {result} is already a Number/Numeric.
9146 Label if_done(this), if_notdone(this);
9148 : IsNumeric(result),
9149 &if_done, &if_notdone);
9150
9151 BIND(&if_done);
9152 {
9153 // The ToPrimitive conversion already gave us a Number/Numeric, so
9154 // we're done.
9155 var_result = CAST(result);
9156 Goto(&end);
9157 }
9158
9159 BIND(&if_notdone);
9160 {
9161 // We now have a Primitive {result}, but it's not yet a
9162 // Number/Numeric.
9163 var_input = CAST(result);
9164 // We have a new input. Redo the check and reload instance_type.
9165 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(var_input.value())));
9166 instance_type = LoadInstanceType(var_input.value());
9167 Goto(&if_inputisnotreceiver);
9168 }
9169 }
9170
9171 BIND(&if_inputisnotreceiver);
9172 {
9173 Label not_plain_primitive(this), if_inputisbigint(this),
9174 if_inputisother(this, Label::kDeferred);
9175
9176 // String and Oddball cases.
9177 TVARIABLE(Number, var_result_number);
9178 TryPlainPrimitiveNonNumberToNumber(var_input.value(), &var_result_number,
9179 &not_plain_primitive);
9180 var_result = var_result_number.value();
9181 Goto(&end);
9182
9183 BIND(&not_plain_primitive);
9184 {
9185 Branch(IsBigIntInstanceType(instance_type.value()), &if_inputisbigint,
9186 &if_inputisother);
9187
9188 BIND(&if_inputisbigint);
9189 {
9190 if (mode == Object::Conversion::kToNumeric) {
9191 var_result = CAST(var_input.value());
9192 Goto(&end);
9193 } else {
9195 if (bigint_handling == BigIntHandling::kThrow) {
9196 Goto(&if_inputisother);
9197 } else {
9199 var_result = CAST(CallRuntime(Runtime::kBigIntToNumber, context,
9200 var_input.value()));
9201 Goto(&end);
9202 }
9203 }
9204 }
9205
9206 BIND(&if_inputisother);
9207 {
9208 // The {var_input.value()} is something else (e.g. Symbol), let the
9209 // runtime figure out the correct exception. Note: We cannot tail call
9210 // to the runtime here, as js-to-wasm trampolines also use this code
9211 // currently, and they declare all outgoing parameters as untagged,
9212 // while we would push a tagged object here.
9213 auto function_id = mode == Object::Conversion::kToNumber
9214 ? Runtime::kToNumber
9215 : Runtime::kToNumeric;
9216 var_result = CAST(CallRuntime(function_id, context, var_input.value()));
9217 Goto(&end);
9218 }
9219 }
9220 }
9221
9222 BIND(&end);
9223 if (mode == Object::Conversion::kToNumber) {
9224 CSA_DCHECK(this, IsNumber(var_result.value()));
9225 }
9226 return var_result.value();
9227}
9228
9230 TNode<Context> context, TNode<HeapObject> input,
9231 BigIntHandling bigint_handling) {
9233 context, input, Object::Conversion::kToNumber, bigint_handling));
9234}
9235
9237 TNode<HeapObject> input, TVariable<Number>* var_result, Label* if_bailout) {
9238 CSA_DCHECK(this, Word32BinaryNot(IsHeapNumber(input)));
9239 Label done(this);
9240
9241 // Dispatch on the {input} instance type.
9242 TNode<Uint16T> input_instance_type = LoadInstanceType(input);
9243 Label if_inputisstring(this);
9244 GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
9245 GotoIfNot(InstanceTypeEqual(input_instance_type, ODDBALL_TYPE), if_bailout);
9246
9247 // The {input} is an Oddball, we just need to load the Number value of it.
9248 *var_result = LoadObjectField<Number>(input, offsetof(Oddball, to_number_));
9249 Goto(&done);
9250
9251 BIND(&if_inputisstring);
9252 {
9253 // The {input} is a String, use the fast stub to convert it to a Number.
9254 *var_result = StringToNumber(CAST(input));
9255 Goto(&done);
9256 }
9257
9258 BIND(&done);
9259}
9260
9266
9268 TNode<Object> input,
9269 BigIntHandling bigint_handling) {
9270 return CAST(ToNumberOrNumeric([context] { return context; }, input, nullptr,
9272 bigint_handling));
9273}
9274
9276 TNode<Object> input) {
9277 TVARIABLE(Number, var_result);
9278 Label end(this), not_smi(this, Label::kDeferred);
9279
9280 GotoIfNot(TaggedIsSmi(input), &not_smi);
9281 var_result = CAST(input);
9282 Goto(&end);
9283
9284 BIND(&not_smi);
9285 {
9286 var_result = Select<Number>(
9287 IsHeapNumber(CAST(input)), [=, this] { return CAST(input); },
9288 [=, this] {
9289 return CAST(CallBuiltin(Builtin::kNonNumberToNumber, context, input));
9290 });
9291 Goto(&end);
9292 }
9293
9294 BIND(&end);
9295 return var_result.value();
9296}
9297
9299 LazyNode<Context> context, TNode<Object> input,
9300 TVariable<Smi>* var_type_feedback, Object::Conversion mode,
9301 BigIntHandling bigint_handling) {
9302 TVARIABLE(Numeric, var_result);
9303 Label end(this);
9304
9305 Label not_smi(this, Label::kDeferred);
9306 GotoIfNot(TaggedIsSmi(input), &not_smi);
9307 TNode<Smi> input_smi = CAST(input);
9308 var_result = input_smi;
9309 if (var_type_feedback) {
9311 }
9312 Goto(&end);
9313
9314 BIND(&not_smi);
9315 {
9316 Label not_heap_number(this, Label::kDeferred);
9317 TNode<HeapObject> input_ho = CAST(input);
9318 GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
9319
9320 TNode<HeapNumber> input_hn = CAST(input_ho);
9321 var_result = input_hn;
9322 if (var_type_feedback) {
9323 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kNumber);
9324 }
9325 Goto(&end);
9326
9327 BIND(&not_heap_number);
9328 {
9329 if (mode == Object::Conversion::kToNumeric) {
9330 // Special case for collecting BigInt feedback.
9331 Label not_bigint(this);
9332 GotoIfNot(IsBigInt(input_ho), &not_bigint);
9333 {
9334 var_result = CAST(input_ho);
9335 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kBigInt);
9336 Goto(&end);
9337 }
9338 BIND(&not_bigint);
9339 }
9340 var_result = NonNumberToNumberOrNumeric(context(), input_ho, mode,
9341 bigint_handling);
9342 if (var_type_feedback) {
9343 *var_type_feedback = SmiConstant(BinaryOperationFeedback::kAny);
9344 }
9345 Goto(&end);
9346 }
9347 }
9348
9349 BIND(&end);
9350 return var_result.value();
9351}
9352
9354 TVARIABLE(Number, var_result);
9355 Label end(this), fallback(this);
9356
9357 Label not_smi(this, Label::kDeferred);
9358 GotoIfNot(TaggedIsSmi(input), &not_smi);
9359 TNode<Smi> input_smi = CAST(input);
9360 var_result = input_smi;
9361 Goto(&end);
9362
9363 BIND(&not_smi);
9364 {
9365 Label not_heap_number(this, Label::kDeferred);
9366 TNode<HeapObject> input_ho = CAST(input);
9367 GotoIfNot(IsHeapNumber(input_ho), &not_heap_number);
9368
9369 TNode<HeapNumber> input_hn = CAST(input_ho);
9370 var_result = input_hn;
9371 Goto(&end);
9372
9373 BIND(&not_heap_number);
9374 {
9375 TryPlainPrimitiveNonNumberToNumber(input_ho, &var_result, &fallback);
9376 Goto(&end);
9377 BIND(&fallback);
9378 Unreachable();
9379 }
9380 }
9381
9382 BIND(&end);
9383 return var_result.value();
9384}
9385
9387 TNode<Object> input) {
9388 TVARIABLE(BigInt, var_result);
9389 Label if_bigint(this), done(this), if_throw(this);
9390
9391 GotoIf(TaggedIsSmi(input), &if_throw);
9392 GotoIf(IsBigInt(CAST(input)), &if_bigint);
9393 var_result = CAST(CallRuntime(Runtime::kToBigInt, context, input));
9394 Goto(&done);
9395
9396 BIND(&if_bigint);
9397 var_result = CAST(input);
9398 Goto(&done);
9399
9400 BIND(&if_throw);
9401 ThrowTypeError(context, MessageTemplate::kBigIntFromObject, input);
9402
9403 BIND(&done);
9404 return var_result.value();
9405}
9406
9408 TNode<Object> input) {
9409 TVARIABLE(BigInt, var_result);
9410 Label if_bigint(this), if_not_bigint(this), done(this);
9411
9412 GotoIf(TaggedIsSmi(input), &if_not_bigint);
9413 GotoIf(IsBigInt(CAST(input)), &if_bigint);
9414 Goto(&if_not_bigint);
9415
9416 BIND(&if_bigint);
9417 var_result = CAST(input);
9418 Goto(&done);
9419
9420 BIND(&if_not_bigint);
9421 var_result =
9422 CAST(CallRuntime(Runtime::kToBigIntConvertNumber, context, input));
9423 Goto(&done);
9424
9425 BIND(&done);
9426 return var_result.value();
9427}
9428
9430 TNode<Object> value,
9431 Label* if_not_bigint, Label* if_bigint,
9432 Label* if_bigint64,
9433 TVariable<BigInt>* var_bigint,
9434 TVariable<Smi>* var_feedback) {
9435 Label done(this), is_smi(this), is_heapnumber(this), maybe_bigint64(this),
9436 is_bigint(this), is_oddball(this);
9437 GotoIf(TaggedIsSmi(value), &is_smi);
9438 TNode<HeapObject> heap_object_value = CAST(value);
9439 TNode<Map> map = LoadMap(heap_object_value);
9440 GotoIf(IsHeapNumberMap(map), &is_heapnumber);
9441 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
9442 if (Is64() && if_bigint64) {
9443 GotoIf(IsBigIntInstanceType(instance_type), &maybe_bigint64);
9444 } else {
9445 GotoIf(IsBigIntInstanceType(instance_type), &is_bigint);
9446 }
9447
9448 // {heap_object_value} is not a Numeric yet.
9449 GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &is_oddball);
9450 TNode<Numeric> numeric_value = CAST(
9451 CallBuiltin(Builtin::kNonNumberToNumeric, context, heap_object_value));
9453 GotoIf(TaggedIsSmi(numeric_value), if_not_bigint);
9454 GotoIfNot(IsBigInt(CAST(numeric_value)), if_not_bigint);
9455 *var_bigint = CAST(numeric_value);
9456 Goto(if_bigint);
9457
9458 BIND(&is_smi);
9460 Goto(if_not_bigint);
9461
9462 BIND(&is_heapnumber);
9464 Goto(if_not_bigint);
9465
9466 if (Is64() && if_bigint64) {
9467 BIND(&maybe_bigint64);
9468 GotoIfLargeBigInt(CAST(value), &is_bigint);
9469 *var_bigint = CAST(value);
9471 Goto(if_bigint64);
9472 }
9473
9474 BIND(&is_bigint);
9475 *var_bigint = CAST(value);
9477 Goto(if_bigint);
9478
9479 BIND(&is_oddball);
9481 Goto(if_not_bigint);
9482}
9483
9484// ES#sec-touint32
9486 TNode<Object> input) {
9487 const TNode<Float64T> float_zero = Float64Constant(0.0);
9488 const TNode<Float64T> float_two_32 =
9489 Float64Constant(static_cast<double>(1ULL << 32));
9490
9491 Label out(this);
9492
9493 TVARIABLE(Object, var_result, input);
9494
9495 // Early exit for positive smis.
9496 {
9497 // TODO(jgruber): This branch and the recheck below can be removed once we
9498 // have a ToNumber with multiple exits.
9499 Label next(this, Label::kDeferred);
9500 Branch(TaggedIsPositiveSmi(input), &out, &next);
9501 BIND(&next);
9502 }
9503
9504 const TNode<Number> number = ToNumber(context, input);
9505 var_result = number;
9506
9507 // Perhaps we have a positive smi now.
9508 {
9509 Label next(this, Label::kDeferred);
9510 Branch(TaggedIsPositiveSmi(number), &out, &next);
9511 BIND(&next);
9512 }
9513
9514 Label if_isnegativesmi(this), if_isheapnumber(this);
9515 Branch(TaggedIsSmi(number), &if_isnegativesmi, &if_isheapnumber);
9516
9517 BIND(&if_isnegativesmi);
9518 {
9519 const TNode<Int32T> uint32_value = SmiToInt32(CAST(number));
9520 TNode<Float64T> float64_value = ChangeUint32ToFloat64(uint32_value);
9521 var_result = AllocateHeapNumberWithValue(float64_value);
9522 Goto(&out);
9523 }
9524
9525 BIND(&if_isheapnumber);
9526 {
9527 Label return_zero(this);
9528 const TNode<Float64T> value = LoadHeapNumberValue(CAST(number));
9529
9530 {
9531 // +-0.
9532 Label next(this);
9533 Branch(Float64Equal(value, float_zero), &return_zero, &next);
9534 BIND(&next);
9535 }
9536
9537 {
9538 // NaN.
9539 Label next(this);
9540 Branch(Float64Equal(value, value), &next, &return_zero);
9541 BIND(&next);
9542 }
9543
9544 {
9545 // +Infinity.
9546 Label next(this);
9547 const TNode<Float64T> positive_infinity =
9548 Float64Constant(std::numeric_limits<double>::infinity());
9549 Branch(Float64Equal(value, positive_infinity), &return_zero, &next);
9550 BIND(&next);
9551 }
9552
9553 {
9554 // -Infinity.
9555 Label next(this);
9556 const TNode<Float64T> negative_infinity =
9557 Float64Constant(-1.0 * std::numeric_limits<double>::infinity());
9558 Branch(Float64Equal(value, negative_infinity), &return_zero, &next);
9559 BIND(&next);
9560 }
9561
9562 // * Let int be the mathematical value that is the same sign as number and
9563 // whose magnitude is floor(abs(number)).
9564 // * Let int32bit be int modulo 2^32.
9565 // * Return int32bit.
9566 {
9568 x = Float64Mod(x, float_two_32);
9569 x = Float64Add(x, float_two_32);
9570 x = Float64Mod(x, float_two_32);
9571
9573 var_result = result;
9574 Goto(&out);
9575 }
9576
9577 BIND(&return_zero);
9578 {
9579 var_result = SmiConstant(0);
9580 Goto(&out);
9581 }
9582 }
9583
9584 BIND(&out);
9585 return CAST(var_result.value());
9586}
9587
9589 TNode<Object> input) {
9590 TVARIABLE(Object, var_result, input);
9591 Label stub_call(this, Label::kDeferred), out(this);
9592
9593 GotoIf(TaggedIsSmi(input), &stub_call);
9594 Branch(IsString(CAST(input)), &out, &stub_call);
9595
9596 BIND(&stub_call);
9597 var_result = CallBuiltin(Builtin::kToString, context, input);
9598 Goto(&out);
9599
9600 BIND(&out);
9601 return CAST(var_result.value());
9602}
9603
9605 TNode<Object> input) {
9606 return CAST(CallBuiltin(Builtin::kToObject, context, input));
9607}
9608
9610 TNode<Object> input) {
9612 Label if_isreceiver(this), if_isnotreceiver(this, Label::kDeferred);
9613 Label done(this);
9614
9615 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
9616
9617 BIND(&if_isreceiver);
9618 {
9619 result = CAST(input);
9620 Goto(&done);
9621 }
9622
9623 BIND(&if_isnotreceiver);
9624 {
9625 result = ToObject(context, input);
9626 Goto(&done);
9627 }
9628
9629 BIND(&done);
9630 return result.value();
9631}
9632
9634 TNode<Object> input) {
9635 TNode<Smi> smi_zero = SmiConstant(0);
9636 return Select<Number>(
9637 TaggedIsSmi(input), [=, this] { return SmiMax(CAST(input), smi_zero); },
9638 [=, this] {
9639 return CAST(CallBuiltin(Builtin::kToLength, context, input));
9640 });
9641}
9642
9647
9649 uint32_t shift, uint32_t mask) {
9650 DCHECK_EQ((mask >> shift) << shift, mask);
9651 if ((std::numeric_limits<uint32_t>::max() >> shift) ==
9652 ((std::numeric_limits<uint32_t>::max() & mask) >> shift)) {
9653 return Unsigned(Word32Shr(word32, static_cast<int>(shift)));
9654 } else {
9655 return Unsigned(Word32And(Word32Shr(word32, static_cast<int>(shift)),
9656 Int32Constant(mask >> shift)));
9657 }
9658}
9659
9661 uintptr_t mask) {
9662 DCHECK_EQ((mask >> shift) << shift, mask);
9663 if ((std::numeric_limits<uintptr_t>::max() >> shift) ==
9664 ((std::numeric_limits<uintptr_t>::max() & mask) >> shift)) {
9665 return Unsigned(WordShr(word, static_cast<int>(shift)));
9666 } else {
9667 return Unsigned(WordAnd(WordShr(word, static_cast<int>(shift)),
9668 IntPtrConstant(mask >> shift)));
9669 }
9670}
9671
9673 TNode<Uint32T> value,
9674 uint32_t shift, uint32_t mask,
9675 bool starts_as_zero) {
9676 DCHECK_EQ((mask >> shift) << shift, mask);
9677 // Ensure the {value} fits fully in the mask.
9678 CSA_DCHECK(this, Uint32LessThanOrEqual(value, Uint32Constant(mask >> shift)));
9679 TNode<Word32T> encoded_value = Word32Shl(value, Int32Constant(shift));
9680 TNode<Word32T> masked_word;
9681 if (starts_as_zero) {
9682 CSA_DCHECK(this, Word32Equal(Word32And(word, Int32Constant(~mask)), word));
9683 masked_word = word;
9684 } else {
9685 masked_word = Word32And(word, Int32Constant(~mask));
9686 }
9687 return Word32Or(masked_word, encoded_value);
9688}
9689
9691 TNode<UintPtrT> value,
9692 uint32_t shift, uintptr_t mask,
9693 bool starts_as_zero) {
9694 DCHECK_EQ((mask >> shift) << shift, mask);
9695 // Ensure the {value} fits fully in the mask.
9696 CSA_DCHECK(this,
9697 UintPtrLessThanOrEqual(value, UintPtrConstant(mask >> shift)));
9698 TNode<WordT> encoded_value = WordShl(value, static_cast<int>(shift));
9699 TNode<WordT> masked_word;
9700 if (starts_as_zero) {
9701 CSA_DCHECK(this, WordEqual(WordAnd(word, UintPtrConstant(~mask)), word));
9702 masked_word = word;
9703 } else {
9704 masked_word = WordAnd(word, UintPtrConstant(~mask));
9705 }
9706 return WordOr(masked_word, encoded_value);
9707}
9708
9710 if (v8_flags.native_code_counters && counter->Enabled()) {
9711 TNode<ExternalReference> counter_address =
9714 Int32Constant(value));
9715 }
9716}
9717
9719 DCHECK_GT(delta, 0);
9720 if (v8_flags.native_code_counters && counter->Enabled()) {
9721 TNode<ExternalReference> counter_address =
9723 // This operation has to be exactly 32-bit wide in case the external
9724 // reference table redirects the counter to a uint32_t dummy_stats_counter_
9725 // field.
9726 TNode<Int32T> value = Load<Int32T>(counter_address);
9727 value = Int32Add(value, Int32Constant(delta));
9728 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
9729 }
9730}
9731
9733 DCHECK_GT(delta, 0);
9734 if (v8_flags.native_code_counters && counter->Enabled()) {
9735 TNode<ExternalReference> counter_address =
9737 // This operation has to be exactly 32-bit wide in case the external
9738 // reference table redirects the counter to a uint32_t dummy_stats_counter_
9739 // field.
9740 TNode<Int32T> value = Load<Int32T>(counter_address);
9741 value = Int32Sub(value, Int32Constant(delta));
9742 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
9743 }
9744}
9745
9746template <typename TIndex>
9748 *variable =
9749 IntPtrOrSmiAdd(variable->value(), IntPtrOrSmiConstant<TIndex>(value));
9750}
9751
9752// Instantiate Increment for Smi and IntPtrT.
9753// TODO(v8:9708): Consider renaming to [Smi|IntPtrT|RawPtrT]Increment.
9755 int value);
9757 TVariable<IntPtrT>* variable, int value);
9759 TVariable<RawPtrT>* variable, int value);
9760
9764
9766 TVariable<IntPtrT>* var_index,
9767 Label* if_keyisunique,
9768 TVariable<Name>* var_unique,
9769 Label* if_bailout,
9770 Label* if_notinternalized) {
9771 Comment("TryToName");
9772
9773 TVARIABLE(Int32T, var_instance_type);
9774 Label if_keyisnotindex(this);
9775 *var_index = TryToIntptr(key, &if_keyisnotindex, &var_instance_type);
9776 Goto(if_keyisindex);
9777
9778 BIND(&if_keyisnotindex);
9779 {
9780 Label if_symbol(this), if_string(this),
9781 if_keyisother(this, Label::kDeferred);
9782
9783 // Symbols are unique.
9784 GotoIf(IsSymbolInstanceType(var_instance_type.value()), &if_symbol);
9785
9786 // Miss if |key| is not a String.
9787 static_assert(FIRST_NAME_TYPE == FIRST_TYPE);
9788 Branch(IsStringInstanceType(var_instance_type.value()), &if_string,
9789 &if_keyisother);
9790
9791 // Symbols are unique.
9792 BIND(&if_symbol);
9793 {
9794 *var_unique = CAST(key);
9795 Goto(if_keyisunique);
9796 }
9797
9798 BIND(&if_string);
9799 {
9800 TVARIABLE(Uint32T, var_raw_hash);
9801 Label check_string_hash(this, {&var_raw_hash});
9802
9803 // TODO(v8:12007): LoadNameRawHashField() should be an acquire load.
9804 var_raw_hash = LoadNameRawHashField(CAST(key));
9805 Goto(&check_string_hash);
9806 BIND(&check_string_hash);
9807 {
9808 Label if_thinstring(this), if_has_cached_index(this),
9809 if_forwarding_index(this, Label::kDeferred);
9810
9811 TNode<Uint32T> raw_hash_field = var_raw_hash.value();
9812 GotoIf(IsClearWord32(raw_hash_field,
9814 &if_has_cached_index);
9815 // No cached array index. If the string knows that it contains an index,
9816 // then it must be an uncacheable index. Handle this case in the
9817 // runtime.
9819 raw_hash_field, Name::HashFieldType::kIntegerIndex),
9820 if_bailout);
9821
9823 GotoIf(IsSetWord32(var_instance_type.value(), kThinStringTagBit),
9824 &if_thinstring);
9825
9826 // Check if the hash field encodes a forwarding index.
9829 &if_forwarding_index);
9830
9831 // Finally, check if |key| is internalized.
9832 static_assert(kNotInternalizedTag != 0);
9833 GotoIf(IsSetWord32(var_instance_type.value(), kIsNotInternalizedMask),
9834 if_notinternalized != nullptr ? if_notinternalized : if_bailout);
9835
9836 *var_unique = CAST(key);
9837 Goto(if_keyisunique);
9838
9839 BIND(&if_thinstring);
9840 {
9841 *var_unique =
9842 LoadObjectField<String>(CAST(key), offsetof(ThinString, actual_));
9843 Goto(if_keyisunique);
9844 }
9845
9846 BIND(&if_forwarding_index);
9847 {
9848 Label if_external(this), if_internalized(this);
9850 raw_hash_field, true),
9851 &if_external, &if_internalized);
9852 BIND(&if_external);
9853 {
9854 // We know nothing about external forwarding indices, so load the
9855 // forwarded hash and check all possibilities again.
9857 ExternalReference::raw_hash_from_forward_table());
9858 const TNode<ExternalReference> isolate_ptr =
9861 function, MachineType::Uint32(),
9862 std::make_pair(MachineType::Pointer(), isolate_ptr),
9863 std::make_pair(MachineType::Int32(),
9865 raw_hash_field))));
9866
9867 var_raw_hash = result;
9868 Goto(&check_string_hash);
9869 }
9870
9871 BIND(&if_internalized);
9872 {
9873 // Integer indices are not overwritten with internalized forwarding
9874 // indices, so we are guaranteed forwarding to a unique name.
9875 CSA_DCHECK(this,
9877 raw_hash_field, false));
9879 ExternalReference::string_from_forward_table());
9880 const TNode<ExternalReference> isolate_ptr =
9883 function, MachineType::AnyTagged(),
9884 std::make_pair(MachineType::Pointer(), isolate_ptr),
9885 std::make_pair(MachineType::Int32(),
9887 raw_hash_field))));
9888
9889 *var_unique = CAST(result);
9890 Goto(if_keyisunique);
9891 }
9892 }
9893
9894 BIND(&if_has_cached_index);
9895 {
9896 TNode<IntPtrT> index =
9898 raw_hash_field));
9899 CSA_DCHECK(this, IntPtrLessThan(index, IntPtrConstant(INT_MAX)));
9900 *var_index = index;
9901 Goto(if_keyisindex);
9902 }
9903 }
9904 }
9905
9906 BIND(&if_keyisother);
9907 {
9908 GotoIfNot(InstanceTypeEqual(var_instance_type.value(), ODDBALL_TYPE),
9909 if_bailout);
9910 *var_unique =
9911 LoadObjectField<String>(CAST(key), offsetof(Oddball, to_string_));
9912 Goto(if_keyisunique);
9913 }
9914 }
9915}
9916
9918 TNode<RawPtrT> sink,
9920 TNode<Int32T> length) {
9921 TNode<ExternalReference> function =
9922 ExternalConstant(ExternalReference::string_write_to_flat_one_byte());
9923 CallCFunction(function, std::nullopt,
9924 std::make_pair(MachineType::AnyTagged(), source),
9925 std::make_pair(MachineType::Pointer(), sink),
9926 std::make_pair(MachineType::Int32(), start),
9927 std::make_pair(MachineType::Int32(), length));
9928}
9929
9931 TNode<RawPtrT> sink,
9933 TNode<Int32T> length) {
9934 TNode<ExternalReference> function =
9935 ExternalConstant(ExternalReference::string_write_to_flat_two_byte());
9936 CallCFunction(function, std::nullopt,
9937 std::make_pair(MachineType::AnyTagged(), source),
9938 std::make_pair(MachineType::Pointer(), sink),
9939 std::make_pair(MachineType::Int32(), start),
9940 std::make_pair(MachineType::Int32(), length));
9941}
9942
9945 TNode<ExternalReference> function =
9946 ExternalConstant(ExternalReference::external_one_byte_string_get_chars());
9949 std::make_pair(MachineType::AnyTagged(), string)));
9950}
9951
9954 TNode<ExternalReference> function =
9955 ExternalConstant(ExternalReference::external_two_byte_string_get_chars());
9958 std::make_pair(MachineType::AnyTagged(), string)));
9959}
9960
9962#ifdef V8_INTL_SUPPORT
9963 TNode<RawPtrT> ptr =
9964 ExternalConstant(ExternalReference::intl_ascii_collation_weights_l1());
9965 return ReinterpretCast<RawPtr<Uint8T>>(ptr);
9966#else
9967 UNREACHABLE();
9968#endif
9969}
9971#ifdef V8_INTL_SUPPORT
9972 TNode<RawPtrT> ptr =
9973 ExternalConstant(ExternalReference::intl_ascii_collation_weights_l3());
9974 return ReinterpretCast<RawPtr<Uint8T>>(ptr);
9975#else
9976 UNREACHABLE();
9977#endif
9978}
9979
9981 TNode<String> string, Label* if_index, TVariable<IntPtrT>* var_index,
9982 Label* if_internalized, TVariable<Name>* var_internalized,
9983 Label* if_not_internalized, Label* if_bailout) {
9985 ExternalReference::try_string_to_index_or_lookup_existing());
9986 const TNode<ExternalReference> isolate_ptr =
9990 std::make_pair(MachineType::Pointer(), isolate_ptr),
9991 std::make_pair(MachineType::AnyTagged(), string)));
9992 Label internalized(this);
9993 GotoIf(TaggedIsNotSmi(result), &internalized);
9994 TNode<IntPtrT> word_result = SmiUntag(CAST(result));
9996 if_not_internalized);
9998 if_bailout);
9999 *var_index = word_result;
10000 Goto(if_index);
10001
10002 BIND(&internalized);
10003 *var_internalized = CAST(result);
10004 Goto(if_internalized);
10005}
10006
10007template <typename Dictionary>
10009 int field_index) {
10010 TNode<IntPtrT> entry_index =
10011 IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
10012 return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
10013 field_index));
10014}
10015
10016template <typename T>
10019 int additional_offset) {
10021 object, DescriptorArray::kHeaderSize, index, additional_offset);
10022}
10023
10028
10030 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
10031 const int kKeyToDetailsOffset =
10034 container, DescriptorArray::kHeaderSize, key_index, kKeyToDetailsOffset));
10035}
10036
10038 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
10039 const int kKeyToValueOffset =
10041 return LoadDescriptorArrayElement<Object>(container, key_index,
10042 kKeyToValueOffset);
10043}
10044
10046 TNode<DescriptorArray> container, TNode<IntPtrT> key_index) {
10047 const int kKeyToValueOffset =
10049 return LoadDescriptorArrayElement<MaybeObject>(container, key_index,
10050 kKeyToValueOffset);
10051}
10052
10058
10065
10067 TNode<DescriptorArray> container, int descriptor_entry) {
10069 container, IntPtrConstant(0),
10070 DescriptorArray::ToKeyIndex(descriptor_entry) * kTaggedSize));
10071}
10072
10074 TNode<DescriptorArray> container, TNode<IntPtrT> descriptor_entry) {
10076 container, DescriptorArray::kHeaderSize,
10077 DescriptorEntryToIndex(descriptor_entry),
10079}
10080
10082 TNode<DescriptorArray> container, int descriptor_entry) {
10084 container, DescriptorArray::kHeaderSize, IntPtrConstant(0),
10085 DescriptorArray::ToDetailsIndex(descriptor_entry) * kTaggedSize));
10086}
10087
10094
10096 TNode<DescriptorArray> container, int descriptor_entry) {
10098 container, IntPtrConstant(0),
10099 DescriptorArray::ToValueIndex(descriptor_entry) * kTaggedSize);
10100}
10101
10108
10109// Loads the value for the entry with the given key_index.
10110// Returns a tagged value.
10111template <class ContainerType>
10113 TNode<ContainerType> container, TNode<IntPtrT> key_index) {
10114 static_assert(!std::is_same_v<ContainerType, DescriptorArray>,
10115 "Use the non-templatized version for DescriptorArray");
10116 const int kKeyToValueOffset =
10117 (ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *
10119 return LoadFixedArrayElement(container, key_index, kKeyToValueOffset);
10120}
10121
10122template <>
10124 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index) {
10127
10128 return Load<Object>(container, offset_minus_tag);
10129}
10130
10131template <class ContainerType>
10133 TNode<ContainerType> container, TNode<IntPtrT> key_index) {
10134 static_assert(!std::is_same_v<ContainerType, DescriptorArray>,
10135 "Use the non-templatized version for DescriptorArray");
10136 const int kKeyToDetailsOffset =
10137 (ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
10139 return Unsigned(LoadAndUntagToWord32FixedArrayElement(container, key_index,
10140 kKeyToDetailsOffset));
10141}
10142
10143template <>
10145 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index) {
10146 TNode<IntPtrT> capacity =
10147 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(container));
10148 return LoadSwissNameDictionaryPropertyDetails(container, capacity, key_index);
10149}
10150
10151// Stores the details for the entry with the given key_index.
10152// |details| must be a Smi.
10153template <class ContainerType>
10155 TNode<IntPtrT> key_index,
10156 TNode<Smi> details) {
10157 const int kKeyToDetailsOffset =
10158 (ContainerType::kEntryDetailsIndex - ContainerType::kEntryKeyIndex) *
10160 StoreFixedArrayElement(container, key_index, details, kKeyToDetailsOffset);
10161}
10162
10163template <>
10165 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index,
10166 TNode<Smi> details) {
10167 TNode<IntPtrT> capacity =
10168 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(container));
10169 TNode<Uint8T> details_byte = UncheckedCast<Uint8T>(SmiToInt32(details));
10170 StoreSwissNameDictionaryPropertyDetails(container, capacity, key_index,
10171 details_byte);
10172}
10173
10174// Stores the value for the entry with the given key_index.
10175template <class ContainerType>
10177 TNode<IntPtrT> key_index,
10178 TNode<Object> value,
10179 WriteBarrierMode write_barrier) {
10180 const int kKeyToValueOffset =
10181 (ContainerType::kEntryValueIndex - ContainerType::kEntryKeyIndex) *
10183 StoreFixedArrayElement(container, key_index, value, write_barrier,
10184 kKeyToValueOffset);
10185}
10186
10187template <>
10189 TNode<SwissNameDictionary> container, TNode<IntPtrT> key_index,
10190 TNode<Object> value, WriteBarrierMode write_barrier) {
10193
10195 switch (write_barrier) {
10197 case SKIP_WRITE_BARRIER:
10199 break;
10202 break;
10203 default:
10204 // We shouldn't see anything else.
10205 UNREACHABLE();
10206 }
10207 StoreToObject(MachineRepresentation::kTagged, container, offset_minus_tag,
10208 value, mode);
10209}
10210
10217
10219 TNode<NameDictionary> container, TNode<IntPtrT> key_index);
10221 TNode<GlobalDictionary> container, TNode<IntPtrT> key_index);
10223 TNode<NameDictionary> container, TNode<IntPtrT> key_index);
10225 TNode<NameDictionary> container, TNode<IntPtrT> key_index,
10226 TNode<Smi> details);
10228 TNode<NameDictionary> container, TNode<IntPtrT> key_index,
10229 TNode<Object> value, WriteBarrierMode write_barrier);
10230
10231// This must be kept in sync with HashTableBase::ComputeCapacity().
10233 TNode<IntPtrT> at_least_space_for) {
10235 IntPtrAdd(at_least_space_for, WordShr(at_least_space_for, 1)));
10237}
10238
10240 TNode<IntPtrT> right) {
10241 intptr_t left_constant;
10242 intptr_t right_constant;
10243 if (TryToIntPtrConstant(left, &left_constant) &&
10244 TryToIntPtrConstant(right, &right_constant)) {
10245 return IntPtrConstant(std::max(left_constant, right_constant));
10246 }
10247 return SelectConstant<IntPtrT>(IntPtrGreaterThanOrEqual(left, right), left,
10248 right);
10249}
10250
10252 TNode<IntPtrT> right) {
10253 intptr_t left_constant;
10254 intptr_t right_constant;
10255 if (TryToIntPtrConstant(left, &left_constant) &&
10256 TryToIntPtrConstant(right, &right_constant)) {
10257 return IntPtrConstant(std::min(left_constant, right_constant));
10258 }
10259 return SelectConstant<IntPtrT>(IntPtrLessThanOrEqual(left, right), left,
10260 right);
10261}
10262
10264 TNode<UintPtrT> right) {
10265 intptr_t left_constant;
10266 intptr_t right_constant;
10267 if (TryToIntPtrConstant(left, &left_constant) &&
10268 TryToIntPtrConstant(right, &right_constant)) {
10269 return UintPtrConstant(std::min(static_cast<uintptr_t>(left_constant),
10270 static_cast<uintptr_t>(right_constant)));
10271 }
10272 return SelectConstant<UintPtrT>(UintPtrLessThanOrEqual(left, right), left,
10273 right);
10274}
10275
10276template <>
10279 CSA_DCHECK(this, Word32Or(IsTheHole(key), IsName(key)));
10280 return key;
10281}
10282
10283template <>
10285 TNode<HeapObject> key) {
10286 TNode<PropertyCell> property_cell = CAST(key);
10287 return CAST(LoadObjectField(property_cell, PropertyCell::kNameOffset));
10288}
10289
10290template <>
10292 TNode<HeapObject> key) {
10293 CSA_DCHECK(this, IsName(key));
10294 return key;
10295}
10296
10297// The implementation should be in sync with NameToIndexHashTable::Lookup.
10299 TNode<NameToIndexHashTable> table, TNode<Name> name, Label* not_found) {
10300 TVARIABLE(IntPtrT, var_entry);
10301 Label index_found(this, {&var_entry});
10302 NameDictionaryLookup<NameToIndexHashTable>(table, name, &index_found,
10303 &var_entry, not_found,
10305 BIND(&index_found);
10306 TNode<Smi> value =
10307 CAST(LoadValueByKeyIndex<NameToIndexHashTable>(table, var_entry.value()));
10308 return SmiToIntPtr(value);
10309}
10310
10311template <typename Dictionary>
10313 TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
10314 TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
10315 static_assert(std::is_same_v<Dictionary, NameDictionary> ||
10316 std::is_same_v<Dictionary, GlobalDictionary> ||
10317 std::is_same_v<Dictionary, NameToIndexHashTable>,
10318 "Unexpected NameDictionary");
10319 DCHECK_IMPLIES(var_name_index != nullptr,
10320 MachineType::PointerRepresentation() == var_name_index->rep());
10321 DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr);
10322 Comment("NameDictionaryLookup");
10323 CSA_DCHECK(this, IsUniqueName(unique_name));
10325
10326 Label if_not_computed(this, Label::kDeferred);
10327
10328 TNode<IntPtrT> capacity =
10331 TNode<UintPtrT> hash =
10332 ChangeUint32ToWord(LoadNameHash(unique_name, &if_not_computed));
10333
10334 // See Dictionary::FirstProbe().
10336 TNode<IntPtrT> initial_entry = Signed(WordAnd(hash, mask));
10337 TNode<Undefined> undefined = UndefinedConstant();
10338
10339 // Appease the variable merging algorithm for "Goto(&loop)" below.
10340 if (var_name_index) *var_name_index = IntPtrConstant(0);
10341
10342 TVARIABLE(IntPtrT, var_count, count);
10343 TVARIABLE(IntPtrT, var_entry, initial_entry);
10344 VariableList loop_vars({&var_count, &var_entry}, zone());
10345 if (var_name_index) loop_vars.push_back(var_name_index);
10346 Label loop(this, loop_vars);
10347 Goto(&loop);
10348 BIND(&loop);
10349 {
10350 Label next_probe(this);
10351 TNode<IntPtrT> entry = var_entry.value();
10352
10354 if (var_name_index) *var_name_index = index;
10355
10356 TNode<HeapObject> current =
10357 CAST(UnsafeLoadFixedArrayElement(dictionary, index));
10358 GotoIf(TaggedEqual(current, undefined), if_not_found);
10359 switch (mode) {
10361 GotoIf(TaggedEqual(current, TheHoleConstant()), if_not_found);
10362 break;
10363 case kFindExisting:
10365 if (Dictionary::TodoShape::kMatchNeedsHoleCheck) {
10366 GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe);
10367 }
10368 current = LoadName<Dictionary>(current);
10369 GotoIf(TaggedEqual(current, unique_name), if_found);
10370 break;
10371 }
10372 Goto(&next_probe);
10373
10374 BIND(&next_probe);
10375 // See Dictionary::NextProbe().
10376 Increment(&var_count);
10377 entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
10378
10379 var_entry = entry;
10380 Goto(&loop);
10381 }
10382
10383 BIND(&if_not_computed);
10384 {
10385 // Strings will only have the forwarding index with experimental shared
10386 // memory features turned on. To minimize affecting the fast path, the
10387 // forwarding index branch defers both fetching the actual hash value and
10388 // the dictionary lookup to the runtime.
10389 NameDictionaryLookupWithForwardIndex(dictionary, unique_name, if_found,
10390 var_name_index, if_not_found, mode);
10391 }
10392}
10393
10394// Instantiate template methods to workaround GCC compilation issue.
10395template V8_EXPORT_PRIVATE void
10399 Label*, LookupMode);
10403
10404template <typename Dictionary>
10406 TNode<Dictionary> dictionary, TNode<Name> unique_name, Label* if_found,
10407 TVariable<IntPtrT>* var_name_index, Label* if_not_found, LookupMode mode) {
10408 using ER = ExternalReference; // To avoid super long lines below.
10409 ER func_ref;
10410 if constexpr (std::is_same_v<Dictionary, NameDictionary>) {
10411 func_ref = mode == kFindInsertionIndex
10412 ? ER::name_dictionary_find_insertion_entry_forwarded_string()
10413 : ER::name_dictionary_lookup_forwarded_string();
10414 } else if constexpr (std::is_same_v<Dictionary, GlobalDictionary>) {
10415 func_ref =
10416 mode == kFindInsertionIndex
10417 ? ER::global_dictionary_find_insertion_entry_forwarded_string()
10418 : ER::global_dictionary_lookup_forwarded_string();
10419 } else {
10420 auto ref0 =
10421 ER::name_to_index_hashtable_find_insertion_entry_forwarded_string();
10422 auto ref1 = ER::name_to_index_hashtable_lookup_forwarded_string();
10423 func_ref = mode == kFindInsertionIndex ? ref0 : ref1;
10424 }
10425 const TNode<ER> function = ExternalConstant(func_ref);
10426 const TNode<ER> isolate_ptr = ExternalConstant(ER::isolate_address());
10429 std::make_pair(MachineType::Pointer(), isolate_ptr),
10430 std::make_pair(MachineType::TaggedPointer(), dictionary),
10431 std::make_pair(MachineType::TaggedPointer(), unique_name)));
10432
10433 if (var_name_index) *var_name_index = EntryToIndex<Dictionary>(entry);
10434 switch (mode) {
10436 CSA_DCHECK(
10437 this,
10438 WordNotEqual(entry,
10439 IntPtrConstant(InternalIndex::NotFound().raw_value())));
10440 Goto(if_not_found);
10441 break;
10442 case kFindExisting:
10443 GotoIf(IntPtrEqual(entry,
10445 if_not_found);
10446 Goto(if_found);
10447 break;
10450 InternalIndex::NotFound().raw_value())),
10451 if_found);
10452 NameDictionaryLookupWithForwardIndex(dictionary, unique_name, if_found,
10453 var_name_index, if_not_found,
10455 break;
10456 }
10457}
10458
10460 const TNode<ExternalReference> function_addr =
10461 ExternalConstant(ExternalReference::compute_integer_hash());
10462 const TNode<ExternalReference> isolate_ptr =
10464
10465 MachineType type_ptr = MachineType::Pointer();
10466 MachineType type_uint32 = MachineType::Uint32();
10467 MachineType type_int32 = MachineType::Int32();
10468
10470 function_addr, type_uint32, std::make_pair(type_ptr, isolate_ptr),
10471 std::make_pair(type_int32, TruncateIntPtrToInt32(key))));
10472}
10473
10474template <>
10476 TNode<SwissNameDictionary> dictionary, TNode<Name> unique_name,
10477 Label* if_found, TVariable<IntPtrT>* var_name_index, Label* if_not_found,
10478 LookupMode mode) {
10479 // TODO(pthier): Support mode kFindExistingOrInsertionIndex for
10480 // SwissNameDictionary.
10481 SwissNameDictionaryFindEntry(dictionary, unique_name, if_found,
10482 var_name_index, if_not_found);
10483}
10484
10486 TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
10487 Label* if_found, TVariable<IntPtrT>* var_entry, Label* if_not_found) {
10488 CSA_DCHECK(this, IsNumberDictionary(dictionary));
10490 Comment("NumberDictionaryLookup");
10492
10493 TNode<IntPtrT> capacity =
10496
10498 TNode<Float64T> key_as_float64 = RoundIntPtrToFloat64(intptr_index);
10499
10500 // See Dictionary::FirstProbe().
10502 TNode<IntPtrT> initial_entry = Signed(WordAnd(hash, mask));
10503
10504 TNode<Undefined> undefined = UndefinedConstant();
10505 TNode<Hole> the_hole = TheHoleConstant();
10506
10507 TVARIABLE(IntPtrT, var_count, count);
10508 Label loop(this, {&var_count, var_entry});
10509 *var_entry = initial_entry;
10510 Goto(&loop);
10511 BIND(&loop);
10512 {
10513 TNode<IntPtrT> entry = var_entry->value();
10514
10516 TNode<Object> current = UnsafeLoadFixedArrayElement(dictionary, index);
10517 GotoIf(TaggedEqual(current, undefined), if_not_found);
10518 Label next_probe(this);
10519 {
10520 Label if_currentissmi(this), if_currentisnotsmi(this);
10521 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
10522 BIND(&if_currentissmi);
10523 {
10524 TNode<IntPtrT> current_value = SmiUntag(CAST(current));
10525 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
10526 }
10527 BIND(&if_currentisnotsmi);
10528 {
10529 GotoIf(TaggedEqual(current, the_hole), &next_probe);
10530 // Current must be the Number.
10531 TNode<Float64T> current_value = LoadHeapNumberValue(CAST(current));
10532 Branch(Float64Equal(current_value, key_as_float64), if_found,
10533 &next_probe);
10534 }
10535 }
10536
10537 BIND(&next_probe);
10538 // See Dictionary::NextProbe().
10539 Increment(&var_count);
10540 entry = Signed(WordAnd(IntPtrAdd(entry, var_count.value()), mask));
10541
10542 *var_entry = entry;
10543 Goto(&loop);
10544 }
10545}
10546
10548 TNode<NumberDictionary> dictionary, TNode<IntPtrT> intptr_index,
10549 Label* not_data, Label* if_hole) {
10550 TVARIABLE(IntPtrT, var_entry);
10551 Label if_found(this);
10552 NumberDictionaryLookup(dictionary, intptr_index, &if_found, &var_entry,
10553 if_hole);
10554 BIND(&if_found);
10555
10556 // Check that the value is a data property.
10557 TNode<IntPtrT> index = EntryToIndex<NumberDictionary>(var_entry.value());
10558 TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, index);
10560 // TODO(jkummerow): Support accessors without missing?
10561 GotoIfNot(
10563 not_data);
10564 // Finally, load the value.
10565 return CAST(LoadValueByKeyIndex(dictionary, index));
10566}
10567
10568template <class Dictionary>
10571 TVariable<IntPtrT>* var_key_index) {
10572 UNREACHABLE();
10573}
10574
10575template <>
10578 TVariable<IntPtrT>* var_key_index) {
10579 Label done(this);
10580 NameDictionaryLookup<NameDictionary>(dictionary, key, nullptr, var_key_index,
10581 &done, kFindInsertionIndex);
10582 BIND(&done);
10583}
10584
10585template <class Dictionary>
10588 TNode<IntPtrT> index,
10589 TNode<Smi> enum_index) {
10590 UNREACHABLE(); // Use specializations instead.
10591}
10592
10593template <>
10595 TNode<NameDictionary> dictionary, TNode<Name> name, TNode<Object> value,
10596 TNode<IntPtrT> index, TNode<Smi> enum_index) {
10597 // This should only be used for adding, not updating existing mappings.
10598 CSA_DCHECK(this,
10599 Word32Or(TaggedEqual(LoadFixedArrayElement(dictionary, index),
10600 UndefinedConstant()),
10601 TaggedEqual(LoadFixedArrayElement(dictionary, index),
10602 TheHoleConstant())));
10603
10604 // Store name and value.
10605 StoreFixedArrayElement(dictionary, index, name);
10606 StoreValueByKeyIndex<NameDictionary>(dictionary, index, value);
10607
10608 // Prepare details of the new property.
10611
10612 // We ignore overflow of |enum_index| here and accept potentially
10613 // broken enumeration order (https://crbug.com/41432983).
10614 enum_index = UnsignedSmiShl(enum_index,
10616 // We OR over the actual index below, so we expect the initial value to be 0.
10617 DCHECK_EQ(0, d.dictionary_index());
10618 TVARIABLE(Smi, var_details, SmiOr(SmiConstant(d.AsSmi()), enum_index));
10619
10620 // Private names must be marked non-enumerable.
10621 Label not_private(this, &var_details);
10622 GotoIfNot(IsPrivateSymbol(name), &not_private);
10623 TNode<Smi> dont_enum = UnsignedSmiShl(
10625 var_details = SmiOr(var_details.value(), dont_enum);
10626 Goto(&not_private);
10627 BIND(&not_private);
10628
10629 // Finally, store the details.
10631 var_details.value());
10632}
10633
10634template <>
10640
10641template <class Dictionary>
10644 Label* bailout, std::optional<TNode<IntPtrT>> insertion_index) {
10645 CSA_DCHECK(this, Word32BinaryNot(IsEmptyPropertyDictionary(dictionary)));
10646 TNode<Smi> capacity = GetCapacity<Dictionary>(dictionary);
10648 TNode<Smi> new_nof = SmiAdd(nof, SmiConstant(1));
10649 // Require 33% to still be free after adding additional_elements.
10650 // Computing "x + (x >> 1)" on a Smi x does not return a valid Smi!
10651 // But that's OK here because it's only used for a comparison.
10652 TNode<Smi> required_capacity_pseudo_smi = SmiAdd(new_nof, SmiShr(new_nof, 1));
10653 GotoIf(SmiBelow(capacity, required_capacity_pseudo_smi), bailout);
10654 // Require rehashing if more than 50% of free elements are deleted elements.
10656 CSA_DCHECK(this, SmiAbove(capacity, new_nof));
10657 TNode<Smi> half_of_free_elements = SmiShr(SmiSub(capacity, new_nof), 1);
10658 GotoIf(SmiAbove(deleted, half_of_free_elements), bailout);
10659
10660 TNode<Smi> enum_index = GetNextEnumerationIndex<Dictionary>(dictionary);
10661 TNode<Smi> new_enum_index = SmiAdd(enum_index, SmiConstant(1));
10662 TNode<Smi> max_enum_index =
10664 GotoIf(SmiAbove(new_enum_index, max_enum_index), bailout);
10665
10666 // No more bailouts after this point.
10667 // Operations from here on can have side effects.
10668
10669 SetNextEnumerationIndex<Dictionary>(dictionary, new_enum_index);
10670 SetNumberOfElements<Dictionary>(dictionary, new_nof);
10671
10672 if (insertion_index.has_value()) {
10673 InsertEntry<Dictionary>(dictionary, key, value, *insertion_index,
10674 enum_index);
10675 } else {
10676 TVARIABLE(IntPtrT, var_key_index);
10677 FindInsertionEntry<Dictionary>(dictionary, key, &var_key_index);
10678 InsertEntry<Dictionary>(dictionary, key, value, var_key_index.value(),
10679 enum_index);
10680 }
10681}
10682
10683template <>
10686 Label* bailout, std::optional<TNode<IntPtrT>> insertion_index) {
10689
10692 TNode<Uint8T> details_byte_enum =
10694 TNode<Uint8T> details_byte_dont_enum =
10696
10697 Label not_private(this);
10698 TVARIABLE(Uint8T, var_details, details_byte_enum);
10699
10700 GotoIfNot(IsPrivateSymbol(key), &not_private);
10701 var_details = details_byte_dont_enum;
10702 Goto(&not_private);
10703
10704 BIND(&not_private);
10705 // TODO(pthier): Use insertion_index if it was provided.
10706 SwissNameDictionaryAdd(dictionary, key, value, var_details.value(), bailout);
10707}
10708
10711 std::optional<TNode<IntPtrT>>);
10712
10713template <class Dictionary>
10715 TNode<Dictionary> dictionary) {
10716 return CAST(
10717 LoadFixedArrayElement(dictionary, Dictionary::kNumberOfElementsIndex));
10718}
10719
10720template <>
10722 TNode<SwissNameDictionary> dictionary) {
10723 TNode<IntPtrT> capacity =
10724 ChangeInt32ToIntPtr(LoadSwissNameDictionaryCapacity(dictionary));
10725 return SmiFromIntPtr(
10726 LoadSwissNameDictionaryNumberOfElements(dictionary, capacity));
10727}
10728
10730 TNode<NameDictionary> dictionary);
10732 TNode<NumberDictionary> dictionary);
10734 TNode<GlobalDictionary> dictionary);
10735
10736template <>
10741
10742template <>
10748
10749template <>
10751 TNode<SwissNameDictionary> dictionary) {
10752 // TODO(pthier): Add flags to swiss dictionaries.
10753 Unreachable();
10754 return SmiConstant(0);
10755}
10756
10757template <>
10759 TNode<SwissNameDictionary> dictionary, TNode<Smi> flags) {
10760 // TODO(pthier): Add flags to swiss dictionaries.
10761 Unreachable();
10762}
10763
10764namespace {
10765// TODO(leszeks): Remove once both TransitionArray and DescriptorArray are
10766// HeapObjectLayout.
10767template <typename Array>
10768struct OffsetOfArrayDataStart;
10769template <>
10770struct OffsetOfArrayDataStart<TransitionArray> {
10771 static constexpr int value = OFFSET_OF_DATA_START(TransitionArray);
10772};
10773template <>
10774struct OffsetOfArrayDataStart<DescriptorArray> {
10775 static constexpr int value = DescriptorArray::kHeaderSize;
10776};
10777} // namespace
10778
10779template <typename Array>
10781 TNode<Array> array,
10782 TNode<Uint32T> number_of_valid_entries,
10783 Label* if_found,
10784 TVariable<IntPtrT>* var_name_index,
10785 Label* if_not_found) {
10786 static_assert(std::is_base_of_v<FixedArray, Array> ||
10787 std::is_base_of_v<WeakFixedArray, Array> ||
10788 std::is_base_of_v<DescriptorArray, Array>,
10789 "T must be a descendant of FixedArray or a WeakFixedArray");
10790 Comment("LookupLinear");
10791 CSA_DCHECK(this, IsUniqueName(unique_name));
10792 TNode<IntPtrT> first_inclusive = IntPtrConstant(Array::ToKeyIndex(0));
10793 TNode<IntPtrT> factor = IntPtrConstant(Array::kEntrySize);
10794 TNode<IntPtrT> last_exclusive = IntPtrAdd(
10795 first_inclusive,
10796 IntPtrMul(ChangeInt32ToIntPtr(number_of_valid_entries), factor));
10797
10799 last_exclusive, first_inclusive,
10800 [=, this](TNode<IntPtrT> name_index) {
10802 array, OffsetOfArrayDataStart<Array>::value, name_index);
10803 TNode<Name> candidate_name = CAST(element);
10804 *var_name_index = name_index;
10805 GotoIf(TaggedEqual(candidate_name, unique_name), if_found);
10806 },
10807 -Array::kEntrySize, LoopUnrollingMode::kYes, IndexAdvanceMode::kPre);
10808 Goto(if_not_found);
10809}
10810
10811template <>
10815
10816template <>
10820
10821template <>
10823 TNode<DescriptorArray> descriptors) {
10824 return Unsigned(LoadNumberOfDescriptors(descriptors));
10825}
10826
10827template <>
10829 TNode<TransitionArray> transitions) {
10830 TNode<Uint32T> length = LoadAndUntagWeakFixedArrayLengthAsUint32(transitions);
10831 return Select<Uint32T>(
10832 Uint32LessThan(length, Uint32Constant(TransitionArray::kFirstIndex)),
10833 [=, this] { return Unsigned(Int32Constant(0)); },
10834 [=, this] {
10836 transitions, OFFSET_OF_DATA_START(WeakFixedArray),
10838 });
10839}
10840
10841template <typename Array>
10843 TNode<Uint32T> entry_index) {
10844 TNode<Int32T> entry_size = Int32Constant(Array::kEntrySize);
10845 TNode<Word32T> index = Int32Mul(entry_index, entry_size);
10846 return ChangeInt32ToIntPtr(index);
10847}
10848
10849template <typename Array>
10851 return IntPtrAdd(IntPtrConstant(Array::ToKeyIndex(0)),
10852 EntryIndexToIndex<Array>(entry_index));
10853}
10854
10859
10860template <>
10862 TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
10863 TNode<Uint32T> details =
10864 DescriptorArrayGetDetails(descriptors, descriptor_number);
10866}
10867
10868template <>
10870 TNode<TransitionArray> transitions, TNode<Uint32T> transition_number) {
10871 return transition_number;
10872}
10873
10874template <typename Array>
10876 TNode<Uint32T> entry_index) {
10877 static_assert(std::is_base_of_v<TransitionArray, Array> ||
10878 std::is_base_of_v<DescriptorArray, Array>,
10879 "T must be a descendant of DescriptorArray or TransitionArray");
10880 const int key_offset = Array::ToKeyIndex(0) * kTaggedSize;
10881 TNode<MaybeObject> element =
10882 LoadArrayElement(array, OffsetOfArrayDataStart<Array>::value,
10883 EntryIndexToIndex<Array>(entry_index), key_offset);
10884 return CAST(element);
10885}
10886
10891
10893 TNode<DescriptorArray> descriptors, TNode<Uint32T> descriptor_number) {
10894 const int details_offset = DescriptorArray::ToDetailsIndex(0) * kTaggedSize;
10896 descriptors, DescriptorArray::kHeaderSize,
10897 EntryIndexToIndex<DescriptorArray>(descriptor_number), details_offset));
10898}
10899
10900template <typename Array>
10902 TNode<Array> array,
10903 TNode<Uint32T> number_of_valid_entries,
10904 Label* if_found,
10905 TVariable<IntPtrT>* var_name_index,
10906 Label* if_not_found) {
10907 Comment("LookupBinary");
10908 TVARIABLE(Uint32T, var_low, Unsigned(Int32Constant(0)));
10909 TNode<Uint32T> limit =
10911 TVARIABLE(Uint32T, var_high, limit);
10912 TNode<Uint32T> hash = LoadNameHashAssumeComputed(unique_name);
10913 CSA_DCHECK(this, Word32NotEqual(hash, Int32Constant(0)));
10914
10915 // Assume non-empty array.
10916 CSA_DCHECK(this, Uint32LessThanOrEqual(var_low.value(), var_high.value()));
10917
10918 int max_entries = MaxNumberOfEntries<Array>();
10919
10920 auto calculate_mid = [&](TNode<Uint32T> low, TNode<Uint32T> high) {
10921 if (max_entries < kMaxInt31) {
10922 // mid = (low + high) / 2.
10923 return Unsigned(Word32Shr(Int32Add(low, high), 1));
10924 } else {
10925 // mid = low + (high - low) / 2.
10926 return Unsigned(Int32Add(low, Word32Shr(Int32Sub(high, low), 1)));
10927 }
10928 };
10929
10930 Label binary_loop(this, {&var_high, &var_low});
10931 Goto(&binary_loop);
10932 BIND(&binary_loop);
10933 {
10934 TNode<Uint32T> mid = calculate_mid(var_low.value(), var_high.value());
10935 // mid_name = array->GetSortedKey(mid).
10936 TNode<Uint32T> sorted_key_index = GetSortedKeyIndex<Array>(array, mid);
10937 TNode<Name> mid_name = GetKey<Array>(array, sorted_key_index);
10938
10939 TNode<Uint32T> mid_hash = LoadNameHashAssumeComputed(mid_name);
10940
10941 Label mid_greater(this), mid_less(this), merge(this);
10942 Branch(Uint32GreaterThanOrEqual(mid_hash, hash), &mid_greater, &mid_less);
10943 BIND(&mid_greater);
10944 {
10945 var_high = mid;
10946 Goto(&merge);
10947 }
10948 BIND(&mid_less);
10949 {
10950 var_low = Unsigned(Int32Add(mid, Int32Constant(1)));
10951 Goto(&merge);
10952 }
10953 BIND(&merge);
10954 GotoIf(Word32NotEqual(var_low.value(), var_high.value()), &binary_loop);
10955 }
10956
10957 Label scan_loop(this, &var_low);
10958 Goto(&scan_loop);
10959 BIND(&scan_loop);
10960 {
10961 GotoIf(Int32GreaterThan(var_low.value(), limit), if_not_found);
10962
10963 TNode<Uint32T> sort_index =
10964 GetSortedKeyIndex<Array>(array, var_low.value());
10965 TNode<Name> current_name = GetKey<Array>(array, sort_index);
10966 TNode<Uint32T> current_hash = LoadNameHashAssumeComputed(current_name);
10967 GotoIf(Word32NotEqual(current_hash, hash), if_not_found);
10968 Label next(this);
10969 GotoIf(TaggedNotEqual(current_name, unique_name), &next);
10970 GotoIf(Uint32GreaterThanOrEqual(sort_index, number_of_valid_entries),
10971 if_not_found);
10972 *var_name_index = ToKeyIndex<Array>(sort_index);
10973 Goto(if_found);
10974
10975 BIND(&next);
10976 var_low = Unsigned(Int32Add(var_low.value(), Int32Constant(1)));
10977 Goto(&scan_loop);
10978 }
10979}
10980
10982 TNode<Context> context, TNode<Map> map, TNode<JSObject> object,
10984 Label* bailout) {
10987
10988 TVARIABLE(DescriptorArray, var_descriptors, LoadMapDescriptors(map));
10989 TNode<Uint32T> nof_descriptors =
10991
10992 TVARIABLE(BoolT, var_stable, Int32TrueConstant());
10993
10994 TVARIABLE(BoolT, var_has_symbol, Int32FalseConstant());
10995 // false - iterate only string properties, true - iterate only symbol
10996 // properties
10997 TVARIABLE(BoolT, var_is_symbol_processing_loop, Int32FalseConstant());
10998 TVARIABLE(IntPtrT, var_start_key_index,
11000 // Note: var_end_key_index is exclusive for the loop
11001 TVARIABLE(IntPtrT, var_end_key_index,
11002 ToKeyIndex<DescriptorArray>(nof_descriptors));
11003 VariableList list({&var_descriptors, &var_stable, &var_has_symbol,
11004 &var_is_symbol_processing_loop, &var_start_key_index,
11005 &var_end_key_index},
11006 zone());
11007 Label descriptor_array_loop(this, list);
11008
11009 Goto(&descriptor_array_loop);
11010 BIND(&descriptor_array_loop);
11011
11013 list, var_start_key_index.value(), var_end_key_index.value(),
11014 [&](TNode<IntPtrT> descriptor_key_index) {
11015 TNode<Name> next_key =
11016 LoadKeyByKeyIndex(var_descriptors.value(), descriptor_key_index);
11017
11018 TVARIABLE(Object, var_value_or_accessor, SmiConstant(0));
11019 Label next_iteration(this);
11020
11021 if (mode == kEnumerationOrder) {
11022 // |next_key| is either a string or a symbol
11023 // Skip strings or symbols depending on
11024 // |var_is_symbol_processing_loop|.
11025 Label if_string(this), if_symbol(this), if_name_ok(this);
11026 Branch(IsSymbol(next_key), &if_symbol, &if_string);
11027 BIND(&if_symbol);
11028 {
11029 // Process symbol property when |var_is_symbol_processing_loop| is
11030 // true.
11031 GotoIf(var_is_symbol_processing_loop.value(), &if_name_ok);
11032 // First iteration need to calculate smaller range for processing
11033 // symbols
11034 Label if_first_symbol(this);
11035 // var_end_key_index is still inclusive at this point.
11036 var_end_key_index = descriptor_key_index;
11037 Branch(var_has_symbol.value(), &next_iteration, &if_first_symbol);
11038 BIND(&if_first_symbol);
11039 {
11040 var_start_key_index = descriptor_key_index;
11041 var_has_symbol = Int32TrueConstant();
11042 Goto(&next_iteration);
11043 }
11044 }
11045 BIND(&if_string);
11046 {
11047 CSA_DCHECK(this, IsString(next_key));
11048 // Process string property when |var_is_symbol_processing_loop| is
11049 // false.
11050 Branch(var_is_symbol_processing_loop.value(), &next_iteration,
11051 &if_name_ok);
11052 }
11053 BIND(&if_name_ok);
11054 }
11055 {
11056 TVARIABLE(Map, var_map);
11057 TVARIABLE(HeapObject, var_meta_storage);
11058 TVARIABLE(IntPtrT, var_entry);
11059 TVARIABLE(Uint32T, var_details);
11060 Label if_found(this);
11061
11062 Label if_found_fast(this), if_found_dict(this);
11063
11064 Label if_stable(this), if_not_stable(this);
11065 Branch(var_stable.value(), &if_stable, &if_not_stable);
11066 BIND(&if_stable);
11067 {
11068 // Directly decode from the descriptor array if |object| did not
11069 // change shape.
11070 var_map = map;
11071 var_meta_storage = var_descriptors.value();
11072 var_entry = Signed(descriptor_key_index);
11073 Goto(&if_found_fast);
11074 }
11075 BIND(&if_not_stable);
11076 {
11077 // If the map did change, do a slower lookup. We are still
11078 // guaranteed that the object has a simple shape, and that the key
11079 // is a name.
11080 var_map = LoadMap(object);
11081 TryLookupPropertyInSimpleObject(object, var_map.value(), next_key,
11082 &if_found_fast, &if_found_dict,
11083 &var_meta_storage, &var_entry,
11084 &next_iteration, bailout);
11085 }
11086
11087 BIND(&if_found_fast);
11088 {
11089 TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
11090 TNode<IntPtrT> name_index = var_entry.value();
11091
11092 // Skip non-enumerable properties.
11093 var_details = LoadDetailsByKeyIndex(descriptors, name_index);
11094 GotoIf(IsSetWord32(var_details.value(),
11096 &next_iteration);
11097
11098 LoadPropertyFromFastObject(object, var_map.value(), descriptors,
11099 name_index, var_details.value(),
11100 &var_value_or_accessor);
11101 Goto(&if_found);
11102 }
11103 BIND(&if_found_dict);
11104 {
11105 TNode<PropertyDictionary> dictionary =
11106 CAST(var_meta_storage.value());
11107 TNode<IntPtrT> entry = var_entry.value();
11108
11109 TNode<Uint32T> details = LoadDetailsByKeyIndex(dictionary, entry);
11110 // Skip non-enumerable properties.
11111 GotoIf(
11113 &next_iteration);
11114
11115 var_details = details;
11116 var_value_or_accessor =
11117 LoadValueByKeyIndex<PropertyDictionary>(dictionary, entry);
11118 Goto(&if_found);
11119 }
11120
11121 // Here we have details and value which could be an accessor.
11122 BIND(&if_found);
11123 {
11124 TNode<Object> value_or_accessor = var_value_or_accessor.value();
11125 body(next_key, [&]() {
11126 TVARIABLE(Object, var_value);
11127 Label value_ready(this), slow_load(this, Label::kDeferred);
11128
11129 var_value = CallGetterIfAccessor(
11130 value_or_accessor, object, var_details.value(), context,
11131 object, next_key, &slow_load, kCallJSGetterUseCachedName);
11132 Goto(&value_ready);
11133
11134 BIND(&slow_load);
11135 var_value =
11136 CallRuntime(Runtime::kGetProperty, context, object, next_key);
11137 Goto(&value_ready);
11138
11139 BIND(&value_ready);
11140 return var_value.value();
11141 });
11142
11143 // Check if |object| is still stable, i.e. the descriptors in the
11144 // preloaded |descriptors| are still the same modulo in-place
11145 // representation changes.
11146 GotoIfNot(var_stable.value(), &next_iteration);
11147 var_stable = TaggedEqual(LoadMap(object), map);
11148 // Reload the descriptors just in case the actual array changed, and
11149 // any of the field representations changed in-place.
11150 var_descriptors = LoadMapDescriptors(map);
11151
11152 Goto(&next_iteration);
11153 }
11154 }
11155 BIND(&next_iteration);
11156 },
11157 DescriptorArray::kEntrySize, LoopUnrollingMode::kNo,
11158 IndexAdvanceMode::kPost);
11159
11160 if (mode == kEnumerationOrder) {
11161 Label done(this);
11162 GotoIf(var_is_symbol_processing_loop.value(), &done);
11163 GotoIfNot(var_has_symbol.value(), &done);
11164 // All string properties are processed, now process symbol properties.
11165 var_is_symbol_processing_loop = Int32TrueConstant();
11166 // Add DescriptorArray::kEntrySize to make the var_end_key_index exclusive
11167 // as BuildFastLoop() expects.
11168 Increment(&var_end_key_index, DescriptorArray::kEntrySize);
11169 Goto(&descriptor_array_loop);
11170
11171 BIND(&done);
11172 }
11173}
11174
11175TNode<Object> CodeStubAssembler::GetConstructor(TNode<Map> map) {
11176 TVARIABLE(HeapObject, var_maybe_constructor);
11177 var_maybe_constructor = map;
11178 Label loop(this, &var_maybe_constructor), done(this);
11179 GotoIfNot(IsMap(var_maybe_constructor.value()), &done);
11180 Goto(&loop);
11181
11182 BIND(&loop);
11183 {
11184 var_maybe_constructor = CAST(
11185 LoadObjectField(var_maybe_constructor.value(),
11186 Map::kConstructorOrBackPointerOrNativeContextOffset));
11187 GotoIf(IsMap(var_maybe_constructor.value()), &loop);
11188 Goto(&done);
11189 }
11190
11191 BIND(&done);
11192 return var_maybe_constructor.value();
11193}
11194
11195TNode<NativeContext> CodeStubAssembler::GetCreationContextFromMap(
11196 TNode<Map> map, Label* if_bailout) {
11197 TNode<Map> meta_map = LoadMap(map);
11198 TNode<Object> maybe_context =
11199 LoadMapConstructorOrBackPointerOrNativeContext(meta_map);
11200 GotoIf(IsNull(maybe_context), if_bailout);
11201 return CAST(maybe_context);
11202}
11203
11204TNode<NativeContext> CodeStubAssembler::GetCreationContext(
11205 TNode<JSReceiver> receiver, Label* if_bailout) {
11206 return GetCreationContextFromMap(LoadMap(receiver), if_bailout);
11207}
11208
11209TNode<NativeContext> CodeStubAssembler::GetFunctionRealm(
11210 TNode<Context> context, TNode<JSReceiver> receiver, Label* if_bailout) {
11211 TVARIABLE(JSReceiver, current);
11212 TVARIABLE(Map, current_map);
11213 Label loop(this, VariableList({&current}, zone())), if_proxy(this),
11214 if_simple_case(this), if_bound_function(this), if_wrapped_function(this),
11215 proxy_revoked(this, Label::kDeferred);
11216 CSA_DCHECK(this, IsCallable(receiver));
11217 current = receiver;
11218 Goto(&loop);
11219
11220 BIND(&loop);
11221 {
11222 current_map = LoadMap(current.value());
11223 TNode<Int32T> instance_type = LoadMapInstanceType(current_map.value());
11224 GotoIf(IsJSFunctionInstanceType(instance_type), &if_simple_case);
11225 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
11226 GotoIf(InstanceTypeEqual(instance_type, JS_BOUND_FUNCTION_TYPE),
11227 &if_bound_function);
11228 GotoIf(InstanceTypeEqual(instance_type, JS_WRAPPED_FUNCTION_TYPE),
11229 &if_wrapped_function);
11230 Goto(&if_simple_case);
11231 }
11232
11233 BIND(&if_proxy);
11234 {
11235 TNode<JSProxy> proxy = CAST(current.value());
11236 TNode<HeapObject> handler =
11237 CAST(LoadObjectField(proxy, JSProxy::kHandlerOffset));
11238 // Proxy is revoked.
11239 GotoIfNot(IsJSReceiver(handler), &proxy_revoked);
11240 TNode<JSReceiver> target =
11241 CAST(LoadObjectField(proxy, JSProxy::kTargetOffset));
11242 current = target;
11243 Goto(&loop);
11244 }
11245
11246 BIND(&proxy_revoked);
11247 { ThrowTypeError(context, MessageTemplate::kProxyRevoked, "apply"); }
11248
11249 BIND(&if_bound_function);
11250 {
11251 TNode<JSBoundFunction> bound_function = CAST(current.value());
11252 TNode<JSReceiver> target = CAST(LoadObjectField(
11253 bound_function, JSBoundFunction::kBoundTargetFunctionOffset));
11254 current = target;
11255 Goto(&loop);
11256 }
11257
11258 BIND(&if_wrapped_function);
11259 {
11260 TNode<JSWrappedFunction> wrapped_function = CAST(current.value());
11261 TNode<JSReceiver> target = CAST(LoadObjectField(
11262 wrapped_function, JSWrappedFunction::kWrappedTargetFunctionOffset));
11263 current = target;
11264 Goto(&loop);
11265 }
11266
11267 BIND(&if_simple_case);
11268 {
11269 // Load native context from the meta map.
11270 return GetCreationContextFromMap(current_map.value(), if_bailout);
11271 }
11272}
11273
11274void CodeStubAssembler::DescriptorLookup(TNode<Name> unique_name,
11275 TNode<DescriptorArray> descriptors,
11276 TNode<Uint32T> bitfield3,
11277 Label* if_found,
11278 TVariable<IntPtrT>* var_name_index,
11279 Label* if_not_found) {
11280 Comment("DescriptorArrayLookup");
11281 TNode<Uint32T> nof =
11282 DecodeWord32<Map::Bits3::NumberOfOwnDescriptorsBits>(bitfield3);
11283 Lookup<DescriptorArray>(unique_name, descriptors, nof, if_found,
11284 var_name_index, if_not_found);
11285}
11286
11287void CodeStubAssembler::TransitionLookup(TNode<Name> unique_name,
11288 TNode<TransitionArray> transitions,
11289 Label* if_found,
11290 TVariable<IntPtrT>* var_name_index,
11291 Label* if_not_found) {
11292 Comment("TransitionArrayLookup");
11293 TNode<Uint32T> number_of_valid_transitions =
11294 NumberOfEntries<TransitionArray>(transitions);
11295 Lookup<TransitionArray>(unique_name, transitions, number_of_valid_transitions,
11296 if_found, var_name_index, if_not_found);
11297}
11298
11299template <typename Array>
11300void CodeStubAssembler::Lookup(TNode<Name> unique_name, TNode<Array> array,
11301 TNode<Uint32T> number_of_valid_entries,
11302 Label* if_found,
11303 TVariable<IntPtrT>* var_name_index,
11304 Label* if_not_found) {
11305 Comment("ArrayLookup");
11306 if (!number_of_valid_entries) {
11307 number_of_valid_entries = NumberOfEntries(array);
11308 }
11309 GotoIf(Word32Equal(number_of_valid_entries, Int32Constant(0)), if_not_found);
11310 Label linear_search(this), binary_search(this);
11311 const int kMaxElementsForLinearSearch = 32;
11312 Branch(Uint32LessThanOrEqual(number_of_valid_entries,
11313 Int32Constant(kMaxElementsForLinearSearch)),
11314 &linear_search, &binary_search);
11315 BIND(&linear_search);
11316 {
11317 LookupLinear<Array>(unique_name, array, number_of_valid_entries, if_found,
11318 var_name_index, if_not_found);
11319 }
11320 BIND(&binary_search);
11321 {
11322 LookupBinary<Array>(unique_name, array, number_of_valid_entries, if_found,
11323 var_name_index, if_not_found);
11324 }
11325}
11326
11327void CodeStubAssembler::TryLookupPropertyInSimpleObject(
11328 TNode<JSObject> object, TNode<Map> map, TNode<Name> unique_name,
11329 Label* if_found_fast, Label* if_found_dict,
11330 TVariable<HeapObject>* var_meta_storage, TVariable<IntPtrT>* var_name_index,
11331 Label* if_not_found, Label* bailout) {
11332 CSA_DCHECK(this, IsSimpleObjectMap(map));
11333 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name));
11334
11335 TNode<Uint32T> bit_field3 = LoadMapBitField3(map);
11336 Label if_isfastmap(this), if_isslowmap(this);
11337 Branch(IsSetWord32<Map::Bits3::IsDictionaryMapBit>(bit_field3), &if_isslowmap,
11338 &if_isfastmap);
11339 BIND(&if_isfastmap);
11340 {
11341 TNode<DescriptorArray> descriptors = LoadMapDescriptors(map);
11342 *var_meta_storage = descriptors;
11343
11344 DescriptorLookup(unique_name, descriptors, bit_field3, if_found_fast,
11345 var_name_index, if_not_found);
11346 }
11347 BIND(&if_isslowmap);
11348 {
11349 TNode<PropertyDictionary> dictionary = CAST(LoadSlowProperties(object));
11350 *var_meta_storage = dictionary;
11351
11352 NameDictionaryLookup<PropertyDictionary>(
11353 dictionary, unique_name, if_found_dict, var_name_index, if_not_found);
11354 }
11355}
11356
11357void CodeStubAssembler::TryLookupProperty(
11358 TNode<HeapObject> object, TNode<Map> map, TNode<Int32T> instance_type,
11359 TNode<Name> unique_name, Label* if_found_fast, Label* if_found_dict,
11360 Label* if_found_global, TVariable<HeapObject>* var_meta_storage,
11361 TVariable<IntPtrT>* var_name_index, Label* if_not_found,
11362 Label* if_bailout) {
11363 Label if_objectisspecial(this);
11364 GotoIf(IsSpecialReceiverInstanceType(instance_type), &if_objectisspecial);
11365
11366 TryLookupPropertyInSimpleObject(CAST(object), map, unique_name, if_found_fast,
11367 if_found_dict, var_meta_storage,
11368 var_name_index, if_not_found, if_bailout);
11369
11370 BIND(&if_objectisspecial);
11371 {
11372 // Handle global object here and bailout for other special objects.
11373 GotoIfNot(InstanceTypeEqual(instance_type, JS_GLOBAL_OBJECT_TYPE),
11374 if_bailout);
11375
11376 // Handle interceptors and access checks in runtime.
11377 TNode<Int32T> bit_field = LoadMapBitField(map);
11378 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
11379 Map::Bits1::IsAccessCheckNeededBit::kMask;
11380 GotoIf(IsSetWord32(bit_field, mask), if_bailout);
11381
11382 TNode<GlobalDictionary> dictionary = CAST(LoadSlowProperties(CAST(object)));
11383 *var_meta_storage = dictionary;
11384
11385 NameDictionaryLookup<GlobalDictionary>(
11386 dictionary, unique_name, if_found_global, var_name_index, if_not_found);
11387 }
11388}
11389
11390void CodeStubAssembler::TryHasOwnProperty(TNode<HeapObject> object,
11391 TNode<Map> map,
11392 TNode<Int32T> instance_type,
11393 TNode<Name> unique_name,
11394 Label* if_found, Label* if_not_found,
11395 Label* if_bailout) {
11396 Comment("TryHasOwnProperty");
11397 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name));
11398 TVARIABLE(HeapObject, var_meta_storage);
11399 TVARIABLE(IntPtrT, var_name_index);
11400
11401 Label if_found_global(this);
11402 TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
11403 &if_found_global, &var_meta_storage, &var_name_index,
11404 if_not_found, if_bailout);
11405
11406 BIND(&if_found_global);
11407 {
11408 TVARIABLE(Object, var_value);
11409 TVARIABLE(Uint32T, var_details);
11410 // Check if the property cell is not deleted.
11411 LoadPropertyFromGlobalDictionary(CAST(var_meta_storage.value()),
11412 var_name_index.value(), &var_details,
11413 &var_value, if_not_found);
11414 Goto(if_found);
11415 }
11416}
11417
11418TNode<JSAny> CodeStubAssembler::GetMethod(TNode<Context> context,
11419 TNode<JSAny> object,
11420 Handle<Name> name,
11421 Label* if_null_or_undefined) {
11422 TNode<JSAny> method = GetProperty(context, object, name);
11423
11424 GotoIf(IsUndefined(method), if_null_or_undefined);
11425 GotoIf(IsNull(method), if_null_or_undefined);
11426
11427 return method;
11428}
11429
11430TNode<JSAny> CodeStubAssembler::GetIteratorMethod(TNode<Context> context,
11431 TNode<JSAnyNotSmi> heap_obj,
11432 Label* if_iteratorundefined) {
11433 return GetMethod(context, heap_obj, isolate()->factory()->iterator_symbol(),
11434 if_iteratorundefined);
11435}
11436
11437TNode<JSAny> CodeStubAssembler::CreateAsyncFromSyncIterator(
11438 TNode<Context> context, TNode<JSAny> sync_iterator) {
11439 Label not_receiver(this, Label::kDeferred);
11440 Label done(this);
11441 TVARIABLE(JSAny, return_value);
11442
11443 GotoIf(TaggedIsSmi(sync_iterator), &not_receiver);
11444 GotoIfNot(IsJSReceiver(CAST(sync_iterator)), &not_receiver);
11445
11446 const TNode<Object> next =
11447 GetProperty(context, sync_iterator, factory()->next_string());
11448 return_value =
11449 CreateAsyncFromSyncIterator(context, CAST(sync_iterator), next);
11450 Goto(&done);
11451
11452 BIND(&not_receiver);
11453 {
11454 return_value =
11455 CallRuntime<JSAny>(Runtime::kThrowSymbolIteratorInvalid, context);
11456
11457 // Unreachable due to the Throw in runtime call.
11458 Goto(&done);
11459 }
11460
11461 BIND(&done);
11462 return return_value.value();
11463}
11464
11465TNode<JSObject> CodeStubAssembler::CreateAsyncFromSyncIterator(
11466 TNode<Context> context, TNode<JSReceiver> sync_iterator,
11467 TNode<Object> next) {
11468 const TNode<NativeContext> native_context = LoadNativeContext(context);
11469 const TNode<Map> map = CAST(LoadContextElement(
11470 native_context, Context::ASYNC_FROM_SYNC_ITERATOR_MAP_INDEX));
11471 const TNode<JSObject> iterator = AllocateJSObjectFromMap(map);
11472
11473 StoreObjectFieldNoWriteBarrier(
11474 iterator, JSAsyncFromSyncIterator::kSyncIteratorOffset, sync_iterator);
11475 StoreObjectFieldNoWriteBarrier(iterator, JSAsyncFromSyncIterator::kNextOffset,
11476 next);
11477 return iterator;
11478}
11479
11480void CodeStubAssembler::LoadPropertyFromFastObject(
11481 TNode<HeapObject> object, TNode<Map> map,
11482 TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
11483 TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
11484 TNode<Uint32T> details = LoadDetailsByKeyIndex(descriptors, name_index);
11485 *var_details = details;
11486
11487 LoadPropertyFromFastObject(object, map, descriptors, name_index, details,
11488 var_value);
11489}
11490
11491void CodeStubAssembler::LoadPropertyFromFastObject(
11492 TNode<HeapObject> object, TNode<Map> map,
11493 TNode<DescriptorArray> descriptors, TNode<IntPtrT> name_index,
11494 TNode<Uint32T> details, TVariable<Object>* var_value) {
11495 Comment("[ LoadPropertyFromFastObject");
11496
11497 TNode<Uint32T> location =
11498 DecodeWord32<PropertyDetails::LocationField>(details);
11499
11500 Label if_in_field(this), if_in_descriptor(this), done(this);
11501 Branch(Word32Equal(location, Int32Constant(static_cast<int32_t>(
11502 PropertyLocation::kField))),
11503 &if_in_field, &if_in_descriptor);
11504 BIND(&if_in_field);
11505 {
11506 TNode<IntPtrT> field_index =
11507 Signed(DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details));
11508 TNode<Uint32T> representation =
11509 DecodeWord32<PropertyDetails::RepresentationField>(details);
11510
11511 // TODO(ishell): support WasmValues.
11512 CSA_DCHECK(this, Word32NotEqual(representation,
11513 Int32Constant(Representation::kWasmValue)));
11514 field_index =
11515 IntPtrAdd(field_index, LoadMapInobjectPropertiesStartInWords(map));
11516 TNode<IntPtrT> instance_size_in_words = LoadMapInstanceSizeInWords(map);
11517
11518 Label if_inobject(this), if_backing_store(this);
11519 TVARIABLE(Float64T, var_double_value);
11520 Label rebox_double(this, &var_double_value);
11521 Branch(UintPtrLessThan(field_index, instance_size_in_words), &if_inobject,
11522 &if_backing_store);
11523 BIND(&if_inobject);
11524 {
11525 Comment("if_inobject");
11526 TNode<IntPtrT> field_offset = TimesTaggedSize(field_index);
11527
11528 Label if_double(this), if_tagged(this);
11529 Branch(Word32NotEqual(representation,
11530 Int32Constant(Representation::kDouble)),
11531 &if_tagged, &if_double);
11532 BIND(&if_tagged);
11533 {
11534 *var_value = LoadObjectField(object, field_offset);
11535 Goto(&done);
11536 }
11537 BIND(&if_double);
11538 {
11539 TNode<HeapNumber> heap_number =
11540 CAST(LoadObjectField(object, field_offset));
11541 var_double_value = LoadHeapNumberValue(heap_number);
11542 Goto(&rebox_double);
11543 }
11544 }
11545 BIND(&if_backing_store);
11546 {
11547 Comment("if_backing_store");
11548 TNode<HeapObject> properties = LoadFastProperties(CAST(object), true);
11549 field_index = Signed(IntPtrSub(field_index, instance_size_in_words));
11550 TNode<Object> value =
11551 LoadPropertyArrayElement(CAST(properties), field_index);
11552
11553 Label if_double(this), if_tagged(this);
11554 Branch(Word32NotEqual(representation,
11555 Int32Constant(Representation::kDouble)),
11556 &if_tagged, &if_double);
11557 BIND(&if_tagged);
11558 {
11559 *var_value = value;
11560 Goto(&done);
11561 }
11562 BIND(&if_double);
11563 {
11564 var_double_value = LoadHeapNumberValue(CAST(value));
11565 Goto(&rebox_double);
11566 }
11567 }
11568 BIND(&rebox_double);
11569 {
11570 Comment("rebox_double");
11571 TNode<HeapNumber> heap_number =
11572 AllocateHeapNumberWithValue(var_double_value.value());
11573 *var_value = heap_number;
11574 Goto(&done);
11575 }
11576 }
11577 BIND(&if_in_descriptor);
11578 {
11579 *var_value = LoadValueByKeyIndex(descriptors, name_index);
11580 Goto(&done);
11581 }
11582 BIND(&done);
11583
11584 Comment("] LoadPropertyFromFastObject");
11585}
11586
11587template <typename Dictionary>
11588void CodeStubAssembler::LoadPropertyFromDictionary(
11589 TNode<Dictionary> dictionary, TNode<IntPtrT> name_index,
11590 TVariable<Uint32T>* var_details, TVariable<Object>* var_value) {
11591 Comment("LoadPropertyFromNameDictionary");
11592 *var_details = LoadDetailsByKeyIndex(dictionary, name_index);
11593 *var_value = LoadValueByKeyIndex(dictionary, name_index);
11594
11595 Comment("] LoadPropertyFromNameDictionary");
11596}
11597
11598void CodeStubAssembler::LoadPropertyFromGlobalDictionary(
11599 TNode<GlobalDictionary> dictionary, TNode<IntPtrT> name_index,
11600 TVariable<Uint32T>* var_details, TVariable<Object>* var_value,
11601 Label* if_deleted) {
11602 Comment("[ LoadPropertyFromGlobalDictionary");
11603 TNode<PropertyCell> property_cell =
11604 CAST(LoadFixedArrayElement(dictionary, name_index));
11605
11606 TNode<Object> value =
11607 LoadObjectField(property_cell, PropertyCell::kValueOffset);
11608 GotoIf(TaggedEqual(value, PropertyCellHoleConstant()), if_deleted);
11609
11610 *var_value = value;
11611
11612 TNode<Uint32T> details = Unsigned(LoadAndUntagToWord32ObjectField(
11613 property_cell, PropertyCell::kPropertyDetailsRawOffset));
11614 *var_details = details;
11615
11616 Comment("] LoadPropertyFromGlobalDictionary");
11617}
11618
11619template void CodeStubAssembler::LoadPropertyFromDictionary(
11620 TNode<NameDictionary> dictionary, TNode<IntPtrT> name_index,
11621 TVariable<Uint32T>* var_details, TVariable<Object>* var_value);
11622
11623template void CodeStubAssembler::LoadPropertyFromDictionary(
11624 TNode<SwissNameDictionary> dictionary, TNode<IntPtrT> name_index,
11625 TVariable<Uint32T>* var_details, TVariable<Object>* var_value);
11626
11627// |value| is the property backing store's contents, which is either a value or
11628// an accessor pair, as specified by |details|. |holder| is a JSReceiver or a
11629// PropertyCell. Returns either the original value, or the result of the getter
11630// call.
11631TNode<Object> CodeStubAssembler::CallGetterIfAccessor(
11634 TNode<Object> name, Label* if_bailout, GetOwnPropertyMode mode,
11635 ExpectedReceiverMode expected_receiver_mode) {
11636 TVARIABLE(Object, var_value, value);
11637 Label done(this), if_accessor_info(this, Label::kDeferred);
11638
11639 TNode<Uint32T> kind = DecodeWord32<PropertyDetails::KindField>(details);
11640 GotoIf(
11641 Word32Equal(kind, Int32Constant(static_cast<int>(PropertyKind::kData))),
11642 &done);
11643
11644 // Accessor case.
11645 GotoIfNot(IsAccessorPair(CAST(value)), &if_accessor_info);
11646
11647 // AccessorPair case.
11648 {
11649 if (mode == kCallJSGetterUseCachedName ||
11650 mode == kCallJSGetterDontUseCachedName) {
11651 Label if_callable(this), if_function_template_info(this);
11652 TNode<AccessorPair> accessor_pair = CAST(value);
11653 TNode<HeapObject> getter = CAST(LoadAccessorPairGetter(accessor_pair));
11654 TNode<Map> getter_map = LoadMap(getter);
11655
11656 GotoIf(IsCallableMap(getter_map), &if_callable);
11657 GotoIf(IsFunctionTemplateInfoMap(getter_map), &if_function_template_info);
11658
11659 // Return undefined if the {getter} is not callable.
11660 var_value = UndefinedConstant();
11661 Goto(&done);
11662
11663 BIND(&if_callable);
11664 {
11665 // Call the accessor. No need to check side-effect mode here, since it
11666 // will be checked later in DebugOnFunctionCall.
11667 // It's too early to convert receiver to JSReceiver at this point
11668 // (the Call builtin will do the conversion), so we ignore the
11669 // |expected_receiver_mode| here.
11670 var_value = Call(context, getter, receiver);
11671 Goto(&done);
11672 }
11673
11674 BIND(&if_function_template_info);
11675 {
11676 Label use_cached_property(this);
11677 TNode<HeapObject> cached_property_name = LoadObjectField<HeapObject>(
11678 getter, FunctionTemplateInfo::kCachedPropertyNameOffset);
11679
11680 Label* has_cached_property = mode == kCallJSGetterUseCachedName
11681 ? &use_cached_property
11682 : if_bailout;
11683 GotoIfNot(IsTheHole(cached_property_name), has_cached_property);
11684
11685 TNode<JSReceiver> js_receiver;
11686 switch (expected_receiver_mode) {
11687 case kExpectingJSReceiver:
11688 js_receiver = CAST(receiver);
11689 break;
11690 case kExpectingAnyReceiver:
11691 // TODO(ishell): in case the function template info has a signature
11692 // and receiver is not a JSReceiver the signature check in
11693 // CallFunctionTemplate builtin will fail anyway, so we can short
11694 // cut it here and throw kIllegalInvocation immediately.
11695 js_receiver = ToObject_Inline(context, receiver);
11696 break;
11697 }
11698 TNode<JSReceiver> holder_receiver = CAST(holder);
11699 TNode<NativeContext> creation_context =
11700 GetCreationContext(holder_receiver, if_bailout);
11701 TNode<Context> caller_context = context;
11702 var_value = CallBuiltin(
11703 Builtin::kCallFunctionTemplate_Generic, creation_context, getter,
11704 Int32Constant(i::JSParameterCount(0)), caller_context, js_receiver);
11705 Goto(&done);
11706
11707 if (mode == kCallJSGetterUseCachedName) {
11708 Bind(&use_cached_property);
11709
11710 var_value =
11711 GetProperty(context, holder_receiver, cached_property_name);
11712
11713 Goto(&done);
11714 }
11715 }
11716 } else {
11717 DCHECK_EQ(mode, kReturnAccessorPair);
11718 Goto(&done);
11719 }
11720 }
11721
11722 // AccessorInfo case.
11723 BIND(&if_accessor_info);
11724 {
11725 TNode<AccessorInfo> accessor_info = CAST(value);
11726 Label if_array(this), if_function(this), if_wrapper(this);
11727
11728 // Dispatch based on {holder} instance type.
11729 TNode<Map> holder_map = LoadMap(holder);
11730 TNode<Uint16T> holder_instance_type = LoadMapInstanceType(holder_map);
11731 GotoIf(IsJSArrayInstanceType(holder_instance_type), &if_array);
11732 GotoIf(IsJSFunctionInstanceType(holder_instance_type), &if_function);
11733 Branch(IsJSPrimitiveWrapperInstanceType(holder_instance_type), &if_wrapper,
11734 if_bailout);
11735
11736 // JSArray AccessorInfo case.
11737 BIND(&if_array);
11738 {
11739 // We only deal with the "length" accessor on JSArray.
11740 GotoIfNot(IsLengthString(
11741 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
11742 if_bailout);
11743 TNode<JSArray> array = CAST(holder);
11744 var_value = LoadJSArrayLength(array);
11745 Goto(&done);
11746 }
11747
11748 // JSFunction AccessorInfo case.
11749 BIND(&if_function);
11750 {
11751 // We only deal with the "prototype" accessor on JSFunction here.
11752 GotoIfNot(IsPrototypeString(
11753 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
11754 if_bailout);
11755
11756 TNode<JSFunction> function = CAST(holder);
11757 GotoIfPrototypeRequiresRuntimeLookup(function, holder_map, if_bailout);
11758 var_value = LoadJSFunctionPrototype(function, if_bailout);
11759 Goto(&done);
11760 }
11761
11762 // JSPrimitiveWrapper AccessorInfo case.
11763 BIND(&if_wrapper);
11764 {
11765 // We only deal with the "length" accessor on JSPrimitiveWrapper string
11766 // wrappers.
11767 GotoIfNot(IsLengthString(
11768 LoadObjectField(accessor_info, AccessorInfo::kNameOffset)),
11769 if_bailout);
11770 TNode<Object> holder_value = LoadJSPrimitiveWrapperValue(CAST(holder));
11771 GotoIfNot(TaggedIsNotSmi(holder_value), if_bailout);
11772 GotoIfNot(IsString(CAST(holder_value)), if_bailout);
11773 var_value = LoadStringLengthAsSmi(CAST(holder_value));
11774 Goto(&done);
11775 }
11776 }
11777
11778 BIND(&done);
11779 return var_value.value();
11780}
11781
11782void CodeStubAssembler::TryGetOwnProperty(
11784 TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
11785 Label* if_found_value, TVariable<Object>* var_value, Label* if_not_found,
11786 Label* if_bailout, ExpectedReceiverMode expected_receiver_mode) {
11787 TryGetOwnProperty(context, receiver, object, map, instance_type, unique_name,
11788 if_found_value, var_value, nullptr, nullptr, if_not_found,
11789 if_bailout,
11790 receiver == object ? kCallJSGetterUseCachedName
11791 : kCallJSGetterDontUseCachedName,
11792 expected_receiver_mode);
11793}
11794
11795void CodeStubAssembler::TryGetOwnProperty(
11797 TNode<Map> map, TNode<Int32T> instance_type, TNode<Name> unique_name,
11798 Label* if_found_value, TVariable<Object>* var_value,
11799 TVariable<Uint32T>* var_details, TVariable<Object>* var_raw_value,
11800 Label* if_not_found, Label* if_bailout, GetOwnPropertyMode mode,
11801 ExpectedReceiverMode expected_receiver_mode) {
11802 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
11803 Comment("TryGetOwnProperty");
11804 if (receiver == object) {
11805 // If |receiver| is exactly the same Node as the |object| which is
11806 // guaranteed to be JSReceiver override the |expected_receiver_mode|.
11807 expected_receiver_mode = kExpectingJSReceiver;
11808 }
11809 CSA_DCHECK(this, IsUniqueNameNoCachedIndex(unique_name));
11810 TVARIABLE(HeapObject, var_meta_storage);
11811 TVARIABLE(IntPtrT, var_entry);
11812
11813 Label if_found_fast(this), if_found_dict(this), if_found_global(this);
11814
11815 TVARIABLE(Uint32T, local_var_details);
11816 if (!var_details) {
11817 var_details = &local_var_details;
11818 }
11819 Label if_found(this);
11820
11821 TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
11822 &if_found_dict, &if_found_global, &var_meta_storage,
11823 &var_entry, if_not_found, if_bailout);
11824 BIND(&if_found_fast);
11825 {
11826 TNode<DescriptorArray> descriptors = CAST(var_meta_storage.value());
11827 TNode<IntPtrT> name_index = var_entry.value();
11828
11829 LoadPropertyFromFastObject(object, map, descriptors, name_index,
11830 var_details, var_value);
11831 Goto(&if_found);
11832 }
11833 BIND(&if_found_dict);
11834 {
11835 TNode<PropertyDictionary> dictionary = CAST(var_meta_storage.value());
11836 TNode<IntPtrT> entry = var_entry.value();
11837 LoadPropertyFromDictionary(dictionary, entry, var_details, var_value);
11838
11839 Goto(&if_found);
11840 }
11841 BIND(&if_found_global);
11842 {
11843 TNode<GlobalDictionary> dictionary = CAST(var_meta_storage.value());
11844 TNode<IntPtrT> entry = var_entry.value();
11845
11846 LoadPropertyFromGlobalDictionary(dictionary, entry, var_details, var_value,
11847 if_not_found);
11848 Goto(&if_found);
11849 }
11850 // Here we have details and value which could be an accessor.
11851 BIND(&if_found);
11852 {
11853 // TODO(ishell): Execute C++ accessor in case of accessor info
11854 if (var_raw_value) {
11855 *var_raw_value = *var_value;
11856 }
11857 TNode<Object> value = CallGetterIfAccessor(
11858 var_value->value(), object, var_details->value(), context, receiver,
11859 unique_name, if_bailout, mode, expected_receiver_mode);
11860 *var_value = value;
11861 Goto(if_found_value);
11862 }
11863}
11864
11865void CodeStubAssembler::InitializePropertyDescriptorObject(
11867 TNode<Uint32T> details, Label* if_bailout) {
11868 Label if_data_property(this), if_accessor_property(this),
11869 test_configurable(this), test_property_type(this), done(this);
11870 TVARIABLE(Smi, flags,
11871 SmiConstant(PropertyDescriptorObject::HasEnumerableBit::kMask |
11872 PropertyDescriptorObject::HasConfigurableBit::kMask));
11873
11874 { // test enumerable
11875 TNode<Uint32T> dont_enum =
11876 Uint32Constant(DONT_ENUM << PropertyDetails::AttributesField::kShift);
11877 GotoIf(Word32And(details, dont_enum), &test_configurable);
11878 flags =
11879 SmiOr(flags.value(),
11880 SmiConstant(PropertyDescriptorObject::IsEnumerableBit::kMask));
11881 Goto(&test_configurable);
11882 }
11883
11884 BIND(&test_configurable);
11885 {
11886 TNode<Uint32T> dont_delete =
11887 Uint32Constant(DONT_DELETE << PropertyDetails::AttributesField::kShift);
11888 GotoIf(Word32And(details, dont_delete), &test_property_type);
11889 flags =
11890 SmiOr(flags.value(),
11891 SmiConstant(PropertyDescriptorObject::IsConfigurableBit::kMask));
11892 Goto(&test_property_type);
11893 }
11894
11895 BIND(&test_property_type);
11896 BranchIfAccessorPair(value, &if_accessor_property, &if_data_property);
11897
11898 BIND(&if_accessor_property);
11899 {
11900 Label done_get(this), store_fields(this);
11901 TNode<AccessorPair> accessor_pair = CAST(value);
11902
11903 auto BailoutIfTemplateInfo = [this, &if_bailout](TNode<HeapObject> value) {
11905
11906 Label bind_undefined(this), return_result(this);
11907 GotoIf(IsNull(value), &bind_undefined);
11908 result = value;
11909 TNode<Map> map = LoadMap(value);
11910 // TODO(ishell): probe template instantiations cache.
11911 GotoIf(IsFunctionTemplateInfoMap(map), if_bailout);
11912 Goto(&return_result);
11913
11914 BIND(&bind_undefined);
11915 result = UndefinedConstant();
11916 Goto(&return_result);
11917
11918 BIND(&return_result);
11919 return result.value();
11920 };
11921
11922 TNode<HeapObject> getter = CAST(LoadAccessorPairGetter(accessor_pair));
11923 TNode<HeapObject> setter = CAST(LoadAccessorPairSetter(accessor_pair));
11924 getter = BailoutIfTemplateInfo(getter);
11925 setter = BailoutIfTemplateInfo(setter);
11926
11927 Label bind_undefined(this, Label::kDeferred), return_result(this);
11928 flags = SmiOr(flags.value(),
11929 SmiConstant(PropertyDescriptorObject::HasGetBit::kMask |
11930 PropertyDescriptorObject::HasSetBit::kMask));
11931 StoreObjectField(descriptor, PropertyDescriptorObject::kFlagsOffset,
11932 flags.value());
11933 StoreObjectField(descriptor, PropertyDescriptorObject::kValueOffset,
11934 NullConstant());
11935 StoreObjectField(descriptor, PropertyDescriptorObject::kGetOffset,
11936 BailoutIfTemplateInfo(getter));
11937 StoreObjectField(descriptor, PropertyDescriptorObject::kSetOffset,
11938 BailoutIfTemplateInfo(setter));
11939 Goto(&done);
11940 }
11941
11942 BIND(&if_data_property);
11943 {
11944 Label store_fields(this);
11945 flags = SmiOr(flags.value(),
11946 SmiConstant(PropertyDescriptorObject::HasValueBit::kMask |
11947 PropertyDescriptorObject::HasWritableBit::kMask));
11948 TNode<Uint32T> read_only =
11949 Uint32Constant(READ_ONLY << PropertyDetails::AttributesField::kShift);
11950 GotoIf(Word32And(details, read_only), &store_fields);
11951 flags = SmiOr(flags.value(),
11952 SmiConstant(PropertyDescriptorObject::IsWritableBit::kMask));
11953 Goto(&store_fields);
11954
11955 BIND(&store_fields);
11956 StoreObjectField(descriptor, PropertyDescriptorObject::kFlagsOffset,
11957 flags.value());
11958 StoreObjectField(descriptor, PropertyDescriptorObject::kValueOffset, value);
11959 StoreObjectField(descriptor, PropertyDescriptorObject::kGetOffset,
11960 NullConstant());
11961 StoreObjectField(descriptor, PropertyDescriptorObject::kSetOffset,
11962 NullConstant());
11963 Goto(&done);
11964 }
11965
11966 BIND(&done);
11967}
11968
11970CodeStubAssembler::AllocatePropertyDescriptorObject(TNode<Context> context) {
11971 TNode<HeapObject> result = Allocate(PropertyDescriptorObject::kSize);
11972 TNode<Map> map = GetInstanceTypeMap(PROPERTY_DESCRIPTOR_OBJECT_TYPE);
11973 StoreMapNoWriteBarrier(result, map);
11974 TNode<Smi> zero = SmiConstant(0);
11975 StoreObjectFieldNoWriteBarrier(result, PropertyDescriptorObject::kFlagsOffset,
11976 zero);
11977 TNode<Hole> the_hole = TheHoleConstant();
11978 StoreObjectFieldNoWriteBarrier(result, PropertyDescriptorObject::kValueOffset,
11979 the_hole);
11980 StoreObjectFieldNoWriteBarrier(result, PropertyDescriptorObject::kGetOffset,
11981 the_hole);
11982 StoreObjectFieldNoWriteBarrier(result, PropertyDescriptorObject::kSetOffset,
11983 the_hole);
11984 return CAST(result);
11985}
11986
11987TNode<BoolT> CodeStubAssembler::IsInterestingProperty(TNode<Name> name) {
11988 TVARIABLE(BoolT, var_result);
11989 Label return_false(this), return_true(this), return_generic(this);
11990 // TODO(ishell): consider using ReadOnlyRoots::IsNameForProtector() trick for
11991 // these strings and interesting symbols.
11992 GotoIf(IsToJSONString(name), &return_true);
11993 GotoIf(IsGetString(name), &return_true);
11994 GotoIfNot(InstanceTypeEqual(LoadMapInstanceType(LoadMap(name)), SYMBOL_TYPE),
11995 &return_false);
11996 Branch(IsSetWord32<Symbol::IsInterestingSymbolBit>(
11997 LoadObjectField<Uint32T>(name, offsetof(Symbol, flags_))),
11998 &return_true, &return_false);
11999
12000 BIND(&return_false);
12001 var_result = BoolConstant(false);
12002 Goto(&return_generic);
12003
12004 BIND(&return_true);
12005 var_result = BoolConstant(true);
12006 Goto(&return_generic);
12007
12008 BIND(&return_generic);
12009 return var_result.value();
12010}
12011
12012TNode<JSAny> CodeStubAssembler::GetInterestingProperty(
12014 Label* if_not_found) {
12015 TVARIABLE(JSAnyNotSmi, var_holder, receiver);
12016 TVARIABLE(Map, var_holder_map, LoadMap(receiver));
12017
12018 return GetInterestingProperty(context, receiver, &var_holder, &var_holder_map,
12019 name, if_not_found);
12020}
12021
12022TNode<JSAny> CodeStubAssembler::GetInterestingProperty(
12024 TVariable<JSAnyNotSmi>* var_holder, TVariable<Map>* var_holder_map,
12025 TNode<Name> name, Label* if_not_found) {
12026 CSA_DCHECK(this, IsInterestingProperty(name));
12027 // The lookup starts at the var_holder and var_holder_map must contain
12028 // var_holder's map.
12029 CSA_DCHECK(this, TaggedEqual(LoadMap((*var_holder).value()),
12030 (*var_holder_map).value()));
12031 TVARIABLE(Object, var_result, UndefinedConstant());
12032
12033 // Check if all relevant maps (including the prototype maps) don't
12034 // have any interesting properties (i.e. that none of them have the
12035 // @@toStringTag or @@toPrimitive property).
12036 Label loop(this, {var_holder, var_holder_map}),
12037 lookup(this, Label::kDeferred);
12038 Goto(&loop);
12039 BIND(&loop);
12040 {
12041 Label interesting_properties(this);
12042 TNode<JSAnyNotSmi> holder = (*var_holder).value();
12043 TNode<Map> holder_map = (*var_holder_map).value();
12044 GotoIf(IsNull(holder), if_not_found);
12045 TNode<Uint32T> holder_bit_field3 = LoadMapBitField3(holder_map);
12046 GotoIf(IsSetWord32<Map::Bits3::MayHaveInterestingPropertiesBit>(
12047 holder_bit_field3),
12048 &interesting_properties);
12049 *var_holder = LoadMapPrototype(holder_map);
12050 *var_holder_map = LoadMap((*var_holder).value());
12051 Goto(&loop);
12052 BIND(&interesting_properties);
12053 {
12054 // Check flags for dictionary objects.
12055 GotoIf(IsClearWord32<Map::Bits3::IsDictionaryMapBit>(holder_bit_field3),
12056 &lookup);
12057 // JSProxy has dictionary properties but has to be handled in runtime.
12058 GotoIf(InstanceTypeEqual(LoadMapInstanceType(holder_map), JS_PROXY_TYPE),
12059 &lookup);
12060 TNode<Object> properties =
12061 LoadObjectField(holder, JSObject::kPropertiesOrHashOffset);
12062 CSA_DCHECK(this, TaggedIsNotSmi(properties));
12063 CSA_DCHECK(this, IsPropertyDictionary(CAST(properties)));
12064 // TODO(pthier): Support swiss dictionaries.
12065 if constexpr (!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
12066 TNode<Smi> flags =
12067 GetNameDictionaryFlags<NameDictionary>(CAST(properties));
12068 GotoIf(IsSetSmi(flags,
12069 NameDictionary::MayHaveInterestingPropertiesBit::kMask),
12070 &lookup);
12071 *var_holder = LoadMapPrototype(holder_map);
12072 *var_holder_map = LoadMap((*var_holder).value());
12073 }
12074 Goto(&loop);
12075 }
12076 }
12077
12078 BIND(&lookup);
12079 return CallBuiltin<JSAny>(Builtin::kGetPropertyWithReceiver, context,
12080 (*var_holder).value(), name, receiver,
12081 SmiConstant(OnNonExistent::kReturnUndefined));
12082}
12083
12084void CodeStubAssembler::TryLookupElement(
12085 TNode<HeapObject> object, TNode<Map> map, TNode<Int32T> instance_type,
12086 TNode<IntPtrT> intptr_index, Label* if_found, Label* if_absent,
12087 Label* if_not_found, Label* if_bailout) {
12088 // Handle special objects in runtime.
12089 GotoIf(IsSpecialReceiverInstanceType(instance_type), if_bailout);
12090
12091 TNode<Int32T> elements_kind = LoadMapElementsKind(map);
12092
12093 // TODO(verwaest): Support other elements kinds as well.
12094 Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
12095 if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this),
12096 if_typedarray(this), if_rab_gsab_typedarray(this);
12097 // clang-format off
12098 int32_t values[] = {
12099 // Handled by {if_isobjectorsmi}.
12104 // Handled by {if_isdouble}.
12106 // Handled by {if_isdictionary}.
12108 // Handled by {if_isfaststringwrapper}.
12110 // Handled by {if_isslowstringwrapper}.
12112 // Handled by {if_not_found}.
12114 // Handled by {if_typed_array}.
12115 UINT8_ELEMENTS,
12116 INT8_ELEMENTS,
12117 UINT16_ELEMENTS,
12118 INT16_ELEMENTS,
12119 UINT32_ELEMENTS,
12120 INT32_ELEMENTS,
12121 FLOAT32_ELEMENTS,
12122 FLOAT64_ELEMENTS,
12123 UINT8_CLAMPED_ELEMENTS,
12124 BIGUINT64_ELEMENTS,
12125 BIGINT64_ELEMENTS,
12126 RAB_GSAB_UINT8_ELEMENTS,
12127 RAB_GSAB_INT8_ELEMENTS,
12128 RAB_GSAB_UINT16_ELEMENTS,
12129 RAB_GSAB_INT16_ELEMENTS,
12130 RAB_GSAB_UINT32_ELEMENTS,
12131 RAB_GSAB_INT32_ELEMENTS,
12132 RAB_GSAB_FLOAT32_ELEMENTS,
12133 RAB_GSAB_FLOAT64_ELEMENTS,
12134 RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
12135 RAB_GSAB_BIGUINT64_ELEMENTS,
12136 RAB_GSAB_BIGINT64_ELEMENTS,
12137 };
12138 Label* labels[] = {
12139 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
12140 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
12141 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
12142 &if_isobjectorsmi,
12143 &if_isdouble, &if_isdouble,
12144 &if_isdictionary,
12145 &if_isfaststringwrapper,
12146 &if_isslowstringwrapper,
12147 if_not_found,
12148 &if_typedarray,
12149 &if_typedarray,
12150 &if_typedarray,
12151 &if_typedarray,
12152 &if_typedarray,
12153 &if_typedarray,
12154 &if_typedarray,
12155 &if_typedarray,
12156 &if_typedarray,
12157 &if_typedarray,
12158 &if_typedarray,
12159 &if_rab_gsab_typedarray,
12160 &if_rab_gsab_typedarray,
12161 &if_rab_gsab_typedarray,
12162 &if_rab_gsab_typedarray,
12163 &if_rab_gsab_typedarray,
12164 &if_rab_gsab_typedarray,
12165 &if_rab_gsab_typedarray,
12166 &if_rab_gsab_typedarray,
12167 &if_rab_gsab_typedarray,
12168 &if_rab_gsab_typedarray,
12169 &if_rab_gsab_typedarray,
12170 };
12171 // clang-format on
12172 static_assert(arraysize(values) == arraysize(labels));
12173 Switch(elements_kind, if_bailout, values, labels, arraysize(values));
12174
12175 BIND(&if_isobjectorsmi);
12176 {
12177 TNode<FixedArray> elements = CAST(LoadElements(CAST(object)));
12178 TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
12179
12180 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
12181
12182 TNode<Object> element = UnsafeLoadFixedArrayElement(elements, intptr_index);
12183 TNode<Hole> the_hole = TheHoleConstant();
12184 Branch(TaggedEqual(element, the_hole), if_not_found, if_found);
12185 }
12186 BIND(&if_isdouble);
12187 {
12188 TNode<FixedArrayBase> elements = LoadElements(CAST(object));
12189 TNode<IntPtrT> length = LoadAndUntagFixedArrayBaseLength(elements);
12190
12191 GotoIfNot(UintPtrLessThan(intptr_index, length), &if_oob);
12192
12193 // Check if the element is a double hole, but don't load it.
12194 LoadFixedDoubleArrayElement(CAST(elements), intptr_index, if_not_found,
12195 MachineType::None());
12196 Goto(if_found);
12197 }
12198 BIND(&if_isdictionary);
12199 {
12200 // Negative and too-large keys must be converted to property names.
12201 if (Is64()) {
12202 GotoIf(UintPtrLessThan(IntPtrConstant(JSObject::kMaxElementIndex),
12203 intptr_index),
12204 if_bailout);
12205 } else {
12206 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
12207 }
12208
12209 TVARIABLE(IntPtrT, var_entry);
12210 TNode<NumberDictionary> elements = CAST(LoadElements(CAST(object)));
12211 NumberDictionaryLookup(elements, intptr_index, if_found, &var_entry,
12212 if_not_found);
12213 }
12214 BIND(&if_isfaststringwrapper);
12215 {
12216 TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)));
12217 TNode<IntPtrT> length = LoadStringLengthAsWord(string);
12218 GotoIf(UintPtrLessThan(intptr_index, length), if_found);
12219 Goto(&if_isobjectorsmi);
12220 }
12221 BIND(&if_isslowstringwrapper);
12222 {
12223 TNode<String> string = CAST(LoadJSPrimitiveWrapperValue(CAST(object)));
12224 TNode<IntPtrT> length = LoadStringLengthAsWord(string);
12225 GotoIf(UintPtrLessThan(intptr_index, length), if_found);
12226 Goto(&if_isdictionary);
12227 }
12228 BIND(&if_typedarray);
12229 {
12230 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(CAST(object));
12231 GotoIf(IsDetachedBuffer(buffer), if_absent);
12232
12233 TNode<UintPtrT> length = LoadJSTypedArrayLength(CAST(object));
12234 Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
12235 }
12236 BIND(&if_rab_gsab_typedarray);
12237 {
12238 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(CAST(object));
12239 TNode<UintPtrT> length =
12240 LoadVariableLengthJSTypedArrayLength(CAST(object), buffer, if_absent);
12241 Branch(UintPtrLessThan(intptr_index, length), if_found, if_absent);
12242 }
12243 BIND(&if_oob);
12244 {
12245 // Positive OOB indices mean "not found", negative indices and indices
12246 // out of array index range must be converted to property names.
12247 if (Is64()) {
12248 GotoIf(UintPtrLessThan(IntPtrConstant(JSObject::kMaxElementIndex),
12249 intptr_index),
12250 if_bailout);
12251 } else {
12252 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
12253 }
12254 Goto(if_not_found);
12255 }
12256}
12257
12258void CodeStubAssembler::BranchIfMaybeSpecialIndex(TNode<String> name_string,
12259 Label* if_maybe_special_index,
12260 Label* if_not_special_index) {
12261 // TODO(cwhan.tunz): Implement fast cases more.
12262
12263 // If a name is empty or too long, it's not a special index
12264 // Max length of canonical double: -X.XXXXXXXXXXXXXXXXX-eXXX
12265 const int kBufferSize = 24;
12266 TNode<Smi> string_length = LoadStringLengthAsSmi(name_string);
12267 GotoIf(SmiEqual(string_length, SmiConstant(0)), if_not_special_index);
12268 GotoIf(SmiGreaterThan(string_length, SmiConstant(kBufferSize)),
12269 if_not_special_index);
12270
12271 // If the first character of name is not a digit or '-', or we can't match it
12272 // to Infinity or NaN, then this is not a special index.
12273 TNode<Int32T> first_char = StringCharCodeAt(name_string, UintPtrConstant(0));
12274 // If the name starts with '-', it can be a negative index.
12275 GotoIf(Word32Equal(first_char, Int32Constant('-')), if_maybe_special_index);
12276 // If the name starts with 'I', it can be "Infinity".
12277 GotoIf(Word32Equal(first_char, Int32Constant('I')), if_maybe_special_index);
12278 // If the name starts with 'N', it can be "NaN".
12279 GotoIf(Word32Equal(first_char, Int32Constant('N')), if_maybe_special_index);
12280 // Finally, if the first character is not a digit either, then we are sure
12281 // that the name is not a special index.
12282 GotoIf(Uint32LessThan(first_char, Int32Constant('0')), if_not_special_index);
12283 GotoIf(Uint32LessThan(Int32Constant('9'), first_char), if_not_special_index);
12284 Goto(if_maybe_special_index);
12285}
12286
12287void CodeStubAssembler::TryPrototypeChainLookup(
12289 const LookupPropertyInHolder& lookup_property_in_holder,
12290 const LookupElementInHolder& lookup_element_in_holder, Label* if_end,
12291 Label* if_bailout, Label* if_proxy, bool handle_private_names) {
12292 // Ensure receiver is JSReceiver, otherwise bailout.
12293 GotoIf(TaggedIsSmi(receiver), if_bailout);
12294 TNode<JSAnyNotSmi> object = CAST(object_arg);
12295
12296 TNode<Map> map = LoadMap(object);
12297 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
12298 {
12299 Label if_objectisreceiver(this);
12300 Branch(IsJSReceiverInstanceType(instance_type), &if_objectisreceiver,
12301 if_bailout);
12302 BIND(&if_objectisreceiver);
12303
12304 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), if_proxy);
12305 }
12306
12307 TVARIABLE(IntPtrT, var_index);
12308 TVARIABLE(Name, var_unique);
12309
12310 Label if_keyisindex(this), if_iskeyunique(this);
12311 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, &var_unique,
12312 if_bailout);
12313
12314 BIND(&if_iskeyunique);
12315 {
12316 TVARIABLE(JSAnyNotSmi, var_holder, object);
12317 TVARIABLE(Map, var_holder_map, map);
12318 TVARIABLE(Int32T, var_holder_instance_type, instance_type);
12319
12320 Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
12321 Goto(&loop);
12322 BIND(&loop);
12323 {
12324 TNode<Map> holder_map = var_holder_map.value();
12325 TNode<Int32T> holder_instance_type = var_holder_instance_type.value();
12326
12327 Label next_proto(this), check_integer_indexed_exotic(this);
12328 lookup_property_in_holder(CAST(receiver), var_holder.value(), holder_map,
12329 holder_instance_type, var_unique.value(),
12330 &check_integer_indexed_exotic, if_bailout);
12331
12332 BIND(&check_integer_indexed_exotic);
12333 {
12334 // Bailout if it can be an integer indexed exotic case.
12335 GotoIfNot(InstanceTypeEqual(holder_instance_type, JS_TYPED_ARRAY_TYPE),
12336 &next_proto);
12337 GotoIfNot(IsString(var_unique.value()), &next_proto);
12338 BranchIfMaybeSpecialIndex(CAST(var_unique.value()), if_bailout,
12339 &next_proto);
12340 }
12341
12342 BIND(&next_proto);
12343
12344 if (handle_private_names) {
12345 // Private name lookup doesn't walk the prototype chain.
12346 GotoIf(IsPrivateSymbol(CAST(key)), if_end);
12347 }
12348
12349 TNode<JSPrototype> proto = LoadMapPrototype(holder_map);
12350
12351 GotoIf(IsNull(proto), if_end);
12352
12353 TNode<Map> proto_map = LoadMap(proto);
12354 TNode<Uint16T> proto_instance_type = LoadMapInstanceType(proto_map);
12355
12356 var_holder = proto;
12357 var_holder_map = proto_map;
12358 var_holder_instance_type = proto_instance_type;
12359 Goto(&loop);
12360 }
12361 }
12362 BIND(&if_keyisindex);
12363 {
12364 TVARIABLE(JSAnyNotSmi, var_holder, object);
12365 TVARIABLE(Map, var_holder_map, map);
12366 TVARIABLE(Int32T, var_holder_instance_type, instance_type);
12367
12368 Label loop(this, {&var_holder, &var_holder_map, &var_holder_instance_type});
12369 Goto(&loop);
12370 BIND(&loop);
12371 {
12372 Label next_proto(this);
12373 lookup_element_in_holder(CAST(receiver), var_holder.value(),
12374 var_holder_map.value(),
12375 var_holder_instance_type.value(),
12376 var_index.value(), &next_proto, if_bailout);
12377 BIND(&next_proto);
12378
12379 TNode<JSPrototype> proto = LoadMapPrototype(var_holder_map.value());
12380
12381 GotoIf(IsNull(proto), if_end);
12382
12383 TNode<Map> proto_map = LoadMap(proto);
12384 TNode<Uint16T> proto_instance_type = LoadMapInstanceType(proto_map);
12385
12386 var_holder = proto;
12387 var_holder_map = proto_map;
12388 var_holder_instance_type = proto_instance_type;
12389 Goto(&loop);
12390 }
12391 }
12392}
12393
12394TNode<Boolean> CodeStubAssembler::HasInPrototypeChain(TNode<Context> context,
12395 TNode<HeapObject> object,
12396 TNode<Object> prototype) {
12397 TVARIABLE(Boolean, var_result);
12398 Label return_false(this), return_true(this),
12399 return_runtime(this, Label::kDeferred), return_result(this);
12400
12401 // Loop through the prototype chain looking for the {prototype}.
12402 TVARIABLE(Map, var_object_map, LoadMap(object));
12403 Label loop(this, &var_object_map);
12404 Goto(&loop);
12405 BIND(&loop);
12406 {
12407 // Check if we can determine the prototype directly from the {object_map}.
12408 Label if_objectisdirect(this), if_objectisspecial(this, Label::kDeferred);
12409 TNode<Map> object_map = var_object_map.value();
12410 TNode<Uint16T> object_instance_type = LoadMapInstanceType(object_map);
12411 Branch(IsSpecialReceiverInstanceType(object_instance_type),
12412 &if_objectisspecial, &if_objectisdirect);
12413 BIND(&if_objectisspecial);
12414 {
12415 // The {object_map} is a special receiver map or a primitive map, check
12416 // if we need to use the if_objectisspecial path in the runtime.
12417 GotoIf(InstanceTypeEqual(object_instance_type, JS_PROXY_TYPE),
12418 &return_runtime);
12419 TNode<Int32T> object_bitfield = LoadMapBitField(object_map);
12420 int mask = Map::Bits1::HasNamedInterceptorBit::kMask |
12421 Map::Bits1::IsAccessCheckNeededBit::kMask;
12422 Branch(IsSetWord32(object_bitfield, mask), &return_runtime,
12423 &if_objectisdirect);
12424 }
12425 BIND(&if_objectisdirect);
12426
12427 // Check the current {object} prototype.
12428 TNode<HeapObject> object_prototype = LoadMapPrototype(object_map);
12429 GotoIf(IsNull(object_prototype), &return_false);
12430 GotoIf(TaggedEqual(object_prototype, prototype), &return_true);
12431
12432 // Continue with the prototype.
12433 CSA_DCHECK(this, TaggedIsNotSmi(object_prototype));
12434 var_object_map = LoadMap(object_prototype);
12435 Goto(&loop);
12436 }
12437
12438 BIND(&return_true);
12439 var_result = TrueConstant();
12440 Goto(&return_result);
12441
12442 BIND(&return_false);
12443 var_result = FalseConstant();
12444 Goto(&return_result);
12445
12446 BIND(&return_runtime);
12447 {
12448 // Fallback to the runtime implementation.
12449 var_result = CAST(
12450 CallRuntime(Runtime::kHasInPrototypeChain, context, object, prototype));
12451 }
12452 Goto(&return_result);
12453
12454 BIND(&return_result);
12455 return var_result.value();
12456}
12457
12458TNode<Boolean> CodeStubAssembler::OrdinaryHasInstance(
12459 TNode<Context> context, TNode<Object> callable_maybe_smi,
12460 TNode<Object> object_maybe_smi) {
12461 TVARIABLE(Boolean, var_result);
12462 Label return_runtime(this, Label::kDeferred), return_result(this);
12463
12464 GotoIfForceSlowPath(&return_runtime);
12465
12466 // Goto runtime if {object} is a Smi.
12467 GotoIf(TaggedIsSmi(object_maybe_smi), &return_runtime);
12468
12469 // Goto runtime if {callable} is a Smi.
12470 GotoIf(TaggedIsSmi(callable_maybe_smi), &return_runtime);
12471
12472 {
12473 // Load map of {callable}.
12474 TNode<HeapObject> object = CAST(object_maybe_smi);
12475 TNode<HeapObject> callable = CAST(callable_maybe_smi);
12476 TNode<Map> callable_map = LoadMap(callable);
12477
12478 // Goto runtime if {callable} is not a JSFunction.
12479 TNode<Uint16T> callable_instance_type = LoadMapInstanceType(callable_map);
12480 GotoIfNot(IsJSFunctionInstanceType(callable_instance_type),
12481 &return_runtime);
12482
12483 GotoIfPrototypeRequiresRuntimeLookup(CAST(callable), callable_map,
12484 &return_runtime);
12485
12486 // Get the "prototype" (or initial map) of the {callable}.
12487 TNode<HeapObject> callable_prototype = LoadObjectField<HeapObject>(
12488 callable, JSFunction::kPrototypeOrInitialMapOffset);
12489 {
12490 Label no_initial_map(this), walk_prototype_chain(this);
12491 TVARIABLE(HeapObject, var_callable_prototype, callable_prototype);
12492
12493 // Resolve the "prototype" if the {callable} has an initial map.
12494 GotoIfNot(IsMap(callable_prototype), &no_initial_map);
12495 var_callable_prototype = LoadObjectField<HeapObject>(
12496 callable_prototype, Map::kPrototypeOffset);
12497 Goto(&walk_prototype_chain);
12498
12499 BIND(&no_initial_map);
12500 // {callable_prototype} is the hole if the "prototype" property hasn't
12501 // been requested so far.
12502 Branch(TaggedEqual(callable_prototype, TheHoleConstant()),
12503 &return_runtime, &walk_prototype_chain);
12504
12505 BIND(&walk_prototype_chain);
12506 callable_prototype = var_callable_prototype.value();
12507 }
12508
12509 // Loop through the prototype chain looking for the {callable} prototype.
12510 var_result = HasInPrototypeChain(context, object, callable_prototype);
12511 Goto(&return_result);
12512 }
12513
12514 BIND(&return_runtime);
12515 {
12516 // Fallback to the runtime implementation.
12517 var_result = CAST(CallRuntime(Runtime::kOrdinaryHasInstance, context,
12518 callable_maybe_smi, object_maybe_smi));
12519 }
12520 Goto(&return_result);
12521
12522 BIND(&return_result);
12523 return var_result.value();
12524}
12525
12526template <typename TIndex>
12527TNode<IntPtrT> CodeStubAssembler::ElementOffsetFromIndex(
12528 TNode<TIndex> index_node, ElementsKind kind, int base_size) {
12529 // TODO(v8:9708): Remove IntPtrT variant in favor of UintPtrT.
12530 static_assert(
12531 std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, TaggedIndex> ||
12532 std::is_same_v<TIndex, IntPtrT> || std::is_same_v<TIndex, UintPtrT>,
12533 "Only Smi, UintPtrT or IntPtrT index nodes are allowed");
12534 int element_size_shift = ElementsKindToShiftSize(kind);
12535 int element_size = 1 << element_size_shift;
12536 intptr_t index = 0;
12537 TNode<IntPtrT> intptr_index_node;
12538 bool constant_index = false;
12539 if (std::is_same_v<TIndex, Smi>) {
12540 TNode<Smi> smi_index_node = ReinterpretCast<Smi>(index_node);
12541 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
12542 element_size_shift -= kSmiShiftBits;
12543 Tagged<Smi> smi_index;
12544 constant_index = TryToSmiConstant(smi_index_node, &smi_index);
12545 if (constant_index) {
12546 index = smi_index.value();
12547 } else {
12549 smi_index_node = NormalizeSmiIndex(smi_index_node);
12550 }
12551 }
12552 intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(smi_index_node);
12553 } else if (std::is_same_v<TIndex, TaggedIndex>) {
12554 TNode<TaggedIndex> tagged_index_node =
12555 ReinterpretCast<TaggedIndex>(index_node);
12556 element_size_shift -= kSmiTagSize;
12557 intptr_index_node = BitcastTaggedToWordForTagAndSmiBits(tagged_index_node);
12558 constant_index = TryToIntPtrConstant(intptr_index_node, &index);
12559 } else {
12560 intptr_index_node = ReinterpretCast<IntPtrT>(index_node);
12561 constant_index = TryToIntPtrConstant(intptr_index_node, &index);
12562 }
12563 if (constant_index) {
12564 return IntPtrConstant(base_size + element_size * index);
12565 }
12566
12567 TNode<IntPtrT> shifted_index =
12568 (element_size_shift == 0)
12569 ? intptr_index_node
12570 : ((element_size_shift > 0)
12571 ? WordShl(intptr_index_node,
12572 IntPtrConstant(element_size_shift))
12573 : WordSar(intptr_index_node,
12574 IntPtrConstant(-element_size_shift)));
12575 return IntPtrAdd(IntPtrConstant(base_size), Signed(shifted_index));
12576}
12577
12578// Instantiate ElementOffsetFromIndex for Smi and IntPtrT.
12580CodeStubAssembler::ElementOffsetFromIndex<Smi>(TNode<Smi> index_node,
12582 int base_size);
12584CodeStubAssembler::ElementOffsetFromIndex<TaggedIndex>(
12585 TNode<TaggedIndex> index_node, ElementsKind kind, int base_size);
12587CodeStubAssembler::ElementOffsetFromIndex<IntPtrT>(TNode<IntPtrT> index_node,
12589 int base_size);
12590
12591TNode<BoolT> CodeStubAssembler::IsOffsetInBounds(TNode<IntPtrT> offset,
12592 TNode<IntPtrT> length,
12593 int header_size,
12595 // Make sure we point to the last field.
12596 int element_size = 1 << ElementsKindToShiftSize(kind);
12597 int correction = header_size - kHeapObjectTag - element_size;
12598 TNode<IntPtrT> last_offset = ElementOffsetFromIndex(length, kind, correction);
12599 return IntPtrLessThanOrEqual(offset, last_offset);
12600}
12601
12602TNode<HeapObject> CodeStubAssembler::LoadFeedbackCellValue(
12603 TNode<JSFunction> closure) {
12604 TNode<FeedbackCell> feedback_cell =
12605 LoadObjectField<FeedbackCell>(closure, JSFunction::kFeedbackCellOffset);
12606 return LoadObjectField<HeapObject>(feedback_cell, FeedbackCell::kValueOffset);
12607}
12608
12609TNode<HeapObject> CodeStubAssembler::LoadFeedbackVector(
12610 TNode<JSFunction> closure) {
12611 TVARIABLE(HeapObject, maybe_vector);
12612 Label if_no_feedback_vector(this), out(this);
12613
12614 maybe_vector = LoadFeedbackVector(closure, &if_no_feedback_vector);
12615 Goto(&out);
12616
12617 BIND(&if_no_feedback_vector);
12618 // If the closure doesn't have a feedback vector allocated yet, return
12619 // undefined. The FeedbackCell can contain Undefined / FixedArray (for lazy
12620 // allocations) / FeedbackVector.
12621 maybe_vector = UndefinedConstant();
12622 Goto(&out);
12623
12624 BIND(&out);
12625 return maybe_vector.value();
12626}
12627
12628TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVector(
12629 TNode<JSFunction> closure, Label* if_no_feedback_vector) {
12630 TNode<HeapObject> maybe_vector = LoadFeedbackCellValue(closure);
12631 GotoIfNot(IsFeedbackVector(maybe_vector), if_no_feedback_vector);
12632 return CAST(maybe_vector);
12633}
12634
12635TNode<ClosureFeedbackCellArray> CodeStubAssembler::LoadClosureFeedbackArray(
12636 TNode<JSFunction> closure) {
12637 TVARIABLE(HeapObject, feedback_cell_array, LoadFeedbackCellValue(closure));
12638 Label end(this);
12639
12640 // When feedback vectors are not yet allocated feedback cell contains
12641 // an array of feedback cells used by create closures.
12642 GotoIf(HasInstanceType(feedback_cell_array.value(),
12643 CLOSURE_FEEDBACK_CELL_ARRAY_TYPE),
12644 &end);
12645
12646 // Load FeedbackCellArray from feedback vector.
12647 TNode<FeedbackVector> vector = CAST(feedback_cell_array.value());
12648 feedback_cell_array = CAST(
12649 LoadObjectField(vector, FeedbackVector::kClosureFeedbackCellArrayOffset));
12650 Goto(&end);
12651
12652 BIND(&end);
12653 return CAST(feedback_cell_array.value());
12654}
12655
12656TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorForStub() {
12657 TNode<JSFunction> function =
12658 CAST(LoadFromParentFrame(StandardFrameConstants::kFunctionOffset));
12659 return CAST(LoadFeedbackVector(function));
12660}
12661
12662TNode<BytecodeArray> CodeStubAssembler::LoadBytecodeArrayFromBaseline() {
12663 return CAST(
12664 LoadFromParentFrame(BaselineFrameConstants::kBytecodeArrayFromFp));
12665}
12666
12667TNode<FeedbackVector> CodeStubAssembler::LoadFeedbackVectorFromBaseline() {
12668 return CAST(
12669 LoadFromParentFrame(BaselineFrameConstants::kFeedbackVectorFromFp));
12670}
12671
12672TNode<Context> CodeStubAssembler::LoadContextFromBaseline() {
12673 return CAST(LoadFromParentFrame(InterpreterFrameConstants::kContextOffset));
12674}
12675
12677CodeStubAssembler::LoadFeedbackVectorForStubWithTrampoline() {
12678 TNode<RawPtrT> frame_pointer = LoadParentFramePointer();
12679 TNode<RawPtrT> parent_frame_pointer = Load<RawPtrT>(frame_pointer);
12680 TNode<JSFunction> function = CAST(
12681 LoadFullTagged(parent_frame_pointer,
12682 IntPtrConstant(StandardFrameConstants::kFunctionOffset)));
12683 return CAST(LoadFeedbackVector(function));
12684}
12685
12686void CodeStubAssembler::UpdateFeedback(TNode<Smi> feedback,
12687 TNode<HeapObject> maybe_feedback_vector,
12688 TNode<UintPtrT> slot_id,
12689 UpdateFeedbackMode mode) {
12690 switch (mode) {
12691 case UpdateFeedbackMode::kOptionalFeedback:
12692 MaybeUpdateFeedback(feedback, maybe_feedback_vector, slot_id);
12693 break;
12694 case UpdateFeedbackMode::kGuaranteedFeedback:
12695 CSA_DCHECK(this, IsFeedbackVector(maybe_feedback_vector));
12696 UpdateFeedback(feedback, CAST(maybe_feedback_vector), slot_id);
12697 break;
12698 case UpdateFeedbackMode::kNoFeedback:
12699#ifdef V8_JITLESS
12700 CSA_DCHECK(this, IsUndefined(maybe_feedback_vector));
12701 break;
12702#else
12703 UNREACHABLE();
12704#endif // !V8_JITLESS
12705 }
12706}
12707
12708void CodeStubAssembler::MaybeUpdateFeedback(TNode<Smi> feedback,
12709 TNode<HeapObject> maybe_vector,
12710 TNode<UintPtrT> slot_id) {
12711 Label end(this);
12712 GotoIf(IsUndefined(maybe_vector), &end);
12713 {
12714 UpdateFeedback(feedback, CAST(maybe_vector), slot_id);
12715 Goto(&end);
12716 }
12717 BIND(&end);
12718}
12719
12720void CodeStubAssembler::UpdateFeedback(TNode<Smi> feedback,
12721 TNode<FeedbackVector> feedback_vector,
12722 TNode<UintPtrT> slot_id) {
12723 Label end(this);
12724
12725 // This method is used for binary op and compare feedback. These
12726 // vector nodes are initialized with a smi 0, so we can simply OR
12727 // our new feedback in place.
12728 TNode<MaybeObject> feedback_element =
12729 LoadFeedbackVectorSlot(feedback_vector, slot_id);
12730 TNode<Smi> previous_feedback = CAST(feedback_element);
12731 TNode<Smi> combined_feedback = SmiOr(previous_feedback, feedback);
12732
12733 GotoIf(SmiEqual(previous_feedback, combined_feedback), &end);
12734 {
12735 StoreFeedbackVectorSlot(feedback_vector, slot_id, combined_feedback,
12737 ReportFeedbackUpdate(feedback_vector, slot_id, "UpdateFeedback");
12738 Goto(&end);
12739 }
12740
12741 BIND(&end);
12742}
12743
12744void CodeStubAssembler::ReportFeedbackUpdate(
12745 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot_id,
12746 const char* reason) {
12747#ifdef V8_TRACE_FEEDBACK_UPDATES
12748 // Trace the update.
12749 CallRuntime(Runtime::kTraceUpdateFeedback, NoContextConstant(),
12750 feedback_vector, SmiTag(Signed(slot_id)), StringConstant(reason));
12751#endif // V8_TRACE_FEEDBACK_UPDATES
12752}
12753
12754void CodeStubAssembler::OverwriteFeedback(TVariable<Smi>* existing_feedback,
12755 int new_feedback) {
12756 if (existing_feedback == nullptr) return;
12757 *existing_feedback = SmiConstant(new_feedback);
12758}
12759
12760void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
12761 int feedback) {
12762 if (existing_feedback == nullptr) return;
12763 *existing_feedback = SmiOr(existing_feedback->value(), SmiConstant(feedback));
12764}
12765
12766void CodeStubAssembler::CombineFeedback(TVariable<Smi>* existing_feedback,
12767 TNode<Smi> feedback) {
12768 if (existing_feedback == nullptr) return;
12769 *existing_feedback = SmiOr(existing_feedback->value(), feedback);
12770}
12771
12772void CodeStubAssembler::CheckForAssociatedProtector(TNode<Name> name,
12773 Label* if_protector) {
12774 // This list must be kept in sync with LookupIterator::UpdateProtector!
12775 auto first_ptr = Unsigned(
12776 BitcastTaggedToWord(LoadRoot(RootIndex::kFirstNameForProtector)));
12777 auto last_ptr =
12778 Unsigned(BitcastTaggedToWord(LoadRoot(RootIndex::kLastNameForProtector)));
12779 auto name_ptr = Unsigned(BitcastTaggedToWord(name));
12780 GotoIf(IsInRange(name_ptr, first_ptr, last_ptr), if_protector);
12781}
12782
12783void CodeStubAssembler::DCheckReceiver(ConvertReceiverMode mode,
12785 switch (mode) {
12786 case ConvertReceiverMode::kNullOrUndefined:
12788 break;
12789 case ConvertReceiverMode::kNotNullOrUndefined:
12790 CSA_DCHECK(this, Word32BinaryNot(IsNullOrUndefined(receiver)));
12791 break;
12792 case ConvertReceiverMode::kAny:
12793 break;
12794 }
12795}
12796
12797TNode<Map> CodeStubAssembler::LoadReceiverMap(TNode<Object> receiver) {
12798 TVARIABLE(Map, value);
12799 Label vtrue(this, Label::kDeferred), vfalse(this), end(this);
12800 Branch(TaggedIsSmi(receiver), &vtrue, &vfalse);
12801
12802 BIND(&vtrue);
12803 {
12804 value = HeapNumberMapConstant();
12805 Goto(&end);
12806 }
12807 BIND(&vfalse);
12808 {
12809 value = LoadMap(UncheckedCast<HeapObject>(receiver));
12810 Goto(&end);
12811 }
12812
12813 BIND(&end);
12814 return value.value();
12815}
12816
12817TNode<IntPtrT> CodeStubAssembler::TryToIntptr(
12818 TNode<Object> key, Label* if_not_intptr,
12819 TVariable<Int32T>* var_instance_type) {
12820 TVARIABLE(IntPtrT, var_intptr_key);
12821 Label done(this, &var_intptr_key), key_is_smi(this), key_is_heapnumber(this);
12822 GotoIf(TaggedIsSmi(key), &key_is_smi);
12823
12824 TNode<Int32T> instance_type = LoadInstanceType(CAST(key));
12825 if (var_instance_type != nullptr) {
12826 *var_instance_type = instance_type;
12827 }
12828
12829 Branch(IsHeapNumberInstanceType(instance_type), &key_is_heapnumber,
12830 if_not_intptr);
12831
12832 BIND(&key_is_smi);
12833 {
12834 var_intptr_key = SmiUntag(CAST(key));
12835 Goto(&done);
12836 }
12837
12838 BIND(&key_is_heapnumber);
12839 {
12840 TNode<Float64T> value = LoadHeapNumberValue(CAST(key));
12841#if V8_TARGET_ARCH_64_BIT
12842 TNode<IntPtrT> int_value =
12843 TNode<IntPtrT>::UncheckedCast(TruncateFloat64ToInt64(value));
12844#else
12845 TNode<IntPtrT> int_value =
12846 TNode<IntPtrT>::UncheckedCast(RoundFloat64ToInt32(value));
12847#endif
12848 GotoIfNot(Float64Equal(value, RoundIntPtrToFloat64(int_value)),
12849 if_not_intptr);
12850#if V8_TARGET_ARCH_64_BIT
12851 // We can't rely on Is64() alone because 32-bit compilers rightly complain
12852 // about kMaxSafeIntegerUint64 not fitting into an intptr_t.
12853 DCHECK(Is64());
12854 // TODO(jkummerow): Investigate whether we can drop support for
12855 // negative indices.
12856 GotoIfNot(IsInRange(int_value, static_cast<intptr_t>(-kMaxSafeInteger),
12857 static_cast<intptr_t>(kMaxSafeIntegerUint64)),
12858 if_not_intptr);
12859#else
12860 DCHECK(!Is64());
12861#endif
12862 var_intptr_key = int_value;
12863 Goto(&done);
12864 }
12865
12866 BIND(&done);
12867 return var_intptr_key.value();
12868}
12869
12870TNode<Context> CodeStubAssembler::LoadScriptContext(
12871 TNode<Context> context, TNode<IntPtrT> context_index) {
12872 TNode<NativeContext> native_context = LoadNativeContext(context);
12873 TNode<ScriptContextTable> script_context_table = CAST(
12874 LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX));
12875 return LoadArrayElement(script_context_table, context_index);
12876}
12877
12878namespace {
12879
12880// Converts typed array elements kind to a machine representations.
12881MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
12882 switch (kind) {
12883 case UINT8_CLAMPED_ELEMENTS:
12884 case UINT8_ELEMENTS:
12885 case INT8_ELEMENTS:
12886 return MachineRepresentation::kWord8;
12887 case UINT16_ELEMENTS:
12888 case INT16_ELEMENTS:
12889 case FLOAT16_ELEMENTS:
12890 return MachineRepresentation::kWord16;
12891 case UINT32_ELEMENTS:
12892 case INT32_ELEMENTS:
12893 return MachineRepresentation::kWord32;
12894 case FLOAT32_ELEMENTS:
12895 return MachineRepresentation::kFloat32;
12896 case FLOAT64_ELEMENTS:
12897 return MachineRepresentation::kFloat64;
12898 default:
12899 UNREACHABLE();
12900 }
12901}
12902
12903} // namespace
12904
12905// TODO(solanes): Since we can't use `if constexpr` until we enable C++17 we
12906// have to specialize the BigInt and Word32T cases. Since we can't partly
12907// specialize, we have to specialize all used combinations.
12908template <typename TIndex>
12909void CodeStubAssembler::StoreElementTypedArrayBigInt(TNode<RawPtrT> elements,
12911 TNode<TIndex> index,
12912 TNode<BigInt> value) {
12913 static_assert(
12914 std::is_same_v<TIndex, UintPtrT> || std::is_same_v<TIndex, IntPtrT>,
12915 "Only UintPtrT or IntPtrT indices is allowed");
12916 DCHECK(kind == BIGINT64_ELEMENTS || kind == BIGUINT64_ELEMENTS);
12917 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
12918 TVARIABLE(UintPtrT, var_low);
12919 // Only used on 32-bit platforms.
12920 TVARIABLE(UintPtrT, var_high);
12921 BigIntToRawBytes(value, &var_low, &var_high);
12922
12923 MachineRepresentation rep = WordT::kMachineRepresentation;
12924#if defined(V8_TARGET_BIG_ENDIAN)
12925 if (!Is64()) {
12926 StoreNoWriteBarrier(rep, elements, offset, var_high.value());
12927 StoreNoWriteBarrier(rep, elements,
12928 IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
12929 var_low.value());
12930 } else {
12931 StoreNoWriteBarrier(rep, elements, offset, var_low.value());
12932 }
12933#else
12934 StoreNoWriteBarrier(rep, elements, offset, var_low.value());
12935 if (!Is64()) {
12936 StoreNoWriteBarrier(rep, elements,
12937 IntPtrAdd(offset, IntPtrConstant(kSystemPointerSize)),
12938 var_high.value());
12939 }
12940#endif
12941}
12942
12943template <>
12944void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
12946 TNode<UintPtrT> index,
12947 TNode<BigInt> value) {
12948 StoreElementTypedArrayBigInt(elements, kind, index, value);
12949}
12950
12951template <>
12952void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
12954 TNode<IntPtrT> index,
12955 TNode<BigInt> value) {
12956 StoreElementTypedArrayBigInt(elements, kind, index, value);
12957}
12958
12959template <typename TIndex>
12960void CodeStubAssembler::StoreElementTypedArrayWord32(TNode<RawPtrT> elements,
12962 TNode<TIndex> index,
12963 TNode<Word32T> value) {
12964 static_assert(
12965 std::is_same_v<TIndex, UintPtrT> || std::is_same_v<TIndex, IntPtrT>,
12966 "Only UintPtrT or IntPtrT indices is allowed");
12968 if (kind == UINT8_CLAMPED_ELEMENTS) {
12969 CSA_DCHECK(this, Word32Equal(value, Word32And(Int32Constant(0xFF), value)));
12970 }
12971 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
12972 // TODO(cbruni): Add OOB check once typed.
12973 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
12974 StoreNoWriteBarrier(rep, elements, offset, value);
12975}
12976
12977template <>
12978void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
12980 TNode<UintPtrT> index,
12981 TNode<Word32T> value) {
12982 StoreElementTypedArrayWord32(elements, kind, index, value);
12983}
12984
12985template <>
12986void CodeStubAssembler::StoreElementTypedArray(TNode<RawPtrT> elements,
12988 TNode<IntPtrT> index,
12989 TNode<Word32T> value) {
12990 StoreElementTypedArrayWord32(elements, kind, index, value);
12991}
12992
12993template <typename TArray, typename TIndex, typename TValue>
12994void CodeStubAssembler::StoreElementTypedArray(TNode<TArray> elements,
12996 TNode<TIndex> index,
12997 TNode<TValue> value) {
12998 // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants?
12999 static_assert(std::is_same_v<TIndex, Smi> ||
13000 std::is_same_v<TIndex, UintPtrT> ||
13001 std::is_same_v<TIndex, IntPtrT>,
13002 "Only Smi, UintPtrT or IntPtrT indices is allowed");
13003 static_assert(
13004 std::is_same_v<TArray, RawPtrT> || std::is_same_v<TArray, FixedArrayBase>,
13005 "Only RawPtrT or FixedArrayBase elements are allowed");
13006 static_assert(
13007 std::is_same_v<TValue, Float16RawBitsT> ||
13008 std::is_same_v<TValue, Int32T> || std::is_same_v<TValue, Float32T> ||
13009 std::is_same_v<TValue, Float64T> || std::is_same_v<TValue, Object>,
13010 "Only Int32T, Float32T, Float64T or object value "
13011 "types are allowed");
13013 TNode<IntPtrT> offset = ElementOffsetFromIndex(index, kind, 0);
13014 // TODO(cbruni): Add OOB check once typed.
13015 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
13016 StoreNoWriteBarrier(rep, elements, offset, value);
13017}
13018
13019template <typename TIndex>
13020void CodeStubAssembler::StoreElement(TNode<FixedArrayBase> elements,
13022 TNode<Object> value) {
13023 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
13024 "Only Smi or IntPtrT indices are allowed");
13027 StoreElementTypedArray(elements, kind, index, value);
13028 } else if (IsSmiElementsKind(kind)) {
13029 TNode<Smi> smi_value = CAST(value);
13030 StoreFixedArrayElement(CAST(elements), index, smi_value);
13031 } else {
13032 StoreFixedArrayElement(CAST(elements), index, value);
13033 }
13034}
13035
13036template <typename TIndex>
13037void CodeStubAssembler::StoreElement(TNode<FixedArrayBase> elements,
13039 TNode<Float64T> value) {
13040 static_assert(std::is_same_v<TIndex, Smi> || std::is_same_v<TIndex, IntPtrT>,
13041 "Only Smi or IntPtrT indices are allowed");
13043 StoreFixedDoubleArrayElement(CAST(elements), index, value);
13044}
13045
13046template <typename TIndex, typename TValue>
13047void CodeStubAssembler::StoreElement(TNode<RawPtrT> elements, ElementsKind kind,
13048 TNode<TIndex> index, TNode<TValue> value) {
13049 static_assert(std::is_same_v<TIndex, Smi> ||
13050 std::is_same_v<TIndex, IntPtrT> ||
13051 std::is_same_v<TIndex, UintPtrT>,
13052 "Only Smi, IntPtrT or UintPtrT indices are allowed");
13053 static_assert(
13054 std::is_same_v<TValue, Float16RawBitsT> ||
13055 std::is_same_v<TValue, Int32T> || std::is_same_v<TValue, Word32T> ||
13056 std::is_same_v<TValue, Float32T> ||
13057 std::is_same_v<TValue, Float64T> || std::is_same_v<TValue, BigInt>,
13058 "Only Int32T, Word32T, Float32T, Float64T or BigInt value types "
13059 "are allowed");
13060
13062 StoreElementTypedArray(elements, kind, index, value);
13063}
13064template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
13068template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
13072template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(
13074template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(
13076template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode<RawPtrT>,
13080template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(
13082
13083TNode<Uint8T> CodeStubAssembler::Int32ToUint8Clamped(
13084 TNode<Int32T> int32_value) {
13085 Label done(this);
13086 TNode<Int32T> int32_zero = Int32Constant(0);
13087 TNode<Int32T> int32_255 = Int32Constant(255);
13088 TVARIABLE(Word32T, var_value, int32_value);
13089 GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
13090 var_value = int32_zero;
13091 GotoIf(Int32LessThan(int32_value, int32_zero), &done);
13092 var_value = int32_255;
13093 Goto(&done);
13094 BIND(&done);
13095 return UncheckedCast<Uint8T>(var_value.value());
13096}
13097
13098TNode<Uint8T> CodeStubAssembler::Float64ToUint8Clamped(
13099 TNode<Float64T> float64_value) {
13100 Label done(this);
13101 TVARIABLE(Word32T, var_value, Int32Constant(0));
13102 GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
13103 var_value = Int32Constant(255);
13104 GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
13105 {
13106 TNode<Float64T> rounded_value = Float64RoundToEven(float64_value);
13107 var_value = TruncateFloat64ToWord32(rounded_value);
13108 Goto(&done);
13109 }
13110 BIND(&done);
13111 return UncheckedCast<Uint8T>(var_value.value());
13112}
13113
13114template <>
13115TNode<Word32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Word32T>(
13116 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
13117 DCHECK(IsTypedArrayElementsKind(elements_kind));
13118
13119 switch (elements_kind) {
13120 case UINT8_ELEMENTS:
13121 case INT8_ELEMENTS:
13122 case UINT16_ELEMENTS:
13123 case INT16_ELEMENTS:
13124 case UINT32_ELEMENTS:
13125 case INT32_ELEMENTS:
13126 case UINT8_CLAMPED_ELEMENTS:
13127 break;
13128 default:
13129 UNREACHABLE();
13130 }
13131
13132 TVARIABLE(Word32T, var_result);
13133 TVARIABLE(Object, var_input, input);
13134 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
13135 convert(this), loop(this, &var_input);
13136 Goto(&loop);
13137 BIND(&loop);
13138 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
13139 // We can handle both HeapNumber and Oddball here, since Oddball has the
13140 // same layout as the HeapNumber for the HeapNumber::value field. This
13141 // way we can also properly optimize stores of oddballs to typed arrays.
13142 TNode<HeapObject> heap_object = CAST(var_input.value());
13143 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
13144 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(offsetof(HeapNumber, value_),
13145 offsetof(Oddball, to_number_raw_));
13146 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
13147 &convert);
13148
13149 BIND(&if_heapnumber_or_oddball);
13150 {
13151 TNode<Float64T> value =
13152 LoadObjectField<Float64T>(heap_object, offsetof(HeapNumber, value_));
13153 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
13154 var_result = Float64ToUint8Clamped(value);
13155 } else if (elements_kind == FLOAT16_ELEMENTS) {
13156 var_result = ReinterpretCast<Word32T>(TruncateFloat64ToFloat16(value));
13157 } else {
13158 var_result = TruncateFloat64ToWord32(value);
13159 }
13160 Goto(&done);
13161 }
13162
13163 BIND(&if_smi);
13164 {
13165 TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
13166 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
13167 var_result = Int32ToUint8Clamped(value);
13168 } else if (elements_kind == FLOAT16_ELEMENTS) {
13169 var_result = ReinterpretCast<Word32T>(RoundInt32ToFloat16(value));
13170 } else {
13171 var_result = value;
13172 }
13173 Goto(&done);
13174 }
13175
13176 BIND(&convert);
13177 {
13178 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
13179 Goto(&loop);
13180 }
13181
13182 BIND(&done);
13183 return var_result.value();
13184}
13185
13186template <>
13187TNode<Float16RawBitsT>
13188CodeStubAssembler::PrepareValueForWriteToTypedArray<Float16RawBitsT>(
13189 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
13190 DCHECK(IsTypedArrayElementsKind(elements_kind));
13191 CHECK_EQ(elements_kind, FLOAT16_ELEMENTS);
13192
13193 TVARIABLE(Float16RawBitsT, var_result);
13194 TVARIABLE(Object, var_input, input);
13195 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
13196 convert(this), loop(this, &var_input);
13197 Goto(&loop);
13198 BIND(&loop);
13199 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
13200 // We can handle both HeapNumber and Oddball here, since Oddball has the
13201 // same layout as the HeapNumber for the HeapNumber::value field. This
13202 // way we can also properly optimize stores of oddballs to typed arrays.
13203 TNode<HeapObject> heap_object = CAST(var_input.value());
13204 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
13205 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(offsetof(HeapNumber, value_),
13206 offsetof(Oddball, to_number_raw_));
13207 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
13208 &convert);
13209
13210 BIND(&if_heapnumber_or_oddball);
13211 {
13212 TNode<Float64T> value =
13213 LoadObjectField<Float64T>(heap_object, offsetof(HeapNumber, value_));
13214 var_result = TruncateFloat64ToFloat16(value);
13215 Goto(&done);
13216 }
13217
13218 BIND(&if_smi);
13219 {
13220 TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
13221 var_result = RoundInt32ToFloat16(value);
13222 Goto(&done);
13223 }
13224
13225 BIND(&convert);
13226 {
13227 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
13228 Goto(&loop);
13229 }
13230
13231 BIND(&done);
13232 return var_result.value();
13233}
13234
13235template <>
13236TNode<Float32T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float32T>(
13237 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
13238 DCHECK(IsTypedArrayElementsKind(elements_kind));
13239 CHECK_EQ(elements_kind, FLOAT32_ELEMENTS);
13240
13241 TVARIABLE(Float32T, var_result);
13242 TVARIABLE(Object, var_input, input);
13243 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
13244 convert(this), loop(this, &var_input);
13245 Goto(&loop);
13246 BIND(&loop);
13247 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
13248 // We can handle both HeapNumber and Oddball here, since Oddball has the
13249 // same layout as the HeapNumber for the HeapNumber::value field. This
13250 // way we can also properly optimize stores of oddballs to typed arrays.
13251 TNode<HeapObject> heap_object = CAST(var_input.value());
13252 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
13253 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(offsetof(HeapNumber, value_),
13254 offsetof(Oddball, to_number_raw_));
13255 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
13256 &convert);
13257
13258 BIND(&if_heapnumber_or_oddball);
13259 {
13260 TNode<Float64T> value =
13261 LoadObjectField<Float64T>(heap_object, offsetof(HeapNumber, value_));
13262 var_result = TruncateFloat64ToFloat32(value);
13263 Goto(&done);
13264 }
13265
13266 BIND(&if_smi);
13267 {
13268 TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
13269 var_result = RoundInt32ToFloat32(value);
13270 Goto(&done);
13271 }
13272
13273 BIND(&convert);
13274 {
13275 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
13276 Goto(&loop);
13277 }
13278
13279 BIND(&done);
13280 return var_result.value();
13281}
13282
13283template <>
13284TNode<Float64T> CodeStubAssembler::PrepareValueForWriteToTypedArray<Float64T>(
13285 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
13286 DCHECK(IsTypedArrayElementsKind(elements_kind));
13287 CHECK_EQ(elements_kind, FLOAT64_ELEMENTS);
13288
13289 TVARIABLE(Float64T, var_result);
13290 TVARIABLE(Object, var_input, input);
13291 Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this),
13292 convert(this), loop(this, &var_input);
13293 Goto(&loop);
13294 BIND(&loop);
13295 GotoIf(TaggedIsSmi(var_input.value()), &if_smi);
13296 // We can handle both HeapNumber and Oddball here, since Oddball has the
13297 // same layout as the HeapNumber for the HeapNumber::value field. This
13298 // way we can also properly optimize stores of oddballs to typed arrays.
13299 TNode<HeapObject> heap_object = CAST(var_input.value());
13300 GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball);
13301 STATIC_ASSERT_FIELD_OFFSETS_EQUAL(offsetof(HeapNumber, value_),
13302 offsetof(Oddball, to_number_raw_));
13303 Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball,
13304 &convert);
13305
13306 BIND(&if_heapnumber_or_oddball);
13307 {
13308 var_result =
13309 LoadObjectField<Float64T>(heap_object, offsetof(HeapNumber, value_));
13310 Goto(&done);
13311 }
13312
13313 BIND(&if_smi);
13314 {
13315 TNode<Int32T> value = SmiToInt32(CAST(var_input.value()));
13316 var_result = ChangeInt32ToFloat64(value);
13317 Goto(&done);
13318 }
13319
13320 BIND(&convert);
13321 {
13322 var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input);
13323 Goto(&loop);
13324 }
13325
13326 BIND(&done);
13327 return var_result.value();
13328}
13329
13330template <>
13331TNode<BigInt> CodeStubAssembler::PrepareValueForWriteToTypedArray<BigInt>(
13332 TNode<Object> input, ElementsKind elements_kind, TNode<Context> context) {
13333 DCHECK(elements_kind == BIGINT64_ELEMENTS ||
13334 elements_kind == BIGUINT64_ELEMENTS);
13335 return ToBigInt(context, input);
13336}
13337
13338#if V8_ENABLE_WEBASSEMBLY
13339TorqueStructInt64AsInt32Pair CodeStubAssembler::BigIntToRawBytes(
13340 TNode<BigInt> value) {
13341 TVARIABLE(UintPtrT, var_low);
13342 // Only used on 32-bit platforms.
13343 TVARIABLE(UintPtrT, var_high);
13344 BigIntToRawBytes(value, &var_low, &var_high);
13345 return {var_low.value(), var_high.value()};
13346}
13347
13348TNode<RawPtrT> CodeStubAssembler::AllocateBuffer(TNode<IntPtrT> size) {
13349 TNode<ExternalReference> function =
13350 ExternalConstant(ExternalReference::allocate_buffer());
13351 return UncheckedCast<RawPtrT>(CallCFunction(
13352 function, MachineType::UintPtr(),
13353 std::make_pair(MachineType::Pointer(),
13354 IsolateField(IsolateFieldId::kIsolateAddress)),
13355 std::make_pair(MachineType::IntPtr(), size)));
13356}
13357#endif // V8_ENABLE_WEBASSEMBLY
13358
13359void CodeStubAssembler::BigIntToRawBytes(TNode<BigInt> bigint,
13360 TVariable<UintPtrT>* var_low,
13361 TVariable<UintPtrT>* var_high) {
13362 Label done(this);
13363 *var_low = Unsigned(IntPtrConstant(0));
13364 *var_high = Unsigned(IntPtrConstant(0));
13365 TNode<Word32T> bitfield = LoadBigIntBitfield(bigint);
13366 TNode<Uint32T> length = DecodeWord32<BigIntBase::LengthBits>(bitfield);
13367 TNode<Uint32T> sign = DecodeWord32<BigIntBase::SignBits>(bitfield);
13368 GotoIf(Word32Equal(length, Int32Constant(0)), &done);
13369 *var_low = LoadBigIntDigit(bigint, 0);
13370 if (!Is64()) {
13371 Label load_done(this);
13372 GotoIf(Word32Equal(length, Int32Constant(1)), &load_done);
13373 *var_high = LoadBigIntDigit(bigint, 1);
13374 Goto(&load_done);
13375 BIND(&load_done);
13376 }
13377 GotoIf(Word32Equal(sign, Int32Constant(0)), &done);
13378 // Negative value. Simulate two's complement.
13379 if (!Is64()) {
13380 *var_high = Unsigned(IntPtrSub(IntPtrConstant(0), var_high->value()));
13381 Label no_carry(this);
13382 GotoIf(IntPtrEqual(var_low->value(), IntPtrConstant(0)), &no_carry);
13383 *var_high = Unsigned(IntPtrSub(var_high->value(), IntPtrConstant(1)));
13384 Goto(&no_carry);
13385 BIND(&no_carry);
13386 }
13387 *var_low = Unsigned(IntPtrSub(IntPtrConstant(0), var_low->value()));
13388 Goto(&done);
13389 BIND(&done);
13390}
13391
13392template <>
13393void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
13394 TNode<Object> value, ElementsKind elements_kind,
13395 TNode<Word32T> converted_value, TVariable<Object>* maybe_converted_value) {
13396 switch (elements_kind) {
13397 case UINT8_ELEMENTS:
13398 case INT8_ELEMENTS:
13399 case UINT16_ELEMENTS:
13400 case INT16_ELEMENTS:
13401 case UINT8_CLAMPED_ELEMENTS:
13402 *maybe_converted_value =
13403 SmiFromInt32(UncheckedCast<Int32T>(converted_value));
13404 break;
13405 case UINT32_ELEMENTS:
13406 *maybe_converted_value =
13407 ChangeUint32ToTagged(UncheckedCast<Uint32T>(converted_value));
13408 break;
13409 case INT32_ELEMENTS:
13410 *maybe_converted_value =
13411 ChangeInt32ToTagged(UncheckedCast<Int32T>(converted_value));
13412 break;
13413 default:
13414 UNREACHABLE();
13415 }
13416}
13417
13418template <>
13419void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
13420 TNode<Object> value, ElementsKind elements_kind,
13421 TNode<Float16RawBitsT> converted_value,
13422 TVariable<Object>* maybe_converted_value) {
13423 Label dont_allocate_heap_number(this), end(this);
13424 GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
13425 GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number);
13426 {
13427 *maybe_converted_value =
13428 AllocateHeapNumberWithValue(ChangeFloat16ToFloat64(converted_value));
13429 Goto(&end);
13430 }
13431 BIND(&dont_allocate_heap_number);
13432 {
13433 *maybe_converted_value = value;
13434 Goto(&end);
13435 }
13436 BIND(&end);
13437}
13438
13439template <>
13440void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
13441 TNode<Object> value, ElementsKind elements_kind,
13442 TNode<Float32T> converted_value, TVariable<Object>* maybe_converted_value) {
13443 Label dont_allocate_heap_number(this), end(this);
13444 GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
13445 GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number);
13446 {
13447 *maybe_converted_value =
13448 AllocateHeapNumberWithValue(ChangeFloat32ToFloat64(converted_value));
13449 Goto(&end);
13450 }
13451 BIND(&dont_allocate_heap_number);
13452 {
13453 *maybe_converted_value = value;
13454 Goto(&end);
13455 }
13456 BIND(&end);
13457}
13458
13459template <>
13460void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
13461 TNode<Object> value, ElementsKind elements_kind,
13462 TNode<Float64T> converted_value, TVariable<Object>* maybe_converted_value) {
13463 Label dont_allocate_heap_number(this), end(this);
13464 GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number);
13465 GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number);
13466 {
13467 *maybe_converted_value = AllocateHeapNumberWithValue(converted_value);
13468 Goto(&end);
13469 }
13470 BIND(&dont_allocate_heap_number);
13471 {
13472 *maybe_converted_value = value;
13473 Goto(&end);
13474 }
13475 BIND(&end);
13476}
13477
13478template <>
13479void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue(
13480 TNode<Object> value, ElementsKind elements_kind,
13481 TNode<BigInt> converted_value, TVariable<Object>* maybe_converted_value) {
13482 *maybe_converted_value = converted_value;
13483}
13484
13485template <typename TValue>
13486void CodeStubAssembler::EmitElementStoreTypedArray(
13488 ElementsKind elements_kind, KeyedAccessStoreMode store_mode, Label* bailout,
13489 TNode<Context> context, TVariable<Object>* maybe_converted_value) {
13490 Label done(this), update_value_and_bailout(this, Label::kDeferred);
13491
13492 bool is_rab_gsab = false;
13493 if (IsRabGsabTypedArrayElementsKind(elements_kind)) {
13494 is_rab_gsab = true;
13495 // For the rest of the function, use the corresponding non-RAB/GSAB
13496 // ElementsKind.
13497 elements_kind = GetCorrespondingNonRabGsabElementsKind(elements_kind);
13498 }
13499
13500 TNode<TValue> converted_value =
13501 PrepareValueForWriteToTypedArray<TValue>(value, elements_kind, context);
13502
13503 // There must be no allocations between the buffer load and
13504 // and the actual store to backing store, because GC may decide that
13505 // the buffer is not alive or move the elements.
13506 // TODO(ishell): introduce DisallowGarbageCollectionCode scope here.
13507
13508 // Check if buffer has been detached. (For RAB / GSAB this is part of loading
13509 // the length, so no additional check is needed.)
13510 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(typed_array);
13511 if (!is_rab_gsab) {
13512 GotoIf(IsDetachedBuffer(buffer), &update_value_and_bailout);
13513 }
13514
13515 // Bounds check.
13517 if (is_rab_gsab) {
13518 length = LoadVariableLengthJSTypedArrayLength(
13519 typed_array, buffer,
13520 StoreModeIgnoresTypeArrayOOB(store_mode) ? &done
13521 : &update_value_and_bailout);
13522 } else {
13523 length = LoadJSTypedArrayLength(typed_array);
13524 }
13525
13526 if (StoreModeIgnoresTypeArrayOOB(store_mode)) {
13527 // Skip the store if we write beyond the length or
13528 // to a property with a negative integer index.
13529 GotoIfNot(UintPtrLessThan(key, length), &done);
13530 } else {
13531 DCHECK(StoreModeIsInBounds(store_mode));
13532 GotoIfNot(UintPtrLessThan(key, length), &update_value_and_bailout);
13533 }
13534
13535 TNode<RawPtrT> data_ptr = LoadJSTypedArrayDataPtr(typed_array);
13536 StoreElement(data_ptr, elements_kind, key, converted_value);
13537 Goto(&done);
13538
13539 if (!is_rab_gsab || !StoreModeIgnoresTypeArrayOOB(store_mode)) {
13540 BIND(&update_value_and_bailout);
13541 // We already prepared the incoming value for storing into a typed array.
13542 // This might involve calling ToNumber in some cases. We shouldn't call
13543 // ToNumber again in the runtime so pass the converted value to the runtime.
13544 // The prepared value is an untagged value. Convert it to a tagged value
13545 // to pass it to runtime. It is not possible to do the detached buffer check
13546 // before we prepare the value, since ToNumber can detach the ArrayBuffer.
13547 // The spec specifies the order of these operations.
13548 if (maybe_converted_value != nullptr) {
13549 EmitElementStoreTypedArrayUpdateValue(
13550 value, elements_kind, converted_value, maybe_converted_value);
13551 }
13552 Goto(bailout);
13553 }
13554
13555 BIND(&done);
13556}
13557
13558void CodeStubAssembler::EmitElementStore(
13560 ElementsKind elements_kind, KeyedAccessStoreMode store_mode, Label* bailout,
13561 TNode<Context> context, TVariable<Object>* maybe_converted_value) {
13562 CSA_DCHECK(this, Word32BinaryNot(IsJSProxy(object)));
13563
13564 TNode<FixedArrayBase> elements = LoadElements(object);
13565 if (!(IsSmiOrObjectElementsKind(elements_kind) ||
13566 IsSealedElementsKind(elements_kind) ||
13567 IsNonextensibleElementsKind(elements_kind))) {
13568 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
13569 } else if (!StoreModeHandlesCOW(store_mode)) {
13570 GotoIf(IsFixedCOWArrayMap(LoadMap(elements)), bailout);
13571 }
13572
13573 // TODO(ishell): introduce TryToIntPtrOrSmi() and use BInt.
13574 TNode<IntPtrT> intptr_key = TryToIntptr(key, bailout);
13575
13576 // TODO(rmcilroy): TNodify the converted value once this function and
13577 // StoreElement are templated based on the type elements_kind type.
13579 TNode<JSTypedArray> typed_array = CAST(object);
13580 switch (elements_kind) {
13581 case UINT8_ELEMENTS:
13582 case INT8_ELEMENTS:
13583 case UINT16_ELEMENTS:
13584 case INT16_ELEMENTS:
13585 case UINT32_ELEMENTS:
13586 case INT32_ELEMENTS:
13587 case UINT8_CLAMPED_ELEMENTS:
13588 case RAB_GSAB_UINT8_ELEMENTS:
13589 case RAB_GSAB_INT8_ELEMENTS:
13590 case RAB_GSAB_UINT16_ELEMENTS:
13591 case RAB_GSAB_INT16_ELEMENTS:
13592 case RAB_GSAB_UINT32_ELEMENTS:
13593 case RAB_GSAB_INT32_ELEMENTS:
13594 case RAB_GSAB_UINT8_CLAMPED_ELEMENTS:
13595 EmitElementStoreTypedArray<Word32T>(typed_array, intptr_key, value,
13596 elements_kind, store_mode, bailout,
13597 context, maybe_converted_value);
13598 break;
13599 case FLOAT32_ELEMENTS:
13600 case RAB_GSAB_FLOAT32_ELEMENTS:
13601 EmitElementStoreTypedArray<Float32T>(typed_array, intptr_key, value,
13602 elements_kind, store_mode, bailout,
13603 context, maybe_converted_value);
13604 break;
13605 case FLOAT64_ELEMENTS:
13606 case RAB_GSAB_FLOAT64_ELEMENTS:
13607 EmitElementStoreTypedArray<Float64T>(typed_array, intptr_key, value,
13608 elements_kind, store_mode, bailout,
13609 context, maybe_converted_value);
13610 break;
13611 case BIGINT64_ELEMENTS:
13612 case BIGUINT64_ELEMENTS:
13613 case RAB_GSAB_BIGINT64_ELEMENTS:
13614 case RAB_GSAB_BIGUINT64_ELEMENTS:
13615 EmitElementStoreTypedArray<BigInt>(typed_array, intptr_key, value,
13616 elements_kind, store_mode, bailout,
13617 context, maybe_converted_value);
13618 break;
13619 case FLOAT16_ELEMENTS:
13620 case RAB_GSAB_FLOAT16_ELEMENTS:
13621 EmitElementStoreTypedArray<Float16RawBitsT>(
13622 typed_array, intptr_key, value, elements_kind, store_mode, bailout,
13623 context, maybe_converted_value);
13624 break;
13625 default:
13626 UNREACHABLE();
13627 }
13628 return;
13629 }
13630 DCHECK(IsFastElementsKind(elements_kind) ||
13631 IsSealedElementsKind(elements_kind) ||
13632 IsNonextensibleElementsKind(elements_kind));
13633
13634 // In case value is stored into a fast smi array, assure that the value is
13635 // a smi before manipulating the backing store. Otherwise the backing store
13636 // may be left in an invalid state.
13637 std::optional<TNode<Float64T>> float_value;
13638 if (IsSmiElementsKind(elements_kind)) {
13639 GotoIfNot(TaggedIsSmi(value), bailout);
13640 } else if (IsDoubleElementsKind(elements_kind)) {
13641#ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
13642 Label float_done(this), is_undefined(this);
13643 TVARIABLE(Float64T, float_var);
13644 float_var = TryTaggedToFloat64(value, &is_undefined, bailout);
13645 Goto(&float_done);
13646
13647 BIND(&is_undefined);
13648 {
13649 float_var = Float64Constant(UndefinedNan());
13650 Goto(&float_done);
13651 }
13652
13653 BIND(&float_done);
13654 float_value = float_var.value();
13655#else
13656 float_value = TryTaggedToFloat64(value, bailout);
13657#endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
13658 }
13659
13660 TNode<Smi> smi_length = Select<Smi>(
13661 IsJSArray(object),
13662 [=, this]() {
13663 // This is casting Number -> Smi which may not actually be safe.
13664 return CAST(LoadJSArrayLength(CAST(object)));
13665 },
13666 [=, this]() { return LoadFixedArrayBaseLength(elements); });
13667
13668 TNode<UintPtrT> length = Unsigned(PositiveSmiUntag(smi_length));
13669 if (StoreModeCanGrow(store_mode) &&
13670 !(IsSealedElementsKind(elements_kind) ||
13671 IsNonextensibleElementsKind(elements_kind))) {
13672 elements = CheckForCapacityGrow(object, elements, elements_kind, length,
13673 intptr_key, bailout);
13674 } else {
13675 GotoIfNot(UintPtrLessThan(Unsigned(intptr_key), length), bailout);
13676 }
13677
13678 // Cannot store to a hole in holey sealed elements so bailout.
13679 if (elements_kind == HOLEY_SEALED_ELEMENTS ||
13680 elements_kind == HOLEY_NONEXTENSIBLE_ELEMENTS) {
13681 TNode<Object> target_value =
13682 LoadFixedArrayElement(CAST(elements), intptr_key);
13683 GotoIf(IsTheHole(target_value), bailout);
13684 }
13685
13686 // If we didn't grow {elements}, it might still be COW, in which case we
13687 // copy it now.
13688 if (!(IsSmiOrObjectElementsKind(elements_kind) ||
13689 IsSealedElementsKind(elements_kind) ||
13690 IsNonextensibleElementsKind(elements_kind))) {
13691 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
13692 } else if (StoreModeHandlesCOW(store_mode)) {
13693 elements = CopyElementsOnWrite(object, elements, elements_kind,
13694 Signed(length), bailout);
13695 }
13696
13697 CSA_DCHECK(this, Word32BinaryNot(IsFixedCOWArrayMap(LoadMap(elements))));
13698 if (float_value) {
13699 StoreElement(elements, elements_kind, intptr_key, float_value.value());
13700 } else {
13701 if (elements_kind == SHARED_ARRAY_ELEMENTS) {
13702 TVARIABLE(Object, shared_value, value);
13703 SharedValueBarrier(context, &shared_value);
13704 StoreElement(elements, elements_kind, intptr_key, shared_value.value());
13705 } else {
13706 StoreElement(elements, elements_kind, intptr_key, value);
13707 }
13708 }
13709}
13710
13711TNode<FixedArrayBase> CodeStubAssembler::CheckForCapacityGrow(
13713 TNode<UintPtrT> length, TNode<IntPtrT> key, Label* bailout) {
13715 TVARIABLE(FixedArrayBase, checked_elements);
13716 Label grow_case(this), no_grow_case(this), done(this),
13717 grow_bailout(this, Label::kDeferred);
13718
13721 condition = UintPtrGreaterThanOrEqual(key, length);
13722 } else {
13723 // We don't support growing here unless the value is being appended.
13724 condition = WordEqual(key, length);
13725 }
13726 Branch(condition, &grow_case, &no_grow_case);
13727
13728 BIND(&grow_case);
13729 {
13730 TNode<IntPtrT> current_capacity =
13731 LoadAndUntagFixedArrayBaseLength(elements);
13732 checked_elements = elements;
13733 Label fits_capacity(this);
13734 // If key is negative, we will notice in Runtime::kGrowArrayElements.
13735 GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
13736
13737 {
13738 TNode<FixedArrayBase> new_elements = TryGrowElementsCapacity(
13739 object, elements, kind, key, current_capacity, &grow_bailout);
13740 checked_elements = new_elements;
13741 Goto(&fits_capacity);
13742 }
13743
13744 BIND(&grow_bailout);
13745 {
13746 GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
13747 TNode<Number> tagged_key = ChangeUintPtrToTagged(Unsigned(key));
13748 TNode<Object> maybe_elements = CallRuntime(
13749 Runtime::kGrowArrayElements, NoContextConstant(), object, tagged_key);
13750 GotoIf(TaggedIsSmi(maybe_elements), bailout);
13751 TNode<FixedArrayBase> new_elements = CAST(maybe_elements);
13752 CSA_DCHECK(this, IsFixedArrayWithKind(new_elements, kind));
13753 checked_elements = new_elements;
13754 Goto(&fits_capacity);
13755 }
13756
13757 BIND(&fits_capacity);
13758 GotoIfNot(IsJSArray(object), &done);
13759
13760 TNode<IntPtrT> new_length = IntPtrAdd(key, IntPtrConstant(1));
13761 StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
13762 SmiTag(new_length));
13763 Goto(&done);
13764 }
13765
13766 BIND(&no_grow_case);
13767 {
13768 GotoIfNot(UintPtrLessThan(key, length), bailout);
13769 checked_elements = elements;
13770 Goto(&done);
13771 }
13772
13773 BIND(&done);
13774 return checked_elements.value();
13775}
13776
13777TNode<FixedArrayBase> CodeStubAssembler::CopyElementsOnWrite(
13779 TNode<IntPtrT> length, Label* bailout) {
13780 TVARIABLE(FixedArrayBase, new_elements_var, elements);
13781 Label done(this);
13782
13783 GotoIfNot(IsFixedCOWArrayMap(LoadMap(elements)), &done);
13784 {
13785 TNode<IntPtrT> capacity = LoadAndUntagFixedArrayBaseLength(elements);
13786 TNode<FixedArrayBase> new_elements = GrowElementsCapacity(
13787 object, elements, kind, kind, length, capacity, bailout);
13788 new_elements_var = new_elements;
13789 Goto(&done);
13790 }
13791
13792 BIND(&done);
13793 return new_elements_var.value();
13794}
13795
13796void CodeStubAssembler::TransitionElementsKind(TNode<JSObject> object,
13797 TNode<Map> map,
13798 ElementsKind from_kind,
13799 ElementsKind to_kind,
13800 Label* bailout) {
13801 DCHECK(!IsHoleyElementsKind(from_kind) || IsHoleyElementsKind(to_kind));
13802 if (AllocationSite::ShouldTrack(from_kind, to_kind)) {
13803 TrapAllocationMemento(object, bailout);
13804 }
13805
13806 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
13807 Comment("Non-simple map transition");
13808 TNode<FixedArrayBase> elements = LoadElements(object);
13809
13810 Label done(this);
13811 GotoIf(TaggedEqual(elements, EmptyFixedArrayConstant()), &done);
13812
13813 // TODO(ishell): Use BInt for elements_length and array_length.
13814 TNode<IntPtrT> elements_length = LoadAndUntagFixedArrayBaseLength(elements);
13815 TNode<IntPtrT> array_length = Select<IntPtrT>(
13816 IsJSArray(object),
13817 [=, this]() {
13818 CSA_DCHECK(this, IsFastElementsKind(LoadElementsKind(object)));
13819 return PositiveSmiUntag(LoadFastJSArrayLength(CAST(object)));
13820 },
13821 [=]() { return elements_length; });
13822
13823 CSA_DCHECK(this, WordNotEqual(elements_length, IntPtrConstant(0)));
13824
13825 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
13826 elements_length, bailout);
13827 Goto(&done);
13828 BIND(&done);
13829 }
13830
13831 StoreMap(object, map);
13832}
13833
13834void CodeStubAssembler::TrapAllocationMemento(TNode<JSObject> object,
13835 Label* memento_found) {
13837 Comment("[ TrapAllocationMemento");
13838 Label no_memento_found(this);
13839 Label top_check(this), map_check(this);
13840
13841 TNode<ExternalReference> new_space_top_address = ExternalConstant(
13842 ExternalReference::new_space_allocation_top_address(isolate()));
13843 const int kMementoMapOffset =
13844 ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize);
13845 const int kMementoLastWordOffset =
13846 kMementoMapOffset + sizeof(AllocationMemento) - kTaggedSize;
13847
13848 // Bail out if the object is not in new space.
13849 TNode<IntPtrT> object_word = BitcastTaggedToWord(object);
13850 // TODO(v8:11641): Skip TrapAllocationMemento when allocation-site
13851 // tracking is disabled.
13852 TNode<IntPtrT> object_page_header = MemoryChunkFromAddress(object_word);
13853 {
13854 TNode<IntPtrT> page_flags = Load<IntPtrT>(
13855 object_page_header, IntPtrConstant(MemoryChunk::FlagsOffset()));
13856 if (v8_flags.sticky_mark_bits) {
13857 // Pages with only old objects contain no mementos.
13858 GotoIfNot(
13859 WordEqual(WordAnd(page_flags,
13860 IntPtrConstant(MemoryChunk::CONTAINS_ONLY_OLD)),
13861 IntPtrConstant(0)),
13862 &no_memento_found);
13863 } else {
13864 GotoIf(WordEqual(
13865 WordAnd(page_flags,
13866 IntPtrConstant(MemoryChunk::kIsInYoungGenerationMask)),
13867 IntPtrConstant(0)),
13868 &no_memento_found);
13869 }
13870 // TODO(v8:11799): Support allocation memento for a large object by
13871 // allocating additional word for the memento after the large object.
13872 GotoIf(WordNotEqual(WordAnd(page_flags,
13873 IntPtrConstant(MemoryChunk::kIsLargePageMask)),
13874 IntPtrConstant(0)),
13875 &no_memento_found);
13876 }
13877
13878 TNode<IntPtrT> memento_last_word = IntPtrAdd(
13879 object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
13880 TNode<IntPtrT> memento_last_word_page_header =
13881 MemoryChunkFromAddress(memento_last_word);
13882
13883 TNode<IntPtrT> new_space_top = Load<IntPtrT>(new_space_top_address);
13884 TNode<IntPtrT> new_space_top_page_header =
13885 MemoryChunkFromAddress(new_space_top);
13886
13887 // If the object is in new space, we need to check whether respective
13888 // potential memento object is on the same page as the current top.
13889 GotoIf(WordEqual(memento_last_word_page_header, new_space_top_page_header),
13890 &top_check);
13891
13892 // The object is on a different page than allocation top. Bail out if the
13893 // object sits on the page boundary as no memento can follow and we cannot
13894 // touch the memory following it.
13895 Branch(WordEqual(object_page_header, memento_last_word_page_header),
13896 &map_check, &no_memento_found);
13897
13898 // If top is on the same page as the current object, we need to check whether
13899 // we are below top.
13900 BIND(&top_check);
13901 {
13902 Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
13903 &no_memento_found, &map_check);
13904 }
13905
13906 // Memento map check.
13907 BIND(&map_check);
13908 {
13909 TNode<AnyTaggedT> maybe_mapword =
13910 LoadObjectField(object, kMementoMapOffset);
13911 TNode<AnyTaggedT> memento_mapword =
13912 LoadRootMapWord(RootIndex::kAllocationMementoMap);
13913 Branch(TaggedEqual(maybe_mapword, memento_mapword), memento_found,
13914 &no_memento_found);
13915 }
13916 BIND(&no_memento_found);
13917 Comment("] TrapAllocationMemento");
13918}
13919
13920TNode<IntPtrT> CodeStubAssembler::MemoryChunkFromAddress(
13921 TNode<IntPtrT> address) {
13922 return WordAnd(address,
13923 IntPtrConstant(~MemoryChunk::GetAlignmentMaskForAssembler()));
13924}
13925
13926TNode<IntPtrT> CodeStubAssembler::PageMetadataFromMemoryChunk(
13927 TNode<IntPtrT> address) {
13928#ifdef V8_ENABLE_SANDBOX
13929 TNode<RawPtrT> table = ExternalConstant(
13930 ExternalReference::memory_chunk_metadata_table_address());
13931 TNode<Uint32T> index = Load<Uint32T>(
13932 address, IntPtrConstant(MemoryChunk::MetadataIndexOffset()));
13933 index = Word32And(index,
13934 UniqueUint32Constant(
13935 MemoryChunkConstants::kMetadataPointerTableSizeMask));
13936 TNode<IntPtrT> offset = ChangeInt32ToIntPtr(
13937 Word32Shl(index, UniqueUint32Constant(kSystemPointerSizeLog2)));
13938 TNode<IntPtrT> metadata = Load<IntPtrT>(table, offset);
13939 // Check that the Metadata belongs to this Chunk, since an attacker with write
13940 // inside the sandbox could've swapped the index.
13941 TNode<IntPtrT> metadata_chunk = MemoryChunkFromAddress(Load<IntPtrT>(
13942 metadata, IntPtrConstant(MemoryChunkMetadata::AreaStartOffset())));
13943 CSA_CHECK(this, WordEqual(metadata_chunk, address));
13944 return metadata;
13945#else
13946 return Load<IntPtrT>(address, IntPtrConstant(MemoryChunk::MetadataOffset()));
13947#endif
13948}
13949
13950TNode<IntPtrT> CodeStubAssembler::PageMetadataFromAddress(
13951 TNode<IntPtrT> address) {
13952 return PageMetadataFromMemoryChunk(MemoryChunkFromAddress(address));
13953}
13954
13955TNode<AllocationSite> CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
13956 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot) {
13957 TNode<IntPtrT> size = IntPtrConstant(sizeof(AllocationSiteWithWeakNext));
13958 TNode<HeapObject> site = Allocate(size, AllocationFlag::kPretenured);
13959 StoreMapNoWriteBarrier(site, RootIndex::kAllocationSiteWithWeakNextMap);
13960 // Should match AllocationSite::Initialize.
13961 TNode<WordT> field = UpdateWord<AllocationSite::ElementsKindBits>(
13962 IntPtrConstant(0), UintPtrConstant(GetInitialFastElementsKind()));
13963 StoreObjectFieldNoWriteBarrier(
13964 site, offsetof(AllocationSite, transition_info_or_boilerplate_),
13965 SmiTag(Signed(field)));
13966
13967 // Unlike literals, constructed arrays don't have nested sites
13968 TNode<Smi> zero = SmiConstant(0);
13969 StoreObjectFieldNoWriteBarrier(site, offsetof(AllocationSite, nested_site_),
13970 zero);
13971
13972 // Pretenuring calculation field.
13973 StoreObjectFieldNoWriteBarrier(
13974 site, offsetof(AllocationSite, pretenure_data_), Int32Constant(0));
13975
13976 // Pretenuring memento creation count field.
13977 StoreObjectFieldNoWriteBarrier(
13978 site, offsetof(AllocationSite, pretenure_create_count_),
13979 Int32Constant(0));
13980
13981 // Store an empty fixed array for the code dependency.
13982 StoreObjectFieldRoot(site, offsetof(AllocationSite, dependent_code_),
13983 DependentCode::kEmptyDependentCode);
13984
13985 // Link the object to the allocation site list
13986 TNode<ExternalReference> site_list = ExternalConstant(
13987 ExternalReference::allocation_sites_list_address(isolate()));
13988 TNode<Object> next_site =
13989 LoadBufferObject(ReinterpretCast<RawPtrT>(site_list), 0);
13990
13991 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
13992 // mark as such in order to skip the write barrier, once we have a unified
13993 // system for weakness. For now we decided to keep it like this because having
13994 // an initial write barrier backed store makes this pointer strong until the
13995 // next GC, and allocation sites are designed to survive several GCs anyway.
13996 StoreObjectField(site, offsetof(AllocationSiteWithWeakNext, weak_next_),
13997 next_site);
13998 StoreFullTaggedNoWriteBarrier(site_list, site);
13999
14000 StoreFeedbackVectorSlot(feedback_vector, slot, site);
14001 return CAST(site);
14002}
14003
14004TNode<MaybeObject> CodeStubAssembler::StoreWeakReferenceInFeedbackVector(
14005 TNode<FeedbackVector> feedback_vector, TNode<UintPtrT> slot,
14006 TNode<HeapObject> value, int additional_offset) {
14007 TNode<HeapObjectReference> weak_value = MakeWeak(value);
14008 StoreFeedbackVectorSlot(feedback_vector, slot, weak_value,
14009 UPDATE_WRITE_BARRIER, additional_offset);
14010 return weak_value;
14011}
14012
14013TNode<BoolT> CodeStubAssembler::HasBoilerplate(
14014 TNode<Object> maybe_literal_site) {
14015 return TaggedIsNotSmi(maybe_literal_site);
14016}
14017
14018TNode<Smi> CodeStubAssembler::LoadTransitionInfo(
14019 TNode<AllocationSite> allocation_site) {
14020 TNode<Smi> transition_info = CAST(LoadObjectField(
14021 allocation_site,
14022 offsetof(AllocationSite, transition_info_or_boilerplate_)));
14023 return transition_info;
14024}
14025
14026TNode<JSObject> CodeStubAssembler::LoadBoilerplate(
14027 TNode<AllocationSite> allocation_site) {
14028 TNode<JSObject> boilerplate = CAST(LoadObjectField(
14029 allocation_site,
14030 offsetof(AllocationSite, transition_info_or_boilerplate_)));
14031 return boilerplate;
14032}
14033
14034TNode<Int32T> CodeStubAssembler::LoadElementsKind(
14035 TNode<AllocationSite> allocation_site) {
14036 TNode<Smi> transition_info = LoadTransitionInfo(allocation_site);
14037 TNode<Int32T> elements_kind =
14038 Signed(DecodeWord32<AllocationSite::ElementsKindBits>(
14039 SmiToInt32(transition_info)));
14040 CSA_DCHECK(this, IsFastElementsKind(elements_kind));
14041 return elements_kind;
14042}
14043
14044TNode<Object> CodeStubAssembler::LoadNestedAllocationSite(
14045 TNode<AllocationSite> allocation_site) {
14046 return LoadObjectField(allocation_site,
14047 offsetof(AllocationSite, nested_site_));
14048}
14049
14050template <typename TIndex>
14051void CodeStubAssembler::BuildFastLoop(
14052 const VariableList& vars, TVariable<TIndex>& var_index,
14053 TNode<TIndex> start_index, TNode<TIndex> end_index,
14055 LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode,
14056 IndexAdvanceDirection advance_direction) {
14057 // Update the index comparisons below in case we'd ever want to use Smi
14058 // indexes.
14059 static_assert(
14060 !std::is_same_v<TIndex, Smi>,
14061 "Smi indices are currently not supported because it's not clear whether "
14062 "the use case allows unsigned comparisons or not");
14063 var_index = start_index;
14064 VariableList vars_copy(vars.begin(), vars.end(), zone());
14065 vars_copy.push_back(&var_index);
14066 Label loop(this, vars_copy);
14067 Label after_loop(this), done(this);
14068
14069 auto loop_body = [&]() {
14070 if (advance_mode == IndexAdvanceMode::kPre) {
14071 var_index = IntPtrOrSmiAdd(var_index.value(), increment);
14072 }
14073 body(var_index.value());
14074 if (advance_mode == IndexAdvanceMode::kPost) {
14075 var_index = IntPtrOrSmiAdd(var_index.value(), increment);
14076 }
14077 };
14078 // The loops below are generated using the following trick:
14079 // Introduce an explicit second check of the termination condition before
14080 // the loop that helps turbofan generate better code. If there's only a
14081 // single check, then the CodeStubAssembler forces it to be at the beginning
14082 // of the loop requiring a backwards branch at the end of the loop (it's not
14083 // possible to force the loop header check at the end of the loop and branch
14084 // forward to it from the pre-header). The extra branch is slower in the
14085 // case that the loop actually iterates.
14086 if (unrolling_mode == LoopUnrollingMode::kNo) {
14087 TNode<BoolT> first_check = UintPtrOrSmiEqual(var_index.value(), end_index);
14088 int32_t first_check_val;
14089 if (TryToInt32Constant(first_check, &first_check_val)) {
14090 if (first_check_val) return;
14091 Goto(&loop);
14092 } else {
14093 Branch(first_check, &done, &loop);
14094 }
14095
14096 BIND(&loop);
14097 {
14098 loop_body();
14099 CSA_DCHECK(
14100 this,
14101 advance_direction == IndexAdvanceDirection::kUp
14102 ? UintPtrOrSmiLessThanOrEqual(var_index.value(), end_index)
14103 : UintPtrOrSmiLessThanOrEqual(end_index, var_index.value()));
14104 Branch(UintPtrOrSmiNotEqual(var_index.value(), end_index), &loop, &done);
14105 }
14106 BIND(&done);
14107 } else {
14108 // Check if there are at least two elements between start_index and
14109 // end_index.
14110 DCHECK_EQ(unrolling_mode, LoopUnrollingMode::kYes);
14111 switch (advance_direction) {
14112 case IndexAdvanceDirection::kUp:
14113 CSA_DCHECK(this, UintPtrOrSmiLessThanOrEqual(start_index, end_index));
14114 GotoIfNot(UintPtrOrSmiLessThanOrEqual(
14115 IntPtrOrSmiAdd(start_index, increment), end_index),
14116 &done);
14117 break;
14118 case IndexAdvanceDirection::kDown:
14119
14120 CSA_DCHECK(this, UintPtrOrSmiLessThanOrEqual(end_index, start_index));
14121 GotoIfNot(UintPtrOrSmiLessThanOrEqual(
14122 IntPtrOrSmiSub(end_index, increment), start_index),
14123 &done);
14124 break;
14125 }
14126
14127 TNode<TIndex> last_index = IntPtrOrSmiSub(end_index, increment);
14128 TNode<BoolT> first_check =
14129 advance_direction == IndexAdvanceDirection::kUp
14130 ? UintPtrOrSmiLessThan(start_index, last_index)
14131 : UintPtrOrSmiGreaterThan(start_index, last_index);
14132 int32_t first_check_val;
14133 if (TryToInt32Constant(first_check, &first_check_val)) {
14134 if (first_check_val) {
14135 Goto(&loop);
14136 } else {
14137 Goto(&after_loop);
14138 }
14139 } else {
14140 Branch(first_check, &loop, &after_loop);
14141 }
14142
14143 BIND(&loop);
14144 {
14145 Comment("Unrolled Loop");
14146 loop_body();
14147 loop_body();
14148 TNode<BoolT> loop_check =
14149 advance_direction == IndexAdvanceDirection::kUp
14150 ? UintPtrOrSmiLessThan(var_index.value(), last_index)
14151 : UintPtrOrSmiGreaterThan(var_index.value(), last_index);
14152 Branch(loop_check, &loop, &after_loop);
14153 }
14154 BIND(&after_loop);
14155 {
14156 GotoIfNot(UintPtrOrSmiEqual(var_index.value(), last_index), &done);
14157 // Iteration count is odd.
14158 loop_body();
14159 Goto(&done);
14160 }
14161 BIND(&done);
14162 }
14163}
14164
14165template <typename TIndex>
14166void CodeStubAssembler::BuildFastLoop(
14167 const VariableList& vars, TVariable<TIndex>& var_index,
14168 TNode<TIndex> start_index, TNode<TIndex> end_index,
14169 const FastLoopBody<TIndex>& body, int increment,
14170 LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode) {
14171 DCHECK_NE(increment, 0);
14172 BuildFastLoop(vars, var_index, start_index, end_index, body,
14173 IntPtrOrSmiConstant<TIndex>(increment), unrolling_mode,
14174 advance_mode,
14175 increment > 0 ? IndexAdvanceDirection::kUp
14176 : IndexAdvanceDirection::kDown);
14177}
14178
14179// Instantiate BuildFastLoop for IntPtrT, UintPtrT and RawPtrT.
14180template V8_EXPORT_PRIVATE void CodeStubAssembler::BuildFastLoop<IntPtrT>(
14181 const VariableList& vars, TVariable<IntPtrT>& var_index,
14182 TNode<IntPtrT> start_index, TNode<IntPtrT> end_index,
14183 const FastLoopBody<IntPtrT>& body, int increment,
14184 LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode);
14185template V8_EXPORT_PRIVATE void CodeStubAssembler::BuildFastLoop<UintPtrT>(
14186 const VariableList& vars, TVariable<UintPtrT>& var_index,
14187 TNode<UintPtrT> start_index, TNode<UintPtrT> end_index,
14188 const FastLoopBody<UintPtrT>& body, int increment,
14189 LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode);
14190template V8_EXPORT_PRIVATE void CodeStubAssembler::BuildFastLoop<RawPtrT>(
14191 const VariableList& vars, TVariable<RawPtrT>& var_index,
14192 TNode<RawPtrT> start_index, TNode<RawPtrT> end_index,
14193 const FastLoopBody<RawPtrT>& body, int increment,
14194 LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode);
14195
14196template <typename TIndex>
14197void CodeStubAssembler::BuildFastArrayForEach(
14199 ElementsKind kind, TNode<TIndex> first_element_inclusive,
14200 TNode<TIndex> last_element_exclusive, const FastArrayForEachBody& body,
14201 LoopUnrollingMode loop_unrolling_mode, ForEachDirection direction) {
14202 static_assert(OFFSET_OF_DATA_START(FixedArray) ==
14204 CSA_SLOW_DCHECK(this, Word32Or(IsFixedArrayWithKind(array, kind),
14205 IsPropertyArray(array)));
14206
14207 intptr_t first_val;
14208 bool constant_first =
14209 TryToIntPtrConstant(first_element_inclusive, &first_val);
14210 intptr_t last_val;
14211 bool constent_last = TryToIntPtrConstant(last_element_exclusive, &last_val);
14212 if (constant_first && constent_last) {
14213 intptr_t delta = last_val - first_val;
14214 DCHECK_GE(delta, 0);
14215 if (delta <= kElementLoopUnrollThreshold) {
14216 if (direction == ForEachDirection::kForward) {
14217 for (intptr_t i = first_val; i < last_val; ++i) {
14218 TNode<IntPtrT> index = IntPtrConstant(i);
14219 TNode<IntPtrT> offset = ElementOffsetFromIndex(
14221 body(array, offset);
14222 }
14223 } else {
14224 for (intptr_t i = last_val - 1; i >= first_val; --i) {
14225 TNode<IntPtrT> index = IntPtrConstant(i);
14226 TNode<IntPtrT> offset = ElementOffsetFromIndex(
14228 body(array, offset);
14229 }
14230 }
14231 return;
14232 }
14233 }
14234
14236 ElementOffsetFromIndex(first_element_inclusive, kind,
14238 TNode<IntPtrT> limit =
14239 ElementOffsetFromIndex(last_element_exclusive, kind,
14241 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
14242
14244 BuildFastLoop<IntPtrT>(
14245 start, limit, [&](TNode<IntPtrT> offset) { body(array, offset); },
14246 direction == ForEachDirection::kReverse ? -increment : increment,
14247 loop_unrolling_mode,
14248 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
14249 : IndexAdvanceMode::kPost);
14250}
14251
14252template <typename TIndex>
14253void CodeStubAssembler::GotoIfFixedArraySizeDoesntFitInNewSpace(
14254 TNode<TIndex> element_count, Label* doesnt_fit, int base_size) {
14255 GotoIf(FixedArraySizeDoesntFitInNewSpace(element_count, base_size),
14256 doesnt_fit);
14257}
14258
14259void CodeStubAssembler::InitializeFieldsWithRoot(TNode<HeapObject> object,
14260 TNode<IntPtrT> start_offset,
14261 TNode<IntPtrT> end_offset,
14262 RootIndex root_index) {
14263 CSA_SLOW_DCHECK(this, TaggedIsNotSmi(object));
14264 start_offset = IntPtrAdd(start_offset, IntPtrConstant(-kHeapObjectTag));
14265 end_offset = IntPtrAdd(end_offset, IntPtrConstant(-kHeapObjectTag));
14266 TNode<AnyTaggedT> root_value;
14267 if (root_index == RootIndex::kOnePointerFillerMap) {
14268 root_value = LoadRootMapWord(root_index);
14269 } else {
14270 root_value = LoadRoot(root_index);
14271 }
14272 BuildFastLoop<IntPtrT>(
14273 end_offset, start_offset,
14274 [=, this](TNode<IntPtrT> current) {
14275 StoreNoWriteBarrier(MachineRepresentation::kTagged, object, current,
14276 root_value);
14277 },
14278 -kTaggedSize, LoopUnrollingMode::kYes, IndexAdvanceMode::kPre);
14279}
14280
14281void CodeStubAssembler::BranchIfNumberRelationalComparison(Operation op,
14282 TNode<Number> left,
14283 TNode<Number> right,
14284 Label* if_true,
14285 Label* if_false) {
14286 Label do_float_comparison(this);
14287 TVARIABLE(Float64T, var_left_float);
14288 TVARIABLE(Float64T, var_right_float);
14289
14290 Branch(
14291 TaggedIsSmi(left),
14292 [&] {
14293 TNode<Smi> smi_left = CAST(left);
14294
14295 Branch(
14296 TaggedIsSmi(right),
14297 [&] {
14298 TNode<Smi> smi_right = CAST(right);
14299
14300 // Both {left} and {right} are Smi, so just perform a fast
14301 // Smi comparison.
14302 switch (op) {
14303 case Operation::kEqual:
14304 BranchIfSmiEqual(smi_left, smi_right, if_true, if_false);
14305 break;
14306 case Operation::kLessThan:
14307 BranchIfSmiLessThan(smi_left, smi_right, if_true, if_false);
14308 break;
14309 case Operation::kLessThanOrEqual:
14310 BranchIfSmiLessThanOrEqual(smi_left, smi_right, if_true,
14311 if_false);
14312 break;
14313 case Operation::kGreaterThan:
14314 BranchIfSmiLessThan(smi_right, smi_left, if_true, if_false);
14315 break;
14316 case Operation::kGreaterThanOrEqual:
14317 BranchIfSmiLessThanOrEqual(smi_right, smi_left, if_true,
14318 if_false);
14319 break;
14320 default:
14321 UNREACHABLE();
14322 }
14323 },
14324 [&] {
14325 var_left_float = SmiToFloat64(smi_left);
14326 var_right_float = LoadHeapNumberValue(CAST(right));
14327 Goto(&do_float_comparison);
14328 });
14329 },
14330 [&] {
14331 var_left_float = LoadHeapNumberValue(CAST(left));
14332
14333 Branch(
14334 TaggedIsSmi(right),
14335 [&] {
14336 var_right_float = SmiToFloat64(CAST(right));
14337 Goto(&do_float_comparison);
14338 },
14339 [&] {
14340 var_right_float = LoadHeapNumberValue(CAST(right));
14341 Goto(&do_float_comparison);
14342 });
14343 });
14344
14345 BIND(&do_float_comparison);
14346 {
14347 switch (op) {
14348 case Operation::kEqual:
14349 Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
14350 if_true, if_false);
14351 break;
14352 case Operation::kLessThan:
14353 Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
14354 if_true, if_false);
14355 break;
14356 case Operation::kLessThanOrEqual:
14357 Branch(Float64LessThanOrEqual(var_left_float.value(),
14358 var_right_float.value()),
14359 if_true, if_false);
14360 break;
14361 case Operation::kGreaterThan:
14362 Branch(
14363 Float64GreaterThan(var_left_float.value(), var_right_float.value()),
14364 if_true, if_false);
14365 break;
14366 case Operation::kGreaterThanOrEqual:
14367 Branch(Float64GreaterThanOrEqual(var_left_float.value(),
14368 var_right_float.value()),
14369 if_true, if_false);
14370 break;
14371 default:
14372 UNREACHABLE();
14373 }
14374 }
14375}
14376
14377void CodeStubAssembler::GotoIfNumberGreaterThanOrEqual(TNode<Number> left,
14378 TNode<Number> right,
14379 Label* if_true) {
14380 Label if_false(this);
14381 BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, left,
14382 right, if_true, &if_false);
14383 BIND(&if_false);
14384}
14385
14386namespace {
14387Operation Reverse(Operation op) {
14388 switch (op) {
14389 case Operation::kLessThan:
14390 return Operation::kGreaterThan;
14391 case Operation::kLessThanOrEqual:
14392 return Operation::kGreaterThanOrEqual;
14393 case Operation::kGreaterThan:
14394 return Operation::kLessThan;
14395 case Operation::kGreaterThanOrEqual:
14396 return Operation::kLessThanOrEqual;
14397 default:
14398 break;
14399 }
14400 UNREACHABLE();
14401}
14402} // anonymous namespace
14403
14404TNode<Context> CodeStubAssembler::GotoIfHasContextExtensionUpToDepth(
14405 TNode<Context> context, TNode<Uint32T> depth, Label* target) {
14406 TVARIABLE(Context, cur_context, context);
14407 TVARIABLE(Uint32T, cur_depth, depth);
14408
14409 Label context_search(this, {&cur_depth, &cur_context});
14410 Label exit_loop(this);
14411 Label no_extension(this);
14412
14413 // Loop until the depth is 0.
14414 CSA_DCHECK(this, Word32NotEqual(cur_depth.value(), Int32Constant(0)));
14415 Goto(&context_search);
14416 BIND(&context_search);
14417 {
14418#if DEBUG
14419 // Const tracking let data is stored in the extension slot of a
14420 // ScriptContext - however, it's unrelated to the sloppy eval variable
14421 // extension. We should never iterate through a ScriptContext here.
14422 auto scope_info = LoadScopeInfo(cur_context.value());
14423 TNode<Uint32T> flags =
14424 LoadObjectField<Uint32T>(scope_info, ScopeInfo::kFlagsOffset);
14425 auto scope_type = DecodeWord32<ScopeInfo::ScopeTypeBits>(flags);
14426 CSA_DCHECK(this, Word32NotEqual(scope_type,
14427 Int32Constant(ScopeType::SCRIPT_SCOPE)));
14428 CSA_DCHECK(this, Word32NotEqual(scope_type,
14429 Int32Constant(ScopeType::REPL_MODE_SCOPE)));
14430#endif
14431
14432 // Check if context has an extension slot.
14433 TNode<BoolT> has_extension =
14434 LoadScopeInfoHasExtensionField(LoadScopeInfo(cur_context.value()));
14435 GotoIfNot(has_extension, &no_extension);
14436
14437 // Jump to the target if the extension slot is not an undefined value.
14438 TNode<Object> extension_slot =
14439 LoadContextElement(cur_context.value(), Context::EXTENSION_INDEX);
14440 Branch(TaggedNotEqual(extension_slot, UndefinedConstant()), target,
14441 &no_extension);
14442
14443 BIND(&no_extension);
14444 {
14445 cur_depth = Unsigned(Int32Sub(cur_depth.value(), Int32Constant(1)));
14446 cur_context = CAST(
14447 LoadContextElement(cur_context.value(), Context::PREVIOUS_INDEX));
14448
14449 Branch(Word32NotEqual(cur_depth.value(), Int32Constant(0)),
14450 &context_search, &exit_loop);
14451 }
14452 }
14453 BIND(&exit_loop);
14454 return cur_context.value();
14455}
14456
14457void CodeStubAssembler::BigInt64Comparison(Operation op, TNode<Object>& left,
14458 TNode<Object>& right,
14459 Label* return_true,
14460 Label* return_false) {
14461 TVARIABLE(UintPtrT, left_raw);
14462 TVARIABLE(UintPtrT, right_raw);
14463 BigIntToRawBytes(CAST(left), &left_raw, &left_raw);
14464 BigIntToRawBytes(CAST(right), &right_raw, &right_raw);
14465 TNode<WordT> left_raw_value = left_raw.value();
14466 TNode<WordT> right_raw_value = right_raw.value();
14467
14469 switch (op) {
14470 case Operation::kEqual:
14471 case Operation::kStrictEqual:
14472 condition = WordEqual(left_raw_value, right_raw_value);
14473 break;
14474 case Operation::kLessThan:
14475 condition = IntPtrLessThan(left_raw_value, right_raw_value);
14476 break;
14477 case Operation::kLessThanOrEqual:
14478 condition = IntPtrLessThanOrEqual(left_raw_value, right_raw_value);
14479 break;
14480 case Operation::kGreaterThan:
14481 condition = IntPtrGreaterThan(left_raw_value, right_raw_value);
14482 break;
14483 case Operation::kGreaterThanOrEqual:
14484 condition = IntPtrGreaterThanOrEqual(left_raw_value, right_raw_value);
14485 break;
14486 default:
14487 UNREACHABLE();
14488 }
14489 Branch(condition, return_true, return_false);
14490}
14491
14492TNode<Boolean> CodeStubAssembler::RelationalComparison(
14493 Operation op, TNode<Object> left, TNode<Object> right,
14494 const LazyNode<Context>& context, TVariable<Smi>* var_type_feedback) {
14495 Label return_true(this), return_false(this), do_float_comparison(this),
14496 end(this);
14497 TVARIABLE(Boolean, var_result);
14498 TVARIABLE(Float64T, var_left_float);
14499 TVARIABLE(Float64T, var_right_float);
14500
14501 // We might need to loop several times due to ToPrimitive and/or ToNumeric
14502 // conversions.
14503 TVARIABLE(Object, var_left, left);
14504 TVARIABLE(Object, var_right, right);
14505 VariableList loop_variable_list({&var_left, &var_right}, zone());
14506 if (var_type_feedback != nullptr) {
14507 // Initialize the type feedback to None. The current feedback is combined
14508 // with the previous feedback.
14509 *var_type_feedback = SmiConstant(CompareOperationFeedback::kNone);
14510 loop_variable_list.push_back(var_type_feedback);
14511 }
14512 Label loop(this, loop_variable_list);
14513 Goto(&loop);
14514 BIND(&loop);
14515 {
14516 left = var_left.value();
14517 right = var_right.value();
14518
14519 Label if_left_smi(this), if_left_not_smi(this);
14520 Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
14521
14522 BIND(&if_left_smi);
14523 {
14524 TNode<Smi> smi_left = CAST(left);
14525 Label if_right_smi(this), if_right_heapnumber(this),
14526 if_right_bigint(this, Label::kDeferred),
14527 if_right_not_numeric(this, Label::kDeferred);
14528 GotoIf(TaggedIsSmi(right), &if_right_smi);
14529 TNode<Map> right_map = LoadMap(CAST(right));
14530 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
14531 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
14532 Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
14533 &if_right_not_numeric);
14534
14535 BIND(&if_right_smi);
14536 {
14537 TNode<Smi> smi_right = CAST(right);
14538 CombineFeedback(var_type_feedback,
14539 CompareOperationFeedback::kSignedSmall);
14540 switch (op) {
14541 case Operation::kLessThan:
14542 BranchIfSmiLessThan(smi_left, smi_right, &return_true,
14543 &return_false);
14544 break;
14545 case Operation::kLessThanOrEqual:
14546 BranchIfSmiLessThanOrEqual(smi_left, smi_right, &return_true,
14547 &return_false);
14548 break;
14549 case Operation::kGreaterThan:
14550 BranchIfSmiLessThan(smi_right, smi_left, &return_true,
14551 &return_false);
14552 break;
14553 case Operation::kGreaterThanOrEqual:
14554 BranchIfSmiLessThanOrEqual(smi_right, smi_left, &return_true,
14555 &return_false);
14556 break;
14557 default:
14558 UNREACHABLE();
14559 }
14560 }
14561
14562 BIND(&if_right_heapnumber);
14563 {
14564 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
14565 var_left_float = SmiToFloat64(smi_left);
14566 var_right_float = LoadHeapNumberValue(CAST(right));
14567 Goto(&do_float_comparison);
14568 }
14569
14570 BIND(&if_right_bigint);
14571 {
14572 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
14573 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
14574 NoContextConstant(),
14575 SmiConstant(Reverse(op)), right, left));
14576 Goto(&end);
14577 }
14578
14579 BIND(&if_right_not_numeric);
14580 {
14581 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
14582 // Convert {right} to a Numeric; we don't need to perform the
14583 // dedicated ToPrimitive(right, hint Number) operation, as the
14584 // ToNumeric(right) will by itself already invoke ToPrimitive with
14585 // a Number hint.
14586 var_right = CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
14587 Goto(&loop);
14588 }
14589 }
14590
14591 BIND(&if_left_not_smi);
14592 {
14593 TNode<Map> left_map = LoadMap(CAST(left));
14594
14595 Label if_right_smi(this), if_right_not_smi(this);
14596 Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
14597
14598 BIND(&if_right_smi);
14599 {
14600 Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
14601 if_left_not_numeric(this, Label::kDeferred);
14602 GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
14603 TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
14604 Branch(IsBigIntInstanceType(left_instance_type), &if_left_bigint,
14605 &if_left_not_numeric);
14606
14607 BIND(&if_left_heapnumber);
14608 {
14609 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
14610 var_left_float = LoadHeapNumberValue(CAST(left));
14611 var_right_float = SmiToFloat64(CAST(right));
14612 Goto(&do_float_comparison);
14613 }
14614
14615 BIND(&if_left_bigint);
14616 {
14617 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
14618 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
14619 NoContextConstant(), SmiConstant(op),
14620 left, right));
14621 Goto(&end);
14622 }
14623
14624 BIND(&if_left_not_numeric);
14625 {
14626 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
14627 // Convert {left} to a Numeric; we don't need to perform the
14628 // dedicated ToPrimitive(left, hint Number) operation, as the
14629 // ToNumeric(left) will by itself already invoke ToPrimitive with
14630 // a Number hint.
14631 var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
14632 Goto(&loop);
14633 }
14634 }
14635
14636 BIND(&if_right_not_smi);
14637 {
14638 TNode<Map> right_map = LoadMap(CAST(right));
14639
14640 Label if_left_heapnumber(this), if_left_bigint(this, Label::kDeferred),
14641 if_left_string(this, Label::kDeferred),
14642 if_left_other(this, Label::kDeferred);
14643 GotoIf(IsHeapNumberMap(left_map), &if_left_heapnumber);
14644 TNode<Uint16T> left_instance_type = LoadMapInstanceType(left_map);
14645 GotoIf(IsBigIntInstanceType(left_instance_type), &if_left_bigint);
14646 Branch(IsStringInstanceType(left_instance_type), &if_left_string,
14647 &if_left_other);
14648
14649 BIND(&if_left_heapnumber);
14650 {
14651 Label if_right_heapnumber(this),
14652 if_right_bigint(this, Label::kDeferred),
14653 if_right_not_numeric(this, Label::kDeferred);
14654 GotoIf(TaggedEqual(right_map, left_map), &if_right_heapnumber);
14655 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
14656 Branch(IsBigIntInstanceType(right_instance_type), &if_right_bigint,
14657 &if_right_not_numeric);
14658
14659 BIND(&if_right_heapnumber);
14660 {
14661 CombineFeedback(var_type_feedback,
14662 CompareOperationFeedback::kNumber);
14663 var_left_float = LoadHeapNumberValue(CAST(left));
14664 var_right_float = LoadHeapNumberValue(CAST(right));
14665 Goto(&do_float_comparison);
14666 }
14667
14668 BIND(&if_right_bigint);
14669 {
14670 OverwriteFeedback(var_type_feedback,
14671 CompareOperationFeedback::kAny);
14672 var_result = CAST(CallRuntime(
14673 Runtime::kBigIntCompareToNumber, NoContextConstant(),
14674 SmiConstant(Reverse(op)), right, left));
14675 Goto(&end);
14676 }
14677
14678 BIND(&if_right_not_numeric);
14679 {
14680 OverwriteFeedback(var_type_feedback,
14681 CompareOperationFeedback::kAny);
14682 // Convert {right} to a Numeric; we don't need to perform
14683 // dedicated ToPrimitive(right, hint Number) operation, as the
14684 // ToNumeric(right) will by itself already invoke ToPrimitive with
14685 // a Number hint.
14686 var_right =
14687 CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
14688 Goto(&loop);
14689 }
14690 }
14691
14692 BIND(&if_left_bigint);
14693 {
14694 Label if_right_heapnumber(this), if_right_bigint(this),
14695 if_right_string(this), if_right_other(this);
14696 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
14697 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
14698 GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
14699 Branch(IsStringInstanceType(right_instance_type), &if_right_string,
14700 &if_right_other);
14701
14702 BIND(&if_right_heapnumber);
14703 {
14704 OverwriteFeedback(var_type_feedback,
14705 CompareOperationFeedback::kAny);
14706 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToNumber,
14707 NoContextConstant(), SmiConstant(op),
14708 left, right));
14709 Goto(&end);
14710 }
14711
14712 BIND(&if_right_bigint);
14713 {
14714 if (Is64()) {
14715 Label if_both_bigint(this);
14716 GotoIfLargeBigInt(CAST(left), &if_both_bigint);
14717 GotoIfLargeBigInt(CAST(right), &if_both_bigint);
14718
14719 CombineFeedback(var_type_feedback,
14720 CompareOperationFeedback::kBigInt64);
14721 BigInt64Comparison(op, left, right, &return_true, &return_false);
14722 BIND(&if_both_bigint);
14723 }
14724
14725 CombineFeedback(var_type_feedback,
14726 CompareOperationFeedback::kBigInt);
14727 var_result = CAST(CallBuiltin(BigIntComparisonBuiltinOf(op),
14728 NoContextConstant(), left, right));
14729 Goto(&end);
14730 }
14731
14732 BIND(&if_right_string);
14733 {
14734 OverwriteFeedback(var_type_feedback,
14735 CompareOperationFeedback::kAny);
14736 var_result = CAST(CallRuntime(Runtime::kBigIntCompareToString,
14737 NoContextConstant(), SmiConstant(op),
14738 left, right));
14739 Goto(&end);
14740 }
14741
14742 // {right} is not a Number, BigInt, or String.
14743 BIND(&if_right_other);
14744 {
14745 OverwriteFeedback(var_type_feedback,
14746 CompareOperationFeedback::kAny);
14747 // Convert {right} to a Numeric; we don't need to perform
14748 // dedicated ToPrimitive(right, hint Number) operation, as the
14749 // ToNumeric(right) will by itself already invoke ToPrimitive with
14750 // a Number hint.
14751 var_right =
14752 CallBuiltin(Builtin::kNonNumberToNumeric, context(), right);
14753 Goto(&loop);
14754 }
14755 }
14756
14757 BIND(&if_left_string);
14758 {
14759 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
14760
14761 Label if_right_not_string(this, Label::kDeferred);
14762 GotoIfNot(IsStringInstanceType(right_instance_type),
14763 &if_right_not_string);
14764
14765 // Both {left} and {right} are strings.
14766 CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
14768 switch (op) {
14769 case Operation::kLessThan:
14770 builtin = Builtin::kStringLessThan;
14771 break;
14772 case Operation::kLessThanOrEqual:
14773 builtin = Builtin::kStringLessThanOrEqual;
14774 break;
14775 case Operation::kGreaterThan:
14776 builtin = Builtin::kStringGreaterThan;
14777 break;
14778 case Operation::kGreaterThanOrEqual:
14779 builtin = Builtin::kStringGreaterThanOrEqual;
14780 break;
14781 default:
14782 UNREACHABLE();
14783 }
14784 var_result = CAST(CallBuiltin(builtin, TNode<Object>(), left, right));
14785 Goto(&end);
14786
14787 BIND(&if_right_not_string);
14788 {
14789 OverwriteFeedback(var_type_feedback,
14790 CompareOperationFeedback::kAny);
14791 // {left} is a String, while {right} isn't. Check if {right} is
14792 // a BigInt, otherwise call ToPrimitive(right, hint Number) if
14793 // {right} is a receiver, or ToNumeric(left) and then
14794 // ToNumeric(right) in the other cases.
14795 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
14796 Label if_right_bigint(this),
14797 if_right_receiver(this, Label::kDeferred);
14798 GotoIf(IsBigIntInstanceType(right_instance_type), &if_right_bigint);
14799 GotoIf(IsJSReceiverInstanceType(right_instance_type),
14800 &if_right_receiver);
14801
14802 var_left =
14803 CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
14804 var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
14805 Goto(&loop);
14806
14807 BIND(&if_right_bigint);
14808 {
14809 var_result = CAST(CallRuntime(
14810 Runtime::kBigIntCompareToString, NoContextConstant(),
14811 SmiConstant(Reverse(op)), right, left));
14812 Goto(&end);
14813 }
14814
14815 BIND(&if_right_receiver);
14816 {
14817 var_right = CallBuiltin(
14818 Builtins::NonPrimitiveToPrimitive(ToPrimitiveHint::kNumber),
14819 context(), right);
14820 Goto(&loop);
14821 }
14822 }
14823 }
14824
14825 BIND(&if_left_other);
14826 {
14827 // {left} is neither a Numeric nor a String, and {right} is not a Smi.
14828 if (var_type_feedback != nullptr) {
14829 // Collect NumberOrOddball feedback if {left} is an Oddball
14830 // and {right} is either a HeapNumber or Oddball. Otherwise collect
14831 // Any feedback.
14832 Label collect_any_feedback(this), collect_oddball_feedback(this),
14833 collect_feedback_done(this);
14834 GotoIfNot(InstanceTypeEqual(left_instance_type, ODDBALL_TYPE),
14835 &collect_any_feedback);
14836
14837 GotoIf(IsHeapNumberMap(right_map), &collect_oddball_feedback);
14838 TNode<Uint16T> right_instance_type = LoadMapInstanceType(right_map);
14839 Branch(InstanceTypeEqual(right_instance_type, ODDBALL_TYPE),
14840 &collect_oddball_feedback, &collect_any_feedback);
14841
14842 BIND(&collect_oddball_feedback);
14843 {
14844 CombineFeedback(var_type_feedback,
14845 CompareOperationFeedback::kNumberOrOddball);
14846 Goto(&collect_feedback_done);
14847 }
14848
14849 BIND(&collect_any_feedback);
14850 {
14851 OverwriteFeedback(var_type_feedback,
14852 CompareOperationFeedback::kAny);
14853 Goto(&collect_feedback_done);
14854 }
14855
14856 BIND(&collect_feedback_done);
14857 }
14858
14859 // If {left} is a receiver, call ToPrimitive(left, hint Number).
14860 // Otherwise call ToNumeric(right) and then ToNumeric(left), the
14861 // order here is important as it's observable by user code.
14862 static_assert(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
14863 Label if_left_receiver(this, Label::kDeferred);
14864 GotoIf(IsJSReceiverInstanceType(left_instance_type),
14865 &if_left_receiver);
14866
14867 var_right = CallBuiltin(Builtin::kToNumeric, context(), right);
14868 var_left = CallBuiltin(Builtin::kNonNumberToNumeric, context(), left);
14869 Goto(&loop);
14870
14871 BIND(&if_left_receiver);
14872 {
14873 Builtin builtin =
14874 Builtins::NonPrimitiveToPrimitive(ToPrimitiveHint::kNumber);
14875 var_left = CallBuiltin(builtin, context(), left);
14876 Goto(&loop);
14877 }
14878 }
14879 }
14880 }
14881 }
14882
14883 BIND(&do_float_comparison);
14884 {
14885 switch (op) {
14886 case Operation::kLessThan:
14887 Branch(Float64LessThan(var_left_float.value(), var_right_float.value()),
14888 &return_true, &return_false);
14889 break;
14890 case Operation::kLessThanOrEqual:
14891 Branch(Float64LessThanOrEqual(var_left_float.value(),
14892 var_right_float.value()),
14893 &return_true, &return_false);
14894 break;
14895 case Operation::kGreaterThan:
14896 Branch(
14897 Float64GreaterThan(var_left_float.value(), var_right_float.value()),
14898 &return_true, &return_false);
14899 break;
14900 case Operation::kGreaterThanOrEqual:
14901 Branch(Float64GreaterThanOrEqual(var_left_float.value(),
14902 var_right_float.value()),
14903 &return_true, &return_false);
14904 break;
14905 default:
14906 UNREACHABLE();
14907 }
14908 }
14909
14910 BIND(&return_true);
14911 {
14912 var_result = TrueConstant();
14913 Goto(&end);
14914 }
14915
14916 BIND(&return_false);
14917 {
14918 var_result = FalseConstant();
14919 Goto(&end);
14920 }
14921
14922 BIND(&end);
14923 return var_result.value();
14924}
14925
14926TNode<Smi> CodeStubAssembler::CollectFeedbackForString(
14927 TNode<Int32T> instance_type) {
14928 TNode<Smi> feedback = SelectSmiConstant(
14929 Word32Equal(
14930 Word32And(instance_type, Int32Constant(kIsNotInternalizedMask)),
14931 Int32Constant(kInternalizedTag)),
14932 CompareOperationFeedback::kInternalizedString,
14933 CompareOperationFeedback::kString);
14934 return feedback;
14935}
14936
14937void CodeStubAssembler::GenerateEqual_Same(TNode<Object> value, Label* if_equal,
14938 Label* if_notequal,
14939 TVariable<Smi>* var_type_feedback) {
14940 // In case of abstract or strict equality checks, we need additional checks
14941 // for NaN values because they are not considered equal, even if both the
14942 // left and the right hand side reference exactly the same value.
14943
14944 Label if_smi(this), if_heapnumber(this);
14945 GotoIf(TaggedIsSmi(value), &if_smi);
14946
14947 TNode<HeapObject> value_heapobject = CAST(value);
14948 TNode<Map> value_map = LoadMap(value_heapobject);
14949 GotoIf(IsHeapNumberMap(value_map), &if_heapnumber);
14950
14951 // For non-HeapNumbers, all we do is collect type feedback.
14952 if (var_type_feedback != nullptr) {
14953 TNode<Uint16T> instance_type = LoadMapInstanceType(value_map);
14954
14955 Label if_string(this), if_receiver(this), if_oddball(this), if_symbol(this),
14956 if_bigint(this);
14957 GotoIf(IsStringInstanceType(instance_type), &if_string);
14958 GotoIf(IsJSReceiverInstanceType(instance_type), &if_receiver);
14959 GotoIf(IsOddballInstanceType(instance_type), &if_oddball);
14960 Branch(IsBigIntInstanceType(instance_type), &if_bigint, &if_symbol);
14961
14962 BIND(&if_string);
14963 {
14964 CSA_DCHECK(this, IsString(value_heapobject));
14965 CombineFeedback(var_type_feedback,
14966 CollectFeedbackForString(instance_type));
14967 Goto(if_equal);
14968 }
14969
14970 BIND(&if_symbol);
14971 {
14972 CSA_DCHECK(this, IsSymbol(value_heapobject));
14973 CombineFeedback(var_type_feedback, CompareOperationFeedback::kSymbol);
14974 Goto(if_equal);
14975 }
14976
14977 BIND(&if_receiver);
14978 {
14979 CSA_DCHECK(this, IsJSReceiver(value_heapobject));
14980 CombineFeedback(var_type_feedback, CompareOperationFeedback::kReceiver);
14981 Goto(if_equal);
14982 }
14983
14984 BIND(&if_bigint);
14985 {
14986 CSA_DCHECK(this, IsBigInt(value_heapobject));
14987
14988 if (Is64()) {
14989 Label if_large_bigint(this);
14990 GotoIfLargeBigInt(CAST(value_heapobject), &if_large_bigint);
14991 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt64);
14992 Goto(if_equal);
14993 BIND(&if_large_bigint);
14994 }
14995 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
14996 Goto(if_equal);
14997 }
14998
14999 BIND(&if_oddball);
15000 {
15001 CSA_DCHECK(this, IsOddball(value_heapobject));
15002 Label if_boolean(this), if_not_boolean(this);
15003 Branch(IsBooleanMap(value_map), &if_boolean, &if_not_boolean);
15004
15005 BIND(&if_boolean);
15006 {
15007 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBoolean);
15008 Goto(if_equal);
15009 }
15010
15011 BIND(&if_not_boolean);
15012 {
15013 CSA_DCHECK(this, IsNullOrUndefined(value_heapobject));
15014 CombineFeedback(var_type_feedback,
15015 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15016 Goto(if_equal);
15017 }
15018 }
15019 } else {
15020 Goto(if_equal);
15021 }
15022
15023 BIND(&if_heapnumber);
15024 {
15025 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15026 TNode<Float64T> number_value = LoadHeapNumberValue(value_heapobject);
15027 BranchIfFloat64IsNaN(number_value, if_notequal, if_equal);
15028 }
15029
15030 BIND(&if_smi);
15031 {
15032 CombineFeedback(var_type_feedback, CompareOperationFeedback::kSignedSmall);
15033 Goto(if_equal);
15034 }
15035}
15036
15037// ES6 section 7.2.12 Abstract Equality Comparison
15038TNode<Boolean> CodeStubAssembler::Equal(TNode<Object> left, TNode<Object> right,
15039 const LazyNode<Context>& context,
15040 TVariable<Smi>* var_type_feedback) {
15041 // This is a slightly optimized version of Object::Equals. Whenever you
15042 // change something functionality wise in here, remember to update the
15043 // Object::Equals method as well.
15044
15045 Label if_equal(this), if_notequal(this), do_float_comparison(this),
15046 do_right_stringtonumber(this, Label::kDeferred), end(this);
15048 TVARIABLE(Float64T, var_left_float);
15049 TVARIABLE(Float64T, var_right_float);
15050
15051 // We can avoid code duplication by exploiting the fact that abstract equality
15052 // is symmetric.
15053 Label use_symmetry(this);
15054
15055 // We might need to loop several times due to ToPrimitive and/or ToNumber
15056 // conversions.
15057 TVARIABLE(Object, var_left, left);
15058 TVARIABLE(Object, var_right, right);
15059 VariableList loop_variable_list({&var_left, &var_right}, zone());
15060 if (var_type_feedback != nullptr) {
15061 // Initialize the type feedback to None. The current feedback will be
15062 // combined with the previous feedback.
15063 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
15064 loop_variable_list.push_back(var_type_feedback);
15065 }
15066 Label loop(this, loop_variable_list);
15067 Goto(&loop);
15068 BIND(&loop);
15069 {
15070 left = var_left.value();
15071 right = var_right.value();
15072
15073 Label if_notsame(this);
15074 GotoIf(TaggedNotEqual(left, right), &if_notsame);
15075 {
15076 // {left} and {right} reference the exact same value, yet we need special
15077 // treatment for HeapNumber, as NaN is not equal to NaN.
15078 GenerateEqual_Same(left, &if_equal, &if_notequal, var_type_feedback);
15079 }
15080
15081 BIND(&if_notsame);
15082 Label if_left_smi(this), if_left_not_smi(this);
15083 Branch(TaggedIsSmi(left), &if_left_smi, &if_left_not_smi);
15084
15085 BIND(&if_left_smi);
15086 {
15087 Label if_right_smi(this), if_right_not_smi(this);
15088 CombineFeedback(var_type_feedback,
15089 CompareOperationFeedback::kSignedSmall);
15090 Branch(TaggedIsSmi(right), &if_right_smi, &if_right_not_smi);
15091
15092 BIND(&if_right_smi);
15093 {
15094 // We have already checked for {left} and {right} being the same value,
15095 // so when we get here they must be different Smis.
15096 Goto(&if_notequal);
15097 }
15098
15099 BIND(&if_right_not_smi);
15100 {
15101 TNode<Map> right_map = LoadMap(CAST(right));
15102 Label if_right_heapnumber(this), if_right_oddball(this),
15103 if_right_bigint(this, Label::kDeferred),
15104 if_right_receiver(this, Label::kDeferred);
15105 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
15106
15107 // {left} is Smi and {right} is not HeapNumber or Smi.
15108 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
15109 GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
15110 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
15111 GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
15112 GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
15113 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
15114 Goto(&if_notequal);
15115
15116 BIND(&if_right_heapnumber);
15117 {
15118 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15119 var_left_float = SmiToFloat64(CAST(left));
15120 var_right_float = LoadHeapNumberValue(CAST(right));
15121 Goto(&do_float_comparison);
15122 }
15123
15124 BIND(&if_right_oddball);
15125 {
15126 Label if_right_boolean(this);
15127 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
15128 CombineFeedback(var_type_feedback,
15129 CompareOperationFeedback::kOddball);
15130 Goto(&if_notequal);
15131
15132 BIND(&if_right_boolean);
15133 {
15134 CombineFeedback(var_type_feedback,
15135 CompareOperationFeedback::kBoolean);
15136 var_right =
15137 LoadObjectField(CAST(right), offsetof(Oddball, to_number_));
15138 Goto(&loop);
15139 }
15140 }
15141
15142 BIND(&if_right_bigint);
15143 {
15144 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
15145 result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,
15146 NoContextConstant(), right, left));
15147 Goto(&end);
15148 }
15149
15150 BIND(&if_right_receiver);
15151 {
15152 CombineFeedback(var_type_feedback,
15153 CompareOperationFeedback::kReceiver);
15154 var_right = CallBuiltin(Builtins::NonPrimitiveToPrimitive(),
15155 context(), right);
15156 Goto(&loop);
15157 }
15158 }
15159 }
15160
15161 BIND(&if_left_not_smi);
15162 {
15163 GotoIf(TaggedIsSmi(right), &use_symmetry);
15164
15165 Label if_left_symbol(this), if_left_number(this),
15166 if_left_string(this, Label::kDeferred),
15167 if_left_bigint(this, Label::kDeferred), if_left_oddball(this),
15168 if_left_receiver(this);
15169
15170 TNode<Map> left_map = LoadMap(CAST(left));
15171 TNode<Map> right_map = LoadMap(CAST(right));
15172 TNode<Uint16T> left_type = LoadMapInstanceType(left_map);
15173 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
15174
15175 GotoIf(IsStringInstanceType(left_type), &if_left_string);
15176 GotoIf(IsSymbolInstanceType(left_type), &if_left_symbol);
15177 GotoIf(IsHeapNumberInstanceType(left_type), &if_left_number);
15178 GotoIf(IsOddballInstanceType(left_type), &if_left_oddball);
15179 Branch(IsBigIntInstanceType(left_type), &if_left_bigint,
15180 &if_left_receiver);
15181
15182 BIND(&if_left_string);
15183 {
15184 GotoIfNot(IsStringInstanceType(right_type), &use_symmetry);
15185 Label combine_feedback(this);
15186 BranchIfStringEqual(CAST(left), CAST(right), &combine_feedback,
15187 &combine_feedback, &result);
15188 BIND(&combine_feedback);
15189 {
15190 CombineFeedback(var_type_feedback,
15191 SmiOr(CollectFeedbackForString(left_type),
15192 CollectFeedbackForString(right_type)));
15193 Goto(&end);
15194 }
15195 }
15196
15197 BIND(&if_left_number);
15198 {
15199 Label if_right_not_number(this);
15200
15201 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15202 GotoIf(Word32NotEqual(left_type, right_type), &if_right_not_number);
15203
15204 var_left_float = LoadHeapNumberValue(CAST(left));
15205 var_right_float = LoadHeapNumberValue(CAST(right));
15206 Goto(&do_float_comparison);
15207
15208 BIND(&if_right_not_number);
15209 {
15210 Label if_right_oddball(this);
15211
15212 GotoIf(IsStringInstanceType(right_type), &do_right_stringtonumber);
15213 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
15214 GotoIf(IsBigIntInstanceType(right_type), &use_symmetry);
15215 GotoIf(IsJSReceiverInstanceType(right_type), &use_symmetry);
15216 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
15217 Goto(&if_notequal);
15218
15219 BIND(&if_right_oddball);
15220 {
15221 Label if_right_boolean(this);
15222 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
15223 CombineFeedback(var_type_feedback,
15224 CompareOperationFeedback::kOddball);
15225 Goto(&if_notequal);
15226
15227 BIND(&if_right_boolean);
15228 {
15229 CombineFeedback(var_type_feedback,
15230 CompareOperationFeedback::kBoolean);
15231 var_right =
15232 LoadObjectField(CAST(right), offsetof(Oddball, to_number_));
15233 Goto(&loop);
15234 }
15235 }
15236 }
15237 }
15238
15239 BIND(&if_left_bigint);
15240 {
15241 Label if_right_heapnumber(this), if_right_bigint(this),
15242 if_right_string(this), if_right_boolean(this);
15243 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
15244
15245 GotoIf(IsHeapNumberMap(right_map), &if_right_heapnumber);
15246 GotoIf(IsBigIntInstanceType(right_type), &if_right_bigint);
15247 GotoIf(IsStringInstanceType(right_type), &if_right_string);
15248 GotoIf(IsBooleanMap(right_map), &if_right_boolean);
15249 Branch(IsJSReceiverInstanceType(right_type), &use_symmetry,
15250 &if_notequal);
15251
15252 BIND(&if_right_heapnumber);
15253 {
15254 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15255 result = CAST(CallRuntime(Runtime::kBigIntEqualToNumber,
15256 NoContextConstant(), left, right));
15257 Goto(&end);
15258 }
15259
15260 BIND(&if_right_bigint);
15261 {
15262 if (Is64()) {
15263 Label if_both_bigint(this);
15264 GotoIfLargeBigInt(CAST(left), &if_both_bigint);
15265 GotoIfLargeBigInt(CAST(right), &if_both_bigint);
15266
15267 OverwriteFeedback(var_type_feedback,
15268 CompareOperationFeedback::kBigInt64);
15269 BigInt64Comparison(Operation::kEqual, left, right, &if_equal,
15270 &if_notequal);
15271 BIND(&if_both_bigint);
15272 }
15273
15274 CombineFeedback(var_type_feedback, CompareOperationFeedback::kBigInt);
15275 result = CAST(CallBuiltin(Builtin::kBigIntEqual, NoContextConstant(),
15276 left, right));
15277 Goto(&end);
15278 }
15279
15280 BIND(&if_right_string);
15281 {
15282 CombineFeedback(var_type_feedback, CompareOperationFeedback::kString);
15283 result = CAST(CallRuntime(Runtime::kBigIntEqualToString,
15284 NoContextConstant(), left, right));
15285 Goto(&end);
15286 }
15287
15288 BIND(&if_right_boolean);
15289 {
15290 CombineFeedback(var_type_feedback,
15291 CompareOperationFeedback::kBoolean);
15292 var_right =
15293 LoadObjectField(CAST(right), offsetof(Oddball, to_number_));
15294 Goto(&loop);
15295 }
15296 }
15297
15298 BIND(&if_left_oddball);
15299 {
15300 Label if_left_boolean(this), if_left_not_boolean(this);
15301 GotoIf(IsBooleanMap(left_map), &if_left_boolean);
15302 if (var_type_feedback != nullptr) {
15303 CombineFeedback(var_type_feedback,
15304 CompareOperationFeedback::kNullOrUndefined);
15305 GotoIf(IsUndetectableMap(left_map), &if_left_not_boolean);
15306 }
15307 Goto(&if_left_not_boolean);
15308
15309 BIND(&if_left_not_boolean);
15310 {
15311 // {left} is either Null or Undefined. Check if {right} is
15312 // undetectable (which includes Null and Undefined).
15313 Label if_right_undetectable(this), if_right_number(this),
15314 if_right_oddball(this),
15315 if_right_not_number_or_oddball_or_undetectable(this);
15316 GotoIf(IsUndetectableMap(right_map), &if_right_undetectable);
15317 GotoIf(IsHeapNumberInstanceType(right_type), &if_right_number);
15318 GotoIf(IsOddballInstanceType(right_type), &if_right_oddball);
15319 Goto(&if_right_not_number_or_oddball_or_undetectable);
15320
15321 BIND(&if_right_undetectable);
15322 {
15323 // If {right} is undetectable, it must be either also
15324 // Null or Undefined, or a Receiver (aka document.all).
15325 CombineFeedback(
15326 var_type_feedback,
15327 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15328 Goto(&if_equal);
15329 }
15330
15331 BIND(&if_right_number);
15332 {
15333 CombineFeedback(var_type_feedback,
15334 CompareOperationFeedback::kNumber);
15335 Goto(&if_notequal);
15336 }
15337
15338 BIND(&if_right_oddball);
15339 {
15340 CombineFeedback(var_type_feedback,
15341 CompareOperationFeedback::kOddball);
15342 Goto(&if_notequal);
15343 }
15344
15345 BIND(&if_right_not_number_or_oddball_or_undetectable);
15346 {
15347 if (var_type_feedback != nullptr) {
15348 // Track whether {right} is Null, Undefined or Receiver.
15349 CombineFeedback(
15350 var_type_feedback,
15351 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15352 GotoIf(IsJSReceiverInstanceType(right_type), &if_notequal);
15353 CombineFeedback(var_type_feedback,
15354 CompareOperationFeedback::kAny);
15355 }
15356 Goto(&if_notequal);
15357 }
15358 }
15359
15360 BIND(&if_left_boolean);
15361 {
15362 CombineFeedback(var_type_feedback,
15363 CompareOperationFeedback::kBoolean);
15364
15365 // If {right} is a Boolean too, it must be a different Boolean.
15366 GotoIf(TaggedEqual(right_map, left_map), &if_notequal);
15367
15368 // Otherwise, convert {left} to number and try again.
15369 var_left = LoadObjectField(CAST(left), offsetof(Oddball, to_number_));
15370 Goto(&loop);
15371 }
15372 }
15373
15374 BIND(&if_left_symbol);
15375 {
15376 Label if_right_receiver(this);
15377 GotoIf(IsJSReceiverInstanceType(right_type), &if_right_receiver);
15378 // {right} is not a JSReceiver and also not the same Symbol as {left},
15379 // so the result is "not equal".
15380 if (var_type_feedback != nullptr) {
15381 Label if_right_symbol(this);
15382 GotoIf(IsSymbolInstanceType(right_type), &if_right_symbol);
15383 *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
15384 Goto(&if_notequal);
15385
15386 BIND(&if_right_symbol);
15387 {
15388 CombineFeedback(var_type_feedback,
15389 CompareOperationFeedback::kSymbol);
15390 Goto(&if_notequal);
15391 }
15392 } else {
15393 Goto(&if_notequal);
15394 }
15395
15396 BIND(&if_right_receiver);
15397 {
15398 // {left} is a Primitive and {right} is a JSReceiver, so swapping
15399 // the order is not observable.
15400 if (var_type_feedback != nullptr) {
15401 *var_type_feedback = SmiConstant(CompareOperationFeedback::kAny);
15402 }
15403 Goto(&use_symmetry);
15404 }
15405 }
15406
15407 BIND(&if_left_receiver);
15408 {
15409 CSA_DCHECK(this, IsJSReceiverInstanceType(left_type));
15410 Label if_right_receiver(this), if_right_not_receiver(this);
15411 Branch(IsJSReceiverInstanceType(right_type), &if_right_receiver,
15412 &if_right_not_receiver);
15413
15414 BIND(&if_right_receiver);
15415 {
15416 // {left} and {right} are different JSReceiver references.
15417 CombineFeedback(var_type_feedback,
15418 CompareOperationFeedback::kReceiver);
15419 Goto(&if_notequal);
15420 }
15421
15422 BIND(&if_right_not_receiver);
15423 {
15424 // Check if {right} is undetectable, which means it must be Null
15425 // or Undefined, since we already ruled out Receiver for {right}.
15426 Label if_right_undetectable(this),
15427 if_right_not_undetectable(this, Label::kDeferred);
15428 Branch(IsUndetectableMap(right_map), &if_right_undetectable,
15429 &if_right_not_undetectable);
15430
15431 BIND(&if_right_undetectable);
15432 {
15433 // When we get here, {right} must be either Null or Undefined.
15434 CSA_DCHECK(this, IsNullOrUndefined(right));
15435 if (var_type_feedback != nullptr) {
15436 *var_type_feedback = SmiConstant(
15437 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15438 }
15439 Branch(IsUndetectableMap(left_map), &if_equal, &if_notequal);
15440 }
15441
15442 BIND(&if_right_not_undetectable);
15443 {
15444 // {right} is a Primitive, and neither Null or Undefined;
15445 // convert {left} to Primitive too.
15446 CombineFeedback(var_type_feedback, CompareOperationFeedback::kAny);
15447 var_left = CallBuiltin(Builtins::NonPrimitiveToPrimitive(),
15448 context(), left);
15449 Goto(&loop);
15450 }
15451 }
15452 }
15453 }
15454
15455 BIND(&do_right_stringtonumber);
15456 {
15457 if (var_type_feedback != nullptr) {
15458 TNode<Map> right_map = LoadMap(CAST(right));
15459 TNode<Uint16T> right_type = LoadMapInstanceType(right_map);
15460 CombineFeedback(var_type_feedback,
15461 CollectFeedbackForString(right_type));
15462 }
15463 var_right = CallBuiltin(Builtin::kStringToNumber, context(), right);
15464 Goto(&loop);
15465 }
15466
15467 BIND(&use_symmetry);
15468 {
15469 var_left = right;
15470 var_right = left;
15471 Goto(&loop);
15472 }
15473 }
15474
15475 BIND(&do_float_comparison);
15476 {
15477 Branch(Float64Equal(var_left_float.value(), var_right_float.value()),
15478 &if_equal, &if_notequal);
15479 }
15480
15481 BIND(&if_equal);
15482 {
15483 result = TrueConstant();
15484 Goto(&end);
15485 }
15486
15487 BIND(&if_notequal);
15488 {
15489 result = FalseConstant();
15490 Goto(&end);
15491 }
15492
15493 BIND(&end);
15494 return result.value();
15495}
15496
15497TNode<Boolean> CodeStubAssembler::StrictEqual(
15498 TNode<Object> lhs, TNode<Object> rhs, TVariable<Smi>* var_type_feedback) {
15499 // Pseudo-code for the algorithm below:
15500 //
15501 // if (lhs == rhs) {
15502 // if (lhs->IsHeapNumber()) return Cast<HeapNumber>(lhs)->value() != NaN;
15503 // return true;
15504 // }
15505 // if (!IsSmi(lhs)) {
15506 // if (lhs->IsHeapNumber()) {
15507 // if (IsSmi(rhs)) {
15508 // return Smi::ToInt(rhs) == Cast<HeapNumber>(lhs)->value();
15509 // } else if (rhs->IsHeapNumber()) {
15510 // return Cast<HeapNumber>(rhs)->value() ==
15511 // Cast<HeapNumber>(lhs)->value();
15512 // } else {
15513 // return false;
15514 // }
15515 // } else {
15516 // if (IsSmi(rhs)) {
15517 // return false;
15518 // } else {
15519 // if (lhs->IsString()) {
15520 // if (rhs->IsString()) {
15521 // return %StringEqual(lhs, rhs);
15522 // } else {
15523 // return false;
15524 // }
15525 // } else if (lhs->IsBigInt()) {
15526 // if (rhs->IsBigInt()) {
15527 // return %BigIntEqualToBigInt(lhs, rhs);
15528 // } else {
15529 // return false;
15530 // }
15531 // } else {
15532 // return false;
15533 // }
15534 // }
15535 // }
15536 // } else {
15537 // if (IsSmi(rhs)) {
15538 // return false;
15539 // } else {
15540 // if (rhs->IsHeapNumber()) {
15541 // return Smi::ToInt(lhs) == Cast<HeapNumber>(rhs)->value();
15542 // } else {
15543 // return false;
15544 // }
15545 // }
15546 // }
15547
15548 Label if_equal(this), if_notequal(this), if_not_equivalent_types(this),
15549 end(this);
15551
15552 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kNone);
15553
15554 // Check if {lhs} and {rhs} refer to the same object.
15555 Label if_same(this), if_notsame(this);
15556 Branch(TaggedEqual(lhs, rhs), &if_same, &if_notsame);
15557
15558 BIND(&if_same);
15559 {
15560 // The {lhs} and {rhs} reference the exact same value, yet we need special
15561 // treatment for HeapNumber, as NaN is not equal to NaN.
15562 GenerateEqual_Same(lhs, &if_equal, &if_notequal, var_type_feedback);
15563 }
15564
15565 BIND(&if_notsame);
15566 {
15567 // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
15568 // BigInt and String they can still be considered equal.
15569
15570 // Check if {lhs} is a Smi or a HeapObject.
15571 Label if_lhsissmi(this), if_lhsisnotsmi(this);
15572 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
15573
15574 BIND(&if_lhsisnotsmi);
15575 {
15576 // Load the map of {lhs}.
15577 TNode<Map> lhs_map = LoadMap(CAST(lhs));
15578
15579 // Check if {lhs} is a HeapNumber.
15580 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
15581 Branch(IsHeapNumberMap(lhs_map), &if_lhsisnumber, &if_lhsisnotnumber);
15582
15583 BIND(&if_lhsisnumber);
15584 {
15585 // Check if {rhs} is a Smi or a HeapObject.
15586 Label if_rhsissmi(this), if_rhsisnotsmi(this);
15587 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
15588
15589 BIND(&if_rhsissmi);
15590 {
15591 // Convert {lhs} and {rhs} to floating point values.
15592 TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs));
15593 TNode<Float64T> rhs_value = SmiToFloat64(CAST(rhs));
15594
15595 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15596
15597 // Perform a floating point comparison of {lhs} and {rhs}.
15598 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
15599 }
15600
15601 BIND(&if_rhsisnotsmi);
15602 {
15603 TNode<HeapObject> rhs_ho = CAST(rhs);
15604 // Load the map of {rhs}.
15605 TNode<Map> rhs_map = LoadMap(rhs_ho);
15606
15607 // Check if {rhs} is also a HeapNumber.
15608 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
15609 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
15610
15611 BIND(&if_rhsisnumber);
15612 {
15613 // Convert {lhs} and {rhs} to floating point values.
15614 TNode<Float64T> lhs_value = LoadHeapNumberValue(CAST(lhs));
15615 TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs));
15616
15617 CombineFeedback(var_type_feedback,
15618 CompareOperationFeedback::kNumber);
15619
15620 // Perform a floating point comparison of {lhs} and {rhs}.
15621 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
15622 }
15623
15624 BIND(&if_rhsisnotnumber);
15625 Goto(&if_not_equivalent_types);
15626 }
15627 }
15628
15629 BIND(&if_lhsisnotnumber);
15630 {
15631 // Check if {rhs} is a Smi or a HeapObject.
15632 Label if_rhsissmi(this), if_rhsisnotsmi(this);
15633 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
15634
15635 BIND(&if_rhsissmi);
15636 Goto(&if_not_equivalent_types);
15637
15638 BIND(&if_rhsisnotsmi);
15639 {
15640 // Load the instance type of {lhs}.
15641 TNode<Uint16T> lhs_instance_type = LoadMapInstanceType(lhs_map);
15642
15643 // Check if {lhs} is a String.
15644 Label if_lhsisstring(this, Label::kDeferred), if_lhsisnotstring(this);
15645 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
15646 &if_lhsisnotstring);
15647
15648 BIND(&if_lhsisstring);
15649 {
15650 // Load the instance type of {rhs}.
15651 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs));
15652
15653 // Check if {rhs} is also a String.
15654 Label if_rhsisstring(this, Label::kDeferred),
15655 if_rhsisnotstring(this);
15656 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
15657 &if_rhsisnotstring);
15658
15659 BIND(&if_rhsisstring);
15660 {
15661 if (var_type_feedback != nullptr) {
15662 TNode<Smi> lhs_feedback =
15663 CollectFeedbackForString(lhs_instance_type);
15664 TNode<Smi> rhs_feedback =
15665 CollectFeedbackForString(rhs_instance_type);
15666 *var_type_feedback = SmiOr(lhs_feedback, rhs_feedback);
15667 }
15668 BranchIfStringEqual(CAST(lhs), CAST(rhs), &end, &end, &result);
15669 }
15670
15671 BIND(&if_rhsisnotstring);
15672 Goto(&if_not_equivalent_types);
15673 }
15674
15675 BIND(&if_lhsisnotstring);
15676 {
15677 // Check if {lhs} is a BigInt.
15678 Label if_lhsisbigint(this), if_lhsisnotbigint(this);
15679 Branch(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint,
15680 &if_lhsisnotbigint);
15681
15682 BIND(&if_lhsisbigint);
15683 {
15684 // Load the instance type of {rhs}.
15685 TNode<Uint16T> rhs_instance_type = LoadInstanceType(CAST(rhs));
15686
15687 // Check if {rhs} is also a BigInt.
15688 Label if_rhsisbigint(this, Label::kDeferred),
15689 if_rhsisnotbigint(this);
15690 Branch(IsBigIntInstanceType(rhs_instance_type), &if_rhsisbigint,
15691 &if_rhsisnotbigint);
15692
15693 BIND(&if_rhsisbigint);
15694 {
15695 if (Is64()) {
15696 Label if_both_bigint(this);
15697 GotoIfLargeBigInt(CAST(lhs), &if_both_bigint);
15698 GotoIfLargeBigInt(CAST(rhs), &if_both_bigint);
15699
15700 OverwriteFeedback(var_type_feedback,
15701 CompareOperationFeedback::kBigInt64);
15702 BigInt64Comparison(Operation::kStrictEqual, lhs, rhs,
15703 &if_equal, &if_notequal);
15704 BIND(&if_both_bigint);
15705 }
15706
15707 CombineFeedback(var_type_feedback,
15708 CompareOperationFeedback::kBigInt);
15709 result = CAST(CallBuiltin(Builtin::kBigIntEqual,
15710 NoContextConstant(), lhs, rhs));
15711 Goto(&end);
15712 }
15713
15714 BIND(&if_rhsisnotbigint);
15715 Goto(&if_not_equivalent_types);
15716 }
15717
15718 BIND(&if_lhsisnotbigint);
15719 if (var_type_feedback != nullptr) {
15720 // Load the instance type of {rhs}.
15721 TNode<Map> rhs_map = LoadMap(CAST(rhs));
15722 TNode<Uint16T> rhs_instance_type = LoadMapInstanceType(rhs_map);
15723
15724 Label if_lhsissymbol(this), if_lhsisreceiver(this),
15725 if_lhsisoddball(this);
15726 GotoIf(IsJSReceiverInstanceType(lhs_instance_type),
15727 &if_lhsisreceiver);
15728 GotoIf(IsBooleanMap(lhs_map), &if_not_equivalent_types);
15729 GotoIf(IsOddballInstanceType(lhs_instance_type),
15730 &if_lhsisoddball);
15731 Branch(IsSymbolInstanceType(lhs_instance_type), &if_lhsissymbol,
15732 &if_not_equivalent_types);
15733
15734 BIND(&if_lhsisreceiver);
15735 {
15736 GotoIf(IsBooleanMap(rhs_map), &if_not_equivalent_types);
15737 OverwriteFeedback(var_type_feedback,
15738 CompareOperationFeedback::kReceiver);
15739 GotoIf(IsJSReceiverInstanceType(rhs_instance_type),
15740 &if_notequal);
15741 OverwriteFeedback(
15742 var_type_feedback,
15743 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15744 GotoIf(IsOddballInstanceType(rhs_instance_type), &if_notequal);
15745 Goto(&if_not_equivalent_types);
15746 }
15747
15748 BIND(&if_lhsisoddball);
15749 {
15750 static_assert(LAST_PRIMITIVE_HEAP_OBJECT_TYPE ==
15751 ODDBALL_TYPE);
15752 GotoIf(Int32LessThan(rhs_instance_type,
15753 Int32Constant(ODDBALL_TYPE)),
15754 &if_not_equivalent_types);
15755
15756 // TODO(marja): This is wrong, since null == true will be
15757 // detected as ReceiverOrNullOrUndefined, but true is not
15758 // receiver or null or undefined.
15759 OverwriteFeedback(
15760 var_type_feedback,
15761 CompareOperationFeedback::kReceiverOrNullOrUndefined);
15762 Goto(&if_notequal);
15763 }
15764
15765 BIND(&if_lhsissymbol);
15766 {
15767 GotoIfNot(IsSymbolInstanceType(rhs_instance_type),
15768 &if_not_equivalent_types);
15769 OverwriteFeedback(var_type_feedback,
15770 CompareOperationFeedback::kSymbol);
15771 Goto(&if_notequal);
15772 }
15773 } else {
15774 Goto(&if_notequal);
15775 }
15776 }
15777 }
15778 }
15779 }
15780
15781 BIND(&if_lhsissmi);
15782 {
15783 // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
15784 // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
15785 // HeapNumber with an equal floating point value.
15786
15787 // Check if {rhs} is a Smi or a HeapObject.
15788 Label if_rhsissmi(this), if_rhsisnotsmi(this);
15789 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
15790
15791 BIND(&if_rhsissmi);
15792 CombineFeedback(var_type_feedback,
15793 CompareOperationFeedback::kSignedSmall);
15794 Goto(&if_notequal);
15795
15796 BIND(&if_rhsisnotsmi);
15797 {
15798 // Load the map of the {rhs}.
15799 TNode<Map> rhs_map = LoadMap(CAST(rhs));
15800
15801 // The {rhs} could be a HeapNumber with the same value as {lhs}.
15802 GotoIfNot(IsHeapNumberMap(rhs_map), &if_not_equivalent_types);
15803
15804 // Convert {lhs} and {rhs} to floating point values.
15805 TNode<Float64T> lhs_value = SmiToFloat64(CAST(lhs));
15806 TNode<Float64T> rhs_value = LoadHeapNumberValue(CAST(rhs));
15807
15808 CombineFeedback(var_type_feedback, CompareOperationFeedback::kNumber);
15809
15810 // Perform a floating point comparison of {lhs} and {rhs}.
15811 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
15812 }
15813 }
15814 }
15815
15816 BIND(&if_equal);
15817 {
15818 result = TrueConstant();
15819 Goto(&end);
15820 }
15821
15822 BIND(&if_not_equivalent_types);
15823 {
15824 OverwriteFeedback(var_type_feedback, CompareOperationFeedback::kAny);
15825 Goto(&if_notequal);
15826 }
15827
15828 BIND(&if_notequal);
15829 {
15830 result = FalseConstant();
15831 Goto(&end);
15832 }
15833
15834 BIND(&end);
15835 return result.value();
15836}
15837
15838void CodeStubAssembler::BranchIfStringEqual(TNode<String> lhs,
15839 TNode<IntPtrT> lhs_length,
15840 TNode<String> rhs,
15841 TNode<IntPtrT> rhs_length,
15842 Label* if_true, Label* if_false,
15844 // Callers must handle the case where {lhs} and {rhs} refer to the same
15845 // String object.
15846 CSA_DCHECK(this, TaggedNotEqual(lhs, rhs));
15847
15848 Label length_equal(this), length_not_equal(this);
15849 Branch(IntPtrEqual(lhs_length, rhs_length), &length_equal, &length_not_equal);
15850
15851 BIND(&length_not_equal);
15852 {
15853 if (result != nullptr) *result = FalseConstant();
15854 Goto(if_false);
15855 }
15856
15857 BIND(&length_equal);
15858 {
15859 TNode<Boolean> value = CAST(CallBuiltin(
15860 Builtin::kStringEqual, NoContextConstant(), lhs, rhs, lhs_length));
15861 if (result != nullptr) {
15862 *result = value;
15863 }
15864 if (if_true == if_false) {
15865 Goto(if_true);
15866 } else {
15867 Branch(TaggedEqual(value, TrueConstant()), if_true, if_false);
15868 }
15869 }
15870}
15871
15872// ECMA#sec-samevalue
15873// This algorithm differs from the Strict Equality Comparison Algorithm in its
15874// treatment of signed zeroes and NaNs.
15875void CodeStubAssembler::BranchIfSameValue(TNode<Object> lhs, TNode<Object> rhs,
15876 Label* if_true, Label* if_false,
15877 SameValueMode mode) {
15878 TVARIABLE(Float64T, var_lhs_value);
15879 TVARIABLE(Float64T, var_rhs_value);
15880 Label do_fcmp(this);
15881
15882 // Immediately jump to {if_true} if {lhs} == {rhs}, because - unlike
15883 // StrictEqual - SameValue considers two NaNs to be equal.
15884 GotoIf(TaggedEqual(lhs, rhs), if_true);
15885
15886 // Check if the {lhs} is a Smi.
15887 Label if_lhsissmi(this), if_lhsisheapobject(this);
15888 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisheapobject);
15889
15890 BIND(&if_lhsissmi);
15891 {
15892 // Since {lhs} is a Smi, the comparison can only yield true
15893 // iff the {rhs} is a HeapNumber with the same float64 value.
15894 Branch(TaggedIsSmi(rhs), if_false, [&] {
15895 GotoIfNot(IsHeapNumber(CAST(rhs)), if_false);
15896 var_lhs_value = SmiToFloat64(CAST(lhs));
15897 var_rhs_value = LoadHeapNumberValue(CAST(rhs));
15898 Goto(&do_fcmp);
15899 });
15900 }
15901
15902 BIND(&if_lhsisheapobject);
15903 {
15904 // Check if the {rhs} is a Smi.
15905 Branch(
15906 TaggedIsSmi(rhs),
15907 [&] {
15908 // Since {rhs} is a Smi, the comparison can only yield true
15909 // iff the {lhs} is a HeapNumber with the same float64 value.
15910 GotoIfNot(IsHeapNumber(CAST(lhs)), if_false);
15911 var_lhs_value = LoadHeapNumberValue(CAST(lhs));
15912 var_rhs_value = SmiToFloat64(CAST(rhs));
15913 Goto(&do_fcmp);
15914 },
15915 [&] {
15916 // Now this can only yield true if either both {lhs} and {rhs} are
15917 // HeapNumbers with the same value, or both are Strings with the
15918 // same character sequence, or both are BigInts with the same
15919 // value.
15920 Label if_lhsisheapnumber(this), if_lhsisstring(this),
15921 if_lhsisbigint(this);
15922 const TNode<Map> lhs_map = LoadMap(CAST(lhs));
15923 GotoIf(IsHeapNumberMap(lhs_map), &if_lhsisheapnumber);
15924 if (mode != SameValueMode::kNumbersOnly) {
15925 const TNode<Uint16T> lhs_instance_type =
15926 LoadMapInstanceType(lhs_map);
15927 GotoIf(IsStringInstanceType(lhs_instance_type), &if_lhsisstring);
15928 GotoIf(IsBigIntInstanceType(lhs_instance_type), &if_lhsisbigint);
15929 }
15930 Goto(if_false);
15931
15932 BIND(&if_lhsisheapnumber);
15933 {
15934 GotoIfNot(IsHeapNumber(CAST(rhs)), if_false);
15935 var_lhs_value = LoadHeapNumberValue(CAST(lhs));
15936 var_rhs_value = LoadHeapNumberValue(CAST(rhs));
15937 Goto(&do_fcmp);
15938 }
15939
15940 if (mode != SameValueMode::kNumbersOnly) {
15941 BIND(&if_lhsisstring);
15942 {
15943 // Now we can only yield true if {rhs} is also a String
15944 // with the same sequence of characters.
15945 GotoIfNot(IsString(CAST(rhs)), if_false);
15946 BranchIfStringEqual(CAST(lhs), CAST(rhs), if_true, if_false);
15947 }
15948
15949 BIND(&if_lhsisbigint);
15950 {
15951 GotoIfNot(IsBigInt(CAST(rhs)), if_false);
15953 Runtime::kBigIntEqualToBigInt, NoContextConstant(), lhs, rhs);
15954 Branch(IsTrue(result), if_true, if_false);
15955 }
15956 }
15957 });
15958 }
15959
15960 BIND(&do_fcmp);
15961 {
15962 TNode<Float64T> lhs_value = UncheckedCast<Float64T>(var_lhs_value.value());
15963 TNode<Float64T> rhs_value = UncheckedCast<Float64T>(var_rhs_value.value());
15964 BranchIfSameNumberValue(lhs_value, rhs_value, if_true, if_false);
15965 }
15966}
15967
15968void CodeStubAssembler::BranchIfSameNumberValue(TNode<Float64T> lhs_value,
15969 TNode<Float64T> rhs_value,
15970 Label* if_true,
15971 Label* if_false) {
15972 Label if_equal(this), if_notequal(this);
15973 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
15974
15975 BIND(&if_equal);
15976 {
15977 // We still need to handle the case when {lhs} and {rhs} are -0.0 and
15978 // 0.0 (or vice versa). Compare the high word to
15979 // distinguish between the two.
15980 const TNode<Uint32T> lhs_hi_word = Float64ExtractHighWord32(lhs_value);
15981 const TNode<Uint32T> rhs_hi_word = Float64ExtractHighWord32(rhs_value);
15982
15983 // If x is +0 and y is -0, return false.
15984 // If x is -0 and y is +0, return false.
15985 Branch(Word32Equal(lhs_hi_word, rhs_hi_word), if_true, if_false);
15986 }
15987
15988 BIND(&if_notequal);
15989 {
15990 // Return true iff both {rhs} and {lhs} are NaN.
15991 GotoIf(Float64Equal(lhs_value, lhs_value), if_false);
15992 Branch(Float64Equal(rhs_value, rhs_value), if_false, if_true);
15993 }
15994}
15995
15996TNode<Boolean> CodeStubAssembler::HasProperty(TNode<Context> context,
15997 TNode<JSAny> object,
15999 HasPropertyLookupMode mode) {
16000 Label call_runtime(this, Label::kDeferred), return_true(this),
16001 return_false(this), end(this), if_proxy(this, Label::kDeferred);
16002
16003 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
16004 [this, &return_true](
16006 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
16007 TNode<Name> unique_name, Label* next_holder, Label* if_bailout) {
16008 TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
16009 &return_true, next_holder, if_bailout);
16010 };
16011
16012 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
16013 [this, &return_true, &return_false](
16015 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
16016 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
16017 TryLookupElement(holder, holder_map, holder_instance_type, index,
16018 &return_true, &return_false, next_holder, if_bailout);
16019 };
16020
16021 const bool kHandlePrivateNames = mode == HasPropertyLookupMode::kHasProperty;
16022 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
16023 lookup_element_in_holder, &return_false,
16024 &call_runtime, &if_proxy, kHandlePrivateNames);
16025
16027
16028 BIND(&if_proxy);
16029 {
16030 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
16031 switch (mode) {
16032 case kHasProperty:
16033 GotoIf(IsPrivateSymbol(name), &call_runtime);
16034
16035 result = CAST(
16036 CallBuiltin(Builtin::kProxyHasProperty, context, object, name));
16037 Goto(&end);
16038 break;
16039 case kForInHasProperty:
16040 Goto(&call_runtime);
16041 break;
16042 }
16043 }
16044
16045 BIND(&return_true);
16046 {
16047 result = TrueConstant();
16048 Goto(&end);
16049 }
16050
16051 BIND(&return_false);
16052 {
16053 result = FalseConstant();
16054 Goto(&end);
16055 }
16056
16057 BIND(&call_runtime);
16058 {
16059 Runtime::FunctionId fallback_runtime_function_id;
16060 switch (mode) {
16061 case kHasProperty:
16062 fallback_runtime_function_id = Runtime::kHasProperty;
16063 break;
16064 case kForInHasProperty:
16065 fallback_runtime_function_id = Runtime::kForInHasProperty;
16066 break;
16067 }
16068
16069 result =
16070 CAST(CallRuntime(fallback_runtime_function_id, context, object, key));
16071 Goto(&end);
16072 }
16073
16074 BIND(&end);
16075 CSA_DCHECK(this, IsBoolean(result.value()));
16076 return result.value();
16077}
16078
16079void CodeStubAssembler::ForInPrepare(TNode<HeapObject> enumerator,
16080 TNode<UintPtrT> slot,
16081 TNode<HeapObject> maybe_feedback_vector,
16082 TNode<FixedArray>* cache_array_out,
16083 TNode<Smi>* cache_length_out,
16084 UpdateFeedbackMode update_feedback_mode) {
16085 // Check if we're using an enum cache.
16086 TVARIABLE(FixedArray, cache_array);
16087 TVARIABLE(Smi, cache_length);
16088 Label if_fast(this), if_slow(this, Label::kDeferred), out(this);
16089 Branch(IsMap(enumerator), &if_fast, &if_slow);
16090
16091 BIND(&if_fast);
16092 {
16093 // Load the enumeration length and cache from the {enumerator}.
16094 TNode<Map> map_enumerator = CAST(enumerator);
16095 TNode<Uint32T> enum_length = LoadMapEnumLength(map_enumerator);
16096 CSA_DCHECK(this, Word32NotEqual(enum_length,
16097 Uint32Constant(kInvalidEnumCacheSentinel)));
16098 TNode<DescriptorArray> descriptors = LoadMapDescriptors(map_enumerator);
16099 TNode<EnumCache> enum_cache = LoadObjectField<EnumCache>(
16100 descriptors, DescriptorArray::kEnumCacheOffset);
16101 TNode<FixedArray> enum_keys =
16102 LoadObjectField<FixedArray>(enum_cache, EnumCache::kKeysOffset);
16103
16104 // Check if we have enum indices available.
16105 TNode<FixedArray> enum_indices =
16106 LoadObjectField<FixedArray>(enum_cache, EnumCache::kIndicesOffset);
16107 TNode<Uint32T> enum_indices_length =
16108 LoadAndUntagFixedArrayBaseLengthAsUint32(enum_indices);
16109 TNode<Smi> feedback = SelectSmiConstant(
16110 Uint32LessThanOrEqual(enum_length, enum_indices_length),
16111 static_cast<int>(ForInFeedback::kEnumCacheKeysAndIndices),
16112 static_cast<int>(ForInFeedback::kEnumCacheKeys));
16113 UpdateFeedback(feedback, maybe_feedback_vector, slot, update_feedback_mode);
16114
16115 cache_array = enum_keys;
16116 cache_length = SmiFromUint32(enum_length);
16117 Goto(&out);
16118 }
16119
16120 BIND(&if_slow);
16121 {
16122 // The {enumerator} is a FixedArray with all the keys to iterate.
16123 TNode<FixedArray> array_enumerator = CAST(enumerator);
16124
16125 // Record the fact that we hit the for-in slow-path.
16126 UpdateFeedback(SmiConstant(ForInFeedback::kAny), maybe_feedback_vector,
16127 slot, update_feedback_mode);
16128
16129 cache_array = array_enumerator;
16130 cache_length = LoadFixedArrayBaseLength(array_enumerator);
16131 Goto(&out);
16132 }
16133
16134 BIND(&out);
16135 *cache_array_out = cache_array.value();
16136 *cache_length_out = cache_length.value();
16137}
16138
16139TNode<String> CodeStubAssembler::Typeof(
16140 TNode<Object> value, std::optional<TNode<UintPtrT>> slot_id,
16141 std::optional<TNode<HeapObject>> maybe_feedback_vector) {
16142 TVARIABLE(String, result_var);
16143
16144 Label return_number(this, Label::kDeferred), if_oddball(this),
16145 return_function(this), return_undefined(this), return_object(this),
16146 return_string(this), return_bigint(this), return_symbol(this),
16147 return_result(this);
16148
16149 GotoIf(TaggedIsSmi(value), &return_number);
16150
16151 TNode<HeapObject> value_heap_object = CAST(value);
16152 TNode<Map> map = LoadMap(value_heap_object);
16153
16154 GotoIf(IsHeapNumberMap(map), &return_number);
16155
16156 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
16157
16158 GotoIf(InstanceTypeEqual(instance_type, ODDBALL_TYPE), &if_oddball);
16159
16160 TNode<Int32T> callable_or_undetectable_mask =
16161 Word32And(LoadMapBitField(map),
16162 Int32Constant(Map::Bits1::IsCallableBit::kMask |
16163 Map::Bits1::IsUndetectableBit::kMask));
16164
16165 GotoIf(Word32Equal(callable_or_undetectable_mask,
16166 Int32Constant(Map::Bits1::IsCallableBit::kMask)),
16167 &return_function);
16168
16169 GotoIfNot(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
16170 &return_undefined);
16171
16172 GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
16173
16174 GotoIf(IsStringInstanceType(instance_type), &return_string);
16175
16176 GotoIf(IsBigIntInstanceType(instance_type), &return_bigint);
16177
16178 GotoIf(IsSymbolInstanceType(instance_type), &return_symbol);
16179
16180 Abort(AbortReason::kUnexpectedInstanceType);
16181
16182 auto UpdateFeedback = [&](TypeOfFeedback::Result feedback) {
16183 if (maybe_feedback_vector.has_value()) {
16184 MaybeUpdateFeedback(SmiConstant(feedback), *maybe_feedback_vector,
16185 *slot_id);
16186 }
16187 };
16188 BIND(&return_number);
16189 {
16190 result_var = HeapConstantNoHole(isolate()->factory()->number_string());
16191 UpdateFeedback(TypeOfFeedback::kNumber);
16192 Goto(&return_result);
16193 }
16194
16195 BIND(&if_oddball);
16196 {
16197 TNode<String> type =
16198 CAST(LoadObjectField(value_heap_object, offsetof(Oddball, type_of_)));
16199 result_var = type;
16200 UpdateFeedback(TypeOfFeedback::kAny);
16201 Goto(&return_result);
16202 }
16203
16204 BIND(&return_function);
16205 {
16206 result_var = HeapConstantNoHole(isolate()->factory()->function_string());
16207 UpdateFeedback(TypeOfFeedback::kFunction);
16208 Goto(&return_result);
16209 }
16210
16211 BIND(&return_undefined);
16212 {
16213 result_var = HeapConstantNoHole(isolate()->factory()->undefined_string());
16214 UpdateFeedback(TypeOfFeedback::kAny);
16215 Goto(&return_result);
16216 }
16217
16218 BIND(&return_object);
16219 {
16220 result_var = HeapConstantNoHole(isolate()->factory()->object_string());
16221 UpdateFeedback(TypeOfFeedback::kAny);
16222 Goto(&return_result);
16223 }
16224
16225 BIND(&return_string);
16226 {
16227 result_var = HeapConstantNoHole(isolate()->factory()->string_string());
16228 UpdateFeedback(TypeOfFeedback::kString);
16229 Goto(&return_result);
16230 }
16231
16232 BIND(&return_bigint);
16233 {
16234 result_var = HeapConstantNoHole(isolate()->factory()->bigint_string());
16235 UpdateFeedback(TypeOfFeedback::kAny);
16236 Goto(&return_result);
16237 }
16238
16239 BIND(&return_symbol);
16240 {
16241 result_var = HeapConstantNoHole(isolate()->factory()->symbol_string());
16242 UpdateFeedback(TypeOfFeedback::kAny);
16243 Goto(&return_result);
16244 }
16245
16246 BIND(&return_result);
16247 return result_var.value();
16248}
16249
16250TNode<HeapObject> CodeStubAssembler::GetSuperConstructor(
16251 TNode<JSFunction> active_function) {
16252 TNode<Map> map = LoadMap(active_function);
16253 return LoadMapPrototype(map);
16254}
16255
16256void CodeStubAssembler::FindNonDefaultConstructor(
16257 TNode<JSFunction> this_function, TVariable<Object>& constructor,
16258 Label* found_default_base_ctor, Label* found_something_else) {
16259 Label loop(this, &constructor);
16260
16261 constructor = GetSuperConstructor(this_function);
16262
16263 // Disable the optimization if the debugger is active, so that we can still
16264 // put breakpoints into default constructors.
16265 GotoIf(IsDebugActive(), found_something_else);
16266
16267 // Disable the optimization if the array iterator has been changed. V8 uses
16268 // the array iterator for the spread in default ctors, even though it
16269 // shouldn't, according to the spec. This ensures that omitting default ctors
16270 // doesn't change the behavior. See crbug.com/v8/13249.
16271 GotoIf(IsArrayIteratorProtectorCellInvalid(), found_something_else);
16272
16273 Goto(&loop);
16274
16275 BIND(&loop);
16276 {
16277 // We know constructor can't be a SMI, since it's a prototype. If it's not a
16278 // JSFunction, the error will be thrown by the ThrowIfNotSuperConstructor
16279 // which follows this bytecode.
16280 GotoIfNot(IsJSFunction(CAST(constructor.value())), found_something_else);
16281
16282 // If there are class fields, bail out. TODO(v8:13091): Handle them here.
16283 const TNode<SharedFunctionInfo> shared_function_info =
16284 LoadObjectField<SharedFunctionInfo>(
16285 CAST(constructor.value()), JSFunction::kSharedFunctionInfoOffset);
16286 const TNode<Uint32T> has_class_fields =
16287 DecodeWord32<SharedFunctionInfo::RequiresInstanceMembersInitializerBit>(
16288 LoadObjectField<Uint32T>(shared_function_info,
16289 SharedFunctionInfo::kFlagsOffset));
16290
16291 GotoIf(Word32NotEqual(has_class_fields, Int32Constant(0)),
16292 found_something_else);
16293
16294 // If there are private methods, bail out. TODO(v8:13091): Handle them here.
16295 TNode<Context> function_context =
16296 LoadJSFunctionContext(CAST(constructor.value()));
16297 TNode<ScopeInfo> scope_info = LoadScopeInfo(function_context);
16298 GotoIf(LoadScopeInfoClassScopeHasPrivateBrand(scope_info),
16299 found_something_else);
16300
16301 const TNode<Uint32T> function_kind =
16302 LoadFunctionKind(CAST(constructor.value()));
16303 // A default base ctor -> stop the search.
16304 GotoIf(Word32Equal(
16305 function_kind,
16306 static_cast<uint32_t>(FunctionKind::kDefaultBaseConstructor)),
16307 found_default_base_ctor);
16308
16309 // Something else than a default derived ctor (e.g., a non-default base
16310 // ctor, a non-default derived ctor, or a normal function) -> stop the
16311 // search.
16312 GotoIfNot(Word32Equal(function_kind,
16313 static_cast<uint32_t>(
16314 FunctionKind::kDefaultDerivedConstructor)),
16315 found_something_else);
16316
16317 constructor = GetSuperConstructor(CAST(constructor.value()));
16318
16319 Goto(&loop);
16320 }
16321 // We don't need to re-check the proctector, since the loop cannot call into
16322 // user code. Even if GetSuperConstructor returns a Proxy, we will throw since
16323 // it's not a constructor, and not invoke [[GetPrototypeOf]] on it.
16324 // TODO(v8:13091): make sure this is still valid after we handle class fields.
16325}
16326
16327TNode<JSReceiver> CodeStubAssembler::SpeciesConstructor(
16328 TNode<Context> context, TNode<JSAny> object,
16329 TNode<JSReceiver> default_constructor) {
16330 Isolate* isolate = this->isolate();
16331 TVARIABLE(JSReceiver, var_result, default_constructor);
16332
16333 // 2. Let C be ? Get(O, "constructor").
16334 TNode<JSAny> constructor =
16335 GetProperty(context, object, isolate->factory()->constructor_string());
16336
16337 // 3. If C is undefined, return defaultConstructor.
16338 Label out(this);
16339 GotoIf(IsUndefined(constructor), &out);
16340
16341 // 4. If Type(C) is not Object, throw a TypeError exception.
16342 ThrowIfNotJSReceiver(context, constructor,
16343 MessageTemplate::kConstructorNotReceiver, "");
16344
16345 // 5. Let S be ? Get(C, @@species).
16346 TNode<Object> species =
16347 GetProperty(context, constructor, isolate->factory()->species_symbol());
16348
16349 // 6. If S is either undefined or null, return defaultConstructor.
16350 GotoIf(IsNullOrUndefined(species), &out);
16351
16352 // 7. If IsConstructor(S) is true, return S.
16353 Label throw_error(this);
16354 GotoIf(TaggedIsSmi(species), &throw_error);
16355 GotoIfNot(IsConstructorMap(LoadMap(CAST(species))), &throw_error);
16356 var_result = CAST(species);
16357 Goto(&out);
16358
16359 // 8. Throw a TypeError exception.
16360 BIND(&throw_error);
16361 ThrowTypeError(context, MessageTemplate::kSpeciesNotConstructor);
16362
16363 BIND(&out);
16364 return var_result.value();
16365}
16366
16367TNode<Boolean> CodeStubAssembler::InstanceOf(TNode<Object> object,
16368 TNode<JSAny> callable,
16369 TNode<Context> context) {
16370 TVARIABLE(Boolean, var_result);
16371 Label if_notcallable(this, Label::kDeferred),
16372 if_notreceiver(this, Label::kDeferred), if_otherhandler(this),
16373 if_nohandler(this, Label::kDeferred), return_true(this),
16374 return_false(this), return_result(this, &var_result);
16375
16376 // Ensure that the {callable} is actually a JSReceiver.
16377 GotoIf(TaggedIsSmi(callable), &if_notreceiver);
16378 GotoIfNot(IsJSReceiver(CAST(callable)), &if_notreceiver);
16379
16380 // Load the @@hasInstance property from {callable}.
16381 TNode<Object> inst_of_handler =
16382 GetProperty(context, callable, HasInstanceSymbolConstant());
16383
16384 // Optimize for the likely case where {inst_of_handler} is the builtin
16385 // Function.prototype[@@hasInstance] method, and emit a direct call in
16386 // that case without any additional checking.
16387 TNode<NativeContext> native_context = LoadNativeContext(context);
16388 TNode<JSFunction> function_has_instance = CAST(
16389 LoadContextElement(native_context, Context::FUNCTION_HAS_INSTANCE_INDEX));
16390 GotoIfNot(TaggedEqual(inst_of_handler, function_has_instance),
16391 &if_otherhandler);
16392 {
16393 // Call to Function.prototype[@@hasInstance] directly without using the
16394 // Builtins::Call().
16395 var_result = CAST(CallJSBuiltin(Builtin::kFunctionPrototypeHasInstance,
16396 context, inst_of_handler,
16397 UndefinedConstant(), // new_target
16398 callable, object));
16399 Goto(&return_result);
16400 }
16401
16402 BIND(&if_otherhandler);
16403 {
16404 // Check if there's actually an {inst_of_handler}.
16405 GotoIf(IsNull(inst_of_handler), &if_nohandler);
16406 GotoIf(IsUndefined(inst_of_handler), &if_nohandler);
16407
16408 // Call the {inst_of_handler} for {callable} and {object}.
16410 Call(context, inst_of_handler, ConvertReceiverMode::kNotNullOrUndefined,
16411 callable, object);
16412
16413 // Convert the {result} to a Boolean.
16414 BranchIfToBooleanIsTrue(result, &return_true, &return_false);
16415 }
16416
16417 BIND(&if_nohandler);
16418 {
16419 // Ensure that the {callable} is actually Callable.
16420 GotoIfNot(IsCallable(CAST(callable)), &if_notcallable);
16421
16422 // Use the OrdinaryHasInstance algorithm.
16423 var_result = CAST(
16424 CallBuiltin(Builtin::kOrdinaryHasInstance, context, callable, object));
16425 Goto(&return_result);
16426 }
16427
16428 BIND(&if_notcallable);
16429 { ThrowTypeError(context, MessageTemplate::kNonCallableInInstanceOfCheck); }
16430
16431 BIND(&if_notreceiver);
16432 { ThrowTypeError(context, MessageTemplate::kNonObjectInInstanceOfCheck); }
16433
16434 BIND(&return_true);
16435 var_result = TrueConstant();
16436 Goto(&return_result);
16437
16438 BIND(&return_false);
16439 var_result = FalseConstant();
16440 Goto(&return_result);
16441
16442 BIND(&return_result);
16443 return var_result.value();
16444}
16445
16446TNode<Number> CodeStubAssembler::NumberInc(TNode<Number> value) {
16447 TVARIABLE(Number, var_result);
16448 TVARIABLE(Float64T, var_finc_value);
16449 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
16450 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
16451
16452 BIND(&if_issmi);
16453 {
16454 Label if_overflow(this);
16455 TNode<Smi> smi_value = CAST(value);
16456 TNode<Smi> one = SmiConstant(1);
16457 var_result = TrySmiAdd(smi_value, one, &if_overflow);
16458 Goto(&end);
16459
16460 BIND(&if_overflow);
16461 {
16462 var_finc_value = SmiToFloat64(smi_value);
16463 Goto(&do_finc);
16464 }
16465 }
16466
16467 BIND(&if_isnotsmi);
16468 {
16469 TNode<HeapNumber> heap_number_value = CAST(value);
16470
16471 // Load the HeapNumber value.
16472 var_finc_value = LoadHeapNumberValue(heap_number_value);
16473 Goto(&do_finc);
16474 }
16475
16476 BIND(&do_finc);
16477 {
16478 TNode<Float64T> finc_value = var_finc_value.value();
16479 TNode<Float64T> one = Float64Constant(1.0);
16480 TNode<Float64T> finc_result = Float64Add(finc_value, one);
16481 var_result = AllocateHeapNumberWithValue(finc_result);
16482 Goto(&end);
16483 }
16484
16485 BIND(&end);
16486 return var_result.value();
16487}
16488
16489TNode<Number> CodeStubAssembler::NumberDec(TNode<Number> value) {
16490 TVARIABLE(Number, var_result);
16491 TVARIABLE(Float64T, var_fdec_value);
16492 Label if_issmi(this), if_isnotsmi(this), do_fdec(this), end(this);
16493 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
16494
16495 BIND(&if_issmi);
16496 {
16497 TNode<Smi> smi_value = CAST(value);
16498 TNode<Smi> one = SmiConstant(1);
16499 Label if_overflow(this);
16500 var_result = TrySmiSub(smi_value, one, &if_overflow);
16501 Goto(&end);
16502
16503 BIND(&if_overflow);
16504 {
16505 var_fdec_value = SmiToFloat64(smi_value);
16506 Goto(&do_fdec);
16507 }
16508 }
16509
16510 BIND(&if_isnotsmi);
16511 {
16512 TNode<HeapNumber> heap_number_value = CAST(value);
16513
16514 // Load the HeapNumber value.
16515 var_fdec_value = LoadHeapNumberValue(heap_number_value);
16516 Goto(&do_fdec);
16517 }
16518
16519 BIND(&do_fdec);
16520 {
16521 TNode<Float64T> fdec_value = var_fdec_value.value();
16522 TNode<Float64T> minus_one = Float64Constant(-1.0);
16523 TNode<Float64T> fdec_result = Float64Add(fdec_value, minus_one);
16524 var_result = AllocateHeapNumberWithValue(fdec_result);
16525 Goto(&end);
16526 }
16527
16528 BIND(&end);
16529 return var_result.value();
16530}
16531
16532TNode<Number> CodeStubAssembler::NumberAdd(TNode<Number> a, TNode<Number> b) {
16533 TVARIABLE(Number, var_result);
16534 Label float_add(this, Label::kDeferred), end(this);
16535 GotoIf(TaggedIsNotSmi(a), &float_add);
16536 GotoIf(TaggedIsNotSmi(b), &float_add);
16537
16538 // Try fast Smi addition first.
16539 var_result = TrySmiAdd(CAST(a), CAST(b), &float_add);
16540 Goto(&end);
16541
16542 BIND(&float_add);
16543 {
16544 var_result = ChangeFloat64ToTagged(
16545 Float64Add(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
16546 Goto(&end);
16547 }
16548
16549 BIND(&end);
16550 return var_result.value();
16551}
16552
16553TNode<Number> CodeStubAssembler::NumberSub(TNode<Number> a, TNode<Number> b) {
16554 TVARIABLE(Number, var_result);
16555 Label float_sub(this, Label::kDeferred), end(this);
16556 GotoIf(TaggedIsNotSmi(a), &float_sub);
16557 GotoIf(TaggedIsNotSmi(b), &float_sub);
16558
16559 // Try fast Smi subtraction first.
16560 var_result = TrySmiSub(CAST(a), CAST(b), &float_sub);
16561 Goto(&end);
16562
16563 BIND(&float_sub);
16564 {
16565 var_result = ChangeFloat64ToTagged(
16566 Float64Sub(ChangeNumberToFloat64(a), ChangeNumberToFloat64(b)));
16567 Goto(&end);
16568 }
16569
16570 BIND(&end);
16571 return var_result.value();
16572}
16573
16574void CodeStubAssembler::GotoIfNotNumber(TNode<Object> input,
16575 Label* is_not_number) {
16576 Label is_number(this);
16577 GotoIf(TaggedIsSmi(input), &is_number);
16578 Branch(IsHeapNumber(CAST(input)), &is_number, is_not_number);
16579 BIND(&is_number);
16580}
16581
16582void CodeStubAssembler::GotoIfNumber(TNode<Object> input, Label* is_number) {
16583 GotoIf(TaggedIsSmi(input), is_number);
16584 GotoIf(IsHeapNumber(CAST(input)), is_number);
16585}
16586
16587TNode<Word32T> CodeStubAssembler::NormalizeShift32OperandIfNecessary(
16588 TNode<Word32T> right32) {
16589 TVARIABLE(Word32T, result, right32);
16590 Label done(this);
16591 // Use UniqueInt32Constant instead of BoolConstant here in order to ensure
16592 // that the graph structure does not depend on the value of the predicate
16593 // (BoolConstant uses cached nodes).
16594 GotoIf(UniqueInt32Constant(Word32ShiftIsSafe()), &done);
16595 {
16596 result = Word32And(right32, Int32Constant(0x1F));
16597 Goto(&done);
16598 }
16599 BIND(&done);
16600 return result.value();
16601}
16602
16603TNode<Number> CodeStubAssembler::BitwiseOp(TNode<Word32T> left32,
16604 TNode<Word32T> right32,
16605 Operation bitwise_op) {
16606 switch (bitwise_op) {
16607 case Operation::kBitwiseAnd:
16608 return ChangeInt32ToTagged(Signed(Word32And(left32, right32)));
16609 case Operation::kBitwiseOr:
16610 return ChangeInt32ToTagged(Signed(Word32Or(left32, right32)));
16611 case Operation::kBitwiseXor:
16612 return ChangeInt32ToTagged(Signed(Word32Xor(left32, right32)));
16613 case Operation::kShiftLeft:
16614 right32 = NormalizeShift32OperandIfNecessary(right32);
16615 return ChangeInt32ToTagged(Signed(Word32Shl(left32, right32)));
16616 case Operation::kShiftRight:
16617 right32 = NormalizeShift32OperandIfNecessary(right32);
16618 return ChangeInt32ToTagged(Signed(Word32Sar(left32, right32)));
16619 case Operation::kShiftRightLogical:
16620 right32 = NormalizeShift32OperandIfNecessary(right32);
16621 return ChangeUint32ToTagged(Unsigned(Word32Shr(left32, right32)));
16622 default:
16623 break;
16624 }
16625 UNREACHABLE();
16626}
16627
16628TNode<Number> CodeStubAssembler::BitwiseSmiOp(TNode<Smi> left, TNode<Smi> right,
16629 Operation bitwise_op) {
16630 switch (bitwise_op) {
16631 case Operation::kBitwiseAnd:
16632 return SmiAnd(left, right);
16633 case Operation::kBitwiseOr:
16634 return SmiOr(left, right);
16635 case Operation::kBitwiseXor:
16636 return SmiXor(left, right);
16637 // Smi shift left and logical shift rihgt can have (Heap)Number output, so
16638 // perform int32 operation.
16639 case Operation::kShiftLeft:
16640 case Operation::kShiftRightLogical:
16641 return BitwiseOp(SmiToInt32(left), SmiToInt32(right), bitwise_op);
16642 // Arithmetic shift right of a Smi can't overflow to the heap number, so
16643 // perform int32 operation but don't check for overflow.
16644 case Operation::kShiftRight: {
16645 TNode<Int32T> left32 = SmiToInt32(left);
16646 TNode<Int32T> right32 =
16647 Signed(NormalizeShift32OperandIfNecessary(SmiToInt32(right)));
16648 return ChangeInt32ToTaggedNoOverflow(Word32Sar(left32, right32));
16649 }
16650 default:
16651 break;
16652 }
16653 UNREACHABLE();
16654}
16655
16656TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResult(
16657 TNode<Context> context, TNode<Object> value, TNode<Boolean> done) {
16658 CSA_DCHECK(this, IsBoolean(done));
16659 TNode<NativeContext> native_context = LoadNativeContext(context);
16660 TNode<Map> map = CAST(
16661 LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX));
16662 TNode<HeapObject> result = Allocate(JSIteratorResult::kSize);
16663 StoreMapNoWriteBarrier(result, map);
16664 StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
16665 RootIndex::kEmptyFixedArray);
16666 StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
16667 RootIndex::kEmptyFixedArray);
16668 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, value);
16669 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kDoneOffset, done);
16670 return CAST(result);
16671}
16672
16673TNode<JSObject> CodeStubAssembler::AllocateJSIteratorResultForEntry(
16675 TNode<NativeContext> native_context = LoadNativeContext(context);
16676 TNode<Smi> length = SmiConstant(2);
16677 int const elements_size = FixedArray::SizeFor(2);
16678 TNode<FixedArray> elements =
16679 UncheckedCast<FixedArray>(Allocate(elements_size));
16680 StoreObjectFieldRoot(elements, offsetof(FixedArray, map_),
16681 RootIndex::kFixedArrayMap);
16682 StoreObjectFieldNoWriteBarrier(elements, offsetof(FixedArray, length_),
16683 length);
16684 StoreFixedArrayElement(elements, 0, key);
16685 StoreFixedArrayElement(elements, 1, value);
16686 TNode<Map> array_map = CAST(LoadContextElement(
16687 native_context, Context::JS_ARRAY_PACKED_ELEMENTS_MAP_INDEX));
16688 TNode<HeapObject> array =
16689 Allocate(ALIGN_TO_ALLOCATION_ALIGNMENT(JSArray::kHeaderSize));
16690 StoreMapNoWriteBarrier(array, array_map);
16691 StoreObjectFieldRoot(array, JSArray::kPropertiesOrHashOffset,
16692 RootIndex::kEmptyFixedArray);
16693 StoreObjectFieldNoWriteBarrier(array, JSArray::kElementsOffset, elements);
16694 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
16695 TNode<Map> iterator_map = CAST(
16696 LoadContextElement(native_context, Context::ITERATOR_RESULT_MAP_INDEX));
16697 TNode<HeapObject> result = Allocate(JSIteratorResult::kSize);
16698 StoreMapNoWriteBarrier(result, iterator_map);
16699 StoreObjectFieldRoot(result, JSIteratorResult::kPropertiesOrHashOffset,
16700 RootIndex::kEmptyFixedArray);
16701 StoreObjectFieldRoot(result, JSIteratorResult::kElementsOffset,
16702 RootIndex::kEmptyFixedArray);
16703 StoreObjectFieldNoWriteBarrier(result, JSIteratorResult::kValueOffset, array);
16704 StoreObjectFieldRoot(result, JSIteratorResult::kDoneOffset,
16705 RootIndex::kFalseValue);
16706 return CAST(result);
16707}
16708
16709TNode<JSObject> CodeStubAssembler::AllocatePromiseWithResolversResult(
16710 TNode<Context> context, TNode<Object> promise, TNode<Object> resolve,
16711 TNode<Object> reject) {
16712 TNode<NativeContext> native_context = LoadNativeContext(context);
16713 TNode<Map> map = CAST(LoadContextElement(
16714 native_context, Context::PROMISE_WITHRESOLVERS_RESULT_MAP_INDEX));
16715 TNode<HeapObject> result = Allocate(JSPromiseWithResolversResult::kSize);
16716 StoreMapNoWriteBarrier(result, map);
16717 StoreObjectFieldRoot(result,
16718 JSPromiseWithResolversResult::kPropertiesOrHashOffset,
16719 RootIndex::kEmptyFixedArray);
16720 StoreObjectFieldRoot(result, JSPromiseWithResolversResult::kElementsOffset,
16721 RootIndex::kEmptyFixedArray);
16722 StoreObjectFieldNoWriteBarrier(
16723 result, JSPromiseWithResolversResult::kPromiseOffset, promise);
16724 StoreObjectFieldNoWriteBarrier(
16725 result, JSPromiseWithResolversResult::kResolveOffset, resolve);
16726 StoreObjectFieldNoWriteBarrier(
16727 result, JSPromiseWithResolversResult::kRejectOffset, reject);
16728 return CAST(result);
16729}
16730
16731TNode<JSReceiver> CodeStubAssembler::ArraySpeciesCreate(TNode<Context> context,
16732 TNode<Object> o,
16733 TNode<Number> len) {
16734 TNode<JSReceiver> constructor =
16735 CAST(CallRuntime(Runtime::kArraySpeciesConstructor, context, o));
16736 return Construct(context, constructor, len);
16737}
16738
16739void CodeStubAssembler::ThrowIfArrayBufferIsDetached(
16740 TNode<Context> context, TNode<JSArrayBuffer> array_buffer,
16741 const char* method_name) {
16742 Label if_detached(this, Label::kDeferred), if_not_detached(this);
16743 Branch(IsDetachedBuffer(array_buffer), &if_detached, &if_not_detached);
16744 BIND(&if_detached);
16745 ThrowTypeError(context, MessageTemplate::kDetachedOperation, method_name);
16746 BIND(&if_not_detached);
16747}
16748
16749void CodeStubAssembler::ThrowIfArrayBufferViewBufferIsDetached(
16750 TNode<Context> context, TNode<JSArrayBufferView> array_buffer_view,
16751 const char* method_name) {
16752 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
16753 ThrowIfArrayBufferIsDetached(context, buffer, method_name);
16754}
16755
16756TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferByteLength(
16757 TNode<JSArrayBuffer> array_buffer) {
16758 return LoadBoundedSizeFromObject(array_buffer,
16759 JSArrayBuffer::kRawByteLengthOffset);
16760}
16761
16762TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferMaxByteLength(
16763 TNode<JSArrayBuffer> array_buffer) {
16764 return LoadBoundedSizeFromObject(array_buffer,
16765 JSArrayBuffer::kRawMaxByteLengthOffset);
16766}
16767
16768TNode<RawPtrT> CodeStubAssembler::LoadJSArrayBufferBackingStorePtr(
16769 TNode<JSArrayBuffer> array_buffer) {
16770 return LoadSandboxedPointerFromObject(array_buffer,
16771 JSArrayBuffer::kBackingStoreOffset);
16772}
16773
16774TNode<JSArrayBuffer> CodeStubAssembler::LoadJSArrayBufferViewBuffer(
16775 TNode<JSArrayBufferView> array_buffer_view) {
16776 return LoadObjectField<JSArrayBuffer>(array_buffer_view,
16777 JSArrayBufferView::kBufferOffset);
16778}
16779
16780TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteLength(
16781 TNode<JSArrayBufferView> array_buffer_view) {
16782 return LoadBoundedSizeFromObject(array_buffer_view,
16783 JSArrayBufferView::kRawByteLengthOffset);
16784}
16785
16786void CodeStubAssembler::StoreJSArrayBufferViewByteLength(
16787 TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value) {
16788 StoreBoundedSizeToObject(array_buffer_view,
16789 JSArrayBufferView::kRawByteLengthOffset, value);
16790}
16791
16792TNode<UintPtrT> CodeStubAssembler::LoadJSArrayBufferViewByteOffset(
16793 TNode<JSArrayBufferView> array_buffer_view) {
16794 return LoadBoundedSizeFromObject(array_buffer_view,
16795 JSArrayBufferView::kRawByteOffsetOffset);
16796}
16797
16798void CodeStubAssembler::StoreJSArrayBufferViewByteOffset(
16799 TNode<JSArrayBufferView> array_buffer_view, TNode<UintPtrT> value) {
16800 StoreBoundedSizeToObject(array_buffer_view,
16801 JSArrayBufferView::kRawByteOffsetOffset, value);
16802}
16803
16804TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLength(
16805 TNode<JSTypedArray> typed_array) {
16806 return LoadBoundedSizeFromObject(typed_array, JSTypedArray::kRawLengthOffset);
16807}
16808
16809void CodeStubAssembler::StoreJSTypedArrayLength(TNode<JSTypedArray> typed_array,
16810 TNode<UintPtrT> value) {
16811 StoreBoundedSizeToObject(typed_array, JSTypedArray::kRawLengthOffset, value);
16812}
16813
16814TNode<UintPtrT> CodeStubAssembler::LoadJSTypedArrayLengthAndCheckDetached(
16815 TNode<JSTypedArray> typed_array, Label* detached) {
16817 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(typed_array);
16818
16819 Label variable_length(this), fixed_length(this), end(this);
16820 Branch(IsVariableLengthJSArrayBufferView(typed_array), &variable_length,
16821 &fixed_length);
16822 BIND(&variable_length);
16823 {
16824 result =
16825 LoadVariableLengthJSTypedArrayLength(typed_array, buffer, detached);
16826 Goto(&end);
16827 }
16828
16829 BIND(&fixed_length);
16830 {
16831 Label not_detached(this);
16832 Branch(IsDetachedBuffer(buffer), detached, &not_detached);
16833 BIND(&not_detached);
16834 result = LoadJSTypedArrayLength(typed_array);
16835 Goto(&end);
16836 }
16837 BIND(&end);
16838 return result.value();
16839}
16840
16841// ES #sec-integerindexedobjectlength
16842TNode<UintPtrT> CodeStubAssembler::LoadVariableLengthJSTypedArrayLength(
16844 Label* detached_or_out_of_bounds) {
16845 // byte_length already takes array's offset into account.
16846 TNode<UintPtrT> byte_length = LoadVariableLengthJSArrayBufferViewByteLength(
16847 array, buffer, detached_or_out_of_bounds);
16848 TNode<IntPtrT> element_size =
16849 RabGsabElementsKindToElementByteSize(LoadElementsKind(array));
16850 return Unsigned(IntPtrDiv(Signed(byte_length), element_size));
16851}
16852
16854CodeStubAssembler::LoadVariableLengthJSArrayBufferViewByteLength(
16856 Label* detached_or_out_of_bounds) {
16857 Label is_gsab(this), is_rab(this), end(this);
16859 TNode<UintPtrT> array_byte_offset = LoadJSArrayBufferViewByteOffset(array);
16860
16861 Branch(IsSharedArrayBuffer(buffer), &is_gsab, &is_rab);
16862 BIND(&is_gsab);
16863 {
16864 // Non-length-tracking GSAB-backed ArrayBufferViews shouldn't end up here.
16865 CSA_DCHECK(this, IsLengthTrackingJSArrayBufferView(array));
16866 // Read the byte length from the BackingStore.
16867 const TNode<ExternalReference> byte_length_function =
16868 ExternalConstant(ExternalReference::gsab_byte_length());
16869 TNode<ExternalReference> isolate_ptr =
16870 ExternalConstant(ExternalReference::isolate_address());
16871 TNode<UintPtrT> buffer_byte_length = UncheckedCast<UintPtrT>(
16872 CallCFunction(byte_length_function, MachineType::UintPtr(),
16873 std::make_pair(MachineType::Pointer(), isolate_ptr),
16874 std::make_pair(MachineType::AnyTagged(), buffer)));
16875 // Since the SharedArrayBuffer can't shrink, and we've managed to create
16876 // this JSArrayBufferDataView without throwing an exception, we know that
16877 // buffer_byte_length >= array_byte_offset.
16878 CSA_CHECK(this,
16879 UintPtrGreaterThanOrEqual(buffer_byte_length, array_byte_offset));
16880 result = UintPtrSub(buffer_byte_length, array_byte_offset);
16881 Goto(&end);
16882 }
16883
16884 BIND(&is_rab);
16885 {
16886 GotoIf(IsDetachedBuffer(buffer), detached_or_out_of_bounds);
16887
16888 TNode<UintPtrT> buffer_byte_length = LoadJSArrayBufferByteLength(buffer);
16889
16890 Label is_length_tracking(this), not_length_tracking(this);
16891 Branch(IsLengthTrackingJSArrayBufferView(array), &is_length_tracking,
16892 &not_length_tracking);
16893
16894 BIND(&is_length_tracking);
16895 {
16896 // The backing RAB might have been shrunk so that the start of the
16897 // TypedArray is already out of bounds.
16898 GotoIfNot(UintPtrLessThanOrEqual(array_byte_offset, buffer_byte_length),
16899 detached_or_out_of_bounds);
16900 result = UintPtrSub(buffer_byte_length, array_byte_offset);
16901 Goto(&end);
16902 }
16903
16904 BIND(&not_length_tracking);
16905 {
16906 // Check if the backing RAB has shrunk so that the buffer is out of
16907 // bounds.
16908 TNode<UintPtrT> array_byte_length =
16909 LoadJSArrayBufferViewByteLength(array);
16910 GotoIfNot(UintPtrGreaterThanOrEqual(
16911 buffer_byte_length,
16912 UintPtrAdd(array_byte_offset, array_byte_length)),
16913 detached_or_out_of_bounds);
16914 result = array_byte_length;
16915 Goto(&end);
16916 }
16917 }
16918 BIND(&end);
16919 return result.value();
16920}
16921
16922void CodeStubAssembler::IsJSArrayBufferViewDetachedOrOutOfBounds(
16923 TNode<JSArrayBufferView> array_buffer_view, Label* detached_or_oob,
16924 Label* not_detached_nor_oob) {
16925 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array_buffer_view);
16926
16927 GotoIf(IsDetachedBuffer(buffer), detached_or_oob);
16928 GotoIfNot(IsVariableLengthJSArrayBufferView(array_buffer_view),
16929 not_detached_nor_oob);
16930 GotoIf(IsSharedArrayBuffer(buffer), not_detached_nor_oob);
16931
16932 {
16933 TNode<UintPtrT> buffer_byte_length = LoadJSArrayBufferByteLength(buffer);
16934 TNode<UintPtrT> array_byte_offset =
16935 LoadJSArrayBufferViewByteOffset(array_buffer_view);
16936
16937 Label length_tracking(this), not_length_tracking(this);
16938 Branch(IsLengthTrackingJSArrayBufferView(array_buffer_view),
16939 &length_tracking, &not_length_tracking);
16940
16941 BIND(&length_tracking);
16942 {
16943 // The backing RAB might have been shrunk so that the start of the
16944 // TypedArray is already out of bounds.
16945 Branch(UintPtrLessThanOrEqual(array_byte_offset, buffer_byte_length),
16946 not_detached_nor_oob, detached_or_oob);
16947 }
16948
16949 BIND(&not_length_tracking);
16950 {
16951 // Check if the backing RAB has shrunk so that the buffer is out of
16952 // bounds.
16953 TNode<UintPtrT> array_byte_length =
16954 LoadJSArrayBufferViewByteLength(array_buffer_view);
16955 Branch(UintPtrGreaterThanOrEqual(
16956 buffer_byte_length,
16957 UintPtrAdd(array_byte_offset, array_byte_length)),
16958 not_detached_nor_oob, detached_or_oob);
16959 }
16960 }
16961}
16962
16963TNode<BoolT> CodeStubAssembler::IsJSArrayBufferViewDetachedOrOutOfBoundsBoolean(
16964 TNode<JSArrayBufferView> array_buffer_view) {
16965 Label is_detached_or_out_of_bounds(this),
16966 not_detached_nor_out_of_bounds(this), end(this);
16968
16969 IsJSArrayBufferViewDetachedOrOutOfBounds(array_buffer_view,
16970 &is_detached_or_out_of_bounds,
16971 &not_detached_nor_out_of_bounds);
16972 BIND(&is_detached_or_out_of_bounds);
16973 {
16974 result = BoolConstant(true);
16975 Goto(&end);
16976 }
16977 BIND(&not_detached_nor_out_of_bounds);
16978 {
16979 result = BoolConstant(false);
16980 Goto(&end);
16981 }
16982 BIND(&end);
16983 return result.value();
16984}
16985
16986void CodeStubAssembler::CheckJSTypedArrayIndex(
16987 TNode<JSTypedArray> typed_array, TNode<UintPtrT> index,
16988 Label* detached_or_out_of_bounds) {
16989 TNode<UintPtrT> len = LoadJSTypedArrayLengthAndCheckDetached(
16990 typed_array, detached_or_out_of_bounds);
16991
16992 GotoIf(UintPtrGreaterThanOrEqual(index, len), detached_or_out_of_bounds);
16993}
16994
16995// ES #sec-integerindexedobjectbytelength
16996TNode<UintPtrT> CodeStubAssembler::LoadVariableLengthJSTypedArrayByteLength(
16997 TNode<Context> context, TNode<JSTypedArray> array,
16998 TNode<JSArrayBuffer> buffer) {
16999 Label miss(this), end(this);
17001
17002 TNode<UintPtrT> length =
17003 LoadVariableLengthJSTypedArrayLength(array, buffer, &miss);
17004 TNode<IntPtrT> element_size =
17005 RabGsabElementsKindToElementByteSize(LoadElementsKind(array));
17006 // Conversion to signed is OK since length < JSArrayBuffer::kMaxByteLength.
17007 TNode<IntPtrT> byte_length = IntPtrMul(Signed(length), element_size);
17008 result = Unsigned(byte_length);
17009 Goto(&end);
17010 BIND(&miss);
17011 {
17012 result = UintPtrConstant(0);
17013 Goto(&end);
17014 }
17015 BIND(&end);
17016 return result.value();
17017}
17018
17019TNode<IntPtrT> CodeStubAssembler::RabGsabElementsKindToElementByteSize(
17020 TNode<Int32T> elements_kind) {
17022 Label elements_8(this), elements_16(this), elements_32(this),
17023 elements_64(this), not_found(this), end(this);
17024 int32_t elements_kinds[] = {
17025 RAB_GSAB_UINT8_ELEMENTS, RAB_GSAB_UINT8_CLAMPED_ELEMENTS,
17026 RAB_GSAB_INT8_ELEMENTS, RAB_GSAB_UINT16_ELEMENTS,
17027 RAB_GSAB_INT16_ELEMENTS, RAB_GSAB_FLOAT16_ELEMENTS,
17028 RAB_GSAB_UINT32_ELEMENTS, RAB_GSAB_INT32_ELEMENTS,
17029 RAB_GSAB_FLOAT32_ELEMENTS, RAB_GSAB_FLOAT64_ELEMENTS,
17030 RAB_GSAB_BIGINT64_ELEMENTS, RAB_GSAB_BIGUINT64_ELEMENTS};
17031 Label* elements_kind_labels[] = {&elements_8, &elements_8, &elements_8,
17032 &elements_16, &elements_16, &elements_16,
17033 &elements_32, &elements_32, &elements_32,
17034 &elements_64, &elements_64, &elements_64};
17035 const size_t kTypedElementsKindCount =
17038 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kinds));
17039 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kind_labels));
17040 Switch(elements_kind, &not_found, elements_kinds, elements_kind_labels,
17041 kTypedElementsKindCount);
17042 BIND(&elements_8);
17043 {
17044 result = IntPtrConstant(1);
17045 Goto(&end);
17046 }
17047 BIND(&elements_16);
17048 {
17049 result = IntPtrConstant(2);
17050 Goto(&end);
17051 }
17052 BIND(&elements_32);
17053 {
17054 result = IntPtrConstant(4);
17055 Goto(&end);
17056 }
17057 BIND(&elements_64);
17058 {
17059 result = IntPtrConstant(8);
17060 Goto(&end);
17061 }
17062 BIND(&not_found);
17063 { Unreachable(); }
17064 BIND(&end);
17065 return result.value();
17066}
17067
17068TNode<JSArrayBuffer> CodeStubAssembler::GetTypedArrayBuffer(
17069 TNode<Context> context, TNode<JSTypedArray> array) {
17070 Label call_runtime(this), done(this);
17071 TVARIABLE(Object, var_result);
17072
17073 GotoIf(IsOnHeapTypedArray(array), &call_runtime);
17074
17075 TNode<JSArrayBuffer> buffer = LoadJSArrayBufferViewBuffer(array);
17076 GotoIf(IsDetachedBuffer(buffer), &call_runtime);
17077 var_result = buffer;
17078 Goto(&done);
17079
17080 BIND(&call_runtime);
17081 {
17082 var_result = CallRuntime(Runtime::kTypedArrayGetBuffer, context, array);
17083 Goto(&done);
17084 }
17085
17086 BIND(&done);
17087 return CAST(var_result.value());
17088}
17089
17090CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler,
17092 : assembler_(assembler),
17093 argc_(argc),
17094 base_(),
17095 fp_(fp != nullptr ? fp : assembler_->LoadFramePointer()) {
17100 // base_ points to the first argument, not the receiver
17101 // whether present or not.
17103}
17104
17106 // If we're using a dynamic parameter count, then there may be additional
17107 // padding arguments on the stack pushed by the caller.
17109}
17110
17116
17122
17128
17130 CSA_DCHECK(assembler_, assembler_->UintPtrOrSmiLessThan(
17131 index, GetLengthWithoutReceiver()));
17132 return assembler_->CAST(assembler_->LoadFullTagged(AtIndexPtr(index)));
17133}
17134
17136 return AtIndex(assembler_->IntPtrConstant(index));
17137}
17138
17143
17147
17149 TNode<IntPtrT> index, TNode<JSAny> default_value) {
17151 CodeStubAssembler::Label argument_missing(assembler_),
17152 argument_done(assembler_, &result);
17153
17155 assembler_->UintPtrGreaterThanOrEqual(index, GetLengthWithoutReceiver()),
17156 &argument_missing);
17157 result = AtIndex(index);
17158 assembler_->Goto(&argument_done);
17159
17160 assembler_->BIND(&argument_missing);
17161 result = default_value;
17162 assembler_->Goto(&argument_done);
17163
17164 assembler_->BIND(&argument_done);
17165 return result.value();
17166}
17167
17173
17177 TNode<IntPtrT> last) const {
17178 assembler_->Comment("CodeStubArguments::ForEach");
17179 if (first == nullptr) {
17180 first = assembler_->IntPtrConstant(0);
17181 }
17182 if (last == nullptr) {
17183 last = GetLengthWithoutReceiver();
17184 }
17187 const int increment = kSystemPointerSize;
17189 vars, start, end,
17190 [&](TNode<RawPtrT> current) {
17191 TNode<JSAny> arg =
17192 assembler_->CAST(assembler_->LoadFullTagged(current));
17193 body(arg);
17194 },
17197}
17198
17200 TNode<IntPtrT> argument_count = GetLengthWithReceiver();
17202 // If there may be padding arguments, we need to remove the maximum of the
17203 // parameter count and the actual argument count.
17204 // TODO(saelo): it would probably be nicer to have this logic in the
17205 // low-level assembler instead, where we also keep the parameter count
17206 // value. It's not even clear why we need this PopAndReturn method at all
17207 // in the higher-level CodeStubAssembler class, as the lower-level
17208 // assemblers should have all the necessary information.
17211 CodeStubAssembler::Label pop_parameter_count(assembler_),
17212 pop_argument_count(assembler_);
17214 assembler_->IntPtrLessThan(argument_count, parameter_count),
17215 &pop_parameter_count, &pop_argument_count);
17216 assembler_->BIND(&pop_parameter_count);
17218 assembler_->BIND(&pop_argument_count);
17219 assembler_->PopAndReturn(argument_count, value);
17220 } else {
17221 assembler_->PopAndReturn(argument_count, value);
17222 }
17223}
17224
17226 TNode<Int32T> elements_kind) {
17228 return Uint32LessThanOrEqual(elements_kind,
17230}
17231
17233 TNode<Int32T> elements_kind) {
17235 // ElementsKind values that are even are packed. See
17236 // internal::IsFastPackedElementsKind.
17237 static_assert((~PACKED_SMI_ELEMENTS & 1) == 1);
17238 static_assert((~PACKED_ELEMENTS & 1) == 1);
17239 static_assert((~PACKED_DOUBLE_ELEMENTS & 1) == 1);
17240 return Word32And(IsNotSetWord32(elements_kind, 1),
17241 IsFastElementsKind(elements_kind));
17242}
17243
17245 TNode<Int32T> elements_kind) {
17248 static_assert(PACKED_NONEXTENSIBLE_ELEMENTS + 1 ==
17251 static_assert(PACKED_SEALED_ELEMENTS + 1 == HOLEY_SEALED_ELEMENTS);
17252 return Uint32LessThanOrEqual(elements_kind,
17254}
17255
17257 TNode<Int32T> elements_kind) {
17259 static_assert((PACKED_DOUBLE_ELEMENTS & 1) == 0);
17260 static_assert(PACKED_DOUBLE_ELEMENTS + 1 == HOLEY_DOUBLE_ELEMENTS);
17261 return Word32Equal(Word32Shr(elements_kind, Int32Constant(1)),
17263}
17264
17266 TNode<Int32T> elements_kind) {
17270 return Uint32LessThanOrEqual(elements_kind,
17272}
17273
17275 TNode<Int32T> elements_kind) {
17276 return Uint32LessThanOrEqual(elements_kind,
17278}
17279
17281 TNode<Int32T> elements_kind) {
17282 CSA_DCHECK(this, IsFastElementsKind(elements_kind));
17283
17284 static_assert(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
17285 static_assert(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
17286 static_assert(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
17287 return IsSetWord32(elements_kind, 1);
17288}
17289
17291 TNode<Int32T> elements_kind) {
17292 CSA_DCHECK(this, Uint32LessThanOrEqual(
17293 elements_kind,
17295
17296 static_assert(HOLEY_SMI_ELEMENTS == (PACKED_SMI_ELEMENTS | 1));
17297 static_assert(HOLEY_ELEMENTS == (PACKED_ELEMENTS | 1));
17298 static_assert(HOLEY_DOUBLE_ELEMENTS == (PACKED_DOUBLE_ELEMENTS | 1));
17299 static_assert(HOLEY_NONEXTENSIBLE_ELEMENTS ==
17301 static_assert(HOLEY_SEALED_ELEMENTS == (PACKED_SEALED_ELEMENTS | 1));
17302 static_assert(HOLEY_FROZEN_ELEMENTS == (PACKED_FROZEN_ELEMENTS | 1));
17303 return IsSetWord32(elements_kind, 1);
17304}
17305
17307 TNode<Int32T> target_kind, ElementsKind reference_kind) {
17308 return Int32GreaterThan(target_kind, Int32Constant(reference_kind));
17309}
17310
17312 TNode<Int32T> target_kind, ElementsKind reference_kind) {
17313 return Int32GreaterThanOrEqual(target_kind, Int32Constant(reference_kind));
17314}
17315
17317 TNode<Int32T> target_kind, ElementsKind reference_kind) {
17318 return Int32LessThanOrEqual(target_kind, Int32Constant(reference_kind));
17319}
17320
17322 TNode<Int32T> elements_kind) {
17323 Label is_rab_gsab(this), end(this);
17325 result = elements_kind;
17326 Branch(Int32GreaterThanOrEqual(elements_kind,
17327 Int32Constant(RAB_GSAB_UINT8_ELEMENTS)),
17328 &is_rab_gsab, &end);
17329 BIND(&is_rab_gsab);
17330 result = Int32Sub(elements_kind,
17331 Int32Constant(RAB_GSAB_UINT8_ELEMENTS - UINT8_ELEMENTS));
17332 Goto(&end);
17333 BIND(&end);
17334 return result.value();
17335}
17336
17338 TNode<Uint8T> is_debug_active = Load<Uint8T>(
17339 ExternalConstant(ExternalReference::debug_is_active_address(isolate())));
17340 return Word32NotEqual(is_debug_active, Int32Constant(0));
17341}
17342
17344 const TNode<RawPtrT> async_event_delegate = Load<RawPtrT>(ExternalConstant(
17345 ExternalReference::async_event_delegate_address(isolate())));
17346 return WordNotEqual(async_event_delegate, IntPtrConstant(0));
17347}
17348
17351 ExternalReference::promise_hook_flags_address(isolate())));
17352}
17353
17359
17364
17365#ifdef V8_ENABLE_JAVASCRIPT_PROMISE_HOOKS
17366TNode<BoolT> CodeStubAssembler::IsContextPromiseHookEnabled(
17367 TNode<Uint32T> flags) {
17369}
17370#endif
17371
17372TNode<BoolT>
17379
17388
17392
17394 CSA_DCHECK(this, SmiBelow(builtin_id, SmiConstant(Builtins::kBuiltinCount)));
17395
17397 ElementOffsetFromIndex(SmiToBInt(builtin_id), SYSTEM_POINTER_ELEMENTS);
17398
17399 TNode<ExternalReference> table = IsolateField(IsolateFieldId::kBuiltinTable);
17400
17401 return CAST(BitcastWordToTagged(Load<RawPtrT>(table, offset)));
17402}
17403
17404#ifdef V8_ENABLE_LEAPTIERING
17405
17406TNode<JSDispatchHandleT> CodeStubAssembler::LoadBuiltinDispatchHandle(
17407 RootIndex idx) {
17408#if V8_STATIC_DISPATCH_HANDLES_BOOL
17409 return LoadBuiltinDispatchHandle(JSBuiltinDispatchHandleRoot::to_idx(idx));
17410#else
17412 IntPtrConstant(JSBuiltinDispatchHandleRoot::to_idx(idx)),
17413 UINT32_ELEMENTS);
17415 IsolateField(IsolateFieldId::kBuiltinDispatchTable);
17416 return Load<JSDispatchHandleT>(table, offset);
17417#endif // V8_STATIC_DISPATCH_HANDLES_BOOL
17418}
17419
17420#if V8_STATIC_DISPATCH_HANDLES_BOOL
17421TNode<JSDispatchHandleT> CodeStubAssembler::LoadBuiltinDispatchHandle(
17422 JSBuiltinDispatchHandleRoot::Idx dispatch_root_idx) {
17423 DCHECK_LT(dispatch_root_idx, JSBuiltinDispatchHandleRoot::Idx::kCount);
17425 isolate()->builtin_dispatch_handle(dispatch_root_idx).value()));
17426}
17427#endif // V8_STATIC_DISPATCH_HANDLES_BOOL
17428
17429#endif // V8_ENABLE_LEAPTIERING
17430
17432 TNode<SharedFunctionInfo> shared_info, TVariable<Uint16T>* data_type_out,
17433 Label* if_compile_lazy) {
17434
17435 Label done(this);
17436 Label use_untrusted_data(this);
17437 Label unknown_data(this);
17438 TVARIABLE(Code, sfi_code);
17439
17440 TNode<Object> sfi_data = LoadSharedFunctionInfoTrustedData(shared_info);
17441 GotoIf(TaggedEqual(sfi_data, SmiConstant(0)), &use_untrusted_data);
17442 {
17443 TNode<Uint16T> data_type = LoadInstanceType(CAST(sfi_data));
17444 if (data_type_out) {
17445 *data_type_out = data_type;
17446 }
17447
17448 int32_t case_values[] = {
17449 BYTECODE_ARRAY_TYPE,
17450 CODE_TYPE,
17451 INTERPRETER_DATA_TYPE,
17452 UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_TYPE,
17453 UNCOMPILED_DATA_WITH_PREPARSE_DATA_TYPE,
17454 UNCOMPILED_DATA_WITHOUT_PREPARSE_DATA_WITH_JOB_TYPE,
17455 UNCOMPILED_DATA_WITH_PREPARSE_DATA_AND_JOB_TYPE,
17456#if V8_ENABLE_WEBASSEMBLY
17457 WASM_CAPI_FUNCTION_DATA_TYPE,
17458 WASM_EXPORTED_FUNCTION_DATA_TYPE,
17459 WASM_JS_FUNCTION_DATA_TYPE,
17460#endif // V8_ENABLE_WEBASSEMBLY
17461 };
17462 Label check_is_bytecode_array(this);
17463 Label check_is_baseline_data(this);
17464 Label check_is_interpreter_data(this);
17465 Label check_is_uncompiled_data(this);
17466 Label check_is_wasm_function_data(this);
17467 Label* case_labels[] = {
17468 &check_is_bytecode_array, &check_is_baseline_data,
17469 &check_is_interpreter_data, &check_is_uncompiled_data,
17470 &check_is_uncompiled_data, &check_is_uncompiled_data,
17471 &check_is_uncompiled_data,
17472#if V8_ENABLE_WEBASSEMBLY
17473 &check_is_wasm_function_data, &check_is_wasm_function_data,
17474 &check_is_wasm_function_data,
17475#endif // V8_ENABLE_WEBASSEMBLY
17476 };
17477 static_assert(arraysize(case_values) == arraysize(case_labels));
17478 Switch(data_type, &unknown_data, case_values, case_labels,
17479 arraysize(case_labels));
17480
17481 // IsBytecodeArray: Interpret bytecode
17482 BIND(&check_is_bytecode_array);
17483 sfi_code =
17484 HeapConstantNoHole(BUILTIN_CODE(isolate(), InterpreterEntryTrampoline));
17485 Goto(&done);
17486
17487 // IsBaselineData: Execute baseline code
17488 BIND(&check_is_baseline_data);
17489 {
17490 TNode<Code> baseline_code = CAST(sfi_data);
17491 sfi_code = baseline_code;
17492 Goto(&done);
17493 }
17494
17495 // IsInterpreterData: Interpret bytecode
17496 BIND(&check_is_interpreter_data);
17497 {
17499 CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset));
17500 sfi_code = trampoline;
17501 }
17502 Goto(&done);
17503
17504 // IsUncompiledDataWithPreparseData | IsUncompiledDataWithoutPreparseData:
17505 // Compile lazy
17506 BIND(&check_is_uncompiled_data);
17507 sfi_code = HeapConstantNoHole(BUILTIN_CODE(isolate(), CompileLazy));
17508 Goto(if_compile_lazy ? if_compile_lazy : &done);
17509
17510#if V8_ENABLE_WEBASSEMBLY
17511 // IsWasmFunctionData: Use the wrapper code
17512 BIND(&check_is_wasm_function_data);
17513 sfi_code = CAST(LoadObjectField(
17514 CAST(sfi_data), WasmExportedFunctionData::kWrapperCodeOffset));
17515 Goto(&done);
17516#endif // V8_ENABLE_WEBASSEMBLY
17517 }
17518
17519 BIND(&use_untrusted_data);
17520 {
17521 sfi_data = LoadSharedFunctionInfoUntrustedData(shared_info);
17522 Label check_instance_type(this);
17523
17524 // IsSmi: Is builtin
17525 GotoIf(TaggedIsNotSmi(sfi_data), &check_instance_type);
17526 if (data_type_out) {
17527 *data_type_out = Uint16Constant(0);
17528 }
17529 if (if_compile_lazy) {
17530 GotoIf(SmiEqual(CAST(sfi_data), SmiConstant(Builtin::kCompileLazy)),
17531 if_compile_lazy);
17532 }
17533 sfi_code = LoadBuiltin(CAST(sfi_data));
17534 Goto(&done);
17535
17536 // Switch on data's instance type.
17537 BIND(&check_instance_type);
17538 TNode<Uint16T> data_type = LoadInstanceType(CAST(sfi_data));
17539 if (data_type_out) {
17540 *data_type_out = data_type;
17541 }
17542
17543 int32_t case_values[] = {
17544 FUNCTION_TEMPLATE_INFO_TYPE,
17545#if V8_ENABLE_WEBASSEMBLY
17546 ASM_WASM_DATA_TYPE,
17547 WASM_RESUME_DATA_TYPE,
17548#endif // V8_ENABLE_WEBASSEMBLY
17549 };
17550 Label check_is_function_template_info(this);
17551 Label check_is_asm_wasm_data(this);
17552 Label check_is_wasm_resume(this);
17553 Label* case_labels[] = {
17554 &check_is_function_template_info,
17555#if V8_ENABLE_WEBASSEMBLY
17556 &check_is_asm_wasm_data,
17557 &check_is_wasm_resume,
17558#endif // V8_ENABLE_WEBASSEMBLY
17559 };
17560 static_assert(arraysize(case_values) == arraysize(case_labels));
17561 Switch(data_type, &unknown_data, case_values, case_labels,
17562 arraysize(case_labels));
17563
17564 // IsFunctionTemplateInfo: API call
17565 BIND(&check_is_function_template_info);
17566 sfi_code =
17567 HeapConstantNoHole(BUILTIN_CODE(isolate(), HandleApiCallOrConstruct));
17568 Goto(&done);
17569
17570#if V8_ENABLE_WEBASSEMBLY
17571 // IsAsmWasmData: Instantiate using AsmWasmData
17572 BIND(&check_is_asm_wasm_data);
17573 sfi_code = HeapConstantNoHole(BUILTIN_CODE(isolate(), InstantiateAsmJs));
17574 Goto(&done);
17575
17576 // IsWasmResumeData: Resume the suspended wasm continuation.
17577 BIND(&check_is_wasm_resume);
17578 sfi_code = HeapConstantNoHole(BUILTIN_CODE(isolate(), WasmResume));
17579 Goto(&done);
17580#endif // V8_ENABLE_WEBASSEMBLY
17581 }
17582
17583 BIND(&unknown_data);
17584 Unreachable();
17585
17586 BIND(&done);
17587 return sfi_code.value();
17588}
17589
17591 TNode<Code> code, CodeEntrypointTag tag) {
17592#ifdef V8_ENABLE_SANDBOX
17593 // In this case, the entrypoint is stored in the code pointer table entry
17594 // referenced via the Code object's 'self' indirect pointer.
17595 return LoadCodeEntrypointViaCodePointerField(
17596 code, Code::kSelfIndirectPointerOffset, tag);
17597#else
17598 return LoadObjectField<RawPtrT>(code, Code::kInstructionStartOffset);
17599#endif
17600}
17601
17603 static_assert(FIELD_SIZE(Code::kFlagsOffset) * kBitsPerByte == 32);
17605 LoadObjectField<Int32T>(code, Code::kFlagsOffset));
17606}
17607
17609 RootIndex function, TNode<Context> context,
17610 std::optional<TNode<NativeContext>> maybe_native_context) {
17613 DCHECK(v8::internal::IsSharedFunctionInfo(
17614 isolate()->root(function).GetHeapObject()));
17616 isolate()->root(function).GetHeapObject());
17617 const TNode<SharedFunctionInfo> sfi_obj =
17620 maybe_native_context ? *maybe_native_context : LoadNativeContext(context);
17621 const TNode<Map> map = CAST(LoadContextElement(
17622 native_context, Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX));
17624 static_assert(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
17625 StoreMapNoWriteBarrier(fun, map);
17626 StoreObjectFieldRoot(fun, JSObject::kPropertiesOrHashOffset,
17627 RootIndex::kEmptyFixedArray);
17628 StoreObjectFieldRoot(fun, JSObject::kElementsOffset,
17629 RootIndex::kEmptyFixedArray);
17630 StoreObjectFieldRoot(fun, JSFunction::kFeedbackCellOffset,
17631 RootIndex::kManyClosuresCell);
17632 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kSharedFunctionInfoOffset,
17633 sfi_obj);
17634 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kContextOffset, context);
17635 // For the native closures that are initialized here we statically know their
17636 // builtin id, so there's no need to use
17637 // CodeStubAssembler::GetSharedFunctionInfoCode().
17638 DCHECK(sfi->HasBuiltinId());
17639#ifdef V8_ENABLE_LEAPTIERING
17640 const TNode<JSDispatchHandleT> dispatch_handle =
17641 LoadBuiltinDispatchHandle(function);
17642 CSA_DCHECK(this,
17643 TaggedEqual(LoadBuiltin(SmiConstant(sfi->builtin_id())),
17644 LoadCodeObjectFromJSDispatchTable(dispatch_handle)));
17645 StoreObjectFieldNoWriteBarrier(fun, JSFunction::kDispatchHandleOffset,
17646 dispatch_handle);
17647 USE(sfi);
17648#else
17649 const TNode<Code> code = LoadBuiltin(SmiConstant(sfi->builtin_id()));
17650 StoreCodePointerFieldNoWriteBarrier(fun, JSFunction::kCodeOffset, code);
17651#endif // V8_ENABLE_LEAPTIERING
17652
17653 return CAST(fun);
17654}
17655
17657 TNode<Map> receiver_map,
17658 Label* if_fast,
17659 Label* if_slow) {
17660 TVARIABLE(JSReceiver, var_object, receiver);
17661 TVARIABLE(Map, object_map, receiver_map);
17662
17663 Label loop(this, {&var_object, &object_map}), done_loop(this);
17664 Goto(&loop);
17665 BIND(&loop);
17666 {
17667 // Check that there are no elements on the current {var_object}.
17668 Label if_no_elements(this);
17669
17670 // The following relies on the elements only aliasing with JSProxy::target,
17671 // which is a JavaScript value and hence cannot be confused with an elements
17672 // backing store.
17673 static_assert(static_cast<int>(JSObject::kElementsOffset) ==
17674 static_cast<int>(JSProxy::kTargetOffset));
17675 TNode<Object> object_elements =
17676 LoadObjectField(var_object.value(), JSObject::kElementsOffset);
17677 GotoIf(IsEmptyFixedArray(object_elements), &if_no_elements);
17678 GotoIf(IsEmptySlowElementDictionary(object_elements), &if_no_elements);
17679
17680 // It might still be an empty JSArray.
17681 GotoIfNot(IsJSArrayMap(object_map.value()), if_slow);
17682 TNode<Number> object_length = LoadJSArrayLength(CAST(var_object.value()));
17683 Branch(TaggedEqual(object_length, SmiConstant(0)), &if_no_elements,
17684 if_slow);
17685
17686 // Continue with {var_object}'s prototype.
17687 BIND(&if_no_elements);
17688 TNode<HeapObject> object = LoadMapPrototype(object_map.value());
17689 GotoIf(IsNull(object), if_fast);
17690
17691 // For all {object}s but the {receiver}, check that the cache is empty.
17692 var_object = CAST(object);
17693 object_map = LoadMap(object);
17694 TNode<Uint32T> object_enum_length = LoadMapEnumLength(object_map.value());
17695 Branch(Word32Equal(object_enum_length, Uint32Constant(0)), &loop, if_slow);
17696 }
17697}
17698
17700 Label* if_empty,
17701 Label* if_runtime) {
17702 Label if_fast(this), if_cache(this), if_no_cache(this, Label::kDeferred);
17703 TNode<Map> receiver_map = LoadMap(receiver);
17704
17705 // Check if the enum length field of the {receiver} is properly initialized,
17706 // indicating that there is an enum cache.
17707 TNode<Uint32T> receiver_enum_length = LoadMapEnumLength(receiver_map);
17708 Branch(Word32Equal(receiver_enum_length,
17710 &if_no_cache, &if_cache);
17711
17712 BIND(&if_no_cache);
17713 {
17714 // Avoid runtime-call for empty dictionary receivers.
17715 GotoIfNot(IsDictionaryMap(receiver_map), if_runtime);
17718
17719 // g++ version 8 has a bug when using `if constexpr(false)` with a lambda:
17720 // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85149
17721 // TODO(miladfarca): Use `if constexpr` once all compilers handle this
17722 // properly.
17723 CSA_DCHECK(this, Word32Or(IsPropertyDictionary(properties),
17724 IsGlobalDictionary(properties)));
17726 length = Select<Smi>(
17727 IsPropertyDictionary(properties),
17728 [=, this] {
17729 return GetNumberOfElements(
17731 },
17732 [=, this] {
17733 return GetNumberOfElements(
17735 });
17736
17737 } else {
17738 static_assert(static_cast<int>(NameDictionary::kNumberOfElementsIndex) ==
17739 static_cast<int>(GlobalDictionary::kNumberOfElementsIndex));
17741 }
17742
17743 GotoIfNot(TaggedEqual(length, SmiConstant(0)), if_runtime);
17744 // Check that there are no elements on the {receiver} and its prototype
17745 // chain. Given that we do not create an EnumCache for dict-mode objects,
17746 // directly jump to {if_empty} if there are no elements and no properties
17747 // on the {receiver}.
17748 CheckPrototypeEnumCache(receiver, receiver_map, if_empty, if_runtime);
17749 }
17750
17751 // Check that there are no elements on the fast {receiver} and its
17752 // prototype chain.
17753 BIND(&if_cache);
17754 CheckPrototypeEnumCache(receiver, receiver_map, &if_fast, if_runtime);
17755
17756 BIND(&if_fast);
17757 return receiver_map;
17758}
17759
17761 TNode<IntPtrT> index) {
17762 return CodeStubArguments(this, args).GetOptionalArgumentValue(index);
17763}
17764
17766 TNode<IntPtrT> index,
17767 TNode<JSAny> value) {
17768 CodeStubArguments(this, args).SetArgumentValue(index, value);
17769}
17770
17772 TNode<RawPtrT> frame, TNode<IntPtrT> argc,
17773 FrameArgumentsArgcType argc_type) {
17776 }
17777 return CodeStubArguments(this, argc, frame).GetTorqueArguments();
17778}
17779
17780void CodeStubAssembler::Print(const char* s) {
17781 PrintToStream(s, fileno(stdout));
17782}
17783
17784void CodeStubAssembler::PrintErr(const char* s) {
17785 PrintToStream(s, fileno(stderr));
17786}
17787
17788void CodeStubAssembler::PrintToStream(const char* s, int stream) {
17789 std::string formatted(s);
17790 formatted += "\n";
17791 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
17792 StringConstant(formatted.c_str()), SmiConstant(stream));
17793}
17794
17795void CodeStubAssembler::Print(const char* prefix,
17796 TNode<MaybeObject> tagged_value) {
17797 PrintToStream(prefix, tagged_value, fileno(stdout));
17798}
17799
17800void CodeStubAssembler::Print(const char* prefix, TNode<UintPtrT> value) {
17801 PrintToStream(prefix, value, fileno(stdout));
17802}
17803
17804void CodeStubAssembler::Print(const char* prefix, TNode<Float64T> value) {
17805 PrintToStream(prefix, value, fileno(stdout));
17806}
17807
17808void CodeStubAssembler::PrintErr(const char* prefix,
17809 TNode<MaybeObject> tagged_value) {
17810 PrintToStream(prefix, tagged_value, fileno(stderr));
17811}
17812
17813void CodeStubAssembler::PrintToStream(const char* prefix,
17814 TNode<MaybeObject> tagged_value,
17815 int stream) {
17816 if (prefix != nullptr) {
17817 std::string formatted(prefix);
17818 formatted += ": ";
17819 Handle<String> string =
17821 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
17822 HeapConstantNoHole(string), SmiConstant(stream));
17823 }
17824 // CallRuntime only accepts Objects, so do an UncheckedCast to object.
17825 // DebugPrint explicitly checks whether the tagged value is a
17826 // Tagged<MaybeObject>.
17827 TNode<Object> arg = UncheckedCast<Object>(tagged_value);
17828 CallRuntime(Runtime::kDebugPrint, NoContextConstant(), arg,
17829 SmiConstant(stream));
17830}
17831
17833 int stream) {
17834 if (prefix != nullptr) {
17835 std::string formatted(prefix);
17836 formatted += ": ";
17837 Handle<String> string =
17839 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
17840 HeapConstantNoHole(string), SmiConstant(stream));
17841 }
17842
17843 // We use 16 bit per chunk.
17844 TNode<Smi> chunks[4];
17845 for (int i = 0; i < 4; ++i) {
17848 value = WordShr(value, IntPtrConstant(16));
17849 }
17850
17851 // Args are: <bits 63-48>, <bits 47-32>, <bits 31-16>, <bits 15-0>, stream.
17852 CallRuntime(Runtime::kDebugPrintWord, NoContextConstant(), chunks[3],
17853 chunks[2], chunks[1], chunks[0], SmiConstant(stream));
17854}
17855
17857 int stream) {
17858 if (prefix != nullptr) {
17859 std::string formatted(prefix);
17860 formatted += ": ";
17861 Handle<String> string =
17863 CallRuntime(Runtime::kGlobalPrint, NoContextConstant(),
17864 HeapConstantNoHole(string), SmiConstant(stream));
17865 }
17866
17867 // We use word32 extraction instead of `BitcastFloat64ToInt64` to support 32
17868 // bit architectures, too.
17869 TNode<Uint32T> high = Float64ExtractHighWord32(value);
17870 TNode<Uint32T> low = Float64ExtractLowWord32(value);
17871
17872 // We use 16 bit per chunk.
17873 TNode<Smi> chunks[4];
17874 chunks[0] = SmiFromUint32(ReinterpretCast<Uint32T>(Word32And(low, 0xFFFF)));
17876 Word32And(Word32Shr(low, Int32Constant(16)), 0xFFFF)));
17877 chunks[2] = SmiFromUint32(ReinterpretCast<Uint32T>(Word32And(high, 0xFFFF)));
17879 Word32And(Word32Shr(high, Int32Constant(16)), 0xFFFF)));
17880
17881 // Args are: <bits 63-48>, <bits 47-32>, <bits 31-16>, <bits 15-0>, stream.
17882 CallRuntime(Runtime::kDebugPrintFloat, NoContextConstant(), chunks[3],
17883 chunks[2], chunks[1], chunks[0], SmiConstant(stream));
17884}
17885
17887 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
17888 return lhs + rhs;
17889}
17891 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
17892 return lhs << rhs;
17893}
17895 const IntegerLiteral& lhs, const IntegerLiteral& rhs) {
17896 return lhs | rhs;
17897}
17898
17900 Label ok(this), stack_check_interrupt(this, Label::kDeferred);
17901
17904 ExternalConstant(ExternalReference::address_of_jslimit(isolate()))));
17905 TNode<BoolT> sp_within_limit = StackPointerGreaterThan(stack_limit);
17906
17907 Branch(sp_within_limit, &ok, &stack_check_interrupt);
17908
17909 BIND(&stack_check_interrupt);
17910 CallRuntime(Runtime::kStackGuard, context);
17911 Goto(&ok);
17912
17913 BIND(&ok);
17914}
17915
17918 TNode<Object> new_target, TNode<Object> allocation_site) {
17919 // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
17920 // copy). Except the last two (new_target and allocation_site) which are add
17921 // on top of the stack later.
17922 return CallRuntime(Runtime::kNewArray, context, length, receiver, new_target,
17923 allocation_site);
17924}
17925
17928 TNode<Object> length,
17930 TNode<Object> allocation_site) {
17931 // Runtime_NewArray receives arguments in the JS order (to avoid unnecessary
17932 // copy). Except the last two (new_target and allocation_site) which are add
17933 // on top of the stack later.
17934 return TailCallRuntime(Runtime::kNewArray, context, length, receiver,
17935 new_target, allocation_site);
17936}
17937
17939 TNode<Number> length) {
17940 TVARIABLE(JSArray, array);
17941 Label allocate_js_array(this);
17942
17943 Label done(this), next(this), runtime(this, Label::kDeferred);
17945 CSA_DCHECK_BRANCH(this, ([=, this](Label* ok, Label* not_ok) {
17947 Operation::kGreaterThanOrEqual, length,
17948 SmiConstant(0), ok, not_ok);
17949 }));
17950 // This check also transitively covers the case where length is too big
17951 // to be representable by a SMI and so is not usable with
17952 // AllocateJSArray.
17953 BranchIfNumberRelationalComparison(Operation::kGreaterThanOrEqual, length,
17954 limit, &runtime, &next);
17955
17956 BIND(&runtime);
17957 {
17959 TNode<JSFunction> array_function =
17960 CAST(LoadContextElement(native_context, Context::ARRAY_FUNCTION_INDEX));
17961 array = CAST(CallRuntimeNewArray(context, array_function, length,
17962 array_function, UndefinedConstant()));
17963 Goto(&done);
17964 }
17965
17966 BIND(&next);
17967 TNode<Smi> length_smi = CAST(length);
17968
17969 TNode<Map> array_map = CAST(LoadContextElement(
17970 context, Context::JS_ARRAY_PACKED_SMI_ELEMENTS_MAP_INDEX));
17971
17972 // TODO(delphick): Consider using
17973 // AllocateUninitializedJSArrayWithElements to avoid initializing an
17974 // array and then writing over it.
17975 array = AllocateJSArray(PACKED_SMI_ELEMENTS, array_map, length_smi,
17976 SmiConstant(0));
17977 Goto(&done);
17978
17979 BIND(&done);
17980 return array.value();
17981}
17982
17984 TNode<JSAny> array,
17985 TNode<Number> length) {
17986 SetPropertyStrict(context, array, CodeStubAssembler::LengthStringConstant(),
17987 length);
17988}
17989
17992 // Cache exhausted, populate the cache. Return value is the new index.
17993 const TNode<ExternalReference> refill_math_random =
17994 ExternalConstant(ExternalReference::refill_math_random());
17995 const TNode<ExternalReference> isolate_ptr =
17997 MachineType type_tagged = MachineType::AnyTagged();
17998 MachineType type_ptr = MachineType::Pointer();
17999
18000 return CAST(CallCFunction(refill_math_random, type_tagged,
18001 std::make_pair(type_ptr, isolate_ptr),
18002 std::make_pair(type_tagged, native_context)));
18003}
18004
18006 Label* fail) {
18007 ToDirectStringAssembler to_direct(state(), CAST(value));
18008 to_direct.TryToDirect(fail);
18009 to_direct.PointerToData(fail);
18010 return CAST(value);
18011}
18012
18014 compiler::CodeAssemblerState* state, Flags flags,
18015 TNode<NativeContext> native_context, TNode<Map> initial_prototype_map,
18017 : CodeStubAssembler(state),
18018 flags_(flags),
18020 initial_prototype_map_(initial_prototype_map),
18021 properties_(properties) {}
18022
18024 Label* if_unmodified,
18025 Label* if_modified) {
18026 TNode<Map> prototype_map = LoadMap(prototype);
18027 TNode<DescriptorArray> descriptors = LoadMapDescriptors(prototype_map);
18028
18029 // The continuation of a failed fast check: if property identity checks are
18030 // enabled, we continue there (since they may still classify the prototype as
18031 // fast), otherwise we bail out.
18032 Label property_identity_check(this, Label::kDeferred);
18033 Label* if_fast_check_failed =
18035 ? if_modified
18036 : &property_identity_check;
18037
18039 // A simple prototype map identity check. Note that map identity does not
18040 // guarantee unmodified properties. It does guarantee that no new properties
18041 // have been added, or old properties deleted.
18042
18044 if_fast_check_failed);
18045
18046 // We need to make sure that relevant properties in the prototype have
18047 // not been tampered with. We do this by checking that their slots
18048 // in the prototype's descriptor array are still marked as const.
18049
18050 TNode<Uint32T> combined_details;
18051 for (int i = 0; i < properties_.length(); i++) {
18052 // Assert the descriptor index is in-bounds.
18053 int descriptor = properties_[i].descriptor_index;
18054 CSA_DCHECK(this, Int32LessThan(Int32Constant(descriptor),
18055 LoadNumberOfDescriptors(descriptors)));
18056
18057 // Assert that the name is correct. This essentially checks that
18058 // the descriptor index corresponds to the insertion order in
18059 // the bootstrapper.
18060 CSA_DCHECK(
18061 this,
18062 TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
18063 CodeAssembler::LoadRoot(properties_[i].name_root_index)));
18064
18065 TNode<Uint32T> details =
18066 DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
18067
18068 if (i == 0) {
18069 combined_details = details;
18070 } else {
18071 combined_details = Word32And(combined_details, details);
18072 }
18073 }
18074
18075 TNode<Uint32T> constness =
18077
18078 Branch(
18079 Word32Equal(constness,
18080 Int32Constant(static_cast<int>(PropertyConstness::kConst))),
18081 if_unmodified, if_fast_check_failed);
18082 }
18083
18085 // The above checks have failed, for whatever reason (maybe the prototype
18086 // map has changed, or a property is no longer const). This block implements
18087 // a more thorough check that can also accept maps which 1. do not have the
18088 // initial map, 2. have mutable relevant properties, but 3. still match the
18089 // expected value for all relevant properties.
18090
18091 BIND(&property_identity_check);
18092
18093 int max_descriptor_index = -1;
18094 for (int i = 0; i < properties_.length(); i++) {
18095 max_descriptor_index =
18096 std::max(max_descriptor_index, properties_[i].descriptor_index);
18097 }
18098
18099 // If the greatest descriptor index is out of bounds, the map cannot be
18100 // fast.
18101 GotoIfNot(Int32LessThan(Int32Constant(max_descriptor_index),
18102 LoadNumberOfDescriptors(descriptors)),
18103 if_modified);
18104
18105 // Logic below only handles maps with fast properties.
18106 GotoIfMapHasSlowProperties(prototype_map, if_modified);
18107
18108 for (int i = 0; i < properties_.length(); i++) {
18110 const int descriptor = p.descriptor_index;
18111
18112 // Check if the name is correct. This essentially checks that
18113 // the descriptor index corresponds to the insertion order in
18114 // the bootstrapper.
18115 GotoIfNot(TaggedEqual(LoadKeyByDescriptorEntry(descriptors, descriptor),
18116 CodeAssembler::LoadRoot(p.name_root_index)),
18117 if_modified);
18118
18119 // Finally, check whether the actual value equals the expected value.
18120 TNode<Uint32T> details =
18121 DescriptorArrayGetDetails(descriptors, Uint32Constant(descriptor));
18122 TVARIABLE(Uint32T, var_details, details);
18123 TVARIABLE(Object, var_value);
18124
18125 const int key_index = DescriptorArray::ToKeyIndex(descriptor);
18126 LoadPropertyFromFastObject(prototype, prototype_map, descriptors,
18127 IntPtrConstant(key_index), &var_details,
18128 &var_value);
18129
18130 TNode<Object> actual_value = var_value.value();
18131 TNode<Object> expected_value =
18132 LoadContextElement(native_context_, p.expected_value_context_index);
18133 GotoIfNot(TaggedEqual(actual_value, expected_value), if_modified);
18134 }
18135
18136 Goto(if_unmodified);
18137 }
18138}
18139
18140//
18141// Begin of SwissNameDictionary macros
18142//
18143
18144namespace {
18145
18146// Provides load and store functions that abstract over the details of accessing
18147// the meta table in memory. Instead they allow using logical indices that are
18148// independent from the underlying entry size in the meta table of a
18149// SwissNameDictionary.
18150class MetaTableAccessor {
18151 public:
18152 MetaTableAccessor(CodeStubAssembler& csa, MachineType mt)
18153 : csa{csa}, mt{mt} {}
18154
18155 TNode<Uint32T> Load(TNode<ByteArray> meta_table, TNode<IntPtrT> index) {
18156 TNode<IntPtrT> offset = OverallOffset(meta_table, index);
18157
18158 return csa.UncheckedCast<Uint32T>(
18159 csa.LoadFromObject(mt, meta_table, offset));
18160 }
18161
18162 TNode<Uint32T> Load(TNode<ByteArray> meta_table, int index) {
18163 return Load(meta_table, csa.IntPtrConstant(index));
18164 }
18165
18166 void Store(TNode<ByteArray> meta_table, TNode<IntPtrT> index,
18167 TNode<Uint32T> data) {
18168 TNode<IntPtrT> offset = OverallOffset(meta_table, index);
18169
18170#ifdef DEBUG
18171 int bits = mt.MemSize() * 8;
18172 TNode<UintPtrT> max_value = csa.UintPtrConstant((1ULL << bits) - 1);
18173
18174 CSA_DCHECK(&csa, csa.UintPtrLessThanOrEqual(csa.ChangeUint32ToWord(data),
18175 max_value));
18176#endif
18177
18178 csa.StoreToObject(mt.representation(), meta_table, offset, data,
18180 }
18181
18182 void Store(TNode<ByteArray> meta_table, int index, TNode<Uint32T> data) {
18183 Store(meta_table, csa.IntPtrConstant(index), data);
18184 }
18185
18186 private:
18187 TNode<IntPtrT> OverallOffset(TNode<ByteArray> meta_table,
18188 TNode<IntPtrT> index) {
18189 // TODO(v8:11330): consider using ElementOffsetFromIndex().
18190
18191 int offset_to_data_minus_tag =
18193
18194 TNode<IntPtrT> overall_offset;
18195 int size = mt.MemSize();
18196 intptr_t constant;
18197 if (csa.TryToIntPtrConstant(index, &constant)) {
18198 intptr_t index_offset = constant * size;
18199 overall_offset =
18200 csa.IntPtrConstant(offset_to_data_minus_tag + index_offset);
18201 } else {
18202 TNode<IntPtrT> index_offset =
18203 csa.IntPtrMul(index, csa.IntPtrConstant(size));
18204 overall_offset = csa.IntPtrAdd(
18205 csa.IntPtrConstant(offset_to_data_minus_tag), index_offset);
18206 }
18207
18208#ifdef DEBUG
18209 TNode<IntPtrT> byte_array_data_bytes =
18210 csa.SmiToIntPtr(csa.LoadFixedArrayBaseLength(meta_table));
18211 TNode<IntPtrT> max_allowed_offset = csa.IntPtrAdd(
18212 byte_array_data_bytes, csa.IntPtrConstant(offset_to_data_minus_tag));
18213 CSA_DCHECK(&csa, csa.UintPtrLessThan(overall_offset, max_allowed_offset));
18214#endif
18215
18216 return overall_offset;
18217 }
18218
18219 CodeStubAssembler& csa;
18220 MachineType mt;
18221};
18222
18223// Type of functions that given a MetaTableAccessor, use its load and store
18224// functions to generate code for operating on the meta table.
18225using MetaTableAccessFunction = std::function<void(MetaTableAccessor&)>;
18226
18227// Helper function for macros operating on the meta table of a
18228// SwissNameDictionary. Given a MetaTableAccessFunction, generates branching
18229// code and uses the builder to generate code for each of the three possible
18230// sizes per entry a meta table can have.
18231void GenerateMetaTableAccess(CodeStubAssembler* csa, TNode<IntPtrT> capacity,
18232 MetaTableAccessFunction builder) {
18233 MetaTableAccessor mta8 = MetaTableAccessor(*csa, MachineType::Uint8());
18234 MetaTableAccessor mta16 = MetaTableAccessor(*csa, MachineType::Uint16());
18235 MetaTableAccessor mta32 = MetaTableAccessor(*csa, MachineType::Uint32());
18236
18237 using Label = compiler::CodeAssemblerLabel;
18238 Label small(csa), medium(csa), done(csa);
18239
18240 csa->GotoIf(
18241 csa->IntPtrLessThanOrEqual(
18242 capacity,
18244 &small);
18245 csa->GotoIf(
18246 csa->IntPtrLessThanOrEqual(
18247 capacity,
18249 &medium);
18250
18251 builder(mta32);
18252 csa->Goto(&done);
18253
18254 csa->Bind(&medium);
18255 builder(mta16);
18256 csa->Goto(&done);
18257
18258 csa->Bind(&small);
18259 builder(mta8);
18260 csa->Goto(&done);
18261 csa->Bind(&done);
18262}
18263
18264} // namespace
18265
18268 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
18269
18271 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18272 nof = mta.Load(meta_table,
18274 };
18275
18276 GenerateMetaTableAccess(this, capacity, builder);
18277 return ChangeInt32ToIntPtr(nof.value());
18278}
18279
18283 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
18284
18286 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18287 nod =
18288 mta.Load(meta_table,
18290 };
18291
18292 GenerateMetaTableAccess(this, capacity, builder);
18293 return ChangeInt32ToIntPtr(nod.value());
18294}
18295
18298 TNode<IntPtrT> enum_index, TNode<Int32T> entry) {
18299 TNode<ByteArray> meta_table = LoadSwissNameDictionaryMetaTable(table);
18300 TNode<IntPtrT> meta_table_index = IntPtrAdd(
18302 enum_index);
18303
18304 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18305 mta.Store(meta_table, meta_table_index, Unsigned(entry));
18306 };
18307
18308 GenerateMetaTableAccess(this, capacity, builder);
18309}
18310
18313 TNode<ByteArray> meta_table, TNode<IntPtrT> capacity,
18314 TNode<Uint32T> max_usable_capacity, Label* bailout) {
18315 TVARIABLE(Uint32T, used_var, Uint32Constant(0));
18316
18317 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18318 TNode<Uint32T> nof = mta.Load(
18320 TNode<Uint32T> nod =
18321 mta.Load(meta_table,
18323 TNode<Uint32T> used = Uint32Add(nof, nod);
18324 GotoIf(Uint32GreaterThanOrEqual(used, max_usable_capacity), bailout);
18325 TNode<Uint32T> inc_nof = Uint32Add(nof, Uint32Constant(1));
18327 inc_nof);
18328 used_var = used;
18329 };
18330
18331 GenerateMetaTableAccess(this, capacity, builder);
18332 return used_var.value();
18333}
18334
18336 TNode<ByteArray> meta_table, TNode<IntPtrT> capacity) {
18337 TVARIABLE(Uint32T, new_nof_var, Uint32Constant(0));
18338
18339 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18340 TNode<Uint32T> nof = mta.Load(
18342 TNode<Uint32T> nod =
18343 mta.Load(meta_table,
18345
18346 TNode<Uint32T> new_nof = Uint32Sub(nof, Uint32Constant(1));
18347 TNode<Uint32T> new_nod = Uint32Add(nod, Uint32Constant(1));
18348
18350 new_nof);
18351 mta.Store(meta_table,
18353 new_nod);
18354
18355 new_nof_var = new_nof;
18356 };
18357
18358 GenerateMetaTableAccess(this, capacity, builder);
18359 return new_nof_var.value();
18360}
18361
18363 TNode<IntPtrT> at_least_space_for) {
18364 // Note that as AllocateNameDictionary, we return a table with initial
18365 // (non-zero) capacity even if |at_least_space_for| is 0.
18366
18367 TNode<IntPtrT> capacity =
18369 SwissNameDictionaryCapacityFor(at_least_space_for));
18370
18372}
18373
18375 int at_least_space_for) {
18376 return AllocateSwissNameDictionary(IntPtrConstant(at_least_space_for));
18377}
18378
18381 TNode<IntPtrT> capacity) {
18382 Comment("[ AllocateSwissNameDictionaryWithCapacity");
18383 CSA_DCHECK(this, WordIsPowerOfTwo(capacity));
18384 CSA_DCHECK(this, UintPtrGreaterThanOrEqual(
18385 capacity,
18387 CSA_DCHECK(this,
18390
18391 Comment("Size check.");
18392 intptr_t capacity_constant;
18393 if (ToParameterConstant(capacity, &capacity_constant)) {
18394 CHECK_LE(capacity_constant, SwissNameDictionary::MaxCapacity());
18395 } else {
18396 Label if_out_of_memory(this, Label::kDeferred), next(this);
18397 Branch(UintPtrGreaterThan(
18399 &if_out_of_memory, &next);
18400
18401 BIND(&if_out_of_memory);
18402 CallRuntime(Runtime::kFatalProcessOutOfMemoryInAllocateRaw,
18404 Unreachable();
18405
18406 BIND(&next);
18407 }
18408
18409 // TODO(v8:11330) Consider adding dedicated handling for constant capacties,
18410 // similar to AllocateOrderedHashTableWithCapacity.
18411
18412 // We must allocate the ByteArray first. Otherwise, allocating the ByteArray
18413 // may trigger GC, which may try to verify the un-initialized
18414 // SwissNameDictionary.
18415 Comment("Meta table allocation.");
18416 TNode<IntPtrT> meta_table_payload_size =
18417 SwissNameDictionaryMetaTableSizeFor(capacity);
18418
18419 TNode<ByteArray> meta_table =
18420 AllocateNonEmptyByteArray(Unsigned(meta_table_payload_size));
18421
18422 Comment("SwissNameDictionary allocation.");
18423 TNode<IntPtrT> total_size = SwissNameDictionarySizeFor(capacity);
18424
18427
18428 StoreMapNoWriteBarrier(table, RootIndex::kSwissNameDictionaryMap);
18429
18430 Comment(
18431 "Initialize the hash, capacity, meta table pointer, and number of "
18432 "(deleted) elements.");
18433
18434 StoreSwissNameDictionaryHash(table,
18437 StoreSwissNameDictionaryMetaTable(table, meta_table);
18438
18439 // Set present and deleted element count without doing branching needed for
18440 // meta table access twice.
18441 MetaTableAccessFunction builder = [&](MetaTableAccessor& mta) {
18443 Uint32Constant(0));
18444 mta.Store(meta_table,
18446 Uint32Constant(0));
18447 };
18448 GenerateMetaTableAccess(this, capacity, builder);
18449
18450 Comment("Initialize the ctrl table.");
18451
18452 TNode<IntPtrT> ctrl_table_start_offset_minus_tag =
18453 SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
18454
18455 TNode<IntPtrT> table_address_with_tag = BitcastTaggedToWord(table);
18456 TNode<IntPtrT> ctrl_table_size_bytes =
18458 TNode<IntPtrT> ctrl_table_start_ptr =
18459 IntPtrAdd(table_address_with_tag, ctrl_table_start_offset_minus_tag);
18460 TNode<IntPtrT> ctrl_table_end_ptr =
18461 IntPtrAdd(ctrl_table_start_ptr, ctrl_table_size_bytes);
18462
18463 // |ctrl_table_size_bytes| (= capacity + kGroupWidth) is divisible by four:
18464 static_assert(SwissNameDictionary::kGroupWidth % 4 == 0);
18465 static_assert(SwissNameDictionary::kInitialCapacity % 4 == 0);
18466
18467 // TODO(v8:11330) For all capacities except 4, we know that
18468 // |ctrl_table_size_bytes| is divisible by 8. Consider initializing the ctrl
18469 // table with WordTs in those cases. Alternatively, always initialize as many
18470 // bytes as possbible with WordT and then, if necessary, the remaining 4 bytes
18471 // with Word32T.
18472
18473 constexpr uint8_t kEmpty = swiss_table::Ctrl::kEmpty;
18474 constexpr uint32_t kEmpty32 =
18475 (kEmpty << 24) | (kEmpty << 16) | (kEmpty << 8) | kEmpty;
18476 TNode<Int32T> empty32 = Int32Constant(kEmpty32);
18478 ctrl_table_start_ptr, ctrl_table_end_ptr,
18479 [=, this](TNode<IntPtrT> current) {
18481 empty32);
18482 },
18484
18485 Comment("Initialize the data table.");
18486
18487 TNode<IntPtrT> data_table_start_offset_minus_tag =
18488 SwissNameDictionaryDataTableStartOffsetMT();
18489 TNode<IntPtrT> data_table_ptr =
18490 IntPtrAdd(table_address_with_tag, data_table_start_offset_minus_tag);
18491 TNode<IntPtrT> data_table_size = IntPtrMul(
18493 capacity);
18494
18495 StoreFieldsNoWriteBarrier(data_table_ptr,
18496 IntPtrAdd(data_table_ptr, data_table_size),
18497 TheHoleConstant());
18498
18499 Comment("AllocateSwissNameDictionaryWithCapacity ]");
18500
18501 return table;
18502}
18503
18505 TNode<SwissNameDictionary> original) {
18506 Comment("[ CopySwissNameDictionary");
18507
18508 TNode<IntPtrT> capacity =
18509 Signed(ChangeUint32ToWord(LoadSwissNameDictionaryCapacity(original)));
18510
18511 // We must allocate the ByteArray first. Otherwise, allocating the ByteArray
18512 // may trigger GC, which may try to verify the un-initialized
18513 // SwissNameDictionary.
18514 Comment("Meta table allocation.");
18515 TNode<IntPtrT> meta_table_payload_size =
18516 SwissNameDictionaryMetaTableSizeFor(capacity);
18517
18518 TNode<ByteArray> meta_table =
18519 AllocateNonEmptyByteArray(Unsigned(meta_table_payload_size));
18520
18521 Comment("SwissNameDictionary allocation.");
18522 TNode<IntPtrT> total_size = SwissNameDictionarySizeFor(capacity);
18523
18526
18527 StoreMapNoWriteBarrier(table, RootIndex::kSwissNameDictionaryMap);
18528
18529 Comment("Copy the hash and capacity.");
18530
18531 StoreSwissNameDictionaryHash(table, LoadSwissNameDictionaryHash(original));
18533 StoreSwissNameDictionaryMetaTable(table, meta_table);
18534 // Not setting up number of (deleted elements), copying whole meta table
18535 // instead.
18536
18538 ExternalConstant(ExternalReference::libc_memcpy_function());
18539
18540 TNode<IntPtrT> old_table_address_with_tag = BitcastTaggedToWord(original);
18541 TNode<IntPtrT> new_table_address_with_tag = BitcastTaggedToWord(table);
18542
18543 TNode<IntPtrT> ctrl_table_start_offset_minus_tag =
18544 SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
18545
18546 TNode<IntPtrT> ctrl_table_size_bytes =
18548
18549 Comment("Copy the ctrl table.");
18550 {
18551 TNode<IntPtrT> old_ctrl_table_start_ptr = IntPtrAdd(
18552 old_table_address_with_tag, ctrl_table_start_offset_minus_tag);
18553 TNode<IntPtrT> new_ctrl_table_start_ptr = IntPtrAdd(
18554 new_table_address_with_tag, ctrl_table_start_offset_minus_tag);
18555
18557 memcpy, MachineType::Pointer(),
18558 std::make_pair(MachineType::Pointer(), new_ctrl_table_start_ptr),
18559 std::make_pair(MachineType::Pointer(), old_ctrl_table_start_ptr),
18560 std::make_pair(MachineType::UintPtr(), ctrl_table_size_bytes));
18561 }
18562
18563 Comment("Copy the data table.");
18564 {
18565 TNode<IntPtrT> start_offset =
18567 TNode<IntPtrT> data_table_size = IntPtrMul(
18569 capacity);
18570
18572 start_offset, IntPtrAdd(start_offset, data_table_size),
18573 [=, this](TNode<IntPtrT> offset) {
18574 TNode<Object> table_field = LoadObjectField(original, offset);
18575 StoreObjectField(table, offset, table_field);
18576 },
18578 }
18579
18580 Comment("Copy the meta table");
18581 {
18582 TNode<IntPtrT> old_meta_table_address_with_tag =
18583 BitcastTaggedToWord(LoadSwissNameDictionaryMetaTable(original));
18584 TNode<IntPtrT> new_meta_table_address_with_tag =
18585 BitcastTaggedToWord(meta_table);
18586
18587 TNode<IntPtrT> meta_table_size =
18588 SwissNameDictionaryMetaTableSizeFor(capacity);
18589
18590 TNode<IntPtrT> old_data_start = IntPtrAdd(
18591 old_meta_table_address_with_tag,
18593 TNode<IntPtrT> new_data_start = IntPtrAdd(
18594 new_meta_table_address_with_tag,
18596
18598 std::make_pair(MachineType::Pointer(), new_data_start),
18599 std::make_pair(MachineType::Pointer(), old_data_start),
18600 std::make_pair(MachineType::UintPtr(), meta_table_size));
18601 }
18602
18603 Comment("Copy the PropertyDetails table");
18604 {
18605 TNode<IntPtrT> property_details_start_offset_minus_tag =
18607 IntPtrConstant(0));
18608
18609 // Offset to property details entry
18610 TVARIABLE(IntPtrT, details_table_offset_minus_tag,
18611 property_details_start_offset_minus_tag);
18612
18613 TNode<IntPtrT> start = ctrl_table_start_offset_minus_tag;
18614
18615 VariableList in_loop_variables({&details_table_offset_minus_tag}, zone());
18617 in_loop_variables, start, IntPtrAdd(start, ctrl_table_size_bytes),
18618 [&](TNode<IntPtrT> ctrl_table_offset) {
18619 TNode<Uint8T> ctrl = Load<Uint8T>(original, ctrl_table_offset);
18620
18621 // TODO(v8:11330) Entries in the PropertyDetails table may be
18622 // uninitialized if the corresponding buckets in the data/ctrl table
18623 // are empty. Therefore, to avoid accessing un-initialized memory
18624 // here, we need to check the ctrl table to determine whether we
18625 // should copy a certain PropertyDetails entry or not.
18626 // TODO(v8:11330) If this function becomes performance-critical, we
18627 // may consider always initializing the PropertyDetails table entirely
18628 // during allocation, to avoid the branching during copying.
18629 Label done(this);
18630 // |kNotFullMask| catches kEmpty and kDeleted, both of which indicate
18631 // entries that we don't want to copy the PropertyDetails for.
18633
18634 TNode<Uint8T> details =
18635 Load<Uint8T>(original, details_table_offset_minus_tag.value());
18636
18638 details_table_offset_minus_tag.value(), details,
18640 Goto(&done);
18641 BIND(&done);
18642
18643 details_table_offset_minus_tag =
18644 IntPtrAdd(details_table_offset_minus_tag.value(),
18646 },
18648 }
18649
18650 Comment("CopySwissNameDictionary ]");
18651
18652 return table;
18653}
18654
18656 TNode<SwissNameDictionary> dict, TNode<IntPtrT> index, int field_index) {
18657 TNode<IntPtrT> data_table_start = SwissNameDictionaryDataTableStartOffsetMT();
18658
18659 TNode<IntPtrT> offset_within_data_table = IntPtrMul(
18660 index,
18662
18663 if (field_index != 0) {
18664 offset_within_data_table = IntPtrAdd(
18665 offset_within_data_table, IntPtrConstant(field_index * kTaggedSize));
18666 }
18667
18668 return IntPtrAdd(data_table_start, offset_within_data_table);
18669}
18670
18674 TNode<IntPtrT> index) {
18675 CSA_DCHECK(this,
18676 WordEqual(capacity, ChangeUint32ToWord(
18677 LoadSwissNameDictionaryCapacity(dict))));
18678
18679 TNode<IntPtrT> data_table_start = SwissNameDictionaryDataTableStartOffsetMT();
18680
18682 TNode<IntPtrT> data_and_ctrl_table_size = IntPtrAdd(
18683 IntPtrMul(capacity,
18686 kTaggedSize)),
18687 gw);
18688
18689 TNode<IntPtrT> property_details_table_start =
18690 IntPtrAdd(data_table_start, data_and_ctrl_table_size);
18691
18692 CSA_DCHECK(
18693 this,
18694 WordEqual(FieldSliceSwissNameDictionaryPropertyDetailsTable(dict).offset,
18695 // Our calculation subtracted the tag, Torque's offset didn't.
18696 IntPtrAdd(property_details_table_start,
18698
18699 TNode<IntPtrT> offset_within_details_table = index;
18700 return IntPtrAdd(property_details_table_start, offset_within_details_table);
18701}
18702
18708
18713
18714 // TODO(v8:11330) Consider using LoadObjectField here.
18715 return CAST(Load<Object>(dict, offset_minus_tag));
18716}
18717
18720 TNode<IntPtrT> entry) {
18721 TNode<IntPtrT> offset_minus_tag =
18723 entry);
18724 // TODO(v8:11330) Consider using LoadObjectField here.
18725 return Load<Uint8T>(table, offset_minus_tag);
18726}
18727
18730 TNode<IntPtrT> entry, TNode<Uint8T> details) {
18731 TNode<IntPtrT> offset_minus_tag =
18733 entry);
18734
18735 // TODO(v8:11330) Consider using StoreObjectField here.
18736 StoreToObject(MachineRepresentation::kWord8, table, offset_minus_tag, details,
18738}
18739
18742 TNode<Object> value) {
18745
18746 // TODO(v8:11330) Consider using StoreObjectField here.
18747 TNode<IntPtrT> key_offset_minus_tag =
18750 StoreToObject(MachineRepresentation::kTagged, dict, key_offset_minus_tag, key,
18752
18753 TNode<IntPtrT> value_offset_minus_tag =
18754 IntPtrAdd(key_offset_minus_tag, IntPtrConstant(kTaggedSize));
18755 StoreToObject(MachineRepresentation::kTagged, dict, value_offset_minus_tag,
18757}
18758
18760 TNode<IntPtrT> address) {
18763
18764#ifdef V8_TARGET_LITTLE_ENDIAN
18765 return data;
18766#else
18767 // Reverse byte order.
18768 // TODO(v8:11330) Doing this without using dedicated instructions (which we
18769 // don't have access to here) will destroy any performance benefit Swiss
18770 // Tables have. So we just support this so that we don't have to disable the
18771 // test suite for SwissNameDictionary on big endian platforms.
18772
18774 constexpr int count = sizeof(uint64_t);
18775 for (int i = 0; i < count; ++i) {
18776 int src_offset = i * 8;
18777 int dest_offset = (count - i - 1) * 8;
18778
18779 TNode<Uint64T> mask = Uint64Constant(0xffULL << src_offset);
18780 TNode<Uint64T> src_data = Word64And(data, mask);
18781
18782 TNode<Uint64T> shifted =
18783 src_offset < dest_offset
18784 ? Word64Shl(src_data, Uint64Constant(dest_offset - src_offset))
18785 : Word64Shr(src_data, Uint64Constant(src_offset - dest_offset));
18786 result = Unsigned(Word64Or(result, shifted));
18787 }
18788 return result;
18789#endif
18790}
18791
18794 TNode<IntPtrT> entry, TNode<Uint8T> ctrl) {
18795 CSA_DCHECK(this,
18796 WordEqual(capacity, ChangeUint32ToWord(
18797 LoadSwissNameDictionaryCapacity(table))));
18798 CSA_DCHECK(this, UintPtrLessThan(entry, capacity));
18799
18801 TNode<IntPtrT> offset = SwissNameDictionaryCtrlTableStartOffsetMT(capacity);
18802
18803 CSA_DCHECK(this,
18804 WordEqual(FieldSliceSwissNameDictionaryCtrlTable(table).offset,
18805 IntPtrAdd(offset, one)));
18806
18807 TNode<IntPtrT> offset_entry = IntPtrAdd(offset, entry);
18808 StoreToObject(MachineRepresentation::kWord8, table, offset_entry, ctrl,
18810
18811 TNode<IntPtrT> mask = IntPtrSub(capacity, one);
18813
18814 // See SwissNameDictionary::SetCtrl for description of what's going on here.
18815
18816 // ((entry - Group::kWidth) & mask) + 1
18817 TNode<IntPtrT> copy_entry_lhs =
18818 IntPtrAdd(WordAnd(IntPtrSub(entry, group_width), mask), one);
18819 // ((Group::kWidth - 1) & mask)
18820 TNode<IntPtrT> copy_entry_rhs = WordAnd(IntPtrSub(group_width, one), mask);
18821 TNode<IntPtrT> copy_entry = IntPtrAdd(copy_entry_lhs, copy_entry_rhs);
18822 TNode<IntPtrT> offset_copy_entry = IntPtrAdd(offset, copy_entry);
18823
18824 // |entry| < |kGroupWidth| implies |copy_entry| == |capacity| + |entry|
18825 CSA_DCHECK(this, Word32Or(UintPtrGreaterThanOrEqual(entry, group_width),
18826 WordEqual(copy_entry, IntPtrAdd(capacity, entry))));
18827
18828 // |entry| >= |kGroupWidth| implies |copy_entry| == |entry|
18829 CSA_DCHECK(this, Word32Or(UintPtrLessThan(entry, group_width),
18830 WordEqual(copy_entry, entry)));
18831
18832 // TODO(v8:11330): consider using StoreObjectFieldNoWriteBarrier here.
18833 StoreToObject(MachineRepresentation::kWord8, table, offset_copy_entry, ctrl,
18835}
18836
18839 TVariable<IntPtrT>* var_found_entry, Label* not_found) {
18841 SwissNameDictionaryFindEntrySIMD(table, key, found, var_found_entry,
18842 not_found);
18843 } else {
18844 SwissNameDictionaryFindEntryPortable(table, key, found, var_found_entry,
18845 not_found);
18846 }
18847}
18848
18851 TNode<Object> value,
18852 TNode<Uint8T> property_details,
18853 Label* needs_resize) {
18855 SwissNameDictionaryAddSIMD(table, key, value, property_details,
18856 needs_resize);
18857 } else {
18858 SwissNameDictionaryAddPortable(table, key, value, property_details,
18859 needs_resize);
18860 }
18861}
18862
18864 TNode<Context> context, TVariable<Object>* var_shared_value) {
18865 // The barrier ensures that the value can be shared across Isolates.
18866 // The fast paths should be kept in sync with Object::Share.
18867
18868 TNode<Object> value = var_shared_value->value();
18869 Label check_in_shared_heap(this), slow(this), skip_barrier(this), done(this);
18870
18871 // Fast path: Smis are trivially shared.
18872 GotoIf(TaggedIsSmi(value), &done);
18873 TNode<IntPtrT> page_flags = LoadMemoryChunkFlags(CAST(value));
18876 IntPtrConstant(0)),
18877 &skip_barrier);
18878
18879 // Fast path: Check if the HeapObject is already shared.
18880 TNode<Uint16T> value_instance_type =
18882 GotoIf(IsSharedStringInstanceType(value_instance_type), &skip_barrier);
18884 &skip_barrier);
18885 GotoIf(IsHeapNumberInstanceType(value_instance_type), &check_in_shared_heap);
18886 Goto(&slow);
18887
18888 BIND(&check_in_shared_heap);
18889 {
18891 WordAnd(page_flags,
18893 IntPtrConstant(0)),
18894 &skip_barrier, &slow);
18895 }
18896
18897 // Slow path: Call out to runtime to share primitives and to throw on
18898 // non-shared JS objects.
18899 BIND(&slow);
18900 {
18901 *var_shared_value =
18902 CallRuntime(Runtime::kSharedValueBarrierSlow, context, value);
18903 Goto(&skip_barrier);
18904 }
18905
18906 BIND(&skip_barrier);
18907 {
18908 CSA_DCHECK(
18909 this,
18911 WordAnd(LoadMemoryChunkFlags(CAST(var_shared_value->value())),
18914 IntPtrConstant(0)));
18915 Goto(&done);
18916 }
18917
18918 BIND(&done);
18919}
18920
18923 Label empty(this), nonempty(this), done(this);
18924
18925 Branch(SmiEqual(capacity, SmiConstant(0)), &empty, &nonempty);
18926
18927 BIND(&nonempty);
18928 {
18929 CSA_DCHECK(this, SmiGreaterThan(capacity, SmiConstant(0)));
18930
18931 intptr_t capacity_constant;
18932 if (ToParameterConstant(capacity, &capacity_constant)) {
18933 CHECK_LE(capacity_constant, ArrayList::kMaxCapacity);
18934 } else {
18935 Label if_out_of_memory(this, Label::kDeferred), next(this);
18936 Branch(SmiGreaterThan(capacity, SmiConstant(ArrayList::kMaxCapacity)),
18937 &if_out_of_memory, &next);
18938
18939 BIND(&if_out_of_memory);
18940 CallRuntime(Runtime::kFatalProcessOutOfMemoryInvalidArrayLength,
18942 Unreachable();
18943
18944 BIND(&next);
18945 }
18946
18949 TNode<HeapObject> array = Allocate(total_size);
18950 RootIndex map_index = RootIndex::kArrayListMap;
18952 StoreMapNoWriteBarrier(array, map_index);
18953 StoreObjectFieldNoWriteBarrier(array, offsetof(ArrayList, capacity_),
18954 capacity);
18956 SmiConstant(0));
18957
18958 TNode<IntPtrT> offset_of_first_element =
18961 IntPtrConstant(0), SmiUntag(capacity),
18962 [=, this](TNode<IntPtrT> index) {
18964 IntPtrAdd(TimesTaggedSize(index), offset_of_first_element);
18965 StoreObjectFieldNoWriteBarrier(array, offset, UndefinedConstant());
18966 },
18968
18970
18971 Goto(&done);
18972 }
18973
18974 BIND(&empty);
18975 {
18976 result = EmptyArrayListConstant();
18977 Goto(&done);
18978 }
18979
18980 BIND(&done);
18981 return result.value();
18982}
18983
18985 TNode<Smi> length) {
18987 TNode<Smi> capacity = LoadFixedArrayBaseLength(array);
18988 TNode<Smi> requested_capacity = length;
18989
18990 Label done(this);
18991 TVARIABLE(ArrayList, result_array, array);
18992
18993 GotoIf(SmiGreaterThanOrEqual(capacity, requested_capacity), &done);
18994
18995 // new_capacity = new_length;
18996 // new_capacity = capacity + max(capacity / 2, 2);
18997 //
18998 // Ensure calculation matches ArrayList::EnsureSpace.
18999 TNode<Smi> new_capacity = TrySmiAdd(
19000 requested_capacity, SmiMax(SmiShr(requested_capacity, 1), SmiConstant(2)),
19001 &overflow);
19002 TNode<ArrayList> new_array = AllocateArrayList(new_capacity);
19003 TNode<Smi> array_length = ArrayListGetLength(array);
19004 result_array = new_array;
19005 GotoIf(SmiEqual(array_length, SmiConstant(0)), &done);
19006 StoreObjectFieldNoWriteBarrier(new_array, offsetof(ArrayList, length_),
19007 array_length);
19008 CopyRange(new_array, ArrayList::OffsetOfElementAt(0), array,
19009 ArrayList::OffsetOfElementAt(0), SmiUntag(array_length));
19010 Goto(&done);
19011
19012 BIND(&overflow);
19013 CallRuntime(Runtime::kFatalInvalidSize, NoContextConstant());
19014 Unreachable();
19015
19016 BIND(&done);
19017 return result_array.value();
19018}
19019
19021 TNode<Object> object) {
19022 TNode<Smi> length = ArrayListGetLength(array);
19023 TNode<Smi> new_length = SmiAdd(length, SmiConstant(1));
19024 TNode<ArrayList> array_with_space = ArrayListEnsureSpace(array, new_length);
19025
19026 CSA_DCHECK(this, SmiEqual(ArrayListGetLength(array_with_space), length));
19027
19028 ArrayListSet(array_with_space, length, object);
19029 ArrayListSetLength(array_with_space, new_length);
19030
19031 return array_with_space;
19032}
19033
19035 TNode<Object> object) {
19036 UnsafeStoreArrayElement(array, index, object);
19037}
19038
19042
19044 TNode<Smi> length) {
19045 StoreObjectField(array, offsetof(ArrayList, length_), length);
19046}
19047
19056
19058 TNode<IntPtrT> cell;
19060 GetMarkBit(BitcastTaggedToWordForTagAndSmiBits(object), &cell, &mask);
19061 // Marked only requires checking a single bit here.
19063}
19064
19068 TNode<IntPtrT> bitmap = IntPtrAdd(
19070
19071 {
19072 // Temp variable to calculate cell offset in bitmap.
19073 TNode<WordT> r0;
19076 r0 = WordShr(object, IntPtrConstant(shift));
19077 r0 = WordAnd(
19078 r0,
19081 *cell = IntPtrAdd(bitmap, Signed(r0));
19082 }
19083 {
19084 // Temp variable to calculate bit offset in cell.
19085 TNode<WordT> r1;
19086 r1 = WordShr(object, IntPtrConstant(kTaggedSizeLog2));
19087 r1 =
19089 // It seems that LSB(e.g. cl) is automatically used, so no manual masking
19090 // is needed. Uncomment the following line otherwise.
19091 // WordAnd(r1, IntPtrConstant((1 << kBitsPerByte) - 1)));
19092 *mask = WordShl(IntPtrConstant(1), r1);
19093 }
19094}
19095
19096#undef CSA_DCHECK_BRANCH
19097
19099
19100} // namespace internal
19101} // namespace v8
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype)
#define one
#define BIND(label)
#define TVARIABLE(...)
#define CSA_SLOW_DCHECK(csa,...)
#define CSA_SBXCHECK(csa,...)
#define CSA_DCHECK(csa,...)
#define CSA_CHECK(csa, x)
int16_t parameter_count
Definition builtins.cc:67
union v8::internal::@341::BuiltinMetadata::KindSpecificData data
Builtins::Kind kind
Definition builtins.cc:40
#define BUILTIN_CODE(isolate, name)
Definition builtins.h:45
PropertyT * setter
PropertyT * getter
static constexpr U kMax
Definition bit-field.h:44
static constexpr U encode(T value)
Definition bit-field.h:55
static constexpr U kMask
Definition bit-field.h:41
static constexpr int kShift
Definition bit-field.h:39
constexpr UnderlyingType & value() &
constexpr T * begin() const
Definition vector.h:96
static const uint32_t kMaxLength
Definition bigint.h:106
static constexpr Builtin OrdinaryToPrimitive(OrdinaryToPrimitiveHint hint)
static constexpr int kBuiltinCount
Definition builtins.h:105
static constexpr Builtin NonPrimitiveToPrimitive(ToPrimitiveHint hint=ToPrimitiveHint::kDefault)
TNode< IntPtrT > GetLengthWithoutReceiver() const
TNode< JSAny > GetOptionalArgumentValue(TNode< IntPtrT > index, TNode< JSAny > default_value)
TNode< JSAny > AtIndex(TNode< IntPtrT > index) const
void SetReceiver(TNode< JSAny > object) const
TNode< IntPtrT > GetLengthWithReceiver() const
std::function< void(TNode< JSAny > arg)> ForEachBodyFunction
void ForEach(const ForEachBodyFunction &body, TNode< IntPtrT > first={}, TNode< IntPtrT > last={}) const
void PopAndReturn(TNode< JSAny > value)
void SetArgumentValue(TNode< IntPtrT > index, TNode< JSAny > value)
TNode< RawPtrT > AtIndexPtr(TNode< IntPtrT > index) const
void GotoIfMapHasSlowProperties(TNode< Map > map, Label *if_slow)
TNode< CollectionType > AllocateOrderedHashTable(TNode< IntPtrT > capacity)
TNode< String > AllocateSlicedOneByteString(TNode< Uint32T > length, TNode< String > parent, TNode< Smi > offset)
TNode< IntPtrT > GetFixedArrayAllocationSize(TNode< TIndex > element_count, ElementsKind kind)
void StoreIndirectPointerFieldNoWriteBarrier(TNode< HeapObject > object, int offset, IndirectPointerTag tag, TNode< ExposedTrustedObject > value)
TNode< Object > LoadProtectedPointerField(TNode< TrustedObject > object, TNode< IntPtrT > offset)
TNode< BoolT > IsFunctionInstanceType(TNode< Int32T > instance_type)
TNode< IntPtrT > IntPtrRoundUpToPowerOfTwo32(TNode< IntPtrT > value)
std::function< TNode< T >()> NodeGenerator
TNode< IntPtrT > MemoryChunkFromAddress(TNode< IntPtrT > address)
TNode< Float64T > Float64RoundToEven(TNode< Float64T > x)
TNode< Number > SmiMod(TNode< Smi > a, TNode< Smi > b)
TNode< BoolT > IsFixedArrayWithKindOrEmpty(TNode< FixedArrayBase > object, ElementsKind kind)
void GotoIfNumberGreaterThanOrEqual(TNode< Number > left, TNode< Number > right, Label *if_false)
TNode< BoolT > IsUniqueNameNoIndex(TNode< HeapObject > object)
TNode< BoolT > IsJSArrayInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsJSObject(TNode< HeapObject > object)
TNode< BoolT > IsSequentialStringMap(TNode< Map > map)
void TaggedToWord32OrBigIntWithFeedback(TNode< Context > context, TNode< Object > value, Label *if_number, TVariable< Word32T > *var_word32, Label *if_bigint, Label *if_bigint64, TVariable< BigInt > *var_maybe_bigint, const FeedbackValues &feedback)
TNode< String > AllocateSeqOneByteString(uint32_t length, AllocationFlags flags=AllocationFlag::kNone)
TNode< JSArray > AllocateJSArray(ElementsKind kind, TNode< Map > array_map, TNode< IntPtrT > capacity, TNode< Smi > length, std::optional< TNode< AllocationSite > > allocation_site, AllocationFlags allocation_flags=AllocationFlag::kNone)
TNode< Uint32T > SwissNameDictionaryIncreaseElementCountOrBailout(TNode< ByteArray > meta_table, TNode< IntPtrT > capacity, TNode< Uint32T > max_usable_capacity, Label *bailout)
TNode< BoolT > IsElementsKindInRange(TNode< Int32T > target_kind, ElementsKind lower_reference_kind, ElementsKind higher_reference_kind)
TNode< UintPtrT > UintPtrMin(TNode< UintPtrT > left, TNode< UintPtrT > right)
TNode< BoolT > IsUndetectableMap(TNode< Map > map)
TNode< Int32T > TryFloat64ToInt32(TNode< Float64T > number, Label *if_failed)
TNode< BoolT > IsJSFunctionWithPrototypeSlot(TNode< HeapObject > object)
TNode< Int32T > LoadNumberOfOwnDescriptors(TNode< Map > map)
TNode< BoolT > IsBoolean(TNode< HeapObject > object)
TNode< Word32T > IsStringWrapperElementsKind(TNode< Map > map)
TNode< MaybeObject > LoadFieldTypeByDescriptorEntry(TNode< DescriptorArray > descriptors, TNode< IntPtrT > descriptor)
TNode< WordT > TimesTaggedSize(TNode< WordT > value)
void FailAssert(const char *message, const std::vector< FileAndLine > &files_and_lines, std::initializer_list< ExtraNode > extra_nodes={}, const SourceLocation &loc=SourceLocation::Current())
TNode< Smi > TryHeapNumberToSmi(TNode< HeapNumber > number, Label *not_smi)
TNode< IntPtrT > LoadAndUntagFixedArrayBaseLength(TNode< FixedArrayBase > array)
void BuildFastArrayForEach(TNode< UnionOf< FixedArray, PropertyArray, HeapObject > > array, ElementsKind kind, TNode< TIndex > first_element_inclusive, TNode< TIndex > last_element_exclusive, const FastArrayForEachBody &body, LoopUnrollingMode loop_unrolling_mode, ForEachDirection direction=ForEachDirection::kReverse)
TNode< Smi > ArrayListGetLength(TNode< ArrayList > array)
TNode< Smi > SmiFromInt32(TNode< Int32T > value)
TNode< IntPtrT > SwissNameDictionaryOffsetIntoPropertyDetailsTableMT(TNode< SwissNameDictionary > dict, TNode< IntPtrT > capacity, TNode< IntPtrT > index)
void TryLookupPropertyInSimpleObject(TNode< JSObject > object, TNode< Map > map, TNode< Name > unique_name, Label *if_found_fast, Label *if_found_dict, TVariable< HeapObject > *var_meta_storage, TVariable< IntPtrT > *var_name_index, Label *if_not_found, Label *bailout)
TNode< BoolT > IsGeneratorFunction(TNode< JSFunction > function)
TNode< BoolT > IsInternalizedStringInstanceType(TNode< Int32T > instance_type)
void SetSupportsDynamicParameterCount(TNode< JSFunction > callee, TNode< JSDispatchHandleT > dispatch_handle)
TNode< Object > LoadPropertyArrayElement(TNode< PropertyArray > object, TNode< IntPtrT > index)
void BranchIfJSReceiver(TNode< Object > object, Label *if_true, Label *if_false)
TNode< BoolT > IsNullOrUndefined(TNode< Object > object)
void NameDictionaryLookupWithForwardIndex(TNode< Dictionary > dictionary, TNode< Name > unique_name, Label *if_found, TVariable< IntPtrT > *var_name_index, Label *if_not_found, LookupMode mode=kFindExisting)
TNode< Name > LoadKeyByDescriptorEntry(TNode< DescriptorArray > descriptors, TNode< IntPtrT > descriptor)
TNode< FixedArrayBase > AllocateFixedArray(ElementsKind kind, TNode< TIndex > capacity, AllocationFlags flags=AllocationFlag::kNone, std::optional< TNode< Map > > fixed_array_map=std::nullopt)
TNode< Object > GetCoverageInfo(TNode< SharedFunctionInfo > sfi)
TNode< BoolT > IsJSGlobalProxy(TNode< HeapObject > object)
TNode< Number > ChangeInt32ToTagged(TNode< Int32T > value)
void InitializeAllocationMemento(TNode< HeapObject > base, TNode< IntPtrT > base_allocation_size, TNode< AllocationSite > allocation_site)
TNode< BoolT > IsJSTypedArrayMap(TNode< Map > map)
TNode< Smi > TryFloat32ToSmi(TNode< Float32T > number, Label *not_smi)
TNode< Uint32T > LoadNameHash(TNode< Name > name, Label *if_hash_not_computed=nullptr)
TNode< BoolT > IsConstructor(TNode< HeapObject > object)
TNode< BoolT > IsNumberNonNegativeSafeInteger(TNode< Number > number)
TNode< Number > ChangeUintPtrToTagged(TNode< UintPtrT > value)
TNode< Int32T > LoadNumberOfDescriptors(TNode< DescriptorArray > array)
TNode< BoolT > IsJSSharedStruct(TNode< HeapObject > object)
TNode< Number > NonNumberToNumber(TNode< Context > context, TNode< HeapObject > input, BigIntHandling bigint_handling=BigIntHandling::kThrow)
TNode< IntPtrT > TryToIntptr(TNode< Object > key, Label *if_not_intptr, TVariable< Int32T > *var_instance_type=nullptr)
TNode< Int32T > LoadMapBitField2(TNode< Map > map)
void Check(const BranchGenerator &branch, const char *message, const char *file, int line, std::initializer_list< ExtraNode > extra_nodes={}, const SourceLocation &loc=SourceLocation::Current())
TNode< BoolT > IsCallableMap(TNode< Map > map)
TNode< Smi > GetNumberOfElements(TNode< Dictionary > dictionary)
TNode< BoolT > IsPrimitiveInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsPropertyArray(TNode< HeapObject > object)
TNode< IntPtrT > DescriptorEntryToIndex(TNode< IntPtrT > descriptor)
TNode< BoolT > IsPromiseSpeciesProtectorCellInvalid()
TNode< BoolT > IsJSObjectMap(TNode< Map > map)
void NameDictionaryLookup(TNode< Dictionary > dictionary, TNode< Name > unique_name, Label *if_found, TVariable< IntPtrT > *var_name_index, Label *if_not_found, LookupMode mode=kFindExisting)
TNode< Float16RawBitsT > TruncateFloat64ToFloat16(TNode< Float64T > value)
TNode< FixedArrayBase > GrowElementsCapacity(TNode< HeapObject > object, TNode< FixedArrayBase > elements, ElementsKind from_kind, ElementsKind to_kind, TNode< TIndex > capacity, TNode< TIndex > new_capacity, Label *bailout)
TNode< BoolT > IsNumberStringNotRegexpLikeProtectorCellInvalid()
TNode< BoolT > IsJSApiObjectInstanceType(TNode< Int32T > instance_type)
TNode< Float64T > TryTaggedToFloat64(TNode< Object > value, Label *if_valueisnotnumber)
void ClearTrustedPointerField(TNode< HeapObject > object, int offset)
TNode< BoolT > IsIsolatePromiseHookEnabledOrHasAsyncEventDelegate()
TNode< BoolT > IsEqualInWord32(TNode< Word32T > word32, typename BitField::FieldType value)
TNode< Float64T > Float64Round(TNode< Float64T > x)
TNode< SwissNameDictionary > CopySwissNameDictionary(TNode< SwissNameDictionary > original)
TNode< IntPtrT > TryIntPtrDiv(TNode< IntPtrT > a, TNode< IntPtrT > b, Label *if_div_zero)
TNode< Numeric > LoadFixedTypedArrayElementAsTagged(TNode< RawPtrT > data_pointer, TNode< UintPtrT > index, ElementsKind elements_kind)
void InsertEntry(TNode< Dictionary > dictionary, TNode< Name > key, TNode< Object > value, TNode< IntPtrT > index, TNode< Smi > enum_index)
TNode< WordT > UpdateWord(TNode< WordT > word, TNode< UintPtrT > value, bool starts_as_zero=false)
TNode< IntPtrT > LoadAndUntagPositiveSmiObjectField(TNode< HeapObject > object, int offset)
void FillPropertyArrayWithUndefined(TNode< PropertyArray > array, TNode< IntPtrT > from_index, TNode< IntPtrT > to_index)
TNode< BoolT > IsJSBoundFunction(TNode< HeapObject > object)
void StoreSandboxedPointerToObject(TNode< HeapObject > object, int offset, TNode< RawPtrT > pointer)
TNode< BigInt > ToBigIntConvertNumber(TNode< Context > context, TNode< Object > input)
TNode< Int32T > LoadBytecodeArrayParameterCount(TNode< BytecodeArray > bytecode_array)
TNode< BoolT > IsSpecialReceiverMap(TNode< Map > map)
TNode< BoolT > IsStrong(TNode< MaybeObject > value)
TNode< Int32T > LoadHeapInt32Value(TNode< HeapObject > object)
TNode< Int32T > TruncateIntPtrToInt32(TNode< IntPtrT > value)
void SetPendingMessage(TNode< Union< Hole, JSMessageObject > > message)
TNode< Smi > LoadFixedArrayBaseLength(TNode< FixedArrayBase > array)
std::function< void( TNode< JSAnyNotSmi > receiver, TNode< JSAnyNotSmi > holder, TNode< Map > map, TNode< Int32T > instance_type, TNode< IntPtrT > key, Label *next_holder, Label *if_bailout)> LookupElementInHolder
TNode< BoolT > IsJSReceiverMap(TNode< Map > map)
void CopyPropertyArrayValues(TNode< HeapObject > from_array, TNode< PropertyArray > to_array, TNode< IntPtrT > length, WriteBarrierMode barrier_mode, DestroySource destroy_source)
TNode< Uint32T > LoadDetailsByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index)
TNode< IntPtrT > LoadMapInobjectPropertiesStartInWords(TNode< Map > map)
TNode< IntPtrT > LoadFeedbackVectorLength(TNode< FeedbackVector >)
TNode< BoolT > IsEphemeronHashTable(TNode< HeapObject > object)
TNode< Map > LoadSlowObjectWithNullPrototypeMap(TNode< NativeContext > native_context)
TNode< BoolT > IsOddballInstanceType(TNode< Int32T > instance_type)
TNode< JSDispatchHandleT > InvalidDispatchHandleConstant()
TNode< ArrayList > ArrayListEnsureSpace(TNode< ArrayList > array, TNode< Smi > length)
void StoreCellValue(TNode< Cell > cell, TNode< Object > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
TNode< JSAny > BasicLoadNumberDictionaryElement(TNode< NumberDictionary > dictionary, TNode< IntPtrT > intptr_index, Label *not_data, Label *if_hole)
std::function< void(TNode< TIndex > index)> FastLoopBody
TNode< IntPtrT > ParameterToIntPtr(TNode< Smi > value)
TNode< Object > LoadValueByDescriptorEntry(TNode< DescriptorArray > descriptors, TNode< IntPtrT > descriptor)
void Dcheck(const BranchGenerator &branch, const char *message, const char *file, int line, std::initializer_list< ExtraNode > extra_nodes={}, const SourceLocation &loc=SourceLocation::Current())
TNode< Numeric > NonNumberToNumeric(TNode< Context > context, TNode< HeapObject > input)
TNode< IntPtrT > EntryIndexToIndex(TNode< Uint32T > entry_index)
TNode< BoolT > IsUncachedExternalStringMap(TNode< Map > map)
void ThrowIfNotInstanceType(TNode< Context > context, TNode< Object > value, InstanceType instance_type, char const *method_name)
void ThrowIfNotCallable(TNode< Context > context, TNode< Object > value, const char *method_name)
TNode< BoolT > IsPrototypeInitialArrayPrototype(TNode< Context > context, TNode< Map > map)
TNode< BigInt > AllocateBigInt(TNode< IntPtrT > length)
void FindInsertionEntry(TNode< Dictionary > dictionary, TNode< Name > key, TVariable< IntPtrT > *var_key_index)
TNode< A > SelectConstant(TNode< BoolT > condition, TNode< A > true_value, TNode< A > false_value)
void FillEntireFixedArrayWithSmiZero(ElementsKind kind, TNode< FixedArray > array, TNode< IntPtrT > length)
TNode< Smi > TaggedIndexToSmi(TNode< TaggedIndex > value)
TNode< BoolT > IsPrivateName(TNode< Symbol > symbol)
TNode< BoolT > InstanceTypeEqual(TNode< Int32T > instance_type, int type)
void StoreFieldsNoWriteBarrier(TNode< IntPtrT > start_address, TNode< IntPtrT > end_address, TNode< Object > value)
TNode< Int32T > LoadAndUntagToWord32ArrayElement(TNode< Array > array, int array_header_size, TNode< IntPtrT > index, int additional_offset=0)
TNode< BoolT > IsFastElementsKind(TNode< Int32T > elements_kind)
TNode< RawPtrT > EmptyBackingStoreBufferConstant()
TNode< BoolT > IsExtensibleMap(TNode< Map > map)
void SwissNameDictionarySetCtrl(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity, TNode< IntPtrT > entry, TNode< Uint8T > ctrl)
TNode< BoolT > IsSequentialString(TNode< HeapObject > object)
TNode< RawPtrT > LoadJSTypedArrayExternalPointerPtr(TNode< JSTypedArray > holder)
void FillFixedDoubleArrayWithZero(TNode< FixedDoubleArray > array, TNode< IntPtrT > start, TNode< IntPtrT > length)
TNode< BoolT > IsFastSmiOrTaggedElementsKind(TNode< Int32T > elements_kind)
TNode< BoolT > IsSymbolInstanceType(TNode< Int32T > instance_type)
TNode< Uint32T > PositiveSmiToUint32(TNode< Smi > value)
TNode< Smi > LoadSharedFunctionInfoBuiltinId(TNode< SharedFunctionInfo > sfi)
void StoreFixedArrayElement(TNode< FixedArray > object, int index, TNode< Object > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER, CheckBounds check_bounds=CheckBounds::kAlways)
TNode< Name > LoadSwissNameDictionaryKey(TNode< SwissNameDictionary > dict, TNode< IntPtrT > entry)
TNode< DescriptorArray > LoadMapDescriptors(TNode< Map > map)
TNode< BoolT > IsIndirectStringInstanceType(TNode< Int32T > instance_type)
TNode< HeapObject > LoadSlowProperties(TNode< JSReceiver > object)
TNode< Object > UnsafeLoadFixedArrayElement(TNode< FixedArray > object, TNode< IntPtrT > index, int additional_offset=0)
TNode< BoolT > IsIsConcatSpreadableProtectorCellInvalid()
TNode< HeapObject > AllocateInNewSpace(TNode< IntPtrT > size, AllocationFlags flags=AllocationFlag::kNone)
TNode< BoolT > IsFastOrNonExtensibleOrSealedElementsKind(TNode< Int32T > elements_kind)
TNode< BoolT > IsSeqOneByteStringMap(TNode< Map > map)
TNode< TrustedObject > LoadTrustedPointerFromObject(TNode< HeapObject > object, int offset, IndirectPointerTag tag)
TNode< JSArray > ExtractFastJSArray(TNode< Context > context, TNode< JSArray > array, TNode< BInt > begin, TNode< BInt > count)
TNode< BoolT > IsJSReceiver(TNode< HeapObject > object)
TNode< Smi > RefillMathRandom(TNode< NativeContext > native_context)
TNode< Numeric > ToNumberOrNumeric(LazyNode< Context > context, TNode< Object > input, TVariable< Smi > *var_type_feedback, Object::Conversion mode, BigIntHandling bigint_handling=BigIntHandling::kThrow)
TNode< Float16RawBitsT > RoundInt32ToFloat16(TNode< Int32T > value)
TNode< Float64T > Float64Floor(TNode< Float64T > x)
TNode< SwissNameDictionary > AllocateSwissNameDictionary(TNode< IntPtrT > at_least_space_for)
TNode< BoolT > IsRegularHeapObjectSize(TNode< IntPtrT > size)
TNode< Int32T > CountLeadingZeros32(TNode< Word32T > value)
TNode< Object > LoadMapConstructor(TNode< Map > map)
void FillEntireFixedDoubleArrayWithZero(TNode< FixedDoubleArray > array, TNode< IntPtrT > length)
TNode< Number > ChangeFloat64ToTagged(TNode< Float64T > value)
TNode< Smi > GetNameDictionaryFlags(TNode< Dictionary > dictionary)
TNode< Smi > SmiFromIntPtr(TNode< IntPtrT > value)
void StoreFixedArrayOrPropertyArrayElement(TNode< UnionOf< FixedArray, PropertyArray > > array, TNode< TIndex > index, TNode< Object > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER, int additional_offset=0)
TNode< Smi > TrySmiSub(TNode< Smi > a, TNode< Smi > b, Label *if_overflow)
TNode< BoolT > DoesntHaveInstanceType(TNode< HeapObject > object, InstanceType type)
void UnsafeStoreObjectFieldNoWriteBarrier(TNode< HeapObject > object, int offset, TNode< Object > value)
TNode< IntPtrT > MapUsedInObjectProperties(TNode< Map > map)
void InitializeFieldsWithRoot(TNode< HeapObject > object, TNode< IntPtrT > start_offset, TNode< IntPtrT > end_offset, RootIndex root)
TNode< BoolT > IsUncachedExternalStringInstanceType(TNode< Int32T > instance_type)
void ThrowRangeError(TNode< Context > context, MessageTemplate message, std::optional< TNode< Object > > arg0=std::nullopt, std::optional< TNode< Object > > arg1=std::nullopt, std::optional< TNode< Object > > arg2=std::nullopt)
TNode< BoolT > FixedArraySizeDoesntFitInNewSpace(TNode< TIndex > element_count, int base_size)
void ThrowTypeError(TNode< Context > context, MessageTemplate message, char const *arg0=nullptr, char const *arg1=nullptr)
TNode< BoolT > IsFunctionWithPrototypeSlotMap(TNode< Map > map)
TNode< BoolT > IsMarked(TNode< Object > object)
TNode< BoolT > IsSloppyArgumentsMap(TNode< Context > context, TNode< Map > map)
void DecrementCounter(StatsCounter *counter, int delta)
void TrySkipWriteBarrier(TNode< Object > object, Label *if_needs_write_barrier)
TNode< JSReceiver > ToObject_Inline(TNode< Context > context, TNode< Object > input)
TNode< Context > LoadModuleContext(TNode< Context > context)
TNode< UintPtrT > DecodeWord(TNode< WordT > word)
TNode< BoolT > LoadScopeInfoHasExtensionField(TNode< ScopeInfo > scope_info)
TNode< Object > LoadMapBackPointer(TNode< Map > map)
void StringWriteToFlatTwoByte(TNode< String > source, TNode< RawPtrT > sink, TNode< Int32T > start, TNode< Int32T > length)
bool TryGetIntPtrOrSmiConstantValue(TNode< Smi > maybe_constant, int *value)
TNode< IntPtrT > TryIntPtrMod(TNode< IntPtrT > a, TNode< IntPtrT > b, Label *if_div_zero)
TNode< BoolT > TaggedIsCode(TNode< Object > object)
TNode< String > StringFromSingleCharCode(TNode< Int32T > code)
TNode< Smi > SmiTag(TNode< IntPtrT > value)
TNode< Number > ChangeInt32ToTaggedNoOverflow(TNode< Int32T > value)
void Increment(TVariable< TIndex > *variable, int value=1)
void SetNameDictionaryFlags(TNode< Dictionary >, TNode< Smi > flags)
TNode< BoolT > IsWeakReferenceTo(TNode< MaybeObject > maybe_object, TNode< HeapObject > heap_object)
TNode< JSReceiver > ToObject(TNode< Context > context, TNode< Object > input)
TNode< Smi > UnsignedSmiShl(TNode< Smi > a, int shift)
TNode< Smi > NormalizeSmiIndex(TNode< Smi > smi_index)
void SetNextEnumerationIndex(TNode< Dictionary > dictionary, TNode< Smi > next_enum_index_smi)
TNode< Float32T > ChangeFloat16ToFloat32(TNode< Float16RawBitsT > value)
TNode< BoolT > IsJSPrimitiveWrapperMap(TNode< Map > map)
TNode< Smi > SmiFromUint32(TNode< Uint32T > value)
TNode< BoolT > IsSafeInteger(TNode< Object > number)
TNode< Word32T > UpdateWord32(TNode< Word32T > word, TNode< Uint32T > value, bool starts_as_zero=false)
TNode< BoolT > WordIsAligned(TNode< WordT > word, size_t alignment)
TNode< Object > LoadSharedFunctionInfoTrustedData(TNode< SharedFunctionInfo > sfi)
TNode< Smi > LoadSmiArrayLength(TNode< Array > array)
void PrintToStream(const char *s, int stream)
TNode< Smi > SmiLexicographicCompare(TNode< Smi > x, TNode< Smi > y)
TNode< Code > LoadCodePointerFromObject(TNode< HeapObject > object, int offset)
TNode< BoolT > IsNumberPositive(TNode< Number > number)
TNode< BoolT > TaggedEqual(TNode< AnyTaggedT > a, TNode< AnyTaggedT > b)
TNode< HeapObject > Allocate(TNode< IntPtrT > size, AllocationFlags flags=AllocationFlag::kNone)
void StoreObjectFieldRoot(TNode< HeapObject > object, int offset, RootIndex root)
void CombineFeedback(TVariable< Smi > *existing_feedback, int feedback)
TNode< FixedArrayBase > LoadElements(TNode< JSObject > object)
void StoreExternalPointerToObject(TNode< HeapObject > object, int offset, TNode< RawPtrT > pointer, ExternalPointerTag tag)
TNode< T > LoadObjectField(TNode< HeapObject > object, int offset)
TNode< Int64T > PopulationCount64(TNode< Word64T > value)
TNode< NameDictionary > AllocateNameDictionary(int at_least_space_for)
TNode< ScopeInfo > LoadScopeInfo(TNode< Context > context)
TNode< IntPtrT > LoadSwissNameDictionaryNumberOfDeletedElements(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity)
TNode< BoolT > IsJSSharedArrayInstanceType(TNode< Int32T > instance_type)
TNode< HeapObject > AllocateRawUnaligned(TNode< IntPtrT > size_in_bytes, AllocationFlags flags, TNode< RawPtrT > top_address, TNode< RawPtrT > limit_address)
TNode< BoolT > IsSeqOneByteStringInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsElementsKindGreaterThanOrEqual(TNode< Int32T > target_kind, ElementsKind reference_kind)
TNode< BoolT > IsNotSetWord32(TNode< Word32T > word32)
TNode< BoolT > IsJSFunction(TNode< HeapObject > object)
TNode< Object > GetContinuationPreservedEmbedderData()
TNode< BoolT > TaggedNotEqual(TNode< AnyTaggedT > a, TNode< AnyTaggedT > b)
TNode< UintPtrT > LoadBigIntDigit(TNode< BigInt > bigint, intptr_t digit_index)
void TaggedPointerToWord32OrBigIntWithFeedback(TNode< Context > context, TNode< HeapObject > pointer, Label *if_number, TVariable< Word32T > *var_word32, Label *if_bigint, Label *if_bigint64, TVariable< BigInt > *var_maybe_bigint, const FeedbackValues &feedback)
TNode< IntPtrT > IntPtrMin(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< Int32T > ChangeBoolToInt32(TNode< BoolT > b)
TNode< Uint32T > LoadMapBitField3(TNode< Map > map)
TNode< BoolT > TaggedIsNotSmi(TNode< MaybeObject > a)
TNode< Uint32T > LoadAndUntagBytecodeArrayLength(TNode< BytecodeArray > array)
TNode< HeapNumber > AllocateHeapInt32WithValue(TNode< Int32T > value)
TNode< BoolT > IsStrictArgumentsMap(TNode< Context > context, TNode< Map > map)
TNode< IntPtrT > GetArrayAllocationSize(TNode< TIndex > element_count, ElementsKind kind, int header_size)
TNode< BoolT > TaggedIsCallable(TNode< Object > object)
TNode< String > ToString_Inline(TNode< Context > context, TNode< Object > input)
void CopyRange(TNode< HeapObject > dst_object, int dst_offset, TNode< HeapObject > src_object, int src_offset, TNode< IntPtrT > length_in_tagged, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
TNode< BoolT > IsDoubleElementsKind(TNode< Int32T > elements_kind)
void FixedArrayBoundsCheck(TNode< FixedArrayBase > array, TNode< Smi > index, int additional_offset)
TNode< JSFunction > AllocateRootFunctionWithContext(RootIndex function, TNode< Context > context, std::optional< TNode< NativeContext > > maybe_native_context)
void TryToName(TNode< Object > key, Label *if_keyisindex, TVariable< IntPtrT > *var_index, Label *if_keyisunique, TVariable< Name > *var_unique, Label *if_bailout, Label *if_notinternalized=nullptr)
TNode< UintPtrT > LoadBoundedSizeFromObject(TNode< HeapObject > object, int offset)
TNode< BoolT > TaggedIsPositiveSmi(TNode< Object > a)
TNode< IntPtrT > IntPtrMax(TNode< IntPtrT > left, TNode< IntPtrT > right)
void StoreFeedbackVectorSlot(TNode< FeedbackVector > feedback_vector, TNode< UintPtrT > slot, TNode< AnyTaggedT > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER, int additional_offset=0)
TNode< String > ToThisString(TNode< Context > context, TNode< Object > value, TNode< String > method_name)
void GotoIfPrototypeRequiresRuntimeLookup(TNode< JSFunction > function, TNode< Map > map, Label *runtime)
bool ToParameterConstant(TNode< Smi > node, intptr_t *out)
TNode< BoolT > IsJSRegExpStringIterator(TNode< HeapObject > object)
TNode< Smi > LoadStringLengthAsSmi(TNode< String > string)
CodeStubAssembler(compiler::CodeAssemblerState *state)
TNode< IntPtrT > ToKeyIndex(TNode< Uint32T > entry_index)
void StoreFixedDoubleArrayElement(TNode< FixedDoubleArray > object, TNode< TIndex > index, TNode< Float64T > value, CheckBounds check_bounds=CheckBounds::kAlways)
TNode< IntPtrT > ChangePositiveInt32ToIntPtr(TNode< Int32T > input)
TNode< BoolT > WordIsPowerOfTwo(TNode< IntPtrT > value)
TNode< Name > GetKey(TNode< Array > array, TNode< Uint32T > entry_index)
TNode< Number > ChangeUint32ToTagged(TNode< Uint32T > value)
TNode< BoolT > IsSharedStringInstanceType(TNode< Int32T > instance_type)
TNode< IntPtrT > TryIntPtrSub(TNode< IntPtrT > a, TNode< IntPtrT > b, Label *if_overflow)
TNode< Uint32T > GetSortedKeyIndex(TNode< Array > descriptors, TNode< Uint32T > entry_index)
void UnsafeStoreArrayElement(TNode< Array > object, int index, TNode< typename Array::Shape::ElementT > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER)
TNode< Number > StringToNumber(TNode< String > input)
TNode< Cell > AllocateCellWithValue(TNode< Object > value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
TNode< JSArray > AllocateUninitializedJSArray(TNode< Map > array_map, TNode< Smi > length, std::optional< TNode< AllocationSite > > allocation_site, TNode< IntPtrT > size_in_bytes)
TNode< Smi > GetNumberOfDeletedElements(TNode< Dictionary > dictionary)
TNode< Code > GetSharedFunctionInfoCode(TNode< SharedFunctionInfo > shared_info, TVariable< Uint16T > *data_type_out=nullptr, Label *if_compile_lazy=nullptr)
TNode< Uint32T > DescriptorArrayGetDetails(TNode< DescriptorArray > descriptors, TNode< Uint32T > descriptor_number)
TNode< Object > CallRuntimeNewArray(TNode< Context > context, TNode< JSAny > receiver, TNode< Object > length, TNode< Object > new_target, TNode< Object > allocation_site)
TNode< BigInt > BigIntFromUint64(TNode< UintPtrT > value)
TNode< IntPtrT > LoadMemoryChunkFlags(TNode< HeapObject > object)
TNode< BoolT > IsUniqueName(TNode< HeapObject > object)
void TryPlainPrimitiveNonNumberToNumber(TNode< HeapObject > input, TVariable< Number > *var_result, Label *if_bailout)
TNode< IntPtrT > LoadSwissNameDictionaryNumberOfElements(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity)
TNode< Code > LoadBuiltin(TNode< Smi > builtin_id)
TNode< BoolT > IsZeroOrContext(TNode< Object > object)
std::pair< TNode< JSArray >, TNode< FixedArrayBase > > AllocateUninitializedJSArrayWithElements(ElementsKind kind, TNode< Map > array_map, TNode< Smi > length, std::optional< TNode< AllocationSite > > allocation_site, TNode< IntPtrT > capacity, AllocationFlags allocation_flags=AllocationFlag::kNone, int array_header_size=JSArray::kHeaderSize)
TNode< FixedArray > ArrayListElements(TNode< ArrayList > array)
TNode< Uint32T > LoadDetailsByDescriptorEntry(TNode< DescriptorArray > descriptors, TNode< IntPtrT > descriptor)
TNode< JSAny > GetArgumentValue(TorqueStructArguments args, TNode< IntPtrT > index)
TNode< AdditiveSafeIntegerT > TryFloat64ToAdditiveSafeInteger(TNode< Float64T > number, Label *if_failed)
TNode< Uint32T > BitcastFloat16ToUint32(TNode< Float16RawBitsT > value)
TNode< Number > NumberMin(TNode< Number > left, TNode< Number > right)
TNode< RawPtrT > LoadExternalPointerFromObject(TNode< HeapObject > object, int offset, ExternalPointerTagRange tag_range)
void LookupLinear(TNode< Name > unique_name, TNode< Array > array, TNode< Uint32T > number_of_valid_entries, Label *if_found, TVariable< IntPtrT > *var_name_index, Label *if_not_found)
TNode< BoolT > IsWeakOrCleared(TNode< MaybeObject > value)
void DcheckHasValidMap(TNode< HeapObject > object)
TNode< BoolT > IsTemporalInstantInstanceType(TNode< Int32T > instance_type)
void StoreValueByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index, TNode< Object > value, WriteBarrierMode write_barrier=UPDATE_WRITE_BARRIER)
TNode< Number > ToNumber_Inline(TNode< Context > context, TNode< Object > input)
void TryInternalizeString(TNode< String > string, Label *if_index, TVariable< IntPtrT > *var_index, Label *if_internalized, TVariable< Name > *var_internalized, Label *if_not_internalized, Label *if_bailout)
TNode< Uint16T > StringCharCodeAt(TNode< String > string, TNode< UintPtrT > index)
TNode< BoolT > IsFastPackedElementsKind(TNode< Int32T > elements_kind)
TNode< BoolT > IsJSDataView(TNode< HeapObject > object)
TNode< Smi > LoadWeakFixedArrayLength(TNode< WeakFixedArray > array)
TNode< Smi > TrySmiAbs(TNode< Smi > a, Label *if_overflow)
void BranchIfNumberRelationalComparison(Operation op, TNode< Number > left, TNode< Number > right, Label *if_true, Label *if_false)
TNode< JSPrototype > LoadMapPrototype(TNode< Map > map)
TNode< IntPtrT > LoadPropertyArrayLength(TNode< PropertyArray > object)
void SetNumberOfElements(TNode< Dictionary > dictionary, TNode< Smi > num_elements_smi)
TNode< BoolT > IsNameInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsSetWord32(TNode< Word32T > word32)
TNode< Int32T > ChangeTaggedNonSmiToInt32(TNode< Context > context, TNode< HeapObject > input)
TNode< ArrayList > ArrayListAdd(TNode< ArrayList > array, TNode< Object > object)
TNode< BoolT > IsJSApiObjectMap(TNode< Map > map)
TNode< BoolT > IsHeapNumberInstanceType(TNode< Int32T > instance_type)
void SwissNameDictionaryFindEntry(TNode< SwissNameDictionary > table, TNode< Name > key, Label *found, TVariable< IntPtrT > *var_found_entry, Label *not_found)
TNode< RawPtr< Uint8T > > ExternalOneByteStringGetChars(TNode< ExternalOneByteString > string)
TNode< Uint32T > LoadJSReceiverIdentityHash(TNode< JSReceiver > receiver, Label *if_no_hash=nullptr)
TNode< HeapObject > LoadFastProperties(TNode< JSReceiver > object, bool skip_empty_check=false)
TNode< BInt > BIntConstant(int value)
void StoreCodePointerFieldNoWriteBarrier(TNode< HeapObject > object, int offset, TNode< Code > value)
TNode< IntPtrT > SmiUntag(TNode< Smi > value)
void GetMarkBit(TNode< IntPtrT > object, TNode< IntPtrT > *cell, TNode< IntPtrT > *mask)
TNode< Map > LoadJSArrayElementsMap(ElementsKind kind, TNode< NativeContext > native_context)
TNode< BoolT > IsDeprecatedMap(TNode< Map > map)
TNode< BoolT > IsJSGeneratorObject(TNode< HeapObject > object)
void TailCallRuntimeNewArray(TNode< Context > context, TNode< JSAny > receiver, TNode< Object > length, TNode< Object > new_target, TNode< Object > allocation_site)
TNode< BoolT > IsIsolatePromiseHookEnabledOrDebugIsActiveOrHasAsyncEventDelegate()
TNode< BoolT > IsNotCleared(TNode< MaybeObject > value)
TNode< BoolT > IsAllSetWord32(TNode< Word32T > word32, uint32_t mask)
void CopyFixedArrayElements(ElementsKind kind, TNode< FixedArrayBase > from_array, TNode< FixedArrayBase > to_array, TNode< TIndex > length, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER)
TNode< BoolT > IsJSFunctionMap(TNode< Map > map)
TNode< BoolT > IsJSPrimitiveWrapperInstanceType(TNode< Int32T > instance_type)
TNode< RawPtrT > LoadJSTypedArrayDataPtr(TNode< JSTypedArray > typed_array)
TNode< BytecodeArray > LoadSharedFunctionInfoBytecodeArray(TNode< SharedFunctionInfo > sfi)
TNode< BoolT > IsHoleInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsName(TNode< HeapObject > object)
void InitializeJSObjectBodyWithSlackTracking(TNode< HeapObject > object, TNode< Map > map, TNode< IntPtrT > instance_size)
TNode< BoolT > IsGlobalDictionary(TNode< HeapObject > object)
TNode< BigInt > BigIntFromInt64(TNode< IntPtrT > value)
TNode< Uint16T > Uint16Constant(uint16_t t)
void StoreHeapInt32Value(TNode< HeapObject > object, TNode< Int32T > value)
TNode< BoolT > IsJSApiObject(TNode< HeapObject > object)
TNode< BoolT > IsJSGlobalProxyMap(TNode< Map > map)
void FillFixedArrayWithSmiZero(ElementsKind kind, TNode< FixedArray > array, TNode< IntPtrT > start, TNode< IntPtrT > length)
TNode< Int32T > SelectInt32Constant(TNode< BoolT > condition, int true_value, int false_value)
IntegerLiteral ConstexprIntegerLiteralBitwiseOr(const IntegerLiteral &lhs, const IntegerLiteral &rhs)
TNode< TIndex > TaggedToParameter(TNode< Smi > value)
TNode< BoolT > IsHeapNumberPositive(TNode< HeapNumber > number)
TNode< BoolT > IsPrototypeTypedArrayPrototype(TNode< Context > context, TNode< Map > map)
TNode< BoolT > IsElementsKindLessThanOrEqual(TNode< Int32T > target_kind, ElementsKind reference_kind)
TNode< Number > ToNumber(TNode< Context > context, TNode< Object > input, BigIntHandling bigint_handling=BigIntHandling::kThrow)
TNode< BoolT > IsCode(TNode< HeapObject > object)
TNode< Uint8T > LoadUint8Ptr(TNode< RawPtrT > ptr, TNode< IntPtrT > offset)
TNode< Word32T > TruncateWord64ToWord32(TNode< Word64T > value)
TNode< BoolT > JSAnyIsNotPrimitiveMap(TNode< Map > map)
TNode< Object > LoadFixedArrayBaseElementAsTagged(TNode< FixedArrayBase > elements, TNode< IntPtrT > index, TNode< Int32T > elements_kind, Label *if_accessor, Label *if_hole)
void UpdateFeedback(TNode< Smi > feedback, TNode< HeapObject > maybe_feedback_vector, TNode< UintPtrT > slot_id, UpdateFeedbackMode mode)
TNode< BoolT > IsDoubleHole(TNode< Object > base, TNode< IntPtrT > offset)
TNode< BoolT > IsJSArray(TNode< HeapObject > object)
TNode< FixedArrayBase > ExtractFixedArray(TNode< FixedArrayBase > source, std::optional< TNode< TIndex > > first, std::optional< TNode< TIndex > > count=std::nullopt, std::optional< TNode< TIndex > > capacity=std::nullopt, ExtractFixedArrayFlags extract_flags=ExtractFixedArrayFlag::kAllFixedArrays, TVariable< BoolT > *var_holes_converted=nullptr, std::optional< TNode< Int32T > > source_elements_kind=std::nullopt)
TNode< BoolT > IsJSStringIterator(TNode< HeapObject > object)
void InitializeJSObjectBodyNoSlackTracking(TNode< HeapObject > object, TNode< Map > map, TNode< IntPtrT > instance_size, int start_offset=JSObject::kHeaderSize)
TNode< Object > LoadRoot(RootIndex root_index)
TNode< OrderedNameDictionary > AllocateOrderedNameDictionary(TNode< IntPtrT > capacity)
TNode< Int32T > TruncateNumberToWord32(TNode< Number > value)
TNode< BoolT > IsPromiseReactionJobTask(TNode< HeapObject > object)
TNode< BoolT > IsHoleyFastElementsKindForRead(TNode< Int32T > elements_kind)
TNode< BoolT > IsPromiseResolveProtectorCellInvalid()
void StoreIndirectPointerField(TNode< HeapObject > object, int offset, IndirectPointerTag tag, TNode< ExposedTrustedObject > value)
void StoreMap(TNode< HeapObject > object, TNode< Map > map)
TNode< Uint32T > DecodeWord32(TNode< Word32T > word32)
TNode< Uint32T > LoadFunctionKind(TNode< JSFunction > function)
TNode< JSArray > ArrayCreate(TNode< Context > context, TNode< Number > length)
TNode< Smi > WordOrSmiShr(TNode< Smi > a, int shift)
TNode< BoolT > IsFastSmiElementsKind(TNode< Int32T > elements_kind)
void StoreElement(TNode< RawPtrT > elements, ElementsKind kind, TNode< TIndex > index, TNode< TValue > value)
TNode< Number > ChangeFloat32ToTagged(TNode< Float32T > value)
TNode< Uint32T > LoadAndUntagFixedArrayBaseLengthAsUint32(TNode< FixedArrayBase > array)
void DispatchMaybeObject(TNode< MaybeObject > maybe_object, Label *if_smi, Label *if_cleared, Label *if_weak, Label *if_strong, TVariable< Object > *extracted)
TNode< BoolT > IsNumberNormalized(TNode< Number > number)
TNode< Object > LoadValueByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index)
TNode< Uint32T > LoadNameRawHash(TNode< Name > name)
void BuildFastLoop(const VariableList &vars, TVariable< TIndex > &var_index, TNode< TIndex > start_index, TNode< TIndex > end_index, const FastLoopBody< TIndex > &body, TNode< TIndex > increment, LoopUnrollingMode unrolling_mode, IndexAdvanceMode advance_mode, IndexAdvanceDirection advance_direction)
TNode< RawPtr< Uint16T > > ExternalTwoByteStringGetChars(TNode< ExternalTwoByteString > string)
TNode< BoolT > IsHoleyFastElementsKind(TNode< Int32T > elements_kind)
TNode< Map > LoadCachedMap(TNode< NativeContext > native_context, TNode< IntPtrT > number_of_properties, Label *runtime)
TNode< IntPtrT > SelectIntPtrConstant(TNode< BoolT > condition, int true_value, int false_value)
TNode< BoolT > IsSequentialStringInstanceType(TNode< Int32T > instance_type)
TNode< IntPtrT > PageMetadataFromAddress(TNode< IntPtrT > address)
TNode< BoolT > IsCustomElementsReceiverInstanceType(TNode< Int32T > instance_type)
TNode< TResult > LoadElementAndPrepareForStore(TNode< FixedArrayBase > array, TNode< IntPtrT > offset, ElementsKind from_kind, ElementsKind to_kind, Label *if_hole)
TNode< BoolT > IsWeakReferenceToObject(TNode< MaybeObject > maybe_object, TNode< Object > object)
std::function< void( TNode< JSAnyNotSmi > receiver, TNode< JSAnyNotSmi > holder, TNode< Map > map, TNode< Int32T > instance_type, TNode< Name > key, Label *next_holder, Label *if_bailout)> LookupPropertyInHolder
TNode< ArrayList > AllocateArrayList(TNode< Smi > size)
TNode< RawPtrT > LoadSandboxedPointerFromObject(TNode< HeapObject > object, int offset)
void StoreSwissNameDictionaryCapacity(TNode< SwissNameDictionary > table, TNode< Int32T > capacity)
TNode< Float64T > SmiToFloat64(TNode< Smi > value)
TNode< BoolT > IsCleared(TNode< MaybeObject > value)
TNode< Int32T > LoadElementsKind(TNode< HeapObject > object)
TNode< BigInt > BigIntFromInt32Pair(TNode< IntPtrT > low, TNode< IntPtrT > high)
TNode< BoolT > IsExtensibleNonPrototypeMap(TNode< Map > map)
TNode< BoolT > IsInteger(TNode< Object > number)
TNode< JSArray > CloneFastJSArray(TNode< Context > context, TNode< JSArray > array, std::optional< TNode< AllocationSite > > allocation_site=std::nullopt, HoleConversionMode convert_holes=HoleConversionMode::kDontConvert)
void MoveElements(ElementsKind kind, TNode< FixedArrayBase > elements, TNode< IntPtrT > dst_index, TNode< IntPtrT > src_index, TNode< IntPtrT > length)
TNode< BoolT > IsBigIntInstanceType(TNode< Int32T > instance_type)
TNode< NameDictionary > AllocateNameDictionaryWithCapacity(TNode< IntPtrT > capacity, AllocationFlags=AllocationFlag::kNone)
void StoreDetailsByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index, TNode< Smi > details)
std::function< void(Label *, Label *)> BranchGenerator
TorqueStructArguments GetFrameArguments(TNode< RawPtrT > frame, TNode< IntPtrT > argc, FrameArgumentsArgcType argc_type=FrameArgumentsArgcType::kCountExcludesReceiver)
TNode< BoolT > IsFastAliasedArgumentsMap(TNode< Context > context, TNode< Map > map)
TNode< Int32T > LoadAndUntagToWord32ObjectField(TNode< HeapObject > object, int offset)
TNode< IntPtrT > TryIntPtrMul(TNode< IntPtrT > a, TNode< IntPtrT > b, Label *if_overflow)
TNode< Map > CheckEnumCache(TNode< JSReceiver > receiver, Label *if_empty, Label *if_runtime)
std::function< TNode< T >()> LazyNode
TNode< Word32T > TruncateTaggedToWord32(TNode< Context > context, TNode< Object > value)
void StoreSwissNameDictionaryEnumToEntryMapping(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity, TNode< IntPtrT > enum_index, TNode< Int32T > entry)
TNode< Smi > ParameterToTagged(TNode< Smi > value)
std::function< void(TNode< HeapObject > array, TNode< IntPtrT > offset)> FastArrayForEachBody
TNode< BoolT > IsOneByteStringMap(TNode< Map > map)
TNode< BoolT > IsJSRabGsabDataView(TNode< HeapObject > object)
TNode< BoolT > IsElementsKindGreaterThan(TNode< Int32T > target_kind, ElementsKind reference_kind)
TNode< IntPtrT > SmiToIntPtr(TNode< Smi > value)
TNode< BoolT > IsClearWord32(TNode< Word32T > word32)
TNode< Uint64T > LoadSwissNameDictionaryCtrlTableGroup(TNode< IntPtrT > address)
TNode< RawPtr< Uint8T > > IntlAsciiCollationWeightsL1()
void SetArgumentValue(TorqueStructArguments args, TNode< IntPtrT > index, TNode< JSAny > value)
TNode< Object > GetImportMetaObject(TNode< Context > context)
void StoreDoubleHole(TNode< HeapObject > object, TNode< IntPtrT > offset)
TNode< BoolT > IsJSObjectInstanceType(TNode< Int32T > instance_type)
void StoreObjectFieldNoWriteBarrier(TNode< HeapObject > object, TNode< IntPtrT > offset, TNode< T > value)
TNode< Object > LoadFixedArrayElement(TNode< FixedArray > object, TNode< TIndex > index, int additional_offset=0, CheckBounds check_bounds=CheckBounds::kAlways)
TNode< BoolT > IsExternalStringMap(TNode< Map > map)
TNode< Number > NumberMax(TNode< Number > left, TNode< Number > right)
void IncrementCounter(StatsCounter *counter, int delta)
TNode< Smi > BuildAppendJSArray(ElementsKind kind, TNode< JSArray > array, CodeStubArguments *args, TVariable< IntPtrT > *arg_index, Label *bailout)
void InitializePropertyArrayLength(TNode< PropertyArray > property_array, TNode< IntPtrT > length)
void SetContinuationPreservedEmbedderData(TNode< Object > value)
TNode< BoolT > IsSeqOneByteString(TNode< HeapObject > object)
void SetCounter(StatsCounter *counter, int value)
TNode< HeapObject > LoadName(TNode< HeapObject > key)
TNode< Word32T > ComputeSeededHash(TNode< IntPtrT > key)
TNode< IntPtrT > SwissNameDictionaryOffsetIntoDataTableMT(TNode< SwissNameDictionary > dict, TNode< IntPtrT > index, int field_index)
TNode< IntPtrT > EntryToIndex(TNode< IntPtrT > entry, int field_index)
TNode< BoolT > IsJSReceiverInstanceType(TNode< Int32T > instance_type)
IntegerLiteral ConstexprIntegerLiteralLeftShift(const IntegerLiteral &lhs, const IntegerLiteral &rhs)
TNode< Object > LoadFromParentFrame(int offset)
TNode< Uint16T > LoadMapInstanceType(TNode< Map > map)
TNode< BoolT > TaggedDoesntHaveInstanceType(TNode< HeapObject > any_tagged, InstanceType type)
TNode< BoolT > IsJSArrayMap(TNode< Map > map)
TNode< SwissNameDictionary > AllocateSwissNameDictionaryWithCapacity(TNode< IntPtrT > capacity)
TNode< FixedArray > HeapObjectToFixedArray(TNode< HeapObject > base, Label *cast_fail)
TNode< BoolT > HasInstanceType(TNode< HeapObject > object, InstanceType type)
TNode< Int32T > CountTrailingZeros32(TNode< Word32T > value)
TNode< BoolT > IsJSWrappedFunction(TNode< HeapObject > object)
TNode< Float64T > LoadDoubleWithHoleCheck(TNode< FixedDoubleArray > array, TNode< IntPtrT > index, Label *if_hole=nullptr)
TNode< IntPtrT > ElementOffsetFromIndex(TNode< TIndex > index, ElementsKind kind, int base_size=0)
IntegerLiteral ConstexprIntegerLiteralAdd(const IntegerLiteral &lhs, const IntegerLiteral &rhs)
TNode< BigInt > LoadFixedBigInt64ArrayElementAsTagged(TNode< RawPtrT > data_pointer, TNode< IntPtrT > offset)
TNode< BoolT > IsPageFlagSet(TNode< IntPtrT > object, int mask)
void UnsafeStoreFixedArrayElement(TNode< FixedArray > object, int index, TNode< Object > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER)
TNode< BoolT > IsFixedArraySubclass(TNode< HeapObject > object)
TNode< BoolT > IsSlowAliasedArgumentsMap(TNode< Context > context, TNode< Map > map)
TNode< Name > LoadKeyByKeyIndex(TNode< DescriptorArray > container, TNode< IntPtrT > key_index)
TNode< IntPtrT > LoadStringLengthAsWord(TNode< String > string)
TNode< Smi > SmiShr(TNode< Smi > a, int shift)
void SharedValueBarrier(TNode< Context > context, TVariable< Object > *var_shared_value)
TNode< HeapNumber > AllocateHeapNumberWithValue(TNode< Float64T > value)
TNode< BoolT > IsNotAnyHole(TNode< Object > object)
TNode< BoolT > IsJSArrayIterator(TNode< HeapObject > object)
TNode< Word32T > IsStringWrapper(TNode< HeapObject > object)
TNode< BoolT > IsPropertyDictionary(TNode< HeapObject > object)
void StoreSharedObjectField(TNode< HeapObject > object, TNode< IntPtrT > offset, TNode< Object > value)
void StoreObjectByteNoWriteBarrier(TNode< HeapObject > object, int offset, TNode< Word32T > value)
void ArrayListSet(TNode< ArrayList > array, TNode< Smi > index, TNode< Object > object)
void PerformStackCheck(TNode< Context > context)
TNode< BoolT > IsNumeric(TNode< Object > object)
void StoreBigIntDigit(TNode< BigInt > bigint, intptr_t digit_index, TNode< UintPtrT > digit)
void BranchIfHasPrototypeProperty(TNode< JSFunction > function, TNode< Int32T > function_map_bit_field, Label *if_true, Label *if_false)
TNode< BoolT > IsJSFinalizationRegistry(TNode< HeapObject > object)
TNode< HeapObject > AllocateRaw(TNode< IntPtrT > size_in_bytes, AllocationFlags flags, TNode< RawPtrT > top_address, TNode< RawPtrT > limit_address)
TNode< Code > LoadJSFunctionCode(TNode< JSFunction > function)
TNode< Object > OrdinaryToPrimitive(TNode< Context > context, TNode< Object > input, OrdinaryToPrimitiveHint hint)
TNode< Smi > TryFloat64ToSmi(TNode< Float64T > number, Label *not_smi)
TNode< Float64T > Float64Trunc(TNode< Float64T > x)
TNode< String > AllocateSeqTwoByteString(uint32_t length, AllocationFlags flags=AllocationFlag::kNone)
TNode< Uint16T > LoadInstanceType(TNode< HeapObject > object)
void ArrayListSetLength(TNode< ArrayList > array, TNode< Smi > length)
TNode< IntPtrT > GetPropertyArrayAllocationSize(TNode< IntPtrT > element_count)
void StoreTrustedPointerField(TNode< HeapObject > object, int offset, IndirectPointerTag tag, TNode< ExposedTrustedObject > value)
TNode< IntPtrT > TryTaggedToInt32AsIntPtr(TNode< Object > value, Label *if_not_possible)
TNode< BoolT > IsConstructorMap(TNode< Map > map)
TNode< BoolT > IsJSSharedArrayMap(TNode< Map > map)
TNode< BoolT > IsIsolatePromiseHookEnabled(TNode< Uint32T > flags)
TNode< NativeContext > LoadNativeContext(TNode< Context > context)
TNode< JSAny > ToThisValue(TNode< Context > context, TNode< JSAny > value, PrimitiveType primitive_type, char const *method_name)
void StoreBigIntBitfield(TNode< BigInt > bigint, TNode< Word32T > bitfield)
void TerminateExecution(TNode< Context > context)
TNode< BoolT > IsNumberDictionary(TNode< HeapObject > object)
void LookupBinary(TNode< Name > unique_name, TNode< Array > array, TNode< Uint32T > number_of_valid_entries, Label *if_found, TVariable< IntPtrT > *var_name_index, Label *if_not_found)
void GotoIfNotNumber(TNode< Object > value, Label *is_not_number)
TNode< String > AllocateSlicedTwoByteString(TNode< Uint32T > length, TNode< String > parent, TNode< Smi > offset)
TNode< IntPtrT > LoadAndUntagWeakFixedArrayLength(TNode< WeakFixedArray > array)
void StringWriteToFlatOneByte(TNode< String > source, TNode< RawPtrT > sink, TNode< Int32T > start, TNode< Int32T > length)
void TryStoreArrayElement(ElementsKind kind, Label *bailout, TNode< FixedArrayBase > elements, TNode< BInt > index, TNode< Object > value)
constexpr int MaxNumberOfEntries()
void CheckPrototypeEnumCache(TNode< JSReceiver > receiver, TNode< Map > receiver_map, Label *if_fast, Label *if_slow)
TNode< BoolT > LoadScopeInfoClassScopeHasPrivateBrand(TNode< ScopeInfo > scope_info)
TNode< Int32T > LoadAndUntagToWord32FixedArrayElement(TNode< FixedArray > object, TNode< IntPtrT > index, int additional_offset=0)
TNode< MaybeObject > LoadWeakFixedArrayElement(TNode< WeakFixedArray > object, TNode< IntPtrT > index, int additional_offset=0)
TNode< Int32T > EnsureArrayPushable(TNode< Context > context, TNode< Map > map, Label *bailout)
TNode< BoolT > IsJSGlobalProxyInstanceType(TNode< Int32T > instance_type)
TNode< OrderedHashSet > AllocateOrderedHashSet()
TNode< T > Select(TNode< BoolT > condition, const NodeGenerator< T > &true_body, const NodeGenerator< T > &false_body, BranchHint branch_hint=BranchHint::kNone)
TNode< RawPtrT > ExternalPointerTableAddress(ExternalPointerTagRange tag_range)
TNode< Uint32T > SwissNameDictionaryUpdateCountsForDeletion(TNode< ByteArray > meta_table, TNode< IntPtrT > capacity)
TNode< Uint8T > LoadSwissNameDictionaryPropertyDetails(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity, TNode< IntPtrT > entry)
TNode< FixedArrayBase > ExtractFixedDoubleArrayFillingHoles(TNode< FixedArrayBase > source, TNode< TIndex > first, TNode< TIndex > count, TNode< TIndex > capacity, TNode< Map > source_map, TVariable< BoolT > *var_holes_converted, AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags)
TNode< BoolT > TaggedIsSmi(TNode< MaybeObject > a)
void StoreContextElementNoWriteBarrier(TNode< Context > context, int slot_index, TNode< Object > value)
TNode< FixedArrayBase > TryGrowElementsCapacity(TNode< HeapObject > object, TNode< FixedArrayBase > elements, ElementsKind kind, TNode< Smi > key, Label *bailout)
TNode< Smi > LoadFastJSArrayLength(TNode< JSArray > array)
TNode< BigInt > AllocateRawBigInt(TNode< IntPtrT > length)
TNode< BoolT > IsSpecialReceiverInstanceType(TNode< Int32T > instance_type)
TNode< IntPtrT > HashTableComputeCapacity(TNode< IntPtrT > at_least_space_for)
TNode< BoolT > IsJSShadowRealm(TNode< HeapObject > object)
TNode< String > NumberToString(TNode< Number > input)
TNode< BoolT > IsJSPromiseMap(TNode< Map > map)
TNode< BoolT > IsJSRegExp(TNode< HeapObject > object)
TNode< BoolT > IsJSFinalizationRegistryMap(TNode< Map > map)
void ThrowIfNotJSReceiver(TNode< Context > context, TNode< Object > value, MessageTemplate msg_template, const char *method_name)
TNode< PropertyArray > AllocatePropertyArray(TNode< IntPtrT > capacity)
TNode< BoolT > IsAdditiveSafeInteger(TNode< Float64T > number)
TNode< BoolT > IsConsStringInstanceType(TNode< Int32T > instance_type)
TNode< Int32T > LoadMapBitField(TNode< Map > map)
TNode< String > TaggedToDirectString(TNode< Object > value, Label *fail)
void StoreSwissNameDictionaryPropertyDetails(TNode< SwissNameDictionary > table, TNode< IntPtrT > capacity, TNode< IntPtrT > entry, TNode< Uint8T > details)
TNode< BoolT > IsJSSharedStructMap(TNode< Map > map)
TNode< BoolT > IsOneByteStringInstanceType(TNode< Int32T > instance_type)
void TaggedToWord32OrBigIntImpl(TNode< Context > context, TNode< Object > value, Label *if_number, TVariable< Word32T > *var_word32, IsKnownTaggedPointer is_known_tagged_pointer, const FeedbackValues &feedback, Label *if_bigint=nullptr, Label *if_bigint64=nullptr, TVariable< BigInt > *var_maybe_bigint=nullptr)
TNode< Int32T > TryInt32Mul(TNode< Int32T > a, TNode< Int32T > b, Label *if_overflow)
TNode< Float64T > Float64Ceil(TNode< Float64T > x)
TNode< BoolT > IsValidPositiveSmi(TNode< IntPtrT > value)
TNode< Float64T > LoadHeapNumberValue(TNode< HeapObject > object)
TNode< BoolT > IsJSSharedStructInstanceType(TNode< Int32T > instance_type)
TNode< TaggedIndex > SmiToTaggedIndex(TNode< Smi > value)
TNode< BoolT > IsMap(TNode< HeapObject > object)
void StoreBoundedSizeToObject(TNode< HeapObject > object, int offset, TNode< UintPtrT > value)
TNode< BoolT > IsDictionaryElementsKind(TNode< Int32T > elements_kind)
TNode< HeapObject > LoadJSFunctionPrototype(TNode< JSFunction > function, Label *if_bailout)
TNode< HeapObjectReference > MakeWeak(TNode< HeapObject > value)
TNode< BigInt > ToBigInt(TNode< Context > context, TNode< Object > input)
TNode< IntPtrT > NameToIndexHashTableLookup(TNode< NameToIndexHashTable > table, TNode< Name > name, Label *not_found)
void OverwriteFeedback(TVariable< Smi > *existing_feedback, int new_feedback)
TNode< BoolT > IsMarkedForDeoptimization(TNode< Code > code)
TNode< Object > SetPropertyStrict(TNode< Context > context, TNode< JSAny > receiver, TNode< Object > key, TNode< Object > value)
TNode< OrderedHashMap > AllocateOrderedHashMap()
TNode< BoolT > IsOffsetInBounds(TNode< IntPtrT > offset, TNode< IntPtrT > length, int header_size, ElementsKind kind=HOLEY_ELEMENTS)
TNode< Float64T > LoadFixedDoubleArrayElement(TNode< FixedDoubleArray > object, TNode< IntPtrT > index, Label *if_hole=nullptr, MachineType machine_type=MachineType::Float64())
void StoreSwissNameDictionaryKeyAndValue(TNode< SwissNameDictionary > dict, TNode< IntPtrT > entry, TNode< Object > key, TNode< Object > value)
TNode< Object > LoadJSPrimitiveWrapperValue(TNode< JSPrimitiveWrapper > object)
TNode< Word32T > LoadBigIntBitfield(TNode< BigInt > bigint)
TNode< Float64T > TruncateTaggedToFloat64(TNode< Context > context, TNode< Object > value)
TNode< Map > LoadMap(TNode< HeapObject > object)
TNode< IntPtrT > MapUsedInstanceSizeInWords(TNode< Map > map)
TNode< FixedArray > ExtractToFixedArray(TNode< FixedArrayBase > source, TNode< TIndex > first, TNode< TIndex > count, TNode< TIndex > capacity, TNode< Map > source_map, ElementsKind from_kind, AllocationFlags allocation_flags, ExtractFixedArrayFlags extract_flags, HoleConversionMode convert_holes, TVariable< BoolT > *var_holes_converted=nullptr, std::optional< TNode< Int32T > > source_runtime_kind=std::nullopt)
TNode< Int32T > LoadBytecodeArrayParameterCountWithoutReceiver(TNode< BytecodeArray > bytecode_array)
TNode< BoolT > SharedFunctionInfoHasBaselineCode(TNode< SharedFunctionInfo > sfi)
TNode< ByteArray > AllocateNonEmptyByteArray(TNode< UintPtrT > length, AllocationFlags flags=AllocationFlag::kNone)
void SwissNameDictionaryAdd(TNode< SwissNameDictionary > table, TNode< Name > key, TNode< Object > value, TNode< Uint8T > property_details, Label *needs_resize)
void GotoIfLargeBigInt(TNode< BigInt > bigint, Label *true_label)
TNode< BoolT > IsString(TNode< HeapObject > object)
TNode< Numeric > NonNumberToNumberOrNumeric(TNode< Context > context, TNode< HeapObject > input, Object::Conversion mode, BigIntHandling bigint_handling=BigIntHandling::kThrow)
TNode< BoolT > IsDictionaryMap(TNode< Map > map)
TNode< IntPtrT > PopulationCountFallback(TNode< UintPtrT > value)
TNode< Int32T > SmiToInt32(TNode< Smi > value)
TNode< Uint32T > LoadStringLengthAsWord32(TNode< String > string)
TNode< Number > ToLength_Inline(TNode< Context > context, TNode< Object > input)
TNode< Number > PlainPrimitiveToNumber(TNode< Object > input)
TNode< Int32T > LoadMapElementsKind(TNode< Map > map)
TNode< IntPtrT > TaggedIndexToIntPtr(TNode< TaggedIndex > value)
TNode< BoolT > IsJSProxy(TNode< HeapObject > object)
TNode< Float64T > ChangeNumberToFloat64(TNode< Number > value)
TNode< BoolT > IsOddball(TNode< HeapObject > object)
TNode< JSObject > AllocateJSObjectFromMap(TNode< Map > map, std::optional< TNode< HeapObject > > properties=std::nullopt, std::optional< TNode< FixedArray > > elements=std::nullopt, AllocationFlags flags=AllocationFlag::kNone, SlackTrackingMode slack_tracking_mode=kNoSlackTracking)
TNode< Object > LoadCellValue(TNode< Cell > cell)
TNode< CollectionType > AllocateOrderedHashTableWithCapacity(TNode< IntPtrT > capacity)
TNode< Union< Hole, JSMessageObject > > GetPendingMessage()
TNode< Number > ToUint32(TNode< Context > context, TNode< Object > input)
TNode< BoolT > IsNotWeakFixedArraySubclass(TNode< HeapObject > object)
void MakeFixedArrayCOW(TNode< FixedArray > array)
TNode< Uint32T > LoadAndUntagWeakFixedArrayLengthAsUint32(TNode< WeakFixedArray > array)
TNode< Float64T > ChangeFloat16ToFloat64(TNode< Float16RawBitsT > value)
TNode< MaybeObject > LoadFieldTypeByKeyIndex(TNode< DescriptorArray > container, TNode< IntPtrT > key_index)
void InitializeJSObjectFromMap(TNode< HeapObject > object, TNode< Map > map, TNode< IntPtrT > instance_size, std::optional< TNode< HeapObject > > properties=std::nullopt, std::optional< TNode< FixedArray > > elements=std::nullopt, SlackTrackingMode slack_tracking_mode=kNoSlackTracking)
TNode< Uint32T > LoadMapEnumLength(TNode< Map > map)
TNode< BoolT > IsTypedArraySpeciesProtectorCellInvalid()
TNode< IntPtrT > AlignToAllocationAlignment(TNode< IntPtrT > value)
TNode< Smi > SmiMin(TNode< Smi > a, TNode< Smi > b)
TNode< TIndex > IntPtrOrSmiConstant(int value)
TNode< Int32T > GetNonRabGsabElementsKind(TNode< Int32T > elements_kind)
TNode< Int32T > TruncateHeapNumberValueToWord32(TNode< HeapNumber > object)
void BranchIfToBooleanIsTrue(TNode< Object > value, Label *if_true, Label *if_false)
TNode< Uint32T > ChangeNonNegativeNumberToUint32(TNode< Number > value)
TNode< Float16RawBitsT > TruncateFloat32ToFloat16(TNode< Float32T > value)
TNode< IntPtrT > TryIntPtrAdd(TNode< IntPtrT > a, TNode< IntPtrT > b, Label *if_overflow)
TNode< BoolT > IsExternalStringInstanceType(TNode< Int32T > instance_type)
TNode< BigInt > LoadFixedBigUint64ArrayElementAsTagged(TNode< RawPtrT > data_pointer, TNode< IntPtrT > offset)
TNode< PropertyDictionary > AllocatePropertyDictionary(int at_least_space_for)
TNode< BoolT > IsNumberArrayIndex(TNode< Number > number)
Uint32LessThanOrEqual IntPtrGreaterThanOrEqual
TNode< IntPtrT > LoadArrayLength(TNode< Array > array)
void TaggedToBigInt(TNode< Context > context, TNode< Object > value, Label *if_not_bigint, Label *if_bigint, Label *if_bigint64, TVariable< BigInt > *var_bigint, TVariable< Smi > *var_feedback)
TNode< BoolT > IsStringInstanceType(TNode< Int32T > instance_type)
TNode< BoolT > IsAlwaysSharedSpaceJSObjectInstanceType(TNode< Int32T > instance_type)
TNode< Int64T > CountLeadingZeros64(TNode< Word64T > value)
TNode< Smi > SelectSmiConstant(TNode< BoolT > condition, Tagged< Smi > true_value, Tagged< Smi > false_value)
TNode< Object > LoadJSArgumentsObjectLength(TNode< Context > context, TNode< JSArgumentsObject > array)
void StoreFixedDoubleArrayHole(TNode< FixedDoubleArray > array, TNode< IntPtrT > index)
TNode< Smi > TrySmiDiv(TNode< Smi > dividend, TNode< Smi > divisor, Label *bailout)
TNode< BoolT > IsUniqueNameNoCachedIndex(TNode< HeapObject > object)
void FastCheck(TNode< BoolT > condition)
TNode< HeapObject > AllocateRawDoubleAligned(TNode< IntPtrT > size_in_bytes, AllocationFlags flags, TNode< RawPtrT > top_address, TNode< RawPtrT > limit_address)
TNode< BoolT > IsHashTable(TNode< HeapObject > object)
TNode< IntPtrT > PositiveSmiUntag(TNode< Smi > value)
TNode< BoolT > Float64AlmostEqual(TNode< Float64T > x, TNode< Float64T > y, double max_relative_error=0.0000001)
TNode< BoolT > IsCallable(TNode< HeapObject > object)
TNode< BoolT > IsBigInt(TNode< HeapObject > object)
TNode< BoolT > IsJSTypedArray(TNode< HeapObject > object)
TNode< HeapObject > GetHeapObjectIfStrong(TNode< MaybeObject > value, Label *if_not_strong)
TNode< NameDictionary > CopyNameDictionary(TNode< NameDictionary > dictionary, Label *large_object_fallback)
void ForEachEnumerableOwnProperty(TNode< Context > context, TNode< Map > map, TNode< JSObject > object, PropertiesEnumerationMode mode, const ForEachKeyValueFunction &body, Label *bailout)
TNode< RawPtrT > LoadExternalStringResourceDataPtr(TNode< ExternalString > object)
TNode< ByteArray > AllocateByteArray(TNode< UintPtrT > length, AllocationFlags flags=AllocationFlag::kNone)
TNode< Float64T > ChangeTaggedToFloat64(TNode< Context > context, TNode< Object > input)
TNode< Uint32T > LoadNameHashAssumeComputed(TNode< Name > name)
TNode< BoolT > IsJSFunctionInstanceType(TNode< Int32T > instance_type)
void StoreObjectField(TNode< HeapObject > object, int offset, TNode< Smi > value)
TNode< BoolT > IsValidFastJSArrayCapacity(TNode< IntPtrT > capacity)
void FillFixedArrayWithValue(ElementsKind kind, TNode< FixedArrayBase > array, TNode< TIndex > from_index, TNode< TIndex > to_index, RootIndex value_root_index)
void PossiblyGrowElementsCapacity(ElementsKind kind, TNode< HeapObject > array, TNode< BInt > length, TVariable< FixedArrayBase > *var_elements, TNode< BInt > growth, Label *bailout)
TNode< Int32T > TruncateWordToInt32(TNode< WordT > value)
TNode< UintPtrT > DecodeWordFromWord32(TNode< Word32T > word32)
TNode< BoolT > IsInRange(TNode< Word32T > value, U lower_limit, U higher_limit)
std::function< void(TNode< Name > key, LazyNode< Object > value)> ForEachKeyValueFunction
TNode< Int64T > CountTrailingZeros64(TNode< Word64T > value)
TNode< BigInt > BigIntFromUint32Pair(TNode< UintPtrT > low, TNode< UintPtrT > high)
TNode< BoolT > IsNullOrJSReceiver(TNode< HeapObject > object)
TNode< IntPtrT > LoadMapConstructorFunctionIndex(TNode< Map > map)
void CopyElements(ElementsKind kind, TNode< FixedArrayBase > dst_elements, TNode< IntPtrT > dst_index, TNode< FixedArrayBase > src_elements, TNode< IntPtrT > src_index, TNode< IntPtrT > length, WriteBarrierMode write_barrier=UPDATE_WRITE_BARRIER)
TNode< Smi > GetCapacity(TNode< Dictionary > dictionary)
TNode< Map > GetInstanceTypeMap(InstanceType instance_type)
void SetPropertyLength(TNode< Context > context, TNode< JSAny > array, TNode< Number > length)
TNode< Boolean > SelectBooleanConstant(TNode< BoolT > condition)
TNode< IntPtrT > LoadMapInstanceSizeInWords(TNode< Map > map)
TNode< MaybeObject > LoadFeedbackVectorSlot(TNode< FeedbackVector > feedback_vector, TNode< TIndex > slot, int additional_offset=0)
TNode< BoolT > IsPropertyCell(TNode< HeapObject > object)
TNode< Map > LoadObjectFunctionInitialMap(TNode< NativeContext > native_context)
TNode< BoolT > IsJSPromise(TNode< HeapObject > object)
TNode< Number > SmiMul(TNode< Smi > a, TNode< Smi > b)
TNode< BoolT > IsJSArrayBuffer(TNode< HeapObject > object)
TNode< RawPtr< Uint8T > > IntlAsciiCollationWeightsL3()
TNode< Uint32T > NumberOfEntries(TNode< Array > array)
TNode< FixedArrayBase > CloneFixedArray(TNode< FixedArrayBase > source, ExtractFixedArrayFlags flags=ExtractFixedArrayFlag::kAllFixedArraysDontCopyCOW)
TNode< TValue > LoadArrayElement(TNode< Array > array, int array_header_size, TNode< TIndex > index, int additional_offset=0)
TNode< BoolT > IsFixedArray(TNode< HeapObject > object)
void LoadPropertyFromFastObject(TNode< HeapObject > object, TNode< Map > map, TNode< DescriptorArray > descriptors, TNode< IntPtrT > name_index, TVariable< Uint32T > *var_details, TVariable< Object > *var_value)
TNode< TIndex > CalculateNewElementsCapacity(TNode< TIndex > old_capacity)
void StoreTrustedPointerFieldNoWriteBarrier(TNode< HeapObject > object, int offset, IndirectPointerTag tag, TNode< ExposedTrustedObject > value)
Uint32LessThanOrEqual Int32GreaterThanOrEqual TNode< Smi > SmiMax(TNode< Smi > a, TNode< Smi > b)
void IncrementCallCount(TNode< FeedbackVector > feedback_vector, TNode< UintPtrT > slot_id)
TNode< BoolT > IsHeapNumberUint32(TNode< HeapNumber > number)
void TaggedToWord32OrBigInt(TNode< Context > context, TNode< Object > value, Label *if_number, TVariable< Word32T > *var_word32, Label *if_bigint, Label *if_bigint64, TVariable< BigInt > *var_maybe_bigint)
TNode< Object > CloneIfMutablePrimitive(TNode< Object > object)
TNode< WordT > TimesDoubleSize(TNode< WordT > value)
TNode< BoolT > IsJSGeneratorMap(TNode< Map > map)
TNode< BoolT > IsJSPrimitiveWrapper(TNode< HeapObject > object)
TNode< TaggedIndex > IntPtrToTaggedIndex(TNode< IntPtrT > value)
TNode< BoolT > IsJSSharedArray(TNode< HeapObject > object)
TNode< Uint32T > EnsureOnlyHasSimpleProperties(TNode< Map > map, TNode< Int32T > instance_type, Label *bailout)
TNode< T > LoadDescriptorArrayElement(TNode< DescriptorArray > object, TNode< IntPtrT > index, int additional_offset)
TNode< Float16RawBitsT > BitcastUint32ToFloat16(TNode< Uint32T > value)
TNode< BoolT > IsJSTypedArrayInstanceType(TNode< Int32T > instance_type)
TNode< HeapObject > GetHeapObjectAssumeWeak(TNode< MaybeObject > value)
TNode< Smi > TrySmiAdd(TNode< Smi > a, TNode< Smi > b, Label *if_overflow)
TNode< Smi > GetNextEnumerationIndex(TNode< Dictionary > dictionary)
TNode< BoolT > IsFixedArrayWithKind(TNode< HeapObject > object, ElementsKind kind)
void StoreHeapNumberValue(TNode< HeapNumber > object, TNode< Float64T > value)
TNode< WordT > TimesSystemPointerSize(TNode< WordT > value)
void StoreMapNoWriteBarrier(TNode< HeapObject > object, RootIndex map_root_index)
TNode< Int32T > PopulationCount32(TNode< Word32T > value)
TNode< Object > CallGetterIfAccessor(TNode< Object > value, TNode< Union< JSReceiver, PropertyCell > > holder, TNode< Uint32T > details, TNode< Context > context, TNode< JSAny > receiver, TNode< Object > name, Label *if_bailout, GetOwnPropertyMode mode=kCallJSGetterDontUseCachedName, ExpectedReceiverMode expected_receiver_mode=kExpectingJSReceiver)
void AddToDictionary(TNode< Dictionary > dictionary, TNode< Name > key, TNode< Object > value, Label *bailout, std::optional< TNode< IntPtrT > > insertion_index=std::nullopt)
TNode< BoolT > IsOrderedNameDictionary(TNode< HeapObject > object)
TNode< BoolT > IsJSAsyncGeneratorObject(TNode< HeapObject > object)
TNode< String > AllocateSlicedString(RootIndex map_root_index, TNode< Uint32T > length, TNode< String > parent, TNode< Smi > offset)
TNode< PropertyDictionary > AllocatePropertyDictionaryWithCapacity(TNode< IntPtrT > capacity, AllocationFlags=AllocationFlag::kNone)
void NumberDictionaryLookup(TNode< NumberDictionary > dictionary, TNode< IntPtrT > intptr_index, Label *if_found, TVariable< IntPtrT > *var_entry, Label *if_not_found)
TNode< Object > LoadSharedFunctionInfoUntrustedData(TNode< SharedFunctionInfo > sfi)
TNode< Uint64T > LoadUint64Ptr(TNode< RawPtrT > ptr, TNode< IntPtrT > index)
TNode< BoolT > JSAnyIsNotPrimitive(TNode< HeapObject > object)
TNode< RawPtrT > LoadCodeInstructionStart(TNode< Code > code, CodeEntrypointTag tag)
static constexpr int kFixedSlotCountAboveFp
static V8_INLINE constexpr int SlotOffset(int index)
Definition contexts.h:516
static int ArrayMapIndex(ElementsKind elements_kind)
Definition contexts.h:676
static const int kNoContext
Definition contexts.h:577
static constexpr int ToKeyIndex(int descriptor_number)
static constexpr int ToValueIndex(int descriptor_number)
static constexpr int ToDetailsIndex(int descriptor_number)
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static V8_EXPORT_PRIVATE ExternalReference address_of_pending_message(LocalIsolate *local_isolate)
static ExternalReference Create(const SCTableReference &table_ref)
Handle< String > InternalizeString(base::Vector< const char > str, bool convert_encoding=false)
Definition factory.h:216
static int GetMaxLengthForNewSpaceAllocation(ElementsKind kind)
static constexpr int kHeaderSize
static constexpr int kLengthOffset
static constexpr int kMaxLength
static const int kMinCapacity
Definition hash-table.h:100
static constexpr int kMapOffset
int MaxNumberToStringCacheSize() const
Definition heap-inl.h:397
static InternalIndex NotFound()
static const int kExternalPointerTableBasePointerOffset
static V8_INLINE constexpr bool IsValidSmi(T value)
v8::internal::Factory * factory()
Definition isolate.h:1527
static const int kInitialMaxFastElementArray
Definition js-array.h:144
static constexpr uint32_t kMaxFastArrayLength
Definition js-array.h:136
static constexpr int kSizeWithoutPrototype
static const int kFieldsAdded
Definition js-objects.h:954
static const int kMapCacheSize
Definition js-objects.h:949
static const uint32_t kMaxGap
Definition js-objects.h:931
static const uint32_t kMinAddedElementsCapacity
Definition js-objects.h:649
static constexpr MachineType Float64()
static constexpr MachineType Pointer()
static constexpr MachineType Uint8()
constexpr bool IsNone() const
static constexpr MachineType Int32()
static constexpr MachineType AnyTagged()
static constexpr MachineType Uint32()
static constexpr MachineType Uint16()
static constexpr MachineType TaggedPointer()
static constexpr MachineType UintPtr()
static constexpr MachineRepresentation PointerRepresentation()
static constexpr MachineType IntPtr()
static const int kNoSlackTracking
Definition map.h:349
static const int kSlackTrackingCounterEnd
Definition map.h:348
static constexpr std::optional< RootIndex > TryGetMapRootIdxFor(InstanceType type)
Definition map.h:891
static constexpr uint32_t kBitsPerCellLog2
Definition marking.h:99
static constexpr uint32_t kBytesPerCellLog2
Definition marking.h:103
static constexpr uint32_t kBytesPerCell
Definition marking.h:102
static constexpr intptr_t FlagsOffset()
static constexpr MainThreadFlags kIsInYoungGenerationMask
static constexpr MainThreadFlags kPointersFromHereAreInterestingMask
static constexpr intptr_t GetAlignmentMaskForAssembler()
static constexpr intptr_t MarkingBitmapOffset()
static constexpr int kFlagsDefault
Definition dictionary.h:262
static const int kFlagsIndex
Definition dictionary.h:247
static constexpr int kEmptyHashField
Definition name.h:133
static constexpr int kHashNotComputedMask
Definition name.h:131
static const unsigned int kDoesNotContainCachedArrayIndexMask
Definition name.h:190
static const unsigned int kDoesNotContainIntegerOrForwardingIndexMask
Definition name.h:197
static const int kNoHashSentinel
static constexpr PropertyConstness kConstIfDictConstnessTracking
static const int kAttributesDontEnumMask
static const int kProtectorInvalid
Definition protectors.h:16
PrototypeCheckAssembler(compiler::CodeAssemblerState *state, Flags flags, TNode< NativeContext > native_context, TNode< Map > initial_prototype_map, base::Vector< DescriptorIndexNameValue > properties)
const TNode< NativeContext > native_context_
void CheckAndBranch(TNode< HeapObject > prototype, Label *if_unmodified, Label *if_modified)
const base::Vector< DescriptorIndexNameValue > properties_
static constexpr bool IsImmortalImmovable(RootIndex root_index)
Definition roots.h:616
static V8_INLINE constexpr int32_t SizeFor(int32_t length)
static V8_INLINE constexpr int32_t SizeFor(int32_t length)
static constexpr int ToInt(const Tagged< Object > object)
Definition smi.h:33
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
static bool constexpr IsValid(T value)
Definition smi.h:67
static constexpr int kMinValue
Definition smi.h:100
static constexpr int kMaxValue
Definition smi.h:101
V8_EXPORT_PRIVATE bool Enabled()
Definition counters.cc:32
static const int32_t kMaxOneByteCharCode
Definition string.h:500
static constexpr int kMetaTableDeletedElementCountFieldIndex
static constexpr int kMetaTableEnumerationDataStartIndex
static constexpr int kMetaTableElementCountFieldIndex
static TNode UncheckedCast(compiler::Node *node)
Definition tnode.h:413
TNode< String > TryToDirect(Label *if_bailout)
TNode< RawPtrT > TryToSequential(StringPointerKind ptr_kind, Label *if_bailout)
ToDirectStringAssembler(compiler::CodeAssemblerState *state, TNode< String > string, Flags flags=Flags())
TNode< RawPtrT > PointerToData(Label *if_bailout)
static const int kTransitionLengthIndex
static const int kMaxNumberOfTransitions
static constexpr bool kUninterestingPagesCanBeSkipped
void push_back(const T &value)
TNode< BoolT > Word32NotEqual(TNode< Word32T > left, TNode< Word32T > right)
TNode< IntPtrT > IntPtrMul(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > IntPtrAdd(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > WordOr(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< PairT< Word32T, Word32T > > Int32PairAdd(TNode< Word32T > lhs_lo_word, TNode< Word32T > lhs_hi_word, TNode< Word32T > rhs_lo_word, TNode< Word32T > rhs_hi_word)
TNode< Int32T > Signed(TNode< Word32T > x)
void SetDynamicJSParameterCount(TNode< Uint16T > parameter_count)
void Comment(MessageWithSourceLocation message, Args &&... args)
TNode< RawPtrT > RawPtrSub(TNode< RawPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > IntPtrConstant(intptr_t value)
void OptimizedStoreFieldUnsafeNoWriteBarrier(MachineRepresentation rep, TNode< HeapObject > object, int offset, Node *value)
TNode< Int64T > Int64Constant(int64_t value)
void StoreEphemeronKey(Node *base, Node *offset, Node *value)
void UnsafeStoreNoWriteBarrier(MachineRepresentation rep, Node *base, Node *value)
TNode< UintPtrT > ChangeUint32ToWord(TNode< Word32T > value)
TNode< T > UncheckedCast(Node *value)
TNode< IntPtrT > WordShl(TNode< IntPtrT > left, TNode< IntegralT > right)
TNode< Int32T > Int32Mul(TNode< Int32T > left, TNode< Int32T > right)
TNode< BoolT > WordEqual(TNode< WordT > left, TNode< WordT > right)
void GotoIfNot(TNode< IntegralT > condition, Label *false_label, GotoHint goto_hint=GotoHint::kNone)
void PopAndReturn(Node *pop, Node *value)
TNode< IntPtrT > WordSar(TNode< IntPtrT > left, TNode< IntegralT > right)
TNode< Int32T > Word32BitwiseNot(TNode< Int32T > a)
TNode< Uint32T > Uint64HighWordConstantNoLowWord(uint64_t value)
void OptimizedStoreIndirectPointerField(TNode< HeapObject > object, int offset, IndirectPointerTag tag, Node *value)
void StoreToObject(MachineRepresentation rep, TNode< Object > object, TNode< IntPtrT > offset, Node *value, StoreToObjectWriteBarrier write_barrier)
Node * LoadFromObject(MachineType type, TNode< Object > object, TNode< IntPtrT > offset)
TNode< Object > LoadFullTagged(Node *base)
TNode< Uint32T > Unsigned(TNode< Word32T > x)
TNode< Int32T > Word32And(TNode< Int32T > left, TNode< Int32T > right)
TNode< Int32T > UniqueInt32Constant(int32_t value)
TNode< Uint32T > UniqueUint32Constant(int32_t value)
bool TryToSmiConstant(TNode< IntegralT > node, Tagged< Smi > *out_value)
TNode< T > ReinterpretCast(Node *value)
TNode< Int32T > Int32Add(TNode< Int32T > left, TNode< Int32T > right)
TNode< IntPtrT > BitcastTaggedToWord(TNode< Smi > node)
TNode< String > StringConstant(const char *str)
TNode< Uint64T > Uint64Constant(uint64_t value)
void TailCallRuntime(Runtime::FunctionId function, TNode< Object > context, TArgs... args)
TNode< Int64T > TruncateFloat64ToInt64(TNode< Float64T > value)
TNode< Uint64T > Uint64Add(TNode< Uint64T > left, TNode< Uint64T > right)
void StoreFullTaggedNoWriteBarrier(TNode< RawPtrT > base, TNode< Object > tagged_value)
TNode< Smi > SmiConstant(Tagged< Smi > value)
void GotoIf(TNode< IntegralT > condition, Label *true_label, GotoHint goto_hint=GotoHint::kNone)
Node * Load(MachineType type, Node *base)
void OptimizedStoreField(MachineRepresentation rep, TNode< HeapObject > object, int offset, Node *value)
void OptimizedStoreMap(TNode< HeapObject > object, TNode< Map >)
TNode< Float64T > RoundIntPtrToFloat64(Node *value)
TNode< IntPtrT > ChangeInt32ToIntPtr(TNode< Word32T > value)
TNode< Int32T > Int32Sub(TNode< Int32T > left, TNode< Int32T > right)
TNode< Int32T > Word32Or(TNode< Int32T > left, TNode< Int32T > right)
TNode< Int32T > Word32Shl(TNode< Int32T > left, TNode< Int32T > right)
TNode< BoolT > IntPtrEqual(TNode< WordT > left, TNode< WordT > right)
bool TryToIntPtrConstant(TNode< IntegralT > node, intptr_t *out_value)
TNode< IntPtrT > WordAnd(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< Int32T > TruncateFloat32ToInt32(TNode< Float32T > value)
void Switch(Node *index, Label *default_label, const int32_t *case_values, Label **case_labels, size_t case_count)
bool TryToInt32Constant(TNode< IntegralT > node, int32_t *out_value)
TNode< Int64T > Word64And(TNode< Int64T > left, TNode< Int64T > right)
TNode< Int64T > Int64Sub(TNode< Int64T > left, TNode< Int64T > right)
TNode< IntPtrT > IntPtrSub(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< typename std::tuple_element< index, std::tuple< T1, T2 > >::type > Projection(TNode< PairT< T1, T2 > > value)
void OptimizedStoreIndirectPointerFieldNoWriteBarrier(TNode< HeapObject > object, int offset, IndirectPointerTag tag, Node *value)
TNode< Uint32T > Uint64HighWordConstant(uint64_t value)
TNode< Uint32T > Uint32Sub(TNode< Uint32T > left, TNode< Uint32T > right)
TNode< Float64T > Float64Constant(double value)
TNode< Int64T > Word64Xor(TNode< Int64T > left, TNode< Int64T > right)
TNode< Float64T > ChangeUintPtrToFloat64(TNode< UintPtrT > value)
TNode< BoolT > BoolConstant(bool value)
TNode< Int64T > Word64Shr(TNode< Int64T > left, TNode< Uint64T > right)
TNode< Uint32T > Word32Shr(TNode< Uint32T > left, TNode< Uint32T > right)
TNode< Uint64T > Uint64Sub(TNode< Uint64T > left, TNode< Uint64T > right)
TNode< Uint32T > Uint64LowWordConstant(uint64_t value)
TNode< ExternalReference > ExternalConstant(ExternalReference address)
TNode< Int32T > Int32Constant(int32_t value)
TNode< BoolT > WordNotEqual(TNode< WordT > left, TNode< WordT > right)
Node * CallCFunction(Node *function, std::optional< MachineType > return_type, CArgs... cargs)
TNode< Uint32T > Uint32Add(TNode< Uint32T > left, TNode< Uint32T > right)
TNode< Uint32T > Uint32Constant(uint32_t value)
TNode< Type > HeapConstantNoHole(Handle< Type > object)
TNode< Type > UnalignedLoad(TNode< RawPtrT > base, TNode< IntPtrT > offset)
TNode< BoolT > Word32Equal(TNode< Word32T > left, TNode< Word32T > right)
TNode< T > CallRuntime(Runtime::FunctionId function, TNode< Object > context, TArgs... args)
TNode< PairT< Word32T, Word32T > > Int32PairSub(TNode< Word32T > lhs_lo_word, TNode< Word32T > lhs_hi_word, TNode< Word32T > rhs_lo_word, TNode< Word32T > rhs_hi_word)
TNode< UintPtrT > UintPtrConstant(uintptr_t value)
TNode< ExternalReference > IsolateField(IsolateFieldId id)
void Store(Node *base, Node *value)
TNode< RawPtrT > RawPtrAdd(TNode< RawPtrT > left, TNode< IntPtrT > right)
TNode< Int64T > Word64Shl(TNode< Int64T > left, TNode< Int64T > right)
TNode< UintPtrT > WordShr(TNode< UintPtrT > left, TNode< IntegralT > right)
TNode< UintPtrT > UintPtrAdd(TNode< UintPtrT > left, TNode< UintPtrT > right)
TNode< Int64T > Word64Not(TNode< Int64T > value)
TNode< T > CallBuiltin(Builtin id, TNode< Object > context, TArgs... args)
void Branch(TNode< IntegralT > condition, Label *true_label, Label *false_label, BranchHint branch_hint=BranchHint::kNone)
TNode< HeapObject > OptimizedAllocate(TNode< IntPtrT > size, AllocationType allocation)
void StoreNoWriteBarrier(MachineRepresentation rep, Node *base, Node *value)
TNode< BoolT > Word64Equal(TNode< Word64T > left, TNode< Word64T > right)
#define CAST(x)
Operand const offset_
Register const value_
#define BIND(label)
#define CSA_DCHECK_BRANCH(csa,...)
#define TVARIABLE(...)
MachineType mt
CodeStubAssembler & csa
#define CSA_SLOW_DCHECK(csa,...)
#define CSA_DCHECK(csa,...)
#define CSA_CHECK(csa, x)
#define HEAP_CONSTANT_ACCESSOR(rootIndexName, rootAccessorName, name)
#define HEAP_CONSTANT_TEST(rootIndexName, rootAccessorName, name)
#define V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL
Definition globals.h:242
#define COMPRESS_POINTERS_BOOL
Definition globals.h:99
#define V8_COMPRESS_POINTERS_8GB_BOOL
Definition globals.h:608
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
Definition globals.h:1796
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
JSRegExp::Flags flags_
const MapRef map_
Tagged< NativeContext > native_context_
int start
uint32_t count
int end
LineAndColumn current
LineAndColumn previous
#define RAB_GSAB_TYPED_ARRAYS(V)
#define TYPED_ARRAYS(V)
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
DirectHandle< Object > new_target
Definition execution.cc:75
Label label
BytecodeAssembler & assembler_
#define V8_ALLOCATION_SITE_TRACKING_BOOL
Isolate * isolate
#define HEAP_IMMUTABLE_IMMOVABLE_OBJECT_LIST(V)
#define HEAP_IMMOVABLE_OBJECT_LIST(V)
#define HEAP_MUTABLE_IMMOVABLE_OBJECT_LIST(V)
OptionalOpIndex index
int32_t offset
TNode< Context > context
TNode< Object > target
std::optional< TNode< JSArray > > a
TNode< Object > receiver
ArrayReduceDirection direction
std::map< const std::string, const std::string > map
icu::number::FormattedNumber formatted
double second
double increment
ZoneVector< RpoNumber > & result
Builtin builtin
MovableLabel handler
int x
uint32_t const mask
AllocationFlags
InstructionOperand source
const int length_
Definition mul-fft.cc:473
int r
Definition mul-fft.cc:298
constexpr unsigned CountPopulation(T value)
Definition bits.h:26
constexpr bool IsPowerOfTwo(T value)
Definition bits.h:187
V8_INLINE Dest bit_cast(Source const &source)
Definition macros.h:95
V8_INLINE constexpr std::optional< RootIndex > UniqueMapOfInstanceType(InstanceType type)
TNode< Float64T > Float64Add(TNode< Float64T > a, TNode< Float64T > b)
void Store(LiftoffAssembler *assm, LiftoffRegister src, MemOperand dst, ValueKind kind)
const uint32_t kSharedStringTag
const intptr_t kHeapObjectTagMask
Definition v8-internal.h:75
constexpr int kMinInt
Definition globals.h:375
V8_INLINE IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition handles-inl.h:72
PropertiesEnumerationMode
Definition globals.h:2856
constexpr int kCodePointerTableEntrySizeLog2
const uint32_t kStringEncodingMask
constexpr uint64_t kExternalPointerTagShift
constexpr int kIntSize
Definition globals.h:400
constexpr int kTaggedSize
Definition globals.h:542
bool StoreModeHandlesCOW(KeyedAccessStoreMode store_mode)
Definition globals.h:2728
bool StoreModeIsInBounds(KeyedAccessStoreMode store_mode)
Definition globals.h:2724
bool StoreModeCanGrow(KeyedAccessStoreMode store_mode)
Definition globals.h:2742
constexpr double kMaxSafeInteger
Definition globals.h:1985
constexpr uint64_t kFP64SignMask
Definition conversions.h:30
constexpr int kMaxRegularHeapObjectSize
Definition globals.h:680
constexpr int kBitsPerByte
Definition globals.h:682
@ SKIP_WRITE_BARRIER
Definition objects.h:52
@ UPDATE_WRITE_BARRIER
Definition objects.h:55
@ UPDATE_EPHEMERON_KEY_WRITE_BARRIER
Definition objects.h:54
@ UNSAFE_SKIP_WRITE_BARRIER
Definition objects.h:53
constexpr bool IsHoleyElementsKind(ElementsKind kind)
constexpr int GB
Definition v8-internal.h:57
constexpr intptr_t kObjectAlignment
Definition globals.h:930
OrdinaryToPrimitiveHint
Definition globals.h:1860
constexpr JSDispatchHandle kInvalidDispatchHandle(0xffffffff<< kJSDispatchHandleShift)
constexpr int kOneByteSize
Definition globals.h:703
bool IsNumber(Tagged< Object > obj)
constexpr uint32_t kStringRepresentationAndEncodingMask
constexpr uint32_t kSeqOneByteStringTag
static V8_INLINE constexpr bool IsSharedExternalPointerType(ExternalPointerTagRange tag_range)
bool IsSpecialReceiverInstanceType(InstanceType instance_type)
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit bit_field2
Definition map-inl.h:123
constexpr uint64_t kExternalPointerPayloadMask
const int kSmiTagSize
Definition v8-internal.h:87
const uint32_t kUncachedExternalStringTag
bool IsSealedElementsKind(ElementsKind kind)
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(kFirstExternalPointerTag, kLastExternalPointerTag)
static constexpr InstanceType PROPERTY_DICTIONARY_TYPE
bool IsTypedArrayElementsKind(ElementsKind kind)
constexpr uint64_t kExternalPointerTagMask
bool IsRabGsabTypedArrayElementsKind(ElementsKind kind)
constexpr int kCodePointerTableEntryCodeObjectOffset
constexpr int kTrustedPointerTableEntrySizeLog2
const uint32_t kThinStringTagBit
constexpr size_t kJSDispatchTableReservationSize
Definition globals.h:567
constexpr bool IsSmiElementsKind(ElementsKind kind)
const Address kWeakHeapObjectMask
Definition globals.h:967
const uint32_t kUncachedExternalStringMask
constexpr uint64_t kHoleNanInt64
Definition globals.h:1960
constexpr bool IsObjectElementsKind(ElementsKind kind)
const uint32_t kNotInternalizedTag
constexpr uint64_t kMaxSafeIntegerUint64
Definition globals.h:1983
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
constexpr int kMaxInt31
Definition globals.h:384
bool IsNonextensibleElementsKind(ElementsKind kind)
@ FIRST_ANY_NONEXTENSIBLE_ELEMENTS_KIND
@ HOLEY_NONEXTENSIBLE_ELEMENTS
@ SLOW_STRING_WRAPPER_ELEMENTS
@ PACKED_NONEXTENSIBLE_ELEMENTS
@ LAST_ANY_NONEXTENSIBLE_ELEMENTS_KIND
@ TERMINAL_FAST_ELEMENTS_KIND
@ FIRST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND
@ LAST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND
@ FAST_STRING_WRAPPER_ELEMENTS
const uint32_t kStringTag
constexpr int kFP16MantissaBits
Definition conversions.h:39
constexpr intptr_t kObjectAlignment8GbHeapMask
Definition globals.h:935
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long mode(MIPS/PPC only)") DEFINE_BOOL(partial_constant_pool
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const char * ElementsKindToString(ElementsKind kind)
constexpr int kJSArgcReceiverSlots
Definition globals.h:2778
const uint32_t kOneByteStringTag
constexpr uint64_t kFP64Infinity
Definition conversions.h:32
ElementsKind GetCorrespondingNonRabGsabElementsKind(ElementsKind typed_array_kind)
bool IsNullOrUndefined(Tagged< Object > obj, Isolate *isolate)
constexpr size_t kCodePointerTableReservationSize
bool IsSmiOrObjectElementsKind(ElementsKind kind)
constexpr TrustedPointerHandle kNullTrustedPointerHandle
constexpr int kSystemPointerSize
Definition globals.h:410
constexpr intptr_t kObjectAlignment8GbHeap
Definition globals.h:934
constexpr int kAdditiveSafeIntegerBitLength
Definition globals.h:1995
const int kWeakHeapObjectTag
Definition v8-internal.h:73
constexpr int ElementsKindToShiftSize(ElementsKind elements_kind)
constexpr int kTaggedSizeLog2
Definition globals.h:543
constexpr uint64_t kFP16InfinityAndNaNInfimum
Definition conversions.h:33
constexpr intptr_t kObjectAlignmentMask
Definition globals.h:931
static const int kInvalidEnumCacheSentinel
bool NeedsBoundsCheck(CheckBounds check_bounds)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES HOLEY_ELEMENTS
const intptr_t kHeapObjectReferenceTagMask
Definition v8-internal.h:76
constexpr bool SmiValuesAre31Bits()
bool IsBooleanMap(Tagged< Map > map)
Definition map-inl.h:745
const uint32_t kStringRepresentationMask
typename detail::FlattenUnionHelper< Union<>, Ts... >::type UnionOf
Definition union.h:123
constexpr int kInt32Size
Definition globals.h:401
bool IsFastElementsKind(ElementsKind kind)
@ LAST_CUSTOM_ELEMENTS_RECEIVER
@ INTERNALIZED_TWO_BYTE_STRING_TYPE
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
static const int kMaxNumberOfDescriptors
Map::Bits1::HasPrototypeSlotBit Map::Bits1::HasNamedInterceptorBit Map::Bits1::IsUndetectableBit Map::Bits1::IsConstructorBit Map::Bits2::IsImmutablePrototypeBit Map::Bits3::IsDeprecatedBit Map::Bits3::IsPrototypeMapBit bit_field3
Definition map-inl.h:137
constexpr uint32_t kTrustedPointerHandleShift
constexpr uint16_t kFP16Infinity
Definition conversions.h:41
constexpr uint32_t kCodePointerHandleShift
constexpr uint32_t kHoleNanLower32
Definition globals.h:1953
const int kHeapObjectTag
Definition v8-internal.h:72
constexpr uint64_t kFP16DenormalThreshold
Definition conversions.h:36
const int kSmiShiftSize
@ kExternalPointerNullTag
const int kSmiValueSize
bool StoreModeIgnoresTypeArrayOOB(KeyedAccessStoreMode store_mode)
Definition globals.h:2738
const uint32_t kIsIndirectStringTag
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
constexpr bool SmiValuesAre32Bits()
constexpr uint32_t kHoleNanUpper32
Definition globals.h:1952
const uint32_t kInternalizedTag
constexpr IndirectPointerHandle kNullIndirectPointerHandle
const uint32_t kSharedStringMask
constexpr int kJSDispatchTableEntrySizeLog2
Definition globals.h:562
constexpr size_t kTrustedPointerTableReservationSize
const intptr_t kSmiTagMask
Definition v8-internal.h:88
const uint32_t kIsNotInternalizedMask
return value
Definition map-inl.h:893
constexpr intptr_t kSmiSignMask
Definition globals.h:925
constexpr int kFP64MantissaBits
Definition conversions.h:28
bool IsPrivateSymbol(Tagged< Object > obj)
constexpr bool Is64()
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
constexpr int kDoubleSizeLog2
Definition globals.h:421
static constexpr Address kNullAddress
Definition v8-internal.h:53
const uint32_t kIsNotStringMask
constexpr int kIeeeDoubleExponentWordOffset
Definition globals.h:1763
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr uint16_t kFP16qNaN
Definition conversions.h:40
constexpr uint64_t kTrustedPointerTableMarkBit
constexpr int kDoubleSize
Definition globals.h:407
constexpr int kSizetSize
Definition globals.h:404
constexpr intptr_t kDoubleAlignmentMask
Definition globals.h:950
JSArrayBuffer::IsDetachableBit is_shared
ElementsKind GetInitialFastElementsKind()
constexpr uint64_t kFP64To16RebiasExponentAndRound
Definition conversions.h:53
constexpr uint32_t kCodePointerHandleMarker
const uint32_t kClearedWeakHeapObjectLower32
Definition globals.h:981
constexpr int64_t kMinAdditiveSafeInteger
Definition globals.h:1993
constexpr uint32_t kMaxUInt32
Definition globals.h:387
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset IsNull(value)||IsJSProxy(value)||IsWasmObject(value)||(IsJSObject(value) &&(HeapLayout
Definition map-inl.h:70
constexpr int kIeeeDoubleMantissaWordOffset
Definition globals.h:1762
const uint32_t kIsIndirectStringMask
constexpr int ElementsKindToByteSize(ElementsKind elements_kind)
constexpr int kIntptrSize
Definition globals.h:408
constexpr uint64_t kExternalPointerMarkBit
static V8_INLINE constexpr bool ExternalPointerCanBeEmpty(ExternalPointerTagRange tag_range)
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
constexpr uint64_t kFP64To16DenormalMagic
Definition conversions.h:59
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
V8_INLINE Local< Primitive > Null(Isolate *isolate)
V8_INLINE Local< Boolean > False(Isolate *isolate)
V8_INLINE Local< Primitive > Undefined(Isolate *isolate)
constexpr std::array< std::remove_cv_t< T >, N > to_array(T(&a)[N])
i::Address Load(i::Address address)
Definition unwinder.cc:19
#define STRING_TYPE_LIST(V)
Operation
Definition operation.h:43
uint32_t compare
BytecodeSequenceNode * parent_
#define UNREACHABLE()
Definition logging.h:67
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define CHECK(condition)
Definition logging.h:124
#define CHECK_GT(lhs, rhs)
#define CHECK_LT(lhs, rhs)
#define CHECK_LE(lhs, rhs)
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define CHECK_NOT_NULL(val)
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_LT(v1, v2)
Definition logging.h:489
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define DCHECK_GT(v1, v2)
Definition logging.h:487
#define USE(...)
Definition macros.h:293
#define V8_EXPORT_PRIVATE
Definition macros.h:460
constexpr bool IsAligned(T value, U alignment)
Definition macros.h:403
#define arraysize(array)
Definition macros.h:67
constexpr size_t Size() const
constexpr bool IsEmpty() const
#define OFFSET_OF_DATA_START(Type)
Symbol file
Symbol method
#define FIELD_SIZE(Name)
Definition utils.h:259
#define STATIC_ASSERT_FIELD_OFFSETS_EQUAL(Offset1, Offset2)
Definition utils.h:262
#define V8_STATIC_ROOTS_BOOL
Definition v8config.h:1001
std::unique_ptr< ValueMirror > value
std::unique_ptr< ValueMirror > key
wasm::ValueType type