v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
builtins-internal-gen.cc
Go to the documentation of this file.
1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <optional>
6
7#include "src/api/api.h"
14#include "src/common/globals.h"
23#include "src/runtime/runtime.h"
24
25namespace v8 {
26namespace internal {
27
29
30// -----------------------------------------------------------------------------
31// TurboFan support builtins.
32
33TF_BUILTIN(CopyFastSmiOrObjectElements, CodeStubAssembler) {
34 auto js_object = Parameter<JSObject>(Descriptor::kObject);
35
36 // Load the {object}s elements.
38 CAST(LoadObjectField(js_object, JSObject::kElementsOffset));
40 CloneFixedArray(source, ExtractFixedArrayFlag::kFixedArrays);
41 StoreObjectField(js_object, JSObject::kElementsOffset, target);
42 Return(target);
43}
44
45TF_BUILTIN(GrowFastDoubleElements, CodeStubAssembler) {
46 auto object = Parameter<JSObject>(Descriptor::kObject);
47 auto key = Parameter<Smi>(Descriptor::kKey);
48
49 Label runtime(this, Label::kDeferred);
50 TNode<FixedArrayBase> elements = LoadElements(object);
51 elements = TryGrowElementsCapacity(object, elements, PACKED_DOUBLE_ELEMENTS,
52 key, &runtime);
53 Return(elements);
54
55 BIND(&runtime);
56 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
57 key);
58}
59
60TF_BUILTIN(GrowFastSmiOrObjectElements, CodeStubAssembler) {
61 auto object = Parameter<JSObject>(Descriptor::kObject);
62 auto key = Parameter<Smi>(Descriptor::kKey);
63
64 Label runtime(this, Label::kDeferred);
65 TNode<FixedArrayBase> elements = LoadElements(object);
66 elements =
67 TryGrowElementsCapacity(object, elements, PACKED_ELEMENTS, key, &runtime);
68 Return(elements);
69
70 BIND(&runtime);
71 TailCallRuntime(Runtime::kGrowArrayElements, NoContextConstant(), object,
72 key);
73}
74
75TF_BUILTIN(ReturnReceiver, CodeStubAssembler) {
76 auto receiver = Parameter<JSAny>(Descriptor::kReceiver);
77 Return(receiver);
78}
79
80TF_BUILTIN(DebugBreakTrampoline, CodeStubAssembler) {
81 Label tailcall_to_shared(this);
82 auto context = Parameter<Context>(Descriptor::kContext);
83 auto new_target = Parameter<Object>(Descriptor::kJSNewTarget);
84 auto arg_count =
85 UncheckedParameter<Int32T>(Descriptor::kJSActualArgumentsCount);
86#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
87 auto dispatch_handle =
88 UncheckedParameter<JSDispatchHandleT>(Descriptor::kJSDispatchHandle);
89#else
90 auto dispatch_handle = InvalidDispatchHandleConstant();
91#endif
92 auto function = Parameter<JSFunction>(Descriptor::kJSTarget);
93
94 // Check break-at-entry flag on the debug info.
96 ExternalConstant(ExternalReference::debug_break_at_entry_function());
97 TNode<ExternalReference> isolate_ptr =
98 ExternalConstant(ExternalReference::isolate_address());
100 CAST(LoadObjectField(function, JSFunction::kSharedFunctionInfoOffset));
102 CallCFunction(f, MachineType::UintPtr(),
103 std::make_pair(MachineType::Pointer(), isolate_ptr),
104 std::make_pair(MachineType::TaggedPointer(), shared)));
105 GotoIf(IntPtrEqual(result, IntPtrConstant(0)), &tailcall_to_shared);
106
107 CallRuntime(Runtime::kDebugBreakAtEntry, context, function);
108 Goto(&tailcall_to_shared);
109
110 BIND(&tailcall_to_shared);
111 // Tail call into code object on the SharedFunctionInfo.
112 // TODO(saelo): this is not safe. We either need to validate the parameter
113 // count here or obtain the code from the dispatch table.
114 TNode<Code> code = GetSharedFunctionInfoCode(shared);
115 TailCallJSCode(code, context, function, new_target, arg_count,
116 dispatch_handle);
117}
118
120 public:
123
126 ExternalReference::heap_is_marking_flag_address(this->isolate()));
127 return Word32NotEqual(Load<Uint8T>(is_marking_addr), Int32Constant(0));
128 }
129
131 TNode<ExternalReference> is_minor_marking_addr = ExternalConstant(
132 ExternalReference::heap_is_minor_marking_flag_address(this->isolate()));
133 return Word32NotEqual(Load<Uint8T>(is_minor_marking_addr),
134 Int32Constant(0));
135 }
136
138 TNode<ExternalReference> is_shared_space_isolate_addr = ExternalConstant(
139 ExternalReference::is_shared_space_isolate_flag_address(
140 this->isolate()));
141 return Word32NotEqual(Load<Uint8T>(is_shared_space_isolate_addr),
142 Int32Constant(0));
143 }
144
146 TNode<ExternalReference> uses_shared_heap_addr =
147 IsolateField(IsolateFieldId::kUsesSharedHeapFlag);
148 return Word32NotEqual(Load<Uint8T>(uses_shared_heap_addr),
149 Int32Constant(0));
150 }
151
153 TNode<IntPtrT> cell;
155 GetMarkBit(object, &cell, &mask);
156 // Marked only requires checking a single bit here.
158 }
159
161 SaveFPRegsMode fp_mode) {
162 Label slow_path(this), next(this);
165
166 // Load address of SlotSet
167 TNode<IntPtrT> slot_set = LoadSlotSet(page, &slow_path);
168 TNode<IntPtrT> slot_offset = IntPtrSub(slot, chunk);
169 TNode<IntPtrT> num_buckets_address =
172 Load(MachineType::Pointer(), num_buckets_address, IntPtrConstant(0)));
173
174 // Load bucket
175 TNode<IntPtrT> bucket =
176 LoadBucket(slot_set, slot_offset, num_buckets, &slow_path);
177
178 // Update cell
179 SetBitInCell(bucket, slot_offset);
180 Goto(&next);
181
182 BIND(&slow_path);
183 {
184 TNode<ExternalReference> function =
185 ExternalConstant(ExternalReference::insert_remembered_set_function());
187 function, MachineTypeOf<Int32T>::value, fp_mode,
188 std::make_pair(MachineTypeOf<IntPtrT>::value, page),
189 std::make_pair(MachineTypeOf<IntPtrT>::value, slot_offset));
190 Goto(&next);
191 }
192
193 BIND(&next);
194 }
195
204
206 TNode<IntPtrT> num_buckets, Label* slow_path) {
207 TNode<WordT> bucket_index =
209 CSA_CHECK(this, IntPtrLessThan(bucket_index, num_buckets));
211 Load(MachineType::Pointer(), slot_set,
212 WordShl(bucket_index, kSystemPointerSizeLog2)));
213 GotoIf(WordEqual(bucket, IntPtrConstant(0)), slow_path);
214 return bucket;
215 }
216
217 void SetBitInCell(TNode<IntPtrT> bucket, TNode<WordT> slot_offset) {
218 // Load cell value
219 TNode<WordT> cell_offset = WordAnd(
224 TNode<IntPtrT> cell_address =
225 UncheckedCast<IntPtrT>(IntPtrAdd(bucket, cell_offset));
226 TNode<IntPtrT> old_cell_value =
227 ChangeInt32ToIntPtr(Load<Int32T>(cell_address));
228
229 // Calculate new cell value
230 TNode<WordT> bit_index = WordAnd(WordShr(slot_offset, kTaggedSizeLog2),
232 TNode<IntPtrT> new_cell_value = UncheckedCast<IntPtrT>(
233 WordOr(old_cell_value, WordShl(IntPtrConstant(1), bit_index)));
234
235 // Update cell value
237 TruncateIntPtrToInt32(new_cell_value));
238 }
239
241 Label marking_is_on(this), marking_is_off(this), next(this);
242
243 auto slot =
244 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
245 Branch(IsMarking(), &marking_is_on, &marking_is_off);
246
247 BIND(&marking_is_off);
248 GenerationalOrSharedBarrierSlow(slot, &next, fp_mode);
249
250 BIND(&marking_is_on);
251 WriteBarrierDuringMarking(slot, &next, fp_mode);
252
253 BIND(&next);
254 }
255
257 CSA_DCHECK(this, IsMarking());
258
259 // For this barrier, the slot contains an index into a pointer table and not
260 // directly a pointer to a HeapObject. Further, the slot address is tagged
261 // with the indirect pointer tag of the slot, so it cannot directly be
262 // dereferenced but needs to be decoded first.
264 IndirectPointerWriteBarrierDescriptor::kSlotAddress);
266 IndirectPointerWriteBarrierDescriptor::kObject));
268 IndirectPointerWriteBarrierDescriptor::kIndirectPointerTag);
269
272 write_barrier_indirect_pointer_marking_from_code_function());
274 function, MachineTypeOf<Int32T>::value, fp_mode,
275 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
276 std::make_pair(MachineTypeOf<IntPtrT>::value, slot),
277 std::make_pair(MachineTypeOf<IntPtrT>::value, tag));
278 }
279
281 SaveFPRegsMode fp_mode) {
282 // When incremental marking is not on, the fast and out-of-line fast path of
283 // the write barrier already checked whether we need to run the generational
284 // or shared barrier slow path.
285 Label generational_barrier(this), shared_barrier(this);
286
288
289 InYoungGeneration(value, &generational_barrier, &shared_barrier);
290
291 BIND(&generational_barrier);
292 if (!v8_flags.sticky_mark_bits) {
293 CSA_DCHECK(this,
295 }
296 GenerationalBarrierSlow(slot, next, fp_mode);
297
298 // TODO(333906585): With sticky-mark bits and without the shared barrier
299 // support, we actually never jump here. Don't put it under the flag though,
300 // since the assert below has been useful.
301 BIND(&shared_barrier);
303 SharedBarrierSlow(slot, next, fp_mode);
304 }
305
307 SaveFPRegsMode fp_mode) {
309 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
310 InsertIntoRememberedSet(object, slot, fp_mode);
311 Goto(next);
312 }
313
315 SaveFPRegsMode fp_mode) {
317 ExternalReference::shared_barrier_from_code_function());
319 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
321 function, MachineTypeOf<Int32T>::value, fp_mode,
322 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
323 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
324 Goto(next);
325 }
326
328 SaveFPRegsMode fp_mode) {
329 // When incremental marking is on, we need to perform generational, shared
330 // and incremental marking write barrier.
331 Label incremental_barrier(this);
332
333 GenerationalOrSharedBarrierDuringMarking(slot, &incremental_barrier,
334 fp_mode);
335
336 BIND(&incremental_barrier);
337 IncrementalWriteBarrier(slot, fp_mode);
338 Goto(next);
339 }
340
342 Label* next,
343 SaveFPRegsMode fp_mode) {
344 Label generational_barrier_check(this), shared_barrier_check(this),
345 shared_barrier_slow(this), generational_barrier_slow(this);
346
347 // During incremental marking we always reach this slow path, so we need to
348 // check whether this is an old-to-new or old-to-shared reference.
350 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
351
352 if (!v8_flags.sticky_mark_bits) {
353 // With sticky markbits we know everything will be old after the GC so no
354 // need to check the age.
355 InYoungGeneration(object, next, &generational_barrier_check);
356
357 BIND(&generational_barrier_check);
358 }
359
361
362 if (!v8_flags.sticky_mark_bits) {
363 // With sticky markbits we know everything will be old after the GC so no
364 // need to track old-to-new references.
365 InYoungGeneration(value, &generational_barrier_slow,
366 &shared_barrier_check);
367
368 BIND(&generational_barrier_slow);
369 GenerationalBarrierSlow(slot, next, fp_mode);
370
371 BIND(&shared_barrier_check);
372 }
373
374 InSharedHeap(value, &shared_barrier_slow, next);
375
376 BIND(&shared_barrier_slow);
377
378 SharedBarrierSlow(slot, next, fp_mode);
379 }
380
381 void InYoungGeneration(TNode<IntPtrT> object, Label* true_label,
382 Label* false_label) {
383 if (v8_flags.sticky_mark_bits) {
384 // This method is currently only used when marking is disabled. Checking
385 // markbits while marking is active may result in unexpected results.
387
388 Label not_read_only(this);
389
390 TNode<BoolT> is_read_only_page =
392 Branch(is_read_only_page, false_label, &not_read_only);
393
394 BIND(&not_read_only);
395 Branch(IsUnmarked(object), true_label, false_label);
396 } else {
397 TNode<BoolT> object_is_young =
399 Branch(object_is_young, true_label, false_label);
400 }
401 }
402
403 void InSharedHeap(TNode<IntPtrT> object, Label* true_label,
404 Label* false_label) {
405 TNode<BoolT> object_is_young =
407
408 Branch(object_is_young, true_label, false_label);
409 }
410
412 SaveFPRegsMode fp_mode, Label* next) {
413 Label check_is_unmarked(this, Label::kDeferred);
414
415 if (!v8_flags.sticky_mark_bits) {
416 // With sticky markbits, InYoungGeneration and IsUnmarked below are
417 // equivalent.
418 InYoungGeneration(value, &check_is_unmarked, next);
419
420 BIND(&check_is_unmarked);
421 }
422
423 GotoIfNot(IsUnmarked(value), next);
424
425 {
427 ExternalReference::write_barrier_marking_from_code_function());
429 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
431 function, MachineTypeOf<Int32T>::value, fp_mode,
432 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
433 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
434 Goto(next);
435 }
436 }
437
439 SaveFPRegsMode fp_mode, Label* next) {
440 Label marking_cpp_slow_path(this);
441
442 IsValueUnmarkedOrRecordSlot(value, &marking_cpp_slow_path, next);
443
444 BIND(&marking_cpp_slow_path);
445 {
447 ExternalReference::write_barrier_marking_from_code_function());
449 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
451 function, MachineTypeOf<Int32T>::value, fp_mode,
452 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
453 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
454 Goto(next);
455 }
456 }
457
459 Label* false_label) {
460 // This code implements the following condition:
461 // IsUnmarked(value) ||
462 // OnEvacuationCandidate(value) &&
463 // !SkipEvacuationCandidateRecording(value)
464
465 // 1) IsUnmarked(value) || ....
466 GotoIf(IsUnmarked(value), true_label);
467
468 // 2) OnEvacuationCandidate(value) &&
469 // !SkipEvacuationCandidateRecording(value)
471 false_label);
472
473 {
475 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
476 Branch(
478 false_label, true_label);
479 }
480 }
481
483 Label next(this), write_into_shared_object(this),
484 write_into_local_object(this),
485 local_object_and_value(this, Label::kDeferred);
486
488 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
490
491 // Without a shared heap, all objects are local. This is the fast path
492 // always used when no shared heap exists.
493 GotoIfNot(UsesSharedHeap(), &local_object_and_value);
494
495 // From the point-of-view of the shared space isolate (= the main isolate)
496 // shared heap objects are just local objects.
497 GotoIf(IsSharedSpaceIsolate(), &local_object_and_value);
498
499 // These checks here are now only reached by client isolates (= worker
500 // isolates). Now first check whether incremental marking is activated for
501 // that particular object's space. Incrementally marking might only be
502 // enabled for either local or shared objects on client isolates.
504
505 // We now know that incremental marking is enabled for the given object.
506 // Decide whether to run the shared or local incremental marking barrier.
507 InSharedHeap(object, &write_into_shared_object, &write_into_local_object);
508
509 BIND(&write_into_shared_object);
510
511 // Run the shared incremental marking barrier.
512 IncrementalWriteBarrierShared(object, slot, value, fp_mode, &next);
513
514 BIND(&write_into_local_object);
515
516 // When writing into a local object we can ignore stores of shared object
517 // values since for those no slot recording or marking is required.
518 InSharedHeap(value, &next, &local_object_and_value);
519
520 // Both object and value are now guaranteed to be local objects, run the
521 // local incremental marking barrier.
522 BIND(&local_object_and_value);
523 IncrementalWriteBarrierLocal(slot, value, fp_mode, &next);
524
525 BIND(&next);
526 }
527
529 TNode<IntPtrT> value,
530 SaveFPRegsMode fp_mode, Label* next) {
531 Label shared_marking_cpp_slow_path(this);
532
533 IsValueUnmarkedOrRecordSlot(value, &shared_marking_cpp_slow_path, next);
534
535 BIND(&shared_marking_cpp_slow_path);
536 {
538 ExternalReference::write_barrier_shared_marking_from_code_function());
540 function, MachineTypeOf<Int32T>::value, fp_mode,
541 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
542 std::make_pair(MachineTypeOf<IntPtrT>::value, slot));
543
544 Goto(next);
545 }
546 }
547
549 SaveFPRegsMode fp_mode, Label* next) {
550 Label is_minor(this), is_major(this);
551 Branch(IsMinorMarking(), &is_minor, &is_major);
552
553 BIND(&is_minor);
554 IncrementalWriteBarrierMinor(slot, value, fp_mode, next);
555
556 BIND(&is_major);
557 IncrementalWriteBarrierMajor(slot, value, fp_mode, next);
558 }
559
562 Return(TrueConstant());
563 return;
564 }
565
566 WriteBarrier(fp_mode);
567 IncrementCounter(isolate()->counters()->write_barriers(), 1);
568 Return(TrueConstant());
569 }
570
573 Return(TrueConstant());
574 return;
575 }
576
578 Unreachable();
579 return;
580 }
581
583 IncrementCounter(isolate()->counters()->write_barriers(), 1);
584 Return(TrueConstant());
585 }
586
589 ExternalReference::ephemeron_key_write_barrier_function());
590 TNode<ExternalReference> isolate_constant =
592 // In this method we limit the allocatable registers so we have to use
593 // UncheckedParameter. Parameter does not work because the checked cast
594 // needs more registers.
595 auto address =
596 UncheckedParameter<IntPtrT>(WriteBarrierDescriptor::kSlotAddress);
598 UncheckedParameter<Object>(WriteBarrierDescriptor::kObject));
599
601 function, MachineTypeOf<Int32T>::value, fp_mode,
602 std::make_pair(MachineTypeOf<IntPtrT>::value, object),
603 std::make_pair(MachineTypeOf<IntPtrT>::value, address),
605 isolate_constant));
606
607 IncrementCounter(isolate()->counters()->write_barriers(), 1);
608 Return(TrueConstant());
609 }
610};
611
613 GenerateRecordWrite(SaveFPRegsMode::kSave);
614}
615
617 GenerateRecordWrite(SaveFPRegsMode::kIgnore);
618}
619
620TF_BUILTIN(IndirectPointerBarrierSaveFP, WriteBarrierCodeStubAssembler) {
621 GenerateIndirectPointerBarrier(SaveFPRegsMode::kSave);
622}
623
624TF_BUILTIN(IndirectPointerBarrierIgnoreFP, WriteBarrierCodeStubAssembler) {
625 GenerateIndirectPointerBarrier(SaveFPRegsMode::kIgnore);
626}
627
628TF_BUILTIN(EphemeronKeyBarrierSaveFP, WriteBarrierCodeStubAssembler) {
629 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kSave);
630}
631
632TF_BUILTIN(EphemeronKeyBarrierIgnoreFP, WriteBarrierCodeStubAssembler) {
633 GenerateEphemeronKeyBarrier(SaveFPRegsMode::kIgnore);
634}
635
636#ifdef V8_IS_TSAN
637class TSANRelaxedStoreCodeStubAssembler : public CodeStubAssembler {
638 public:
639 explicit TSANRelaxedStoreCodeStubAssembler(
640 compiler::CodeAssemblerState* state)
641 : CodeStubAssembler(state) {}
642
643 TNode<ExternalReference> GetExternalReference(int size) {
644 if (size == kInt8Size) {
645 return ExternalConstant(
646 ExternalReference::tsan_relaxed_store_function_8_bits());
647 } else if (size == kInt16Size) {
648 return ExternalConstant(
649 ExternalReference::tsan_relaxed_store_function_16_bits());
650 } else if (size == kInt32Size) {
651 return ExternalConstant(
652 ExternalReference::tsan_relaxed_store_function_32_bits());
653 } else {
654 CHECK_EQ(size, kInt64Size);
655 return ExternalConstant(
656 ExternalReference::tsan_relaxed_store_function_64_bits());
657 }
658 }
659
660 void GenerateTSANRelaxedStore(SaveFPRegsMode fp_mode, int size) {
661 TNode<ExternalReference> function = GetExternalReference(size);
662 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
663 TNode<IntPtrT> value = BitcastTaggedToWord(
664 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
665 CallCFunctionWithCallerSavedRegisters(
666 function, MachineType::Int32(), fp_mode,
667 std::make_pair(MachineType::IntPtr(), address),
668 std::make_pair(MachineType::IntPtr(), value));
669 Return(UndefinedConstant());
670 }
671};
672
673TF_BUILTIN(TSANRelaxedStore8IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
674 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt8Size);
675}
676
677TF_BUILTIN(TSANRelaxedStore8SaveFP, TSANRelaxedStoreCodeStubAssembler) {
678 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt8Size);
679}
680
681TF_BUILTIN(TSANRelaxedStore16IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
682 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt16Size);
683}
684
685TF_BUILTIN(TSANRelaxedStore16SaveFP, TSANRelaxedStoreCodeStubAssembler) {
686 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt16Size);
687}
688
689TF_BUILTIN(TSANRelaxedStore32IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
690 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt32Size);
691}
692
693TF_BUILTIN(TSANRelaxedStore32SaveFP, TSANRelaxedStoreCodeStubAssembler) {
694 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt32Size);
695}
696
697TF_BUILTIN(TSANRelaxedStore64IgnoreFP, TSANRelaxedStoreCodeStubAssembler) {
698 GenerateTSANRelaxedStore(SaveFPRegsMode::kIgnore, kInt64Size);
699}
700
701TF_BUILTIN(TSANRelaxedStore64SaveFP, TSANRelaxedStoreCodeStubAssembler) {
702 GenerateTSANRelaxedStore(SaveFPRegsMode::kSave, kInt64Size);
703}
704
705class TSANSeqCstStoreCodeStubAssembler : public CodeStubAssembler {
706 public:
707 explicit TSANSeqCstStoreCodeStubAssembler(compiler::CodeAssemblerState* state)
708 : CodeStubAssembler(state) {}
709
710 TNode<ExternalReference> GetExternalReference(int size) {
711 if (size == kInt8Size) {
712 return ExternalConstant(
713 ExternalReference::tsan_seq_cst_store_function_8_bits());
714 } else if (size == kInt16Size) {
715 return ExternalConstant(
716 ExternalReference::tsan_seq_cst_store_function_16_bits());
717 } else if (size == kInt32Size) {
718 return ExternalConstant(
719 ExternalReference::tsan_seq_cst_store_function_32_bits());
720 } else {
721 CHECK_EQ(size, kInt64Size);
722 return ExternalConstant(
723 ExternalReference::tsan_seq_cst_store_function_64_bits());
724 }
725 }
726
727 void GenerateTSANSeqCstStore(SaveFPRegsMode fp_mode, int size) {
728 TNode<ExternalReference> function = GetExternalReference(size);
729 auto address = UncheckedParameter<IntPtrT>(TSANStoreDescriptor::kAddress);
730 TNode<IntPtrT> value = BitcastTaggedToWord(
731 UncheckedParameter<Object>(TSANStoreDescriptor::kValue));
732 CallCFunctionWithCallerSavedRegisters(
733 function, MachineType::Int32(), fp_mode,
734 std::make_pair(MachineType::IntPtr(), address),
735 std::make_pair(MachineType::IntPtr(), value));
736 Return(UndefinedConstant());
737 }
738};
739
740TF_BUILTIN(TSANSeqCstStore8IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
741 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt8Size);
742}
743
744TF_BUILTIN(TSANSeqCstStore8SaveFP, TSANSeqCstStoreCodeStubAssembler) {
745 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt8Size);
746}
747
748TF_BUILTIN(TSANSeqCstStore16IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
749 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt16Size);
750}
751
752TF_BUILTIN(TSANSeqCstStore16SaveFP, TSANSeqCstStoreCodeStubAssembler) {
753 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt16Size);
754}
755
756TF_BUILTIN(TSANSeqCstStore32IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
757 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt32Size);
758}
759
760TF_BUILTIN(TSANSeqCstStore32SaveFP, TSANSeqCstStoreCodeStubAssembler) {
761 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt32Size);
762}
763
764TF_BUILTIN(TSANSeqCstStore64IgnoreFP, TSANSeqCstStoreCodeStubAssembler) {
765 GenerateTSANSeqCstStore(SaveFPRegsMode::kIgnore, kInt64Size);
766}
767
768TF_BUILTIN(TSANSeqCstStore64SaveFP, TSANSeqCstStoreCodeStubAssembler) {
769 GenerateTSANSeqCstStore(SaveFPRegsMode::kSave, kInt64Size);
770}
771
772class TSANRelaxedLoadCodeStubAssembler : public CodeStubAssembler {
773 public:
774 explicit TSANRelaxedLoadCodeStubAssembler(compiler::CodeAssemblerState* state)
775 : CodeStubAssembler(state) {}
776
777 TNode<ExternalReference> GetExternalReference(int size) {
778 if (size == kInt32Size) {
779 return ExternalConstant(
780 ExternalReference::tsan_relaxed_load_function_32_bits());
781 } else {
782 CHECK_EQ(size, kInt64Size);
783 return ExternalConstant(
784 ExternalReference::tsan_relaxed_load_function_64_bits());
785 }
786 }
787
788 void GenerateTSANRelaxedLoad(SaveFPRegsMode fp_mode, int size) {
789 TNode<ExternalReference> function = GetExternalReference(size);
790 auto address = UncheckedParameter<IntPtrT>(TSANLoadDescriptor::kAddress);
791 CallCFunctionWithCallerSavedRegisters(
792 function, MachineType::Int32(), fp_mode,
793 std::make_pair(MachineType::IntPtr(), address));
794 Return(UndefinedConstant());
795 }
796};
797
798TF_BUILTIN(TSANRelaxedLoad32IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
799 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt32Size);
800}
801
802TF_BUILTIN(TSANRelaxedLoad32SaveFP, TSANRelaxedLoadCodeStubAssembler) {
803 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt32Size);
804}
805
806TF_BUILTIN(TSANRelaxedLoad64IgnoreFP, TSANRelaxedLoadCodeStubAssembler) {
807 GenerateTSANRelaxedLoad(SaveFPRegsMode::kIgnore, kInt64Size);
808}
809
810TF_BUILTIN(TSANRelaxedLoad64SaveFP, TSANRelaxedLoadCodeStubAssembler) {
811 GenerateTSANRelaxedLoad(SaveFPRegsMode::kSave, kInt64Size);
812}
813#endif // V8_IS_TSAN
814
816 public:
819
821 TNode<NameDictionary> properties,
822 TNode<IntPtrT> key_index,
823 TNode<Context> context) {
824 // Overwrite the entry itself (see NameDictionary::SetEntry).
825 TNode<Hole> filler = TheHoleConstant();
826 DCHECK(RootsTable::IsImmortalImmovable(RootIndex::kTheHoleValue));
827 StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
828 StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
830 StoreDetailsByKeyIndex<NameDictionary>(properties, key_index,
831 SmiConstant(0));
832
833 // Update bookkeeping information (see NameDictionary::ElementRemoved).
835 TNode<Smi> new_nof = SmiSub(nof, SmiConstant(1));
836 SetNumberOfElements<NameDictionary>(properties, new_nof);
837 TNode<Smi> num_deleted =
839 TNode<Smi> new_deleted = SmiAdd(num_deleted, SmiConstant(1));
840 SetNumberOfDeletedElements<NameDictionary>(properties, new_deleted);
841
842 // Shrink the dictionary if necessary (see NameDictionary::Shrink).
843 Label shrinking_done(this);
844 TNode<Smi> capacity = GetCapacity<NameDictionary>(properties);
845 GotoIf(SmiGreaterThan(new_nof, SmiShr(capacity, 2)), &shrinking_done);
846 GotoIf(SmiLessThan(new_nof, SmiConstant(16)), &shrinking_done);
847
848 TNode<NameDictionary> new_properties =
849 CAST(CallRuntime(Runtime::kShrinkNameDictionary, context, properties));
850
851 StoreJSReceiverPropertiesOrHash(receiver, new_properties);
852
853 Goto(&shrinking_done);
854 BIND(&shrinking_done);
855 }
856
859 TNode<IntPtrT> key_index,
860 TNode<Context> context) {
861 Label shrunk(this), done(this);
862 TVARIABLE(SwissNameDictionary, shrunk_table);
863
864 SwissNameDictionaryDelete(properties, key_index, &shrunk, &shrunk_table);
865 Goto(&done);
866 BIND(&shrunk);
867 StoreJSReceiverPropertiesOrHash(receiver, shrunk_table.value());
868 Goto(&done);
869
870 BIND(&done);
871 }
872
873 template <typename Dictionary>
875 TNode<Dictionary> properties, TNode<Name> name,
876 TNode<Context> context, Label* dont_delete,
877 Label* notfound) {
878 TVARIABLE(IntPtrT, var_name_index);
879 Label dictionary_found(this, &var_name_index);
880 NameDictionaryLookup<Dictionary>(properties, name, &dictionary_found,
881 &var_name_index, notfound);
882
883 BIND(&dictionary_found);
884 TNode<IntPtrT> key_index = var_name_index.value();
885 TNode<Uint32T> details = LoadDetailsByKeyIndex(properties, key_index);
887 dont_delete);
888
889 DictionarySpecificDelete(receiver, properties, key_index, context);
890
891 Return(TrueConstant());
892 }
893};
894
896 auto receiver = Parameter<Object>(Descriptor::kObject);
897 auto key = Parameter<Object>(Descriptor::kKey);
898 auto language_mode = Parameter<Smi>(Descriptor::kLanguageMode);
899 auto context = Parameter<Context>(Descriptor::kContext);
900
901 TVARIABLE(IntPtrT, var_index);
902 TVARIABLE(Name, var_unique);
903 Label if_index(this, &var_index), if_unique_name(this), if_notunique(this),
904 if_notfound(this), slow(this), if_proxy(this);
905
906 GotoIf(TaggedIsSmi(receiver), &slow);
907 TNode<Map> receiver_map = LoadMap(CAST(receiver));
908 TNode<Uint16T> instance_type = LoadMapInstanceType(receiver_map);
909 GotoIf(InstanceTypeEqual(instance_type, JS_PROXY_TYPE), &if_proxy);
910 GotoIf(IsCustomElementsReceiverInstanceType(instance_type), &slow);
911 TryToName(key, &if_index, &var_index, &if_unique_name, &var_unique, &slow,
912 &if_notunique);
913
914 BIND(&if_index);
915 {
916 Comment("integer index");
917 Goto(&slow); // TODO(jkummerow): Implement more smarts here.
918 }
919
920 BIND(&if_unique_name);
921 {
922 Comment("key is unique name");
923 CheckForAssociatedProtector(var_unique.value(), &slow);
924
925 Label dictionary(this), dont_delete(this);
926 GotoIf(IsDictionaryMap(receiver_map), &dictionary);
927
928 // Fast properties need to clear recorded slots and mark the deleted
929 // property as mutable, which can only be done in C++.
930 Goto(&slow);
931
932 BIND(&dictionary);
933 {
934 InvalidateValidityCellIfPrototype(receiver_map);
935
936 TNode<PropertyDictionary> properties =
937 CAST(LoadSlowProperties(CAST(receiver)));
938 DeleteDictionaryProperty(CAST(receiver), properties, var_unique.value(),
939 context, &dont_delete, &if_notfound);
940 }
941
942 BIND(&dont_delete);
943 {
944 static_assert(LanguageModeSize == 2);
945 GotoIf(SmiNotEqual(language_mode, SmiConstant(LanguageMode::kSloppy)),
946 &slow);
947 Return(FalseConstant());
948 }
949 }
950
951 BIND(&if_notunique);
952 {
953 // If the string was not found in the string table, then no object can
954 // have a property with that name.
955 TryInternalizeString(CAST(key), &if_index, &var_index, &if_unique_name,
956 &var_unique, &if_notfound, &slow);
957 }
958
959 BIND(&if_notfound);
960 Return(TrueConstant());
961
962 BIND(&if_proxy);
963 {
964 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
965 GotoIf(IsPrivateSymbol(name), &slow);
966 TailCallBuiltin(Builtin::kProxyDeleteProperty, context, receiver, name,
967 language_mode);
968 }
969
970 BIND(&slow);
971 {
972 TailCallRuntime(Runtime::kDeleteProperty, context, receiver, key,
973 language_mode);
974 }
975}
976
977namespace {
978
979class SetOrCopyDataPropertiesAssembler : public CodeStubAssembler {
980 public:
981 explicit SetOrCopyDataPropertiesAssembler(compiler::CodeAssemblerState* state)
982 : CodeStubAssembler(state) {}
983
984 protected:
985 TNode<JSObject> AllocateJsObjectTarget(TNode<Context> context) {
986 const TNode<NativeContext> native_context = LoadNativeContext(context);
987 const TNode<JSFunction> object_function = Cast(
988 LoadContextElement(native_context, Context::OBJECT_FUNCTION_INDEX));
989 const TNode<Map> map =
990 Cast(LoadJSFunctionPrototypeOrInitialMap(object_function));
991 const TNode<JSObject> target = AllocateJSObjectFromMap(map);
992 return target;
993 }
994 TNode<Object> SetOrCopyDataProperties(
995 TNode<Context> context, TNode<JSReceiver> target, TNode<Object> source,
996 Label* if_runtime,
997 std::optional<TNode<IntPtrT>> excluded_property_count = std::nullopt,
998 std::optional<TNode<IntPtrT>> excluded_property_base = std::nullopt,
999 bool use_set = true) {
1000 Label if_done(this), if_noelements(this),
1001 if_sourcenotjsobject(this, Label::kDeferred);
1002
1003 // JSPrimitiveWrapper wrappers for numbers don't have any enumerable own
1004 // properties, so we can immediately skip the whole operation if {source} is
1005 // a Smi.
1006 GotoIf(TaggedIsSmi(source), &if_done);
1007
1008 // Otherwise check if {source} is a proper JSObject, and if not, defer
1009 // to testing for non-empty strings below.
1010 TNode<Map> source_map = LoadMap(CAST(source));
1011 TNode<Uint16T> source_instance_type = LoadMapInstanceType(source_map);
1012 GotoIfNot(IsJSObjectInstanceType(source_instance_type),
1013 &if_sourcenotjsobject);
1014
1015 TNode<FixedArrayBase> source_elements = LoadElements(CAST(source));
1016 GotoIf(IsEmptyFixedArray(source_elements), &if_noelements);
1017 Branch(IsEmptySlowElementDictionary(source_elements), &if_noelements,
1018 if_runtime);
1019
1020 BIND(&if_noelements);
1021 {
1022 // If the target is deprecated, the object will be updated on first
1023 // store. If the source for that store equals the target, this will
1024 // invalidate the cached representation of the source. Handle this case
1025 // in runtime.
1026 TNode<Map> target_map = LoadMap(target);
1027 GotoIf(IsDeprecatedMap(target_map), if_runtime);
1028 if (use_set) {
1029 TNode<BoolT> target_is_simple_receiver = IsSimpleObjectMap(target_map);
1030 ForEachEnumerableOwnProperty(
1031 context, source_map, CAST(source), kEnumerationOrder,
1032 [=, this](TNode<Name> key, LazyNode<Object> value) {
1034 state(), context, target, target_is_simple_receiver, key,
1036 },
1037 if_runtime);
1038 } else {
1039 ForEachEnumerableOwnProperty(
1040 context, source_map, CAST(source), kEnumerationOrder,
1041 [=, this](TNode<Name> key, LazyNode<Object> value) {
1042 Label skip(this);
1043 if (excluded_property_count.has_value()) {
1044 BuildFastLoop<IntPtrT>(
1045 IntPtrConstant(0), excluded_property_count.value(),
1046 [&](TNode<IntPtrT> index) {
1047 auto offset = Signed(TimesSystemPointerSize(index));
1048 TNode<IntPtrT> location = Signed(
1049 IntPtrSub(excluded_property_base.value(), offset));
1050 auto property = LoadFullTagged(location);
1051
1052 Label continue_label(this);
1053 BranchIfSameValue(key, property, &skip, &continue_label);
1054 Bind(&continue_label);
1055 },
1056 1, LoopUnrollingMode::kNo, IndexAdvanceMode::kPost);
1057 }
1058
1059 CallBuiltin(Builtin::kCreateDataProperty, context, target, key,
1060 value());
1061 Goto(&skip);
1062 Bind(&skip);
1063 },
1064 if_runtime);
1065 }
1066 Goto(&if_done);
1067 }
1068
1069 BIND(&if_sourcenotjsobject);
1070 {
1071 // Handle other JSReceivers in the runtime.
1072 GotoIf(IsJSReceiverInstanceType(source_instance_type), if_runtime);
1073
1074 // Non-empty strings are the only non-JSReceivers that need to be
1075 // handled explicitly by Object.assign() and CopyDataProperties.
1076 GotoIfNot(IsStringInstanceType(source_instance_type), &if_done);
1077 TNode<Uint32T> source_length = LoadStringLengthAsWord32(CAST(source));
1078 Branch(Word32Equal(source_length, Uint32Constant(0)), &if_done,
1079 if_runtime);
1080 }
1081
1082 BIND(&if_done);
1083 return target;
1084 }
1085};
1086
1087} // namespace
1088
1089TF_BUILTIN(CopyDataPropertiesWithExcludedPropertiesOnStack,
1090 SetOrCopyDataPropertiesAssembler) {
1091 auto source = UncheckedParameter<Object>(Descriptor::kSource);
1092 auto excluded_property_count =
1093 UncheckedParameter<IntPtrT>(Descriptor::kExcludedPropertyCount);
1094 auto excluded_properties =
1095 UncheckedParameter<IntPtrT>(Descriptor::kExcludedPropertyBase);
1096 auto context = Parameter<Context>(Descriptor::kContext);
1097
1098 // first check undefine or null
1099 Label if_runtime(this, Label::kDeferred);
1100 GotoIf(IsNullOrUndefined(source), &if_runtime);
1101
1102 TNode<JSReceiver> target = AllocateJsObjectTarget(context);
1103 Return(SetOrCopyDataProperties(context, target, source, &if_runtime,
1104 excluded_property_count, excluded_properties,
1105 false));
1106
1107 BIND(&if_runtime);
1108 // The excluded_property_base is passed as a raw stack pointer, but is
1109 // bitcasted to a Smi . This is safe because the stack pointer is aligned, so
1110 // it looks like a Smi to the GC.
1111 CSA_DCHECK(this, IntPtrEqual(WordAnd(excluded_properties,
1112 IntPtrConstant(kSmiTagMask)),
1113 IntPtrConstant(kSmiTag)));
1114 TailCallRuntime(Runtime::kCopyDataPropertiesWithExcludedPropertiesOnStack,
1115 context, source, SmiTag(excluded_property_count),
1116 BitcastWordToTaggedSigned(excluded_properties));
1117}
1118
1119TF_BUILTIN(CopyDataPropertiesWithExcludedProperties,
1120 SetOrCopyDataPropertiesAssembler) {
1121 auto source = UncheckedParameter<Object>(Descriptor::kSource);
1122
1123 auto excluded_property_count_smi =
1124 UncheckedParameter<Smi>(Descriptor::kExcludedPropertyCount);
1125 auto context = Parameter<Context>(Descriptor::kContext);
1126
1127 auto excluded_property_count = SmiToIntPtr(excluded_property_count_smi);
1128 CodeStubArguments arguments(this, excluded_property_count);
1129
1130 TNode<IntPtrT> excluded_properties =
1131 ReinterpretCast<IntPtrT>(arguments.AtIndexPtr(
1132 IntPtrSub(excluded_property_count, IntPtrConstant(2))));
1133
1134 arguments.PopAndReturn(CallBuiltin<JSAny>(
1135 Builtin::kCopyDataPropertiesWithExcludedPropertiesOnStack, context,
1136 source, excluded_property_count, excluded_properties));
1137}
1138
1139// ES #sec-copydataproperties
1140TF_BUILTIN(CopyDataProperties, SetOrCopyDataPropertiesAssembler) {
1141 auto target = Parameter<JSObject>(Descriptor::kTarget);
1142 auto source = Parameter<Object>(Descriptor::kSource);
1143 auto context = Parameter<Context>(Descriptor::kContext);
1144
1145 CSA_DCHECK(this, TaggedNotEqual(target, source));
1146
1147 Label if_runtime(this, Label::kDeferred);
1148 SetOrCopyDataProperties(context, target, source, &if_runtime, std::nullopt,
1149 std::nullopt, false);
1150 Return(UndefinedConstant());
1151
1152 BIND(&if_runtime);
1153 TailCallRuntime(Runtime::kCopyDataProperties, context, target, source);
1154}
1155
1156TF_BUILTIN(SetDataProperties, SetOrCopyDataPropertiesAssembler) {
1157 auto target = Parameter<JSReceiver>(Descriptor::kTarget);
1158 auto source = Parameter<Object>(Descriptor::kSource);
1159 auto context = Parameter<Context>(Descriptor::kContext);
1160
1161 Label if_runtime(this, Label::kDeferred);
1162 GotoIfForceSlowPath(&if_runtime);
1163 SetOrCopyDataProperties(context, target, source, &if_runtime, std::nullopt,
1164 std::nullopt, true);
1165 Return(UndefinedConstant());
1166
1167 BIND(&if_runtime);
1168 TailCallRuntime(Runtime::kSetDataProperties, context, target, source);
1169}
1170
1172 auto receiver = Parameter<JSReceiver>(Descriptor::kReceiver);
1173 auto context = Parameter<Context>(Descriptor::kContext);
1174
1175 Label if_empty(this), if_runtime(this, Label::kDeferred);
1176 TNode<Map> receiver_map = CheckEnumCache(receiver, &if_empty, &if_runtime);
1177 Return(receiver_map);
1178
1179 BIND(&if_empty);
1180 Return(EmptyFixedArrayConstant());
1181
1182 BIND(&if_runtime);
1183 TailCallRuntime(Runtime::kForInEnumerate, context, receiver);
1184}
1185
1187 // The {enumerator} is either a Map or a FixedArray.
1188 auto enumerator = Parameter<HeapObject>(Descriptor::kEnumerator);
1189 auto index = Parameter<TaggedIndex>(Descriptor::kVectorIndex);
1190 auto feedback_vector = Parameter<FeedbackVector>(Descriptor::kFeedbackVector);
1191 TNode<UintPtrT> vector_index = Unsigned(TaggedIndexToIntPtr(index));
1192
1193 TNode<FixedArray> cache_array;
1194 TNode<Smi> cache_length;
1195 ForInPrepare(enumerator, vector_index, feedback_vector, &cache_array,
1197 Return(cache_array, cache_length);
1198}
1199
1201 auto key = Parameter<String>(Descriptor::kKey);
1202 auto object = Parameter<JSAnyNotSmi>(Descriptor::kObject);
1203 auto context = Parameter<Context>(Descriptor::kContext);
1204
1205 Label if_true(this), if_false(this);
1206 TNode<Oddball> result = HasProperty(context, object, key, kForInHasProperty);
1207 Branch(IsTrue(result), &if_true, &if_false);
1208
1209 BIND(&if_true);
1210 Return(key);
1211
1212 BIND(&if_false);
1213 Return(UndefinedConstant());
1214}
1215
1217 auto lhs = Parameter<Object>(Descriptor::kLeft);
1218 auto rhs = Parameter<Object>(Descriptor::kRight);
1219
1220 Label if_true(this), if_false(this);
1221 BranchIfSameValue(lhs, rhs, &if_true, &if_false);
1222
1223 BIND(&if_true);
1224 Return(TrueConstant());
1225
1226 BIND(&if_false);
1227 Return(FalseConstant());
1228}
1229
1230TF_BUILTIN(SameValueNumbersOnly, CodeStubAssembler) {
1231 auto lhs = Parameter<Object>(Descriptor::kLeft);
1232 auto rhs = Parameter<Object>(Descriptor::kRight);
1233
1234 Label if_true(this), if_false(this);
1235 BranchIfSameValue(lhs, rhs, &if_true, &if_false, SameValueMode::kNumbersOnly);
1236
1237 BIND(&if_true);
1238 Return(TrueConstant());
1239
1240 BIND(&if_false);
1241 Return(FalseConstant());
1242}
1243
1245 public:
1247
1250
1251 void GenerateAdaptor(int formal_parameter_count);
1252};
1253
1254void CppBuiltinsAdaptorAssembler::GenerateAdaptor(int formal_parameter_count) {
1255 auto context = Parameter<Context>(Descriptor::kContext);
1256 auto target = Parameter<JSFunction>(Descriptor::kTarget);
1257 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1258 auto c_function = UncheckedParameter<WordT>(Descriptor::kCFunction);
1259 auto actual_argc =
1260 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1261
1262 // The logic contained here is mirrored for TurboFan inlining in
1263 // JSTypedLowering::ReduceJSCall{Function,Construct}. Keep these in sync.
1264
1265 // Make sure we operate in the context of the called function.
1266 CSA_DCHECK(this, TaggedEqual(context, LoadJSFunctionContext(target)));
1267
1268 static_assert(kDontAdaptArgumentsSentinel == 0);
1269 // The code below relies on |actual_argc| to include receiver.
1270 static_assert(i::JSParameterCount(0) == 1);
1271 TVARIABLE(Int32T, pushed_argc, actual_argc);
1272
1273 // It's guaranteed that the receiver is pushed to the stack, thus both
1274 // kDontAdaptArgumentsSentinel and JSParameterCount(0) cases don't require
1275 // arguments adaptation. Just use the latter version for consistency.
1276 DCHECK_NE(kDontAdaptArgumentsSentinel, formal_parameter_count);
1277 if (formal_parameter_count > i::JSParameterCount(0)) {
1278 TNode<Int32T> formal_count = Int32Constant(formal_parameter_count);
1279
1280 // The number of arguments pushed is the maximum of actual arguments count
1281 // and formal parameters count.
1282 Label done_argc(this);
1283 GotoIf(Int32GreaterThanOrEqual(pushed_argc.value(), formal_count),
1284 &done_argc);
1285 // Update pushed args.
1286 pushed_argc = formal_count;
1287 Goto(&done_argc);
1288 BIND(&done_argc);
1289 }
1290
1291 // Update arguments count for CEntry to contain the number of arguments
1292 // including the receiver and the extra arguments.
1293 TNode<Int32T> argc =
1294 Int32Add(pushed_argc.value(),
1296
1297 const bool builtin_exit_frame = true;
1298 const bool switch_to_central_stack = false;
1299 Builtin centry = Builtins::CEntry(1, ArgvMode::kStack, builtin_exit_frame,
1300 switch_to_central_stack);
1301
1302 static_assert(BuiltinArguments::kNewTargetIndex == 0);
1303 static_assert(BuiltinArguments::kTargetIndex == 1);
1304 static_assert(BuiltinArguments::kArgcIndex == 2);
1305 static_assert(BuiltinArguments::kPaddingIndex == 3);
1306
1307 // Unconditionally push argc, target and new target as extra stack arguments.
1308 // They will be used by stack frame iterators when constructing stack trace.
1309 TailCallBuiltin(centry, context, // standard arguments for TailCallBuiltin
1310 argc, c_function, // register arguments
1311 TheHoleConstant(), // additional stack argument 1 (padding)
1312 SmiFromInt32(argc), // additional stack argument 2
1313 target, // additional stack argument 3
1314 new_target); // additional stack argument 4
1315}
1316
1317TF_BUILTIN(AdaptorWithBuiltinExitFrame0, CppBuiltinsAdaptorAssembler) {
1318 GenerateAdaptor(i::JSParameterCount(0));
1319}
1320
1321TF_BUILTIN(AdaptorWithBuiltinExitFrame1, CppBuiltinsAdaptorAssembler) {
1322 GenerateAdaptor(i::JSParameterCount(1));
1323}
1324
1325TF_BUILTIN(AdaptorWithBuiltinExitFrame2, CppBuiltinsAdaptorAssembler) {
1326 GenerateAdaptor(i::JSParameterCount(2));
1327}
1328
1329TF_BUILTIN(AdaptorWithBuiltinExitFrame3, CppBuiltinsAdaptorAssembler) {
1330 GenerateAdaptor(i::JSParameterCount(3));
1331}
1332
1333TF_BUILTIN(AdaptorWithBuiltinExitFrame4, CppBuiltinsAdaptorAssembler) {
1334 GenerateAdaptor(i::JSParameterCount(4));
1335}
1336
1337TF_BUILTIN(AdaptorWithBuiltinExitFrame5, CppBuiltinsAdaptorAssembler) {
1338 GenerateAdaptor(i::JSParameterCount(5));
1339}
1340
1342 auto val = UncheckedParameter<Float64T>(Descriptor::kValue);
1343 Return(ChangeFloat64ToTagged(val));
1344}
1345
1346TF_BUILTIN(AllocateInYoungGeneration, CodeStubAssembler) {
1347 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1348 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1349
1350 TNode<Smi> allocation_flags =
1351 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false)));
1352 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1353 SmiFromIntPtr(requested_size), allocation_flags);
1354}
1355
1356TF_BUILTIN(AllocateInOldGeneration, CodeStubAssembler) {
1357 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1358 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1359
1360 TNode<Smi> runtime_flags =
1361 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false)));
1362 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1363 SmiFromIntPtr(requested_size), runtime_flags);
1364}
1365
1366#if V8_ENABLE_WEBASSEMBLY
1367TF_BUILTIN(WasmAllocateInYoungGeneration, CodeStubAssembler) {
1368 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1369 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1370
1371 TNode<Smi> allocation_flags =
1372 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false)));
1373 TailCallRuntime(Runtime::kAllocateInYoungGeneration, NoContextConstant(),
1374 SmiFromIntPtr(requested_size), allocation_flags);
1375}
1376
1377TF_BUILTIN(WasmAllocateInOldGeneration, CodeStubAssembler) {
1378 auto requested_size = UncheckedParameter<IntPtrT>(Descriptor::kRequestedSize);
1379 CSA_CHECK(this, IsValidPositiveSmi(requested_size));
1380
1381 TNode<Smi> runtime_flags =
1382 SmiConstant(Smi::FromInt(AllocateDoubleAlignFlag::encode(false)));
1383 TailCallRuntime(Runtime::kAllocateInOldGeneration, NoContextConstant(),
1384 SmiFromIntPtr(requested_size), runtime_flags);
1385}
1386#endif
1387
1389 auto message_id = Parameter<Smi>(Descriptor::kMessageOrMessageId);
1390 TailCallRuntime(Runtime::kAbort, NoContextConstant(), message_id);
1391}
1392
1394 auto message = Parameter<String>(Descriptor::kMessageOrMessageId);
1395 TailCallRuntime(Runtime::kAbortCSADcheck, NoContextConstant(), message);
1396}
1397
1398void Builtins::Generate_CEntry_Return1_ArgvOnStack_NoBuiltinExit(
1399 MacroAssembler* masm) {
1400 Generate_CEntry(masm, 1, ArgvMode::kStack, false, false);
1401}
1402
1403void Builtins::Generate_CEntry_Return1_ArgvOnStack_BuiltinExit(
1404 MacroAssembler* masm) {
1405 Generate_CEntry(masm, 1, ArgvMode::kStack, true, false);
1406}
1407
1408void Builtins::Generate_CEntry_Return1_ArgvInRegister_NoBuiltinExit(
1409 MacroAssembler* masm) {
1410 Generate_CEntry(masm, 1, ArgvMode::kRegister, false, false);
1411}
1412
1413void Builtins::Generate_CEntry_Return2_ArgvOnStack_NoBuiltinExit(
1414 MacroAssembler* masm) {
1415 Generate_CEntry(masm, 2, ArgvMode::kStack, false, false);
1416}
1417
1418void Builtins::Generate_CEntry_Return2_ArgvOnStack_BuiltinExit(
1419 MacroAssembler* masm) {
1420 Generate_CEntry(masm, 2, ArgvMode::kStack, true, false);
1421}
1422
1423void Builtins::Generate_CEntry_Return2_ArgvInRegister_NoBuiltinExit(
1424 MacroAssembler* masm) {
1425 Generate_CEntry(masm, 2, ArgvMode::kRegister, false, false);
1426}
1427
1428void Builtins::Generate_WasmCEntry(MacroAssembler* masm) {
1429 Generate_CEntry(masm, 1, ArgvMode::kStack, false, true);
1430}
1431
1432#if !defined(V8_TARGET_ARCH_ARM)
1433void Builtins::Generate_MemCopyUint8Uint8(MacroAssembler* masm) {
1434 masm->CallBuiltin(Builtin::kIllegal);
1435}
1436#endif // !defined(V8_TARGET_ARCH_ARM)
1437
1438#ifndef V8_TARGET_ARCH_IA32
1439void Builtins::Generate_MemMove(MacroAssembler* masm) {
1440 masm->CallBuiltin(Builtin::kIllegal);
1441}
1442#endif // V8_TARGET_ARCH_IA32
1443
1444void Builtins::Generate_BaselineLeaveFrame(MacroAssembler* masm) {
1445#ifdef V8_ENABLE_SPARKPLUG
1446 EmitReturnBaseline(masm);
1447#else
1448 masm->Trap();
1449#endif // V8_ENABLE_SPARKPLUG
1450}
1451
1452#if defined(V8_ENABLE_MAGLEV) && !defined(V8_ENABLE_LEAPTIERING)
1453void Builtins::Generate_MaglevOptimizeCodeOrTailCallOptimizedCodeSlot(
1454 MacroAssembler* masm) {
1455 using D = MaglevOptimizeCodeOrTailCallOptimizedCodeSlotDescriptor;
1456 Register flags = D::GetRegisterParameter(D::kFlags);
1457 Register feedback_vector = D::GetRegisterParameter(D::kFeedbackVector);
1458 Register temporary = D::GetRegisterParameter(D::kTemporary);
1459 masm->AssertFeedbackVector(feedback_vector, temporary);
1460 masm->OptimizeCodeOrTailCallOptimizedCodeSlot(flags, feedback_vector);
1461 masm->Trap();
1462}
1463#else
1464void Builtins::Generate_MaglevOptimizeCodeOrTailCallOptimizedCodeSlot(
1465 MacroAssembler* masm) {
1466 masm->Trap();
1467}
1468#endif // V8_ENABLE_MAGLEV && !V8_ENABLE_LEAPTIERING
1469
1470#ifndef V8_ENABLE_MAGLEV
1471// static
1473 bool save_new_target) {
1474 masm->Trap();
1475}
1476#endif // !V8_ENABLE_MAGLEV
1477
1478void Builtins::Generate_MaglevFunctionEntryStackCheck_WithoutNewTarget(
1479 MacroAssembler* masm) {
1481}
1482
1483void Builtins::Generate_MaglevFunctionEntryStackCheck_WithNewTarget(
1484 MacroAssembler* masm) {
1486}
1487
1488// ES6 [[Get]] operation.
1490 auto object = Parameter<JSAny>(Descriptor::kObject);
1491 auto key = Parameter<Object>(Descriptor::kKey);
1492 auto context = Parameter<Context>(Descriptor::kContext);
1493 // TODO(duongn): consider tailcalling to GetPropertyWithReceiver(object,
1494 // object, key, OnNonExistent::kReturnUndefined).
1495 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1496 if_slow(this, Label::kDeferred);
1497
1498 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1500 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1501 TNode<Name> unique_name, Label* next_holder,
1502 Label* if_bailout) {
1503 TVARIABLE(Object, var_value);
1504 Label if_found(this);
1505 // If we get here then it's guaranteed that |object| (and thus the
1506 // |receiver|) is a JSReceiver.
1507 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1508 holder_instance_type, unique_name, &if_found,
1509 &var_value, next_holder, if_bailout,
1510 kExpectingJSReceiver);
1511 BIND(&if_found);
1512 Return(var_value.value());
1513 };
1514
1515 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1517 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1518 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1519 // Not supported yet.
1520 Use(next_holder);
1521 Goto(if_bailout);
1522 };
1523
1524 TryPrototypeChainLookup(object, object, key, lookup_property_in_holder,
1525 lookup_element_in_holder, &if_notfound, &if_slow,
1526 &if_proxy);
1527
1528 BIND(&if_notfound);
1529 Return(UndefinedConstant());
1530
1531 BIND(&if_slow);
1532 TailCallRuntime(Runtime::kGetProperty, context, object, key);
1533
1534 BIND(&if_proxy);
1535 {
1536 // Convert the {key} to a Name first.
1537 TNode<Object> name = CallBuiltin(Builtin::kToName, context, key);
1538
1539 // The {object} is a JSProxy instance, look up the {name} on it, passing
1540 // {object} both as receiver and holder. If {name} is absent we can safely
1541 // return undefined from here.
1542 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, object,
1543 SmiConstant(OnNonExistent::kReturnUndefined));
1544 }
1545}
1546
1547// ES6 [[Get]] operation with Receiver.
1548TF_BUILTIN(GetPropertyWithReceiver, CodeStubAssembler) {
1549 auto object = Parameter<JSAny>(Descriptor::kObject);
1550 auto key = Parameter<Object>(Descriptor::kKey);
1551 auto context = Parameter<Context>(Descriptor::kContext);
1552 auto receiver = Parameter<JSAny>(Descriptor::kReceiver);
1553 auto on_non_existent = Parameter<Object>(Descriptor::kOnNonExistent);
1554 Label if_notfound(this), if_proxy(this, Label::kDeferred),
1555 if_slow(this, Label::kDeferred);
1556
1557 CodeStubAssembler::LookupPropertyInHolder lookup_property_in_holder =
1559 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1560 TNode<Name> unique_name, Label* next_holder,
1561 Label* if_bailout) {
1562 TVARIABLE(Object, var_value);
1563 Label if_found(this);
1564 TryGetOwnProperty(context, receiver, CAST(holder), holder_map,
1565 holder_instance_type, unique_name, &if_found,
1566 &var_value, next_holder, if_bailout,
1567 kExpectingAnyReceiver);
1568 BIND(&if_found);
1569 Return(var_value.value());
1570 };
1571
1572 CodeStubAssembler::LookupElementInHolder lookup_element_in_holder =
1574 TNode<Map> holder_map, TNode<Int32T> holder_instance_type,
1575 TNode<IntPtrT> index, Label* next_holder, Label* if_bailout) {
1576 // Not supported yet.
1577 Use(next_holder);
1578 Goto(if_bailout);
1579 };
1580
1581 TryPrototypeChainLookup(receiver, object, key, lookup_property_in_holder,
1582 lookup_element_in_holder, &if_notfound, &if_slow,
1583 &if_proxy);
1584
1585 BIND(&if_notfound);
1586 Label throw_reference_error(this);
1587 GotoIf(TaggedEqual(on_non_existent,
1589 &throw_reference_error);
1590 CSA_DCHECK(this, TaggedEqual(on_non_existent,
1591 SmiConstant(OnNonExistent::kReturnUndefined)));
1592 Return(UndefinedConstant());
1593
1594 BIND(&throw_reference_error);
1595 Return(CallRuntime(Runtime::kThrowReferenceError, context, key));
1596
1597 BIND(&if_slow);
1598 TailCallRuntime(Runtime::kGetPropertyWithReceiver, context, object, key,
1599 receiver, on_non_existent);
1600
1601 BIND(&if_proxy);
1602 {
1603 // Convert the {key} to a Name first.
1604 TNode<Name> name = CAST(CallBuiltin(Builtin::kToName, context, key));
1605
1606 // Proxy cannot handle private symbol so bailout.
1607 GotoIf(IsPrivateSymbol(name), &if_slow);
1608
1609 // The {object} is a JSProxy instance, look up the {name} on it, passing
1610 // {object} both as receiver and holder. If {name} is absent we can safely
1611 // return undefined from here.
1612 TailCallBuiltin(Builtin::kProxyGetProperty, context, object, name, receiver,
1613 on_non_existent);
1614 }
1615}
1616
1617// ES6 [[Set]] operation.
1619 auto context = Parameter<Context>(Descriptor::kContext);
1620 auto receiver = Parameter<JSAny>(Descriptor::kReceiver);
1621 auto key = Parameter<Object>(Descriptor::kKey);
1622 auto value = Parameter<Object>(Descriptor::kValue);
1623
1625 value, LanguageMode::kStrict);
1626}
1627
1628// ES6 CreateDataProperty(), specialized for the case where objects are still
1629// being initialized, and have not yet been made accessible to the user. Thus,
1630// any operation here should be unobservable until after the object has been
1631// returned.
1632TF_BUILTIN(CreateDataProperty, CodeStubAssembler) {
1633 auto context = Parameter<Context>(Descriptor::kContext);
1634 auto receiver = Parameter<JSObject>(Descriptor::kReceiver);
1635 auto key = Parameter<Object>(Descriptor::kKey);
1636 auto value = Parameter<Object>(Descriptor::kValue);
1637
1639 key, value);
1640}
1641
1642TF_BUILTIN(InstantiateAsmJs, CodeStubAssembler) {
1643 Label tailcall_to_function(this);
1644 auto function = Parameter<JSFunction>(Descriptor::kTarget);
1645 auto context = Parameter<Context>(Descriptor::kContext);
1646 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1647 auto arg_count =
1648 UncheckedParameter<Int32T>(Descriptor::kActualArgumentsCount);
1649#ifdef V8_JS_LINKAGE_INCLUDES_DISPATCH_HANDLE
1650 auto dispatch_handle =
1651 UncheckedParameter<JSDispatchHandleT>(Descriptor::kDispatchHandle);
1652#elif defined(V8_ENABLE_LEAPTIERING)
1653 TNode<JSDispatchHandleT> dispatch_handle = ReinterpretCast<JSDispatchHandleT>(
1654 LoadJSFunctionDispatchHandle(function));
1655#else
1656 auto dispatch_handle = InvalidDispatchHandleConstant();
1657#endif
1658
1659 // This builtin is used on functions with different parameter counts.
1660 SetSupportsDynamicParameterCount(function, dispatch_handle);
1661
1662 // Retrieve arguments from caller (stdlib, foreign, heap).
1663 CodeStubArguments args(this, arg_count);
1664
1665 TNode<Object> stdlib = args.GetOptionalArgumentValue(0);
1666 TNode<Object> foreign = args.GetOptionalArgumentValue(1);
1667 TNode<Object> heap = args.GetOptionalArgumentValue(2);
1668
1669 // Call runtime, on success just pass the result to the caller and pop all
1670 // arguments. A smi 0 is returned on failure, an object on success.
1671 TNode<JSAny> maybe_result_or_smi_zero = CallRuntime<JSAny>(
1672 Runtime::kInstantiateAsmJs, context, function, stdlib, foreign, heap);
1673 GotoIf(TaggedIsSmi(maybe_result_or_smi_zero), &tailcall_to_function);
1674 args.PopAndReturn(maybe_result_or_smi_zero);
1675
1676 BIND(&tailcall_to_function);
1677 // On failure, tail call back to regular JavaScript by re-calling the given
1678 // function which has been reset to the compile lazy builtin.
1679
1680 TNode<Code> code = LoadJSFunctionCode(function);
1681 TailCallJSCode(code, context, function, new_target, arg_count,
1682 dispatch_handle);
1683}
1684
1685TF_BUILTIN(FindNonDefaultConstructorOrConstruct, CodeStubAssembler) {
1686 auto this_function = Parameter<JSFunction>(Descriptor::kThisFunction);
1687 auto new_target = Parameter<Object>(Descriptor::kNewTarget);
1688 auto context = Parameter<Context>(Descriptor::kContext);
1689
1690 TVARIABLE(Object, constructor);
1691 Label found_default_base_ctor(this, &constructor),
1692 found_something_else(this, &constructor);
1693
1694 FindNonDefaultConstructor(this_function, constructor,
1695 &found_default_base_ctor, &found_something_else);
1696
1697 BIND(&found_default_base_ctor);
1698 {
1699 // Create an object directly, without calling the default base ctor.
1700 TNode<Object> instance = CallBuiltin(Builtin::kFastNewObject, context,
1701 constructor.value(), new_target);
1702 Return(TrueConstant(), instance);
1703 }
1704
1705 BIND(&found_something_else);
1706 {
1707 // Not a base ctor (or bailed out).
1708 Return(FalseConstant(), constructor.value());
1709 }
1710}
1711
1712// Dispatcher for different implementations of the [[GetOwnProperty]] internal
1713// method, returning a PropertyDescriptorObject (a Struct representation of the
1714// spec PropertyDescriptor concept)
1715TF_BUILTIN(GetOwnPropertyDescriptor, CodeStubAssembler) {
1716 auto context = Parameter<Context>(Descriptor::kContext);
1717 auto receiver = Parameter<JSReceiver>(Descriptor::kReceiver);
1718 auto key = Parameter<Name>(Descriptor::kKey);
1719
1720 Label call_runtime(this);
1721
1722 TNode<Map> map = LoadMap(receiver);
1723 TNode<Uint16T> instance_type = LoadMapInstanceType(map);
1724
1725 GotoIf(IsSpecialReceiverInstanceType(instance_type), &call_runtime);
1726 TailCallBuiltin(Builtin::kOrdinaryGetOwnPropertyDescriptor, context, receiver,
1727 key);
1728
1729 BIND(&call_runtime);
1730 TailCallRuntime(Runtime::kGetOwnPropertyDescriptorObject, context, receiver,
1731 key);
1732}
1733
1735
1736} // namespace internal
1737} // namespace v8
#define BIND(label)
#define TVARIABLE(...)
#define CSA_DCHECK(csa,...)
#define CSA_CHECK(csa, x)
#define TF_BUILTIN(Name, AssemblerBase)
static constexpr U encode(T value)
Definition bit-field.h:55
static constexpr int kNewTargetIndex
static constexpr int kPaddingIndex
static constexpr int kArgcIndex
static constexpr int kTargetIndex
static void Generate_CEntry(MacroAssembler *masm, int result_size, ArgvMode argv_mode, bool builtin_exit_frame, bool switch_to_central_stack)
static void Generate_MaglevFunctionEntryStackCheck(MacroAssembler *masm, bool save_new_target)
static constexpr Builtin CEntry(int result_size, ArgvMode argv_mode, bool builtin_exit_frame=false, bool switch_to_central_stack=false)
TNode< IntPtrT > MemoryChunkFromAddress(TNode< IntPtrT > address)
TNode< Smi > SmiFromInt32(TNode< Int32T > value)
TNode< Smi > GetNumberOfElements(TNode< Dictionary > dictionary)
void NameDictionaryLookup(TNode< Dictionary > dictionary, TNode< Name > unique_name, Label *if_found, TVariable< IntPtrT > *var_name_index, Label *if_not_found, LookupMode mode=kFindExisting)
TNode< Int32T > TruncateIntPtrToInt32(TNode< IntPtrT > value)
std::function< void( TNode< JSAnyNotSmi > receiver, TNode< JSAnyNotSmi > holder, TNode< Map > map, TNode< Int32T > instance_type, TNode< IntPtrT > key, Label *next_holder, Label *if_bailout)> LookupElementInHolder
TNode< Uint32T > LoadDetailsByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index)
void SetNumberOfDeletedElements(TNode< Dictionary > dictionary, TNode< Smi > num_deleted_smi)
void StoreFixedArrayElement(TNode< FixedArray > object, int index, TNode< Object > value, WriteBarrierMode barrier_mode=UPDATE_WRITE_BARRIER, CheckBounds check_bounds=CheckBounds::kAlways)
TNode< BoolT > TaggedEqual(TNode< AnyTaggedT > a, TNode< AnyTaggedT > b)
TNode< Smi > GetNumberOfDeletedElements(TNode< Dictionary > dictionary)
void StoreValueByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index, TNode< Object > value, WriteBarrierMode write_barrier=UPDATE_WRITE_BARRIER)
TNode< IntPtrT > PageMetadataFromMemoryChunk(TNode< IntPtrT > address)
void SetNumberOfElements(TNode< Dictionary > dictionary, TNode< Smi > num_elements_smi)
TNode< BoolT > IsSetWord32(TNode< Word32T > word32)
void GetMarkBit(TNode< IntPtrT > object, TNode< IntPtrT > *cell, TNode< IntPtrT > *mask)
std::function< void( TNode< JSAnyNotSmi > receiver, TNode< JSAnyNotSmi > holder, TNode< Map > map, TNode< Int32T > instance_type, TNode< Name > key, Label *next_holder, Label *if_bailout)> LookupPropertyInHolder
void StoreDetailsByKeyIndex(TNode< ContainerType > container, TNode< IntPtrT > key_index, TNode< Smi > details)
void IncrementCounter(StatsCounter *counter, int delta)
TNode< BoolT > IsPageFlagSet(TNode< IntPtrT > object, int mask)
TNode< Smi > SmiShr(TNode< Smi > a, int shift)
TNode< Smi > GetCapacity(TNode< Dictionary > dictionary)
void GenerateAdaptor(int formal_parameter_count)
CppBuiltinsAdaptorAssembler(compiler::CodeAssemblerState *state)
void DictionarySpecificDelete(TNode< JSReceiver > receiver, TNode< NameDictionary > properties, TNode< IntPtrT > key_index, TNode< Context > context)
void DeleteDictionaryProperty(TNode< JSReceiver > receiver, TNode< Dictionary > properties, TNode< Name > name, TNode< Context > context, Label *dont_delete, Label *notfound)
void DictionarySpecificDelete(TNode< JSReceiver > receiver, TNode< SwissNameDictionary > properties, TNode< IntPtrT > key_index, TNode< Context > context)
DeletePropertyBaseAssembler(compiler::CodeAssemblerState *state)
static V8_EXPORT_PRIVATE ExternalReference isolate_address()
static void SetProperty(compiler::CodeAssemblerState *state, TNode< Context > context, TNode< JSReceiver > receiver, TNode< BoolT > is_simple_receiver, TNode< Name > name, TNode< Object > value, LanguageMode language_mode)
static void CreateDataProperty(compiler::CodeAssemblerState *state, TNode< Context > context, TNode< JSObject > receiver, TNode< Object > key, TNode< Object > value)
static constexpr MachineType Pointer()
static constexpr MachineType TaggedPointer()
static constexpr MachineType UintPtr()
static constexpr MainThreadFlags kIsOnlyOldOrMajorGCInProgressMask
static constexpr MainThreadFlags kIncrementalMarking
static constexpr MainThreadFlags kIsInYoungGenerationMask
static constexpr MainThreadFlags kInSharedHeap
static constexpr MainThreadFlags kSkipEvacuationSlotsRecordingMask
static constexpr MainThreadFlags kEvacuationCandidateMask
static constexpr intptr_t SlotSetOffset(RememberedSetType remembered_set_type)
static const int kAttributesDontDeleteMask
static constexpr bool IsImmortalImmovable(RootIndex root_index)
Definition roots.h:616
static constexpr Tagged< Smi > FromInt(int value)
Definition smi.h:38
void IncrementalWriteBarrierMajor(TNode< IntPtrT > slot, TNode< IntPtrT > value, SaveFPRegsMode fp_mode, Label *next)
void InSharedHeap(TNode< IntPtrT > object, Label *true_label, Label *false_label)
TNode< BoolT > IsUnmarked(TNode< IntPtrT > object)
void GenerateEphemeronKeyBarrier(SaveFPRegsMode fp_mode)
void GenerationalBarrierSlow(TNode< IntPtrT > slot, Label *next, SaveFPRegsMode fp_mode)
TNode< IntPtrT > LoadBucket(TNode< IntPtrT > slot_set, TNode< WordT > slot_offset, TNode< IntPtrT > num_buckets, Label *slow_path)
void GenerationalOrSharedBarrierSlow(TNode< IntPtrT > slot, Label *next, SaveFPRegsMode fp_mode)
void GenerationalOrSharedBarrierDuringMarking(TNode< IntPtrT > slot, Label *next, SaveFPRegsMode fp_mode)
void IndirectPointerWriteBarrier(SaveFPRegsMode fp_mode)
void GenerateIndirectPointerBarrier(SaveFPRegsMode fp_mode)
void InsertIntoRememberedSet(TNode< IntPtrT > object, TNode< IntPtrT > slot, SaveFPRegsMode fp_mode)
void InYoungGeneration(TNode< IntPtrT > object, Label *true_label, Label *false_label)
void IsValueUnmarkedOrRecordSlot(TNode< IntPtrT > value, Label *true_label, Label *false_label)
void SetBitInCell(TNode< IntPtrT > bucket, TNode< WordT > slot_offset)
void SharedBarrierSlow(TNode< IntPtrT > slot, Label *next, SaveFPRegsMode fp_mode)
WriteBarrierCodeStubAssembler(compiler::CodeAssemblerState *state)
void IncrementalWriteBarrierMinor(TNode< IntPtrT > slot, TNode< IntPtrT > value, SaveFPRegsMode fp_mode, Label *next)
void WriteBarrierDuringMarking(TNode< IntPtrT > slot, Label *next, SaveFPRegsMode fp_mode)
void IncrementalWriteBarrier(TNode< IntPtrT > slot, SaveFPRegsMode fp_mode)
void IncrementalWriteBarrierShared(TNode< IntPtrT > object, TNode< IntPtrT > slot, TNode< IntPtrT > value, SaveFPRegsMode fp_mode, Label *next)
void IncrementalWriteBarrierLocal(TNode< IntPtrT > slot, TNode< IntPtrT > value, SaveFPRegsMode fp_mode, Label *next)
TNode< IntPtrT > LoadSlotSet(TNode< IntPtrT > page, Label *slow_path)
TNode< BoolT > Word32NotEqual(TNode< Word32T > left, TNode< Word32T > right)
TNode< IntPtrT > IntPtrAdd(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > WordOr(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > IntPtrConstant(intptr_t value)
Node * CallCFunctionWithCallerSavedRegisters(Node *function, MachineType return_type, SaveFPRegsMode mode, CArgs... cargs)
TNode< T > UncheckedCast(Node *value)
TNode< IntPtrT > WordShl(TNode< IntPtrT > left, TNode< IntegralT > right)
TNode< BoolT > WordEqual(TNode< WordT > left, TNode< WordT > right)
void GotoIfNot(TNode< IntegralT > condition, Label *false_label, GotoHint goto_hint=GotoHint::kNone)
void Return(TNode< Object > value)
TNode< Int32T > Int32Add(TNode< Int32T > left, TNode< Int32T > right)
TNode< IntPtrT > BitcastTaggedToWord(TNode< Smi > node)
TNode< Smi > SmiConstant(Tagged< Smi > value)
void GotoIf(TNode< IntegralT > condition, Label *true_label, GotoHint goto_hint=GotoHint::kNone)
Node * Load(MachineType type, Node *base)
TNode< IntPtrT > ChangeInt32ToIntPtr(TNode< Word32T > value)
TNode< IntPtrT > WordAnd(TNode< IntPtrT > left, TNode< IntPtrT > right)
TNode< IntPtrT > IntPtrSub(TNode< IntPtrT > left, TNode< IntPtrT > right)
void TailCallBuiltin(Builtin id, TNode< Object > context, TArgs... args)
TNode< BoolT > BoolConstant(bool value)
TNode< ExternalReference > ExternalConstant(ExternalReference address)
TNode< Int32T > Int32Constant(int32_t value)
TNode< BoolT > Word32Equal(TNode< Word32T > left, TNode< Word32T > right)
TNode< T > CallRuntime(Runtime::FunctionId function, TNode< Object > context, TArgs... args)
TNode< ExternalReference > IsolateField(IsolateFieldId id)
TNode< UintPtrT > WordShr(TNode< UintPtrT > left, TNode< IntegralT > right)
void Branch(TNode< IntegralT > condition, Label *true_label, Label *false_label, BranchHint branch_hint=BranchHint::kNone)
void StoreNoWriteBarrier(MachineRepresentation rep, Node *base, Node *value)
TNode< T > Parameter(int value, const SourceLocation &loc=SourceLocation::Current())
#define CAST(x)
#define V8_ENABLE_SANDBOX_BOOL
Definition globals.h:160
base::Vector< const DirectHandle< Object > > args
Definition execution.cc:74
DirectHandle< Object > new_target
Definition execution.cc:75
#define V8_DISABLE_WRITE_BARRIERS_BOOL
TNode< Object > target
TNode< Object > receiver
ZoneVector< RpoNumber > & result
LiftoffAssembler::CacheState state
uint32_t const mask
TNode< Oddball > UndefinedConstant(JSGraph *jsgraph)
@ kEnumerationOrder
Definition globals.h:2859
constexpr int kInt64Size
Definition globals.h:402
@ SKIP_WRITE_BARRIER
Definition objects.h:52
constexpr int kInt16Size
Definition globals.h:398
bool IsSpecialReceiverInstanceType(InstanceType instance_type)
constexpr uint16_t kDontAdaptArgumentsSentinel
Definition globals.h:2779
constexpr int kSystemPointerSizeLog2
Definition globals.h:494
bool IsCustomElementsReceiverInstanceType(InstanceType instance_type)
bool IsNullOrUndefined(Tagged< Object > obj, Isolate *isolate)
Handle< To > UncheckedCast(Handle< From > value)
Definition handles-inl.h:55
constexpr int kTaggedSizeLog2
Definition globals.h:543
constexpr int kInt32Size
Definition globals.h:401
void EmitReturnBaseline(MacroAssembler *masm)
Definition baseline.cc:92
V8_EXPORT_PRIVATE FlagValues v8_flags
constexpr int JSParameterCount(int param_count_without_receiver)
Definition globals.h:2782
const intptr_t kSmiTagMask
Definition v8-internal.h:88
return value
Definition map-inl.h:893
bool IsPrivateSymbol(Tagged< Object > obj)
const int kSmiTag
Definition v8-internal.h:86
constexpr int kInt8Size
Definition globals.h:393
static const size_t LanguageModeSize
Definition globals.h:753
!IsContextMap !IsContextMap native_context
Definition map-inl.h:877
Tagged< To > Cast(Tagged< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition casting.h:150
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define CHECK_EQ(lhs, rhs)
#define DCHECK(condition)
Definition logging.h:482