v8
V8 is Google’s open source high-performance JavaScript and WebAssembly engine, written in C++.
Loading...
Searching...
No Matches
js-native-context-specialization.cc
Go to the documentation of this file.
1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <optional>
8
9#include "src/base/logging.h"
12#include "src/common/globals.h"
32#include "src/flags/flags.h"
33#include "src/handles/handles.h"
34#include "src/heap/factory.h"
40#include "src/objects/string.h"
41
42namespace v8 {
43namespace internal {
44namespace compiler {
45
46namespace {
47
48bool HasNumberMaps(JSHeapBroker* broker, ZoneVector<MapRef> const& maps) {
49 for (MapRef map : maps) {
50 if (map.IsHeapNumberMap()) return true;
51 }
52 return false;
53}
54
55bool HasOnlyJSArrayMaps(JSHeapBroker* broker, ZoneVector<MapRef> const& maps) {
56 for (MapRef map : maps) {
57 if (!map.IsJSArrayMap()) return false;
58 }
59 return true;
60}
61
62} // namespace
63
65 Editor* editor, JSGraph* jsgraph, JSHeapBroker* broker, Flags flags,
66 Zone* zone, Zone* shared_zone)
67 : AdvancedReducer(editor),
68 jsgraph_(jsgraph),
70 flags_(flags),
71 global_object_(
72 broker->target_native_context().global_object(broker).object()),
73 global_proxy_(
74 broker->target_native_context().global_proxy_object(broker).object()),
75 zone_(zone),
76 shared_zone_(shared_zone),
77 type_cache_(TypeCache::Get()),
78 created_strings_(zone) {}
79
81 switch (node->opcode()) {
82 case IrOpcode::kJSAdd:
83 return ReduceJSAdd(node);
84 case IrOpcode::kJSAsyncFunctionEnter:
85 return ReduceJSAsyncFunctionEnter(node);
86 case IrOpcode::kJSAsyncFunctionReject:
87 return ReduceJSAsyncFunctionReject(node);
88 case IrOpcode::kJSAsyncFunctionResolve:
90 case IrOpcode::kJSGetSuperConstructor:
91 return ReduceJSGetSuperConstructor(node);
92 case IrOpcode::kJSFindNonDefaultConstructorOrConstruct:
94 case IrOpcode::kJSInstanceOf:
95 return ReduceJSInstanceOf(node);
96 case IrOpcode::kJSHasInPrototypeChain:
97 return ReduceJSHasInPrototypeChain(node);
98 case IrOpcode::kJSOrdinaryHasInstance:
99 return ReduceJSOrdinaryHasInstance(node);
100 case IrOpcode::kJSPromiseResolve:
101 return ReduceJSPromiseResolve(node);
102 case IrOpcode::kJSResolvePromise:
103 return ReduceJSResolvePromise(node);
104 case IrOpcode::kJSLoadGlobal:
105 return ReduceJSLoadGlobal(node);
106 case IrOpcode::kJSStoreGlobal:
107 return ReduceJSStoreGlobal(node);
108 case IrOpcode::kJSLoadNamed:
109 return ReduceJSLoadNamed(node);
110 case IrOpcode::kJSLoadNamedFromSuper:
111 return ReduceJSLoadNamedFromSuper(node);
112 case IrOpcode::kJSSetNamedProperty:
113 return ReduceJSSetNamedProperty(node);
114 case IrOpcode::kJSHasProperty:
115 return ReduceJSHasProperty(node);
116 case IrOpcode::kJSLoadProperty:
117 return ReduceJSLoadProperty(node);
118 case IrOpcode::kJSSetKeyedProperty:
119 return ReduceJSSetKeyedProperty(node);
120 case IrOpcode::kJSDefineKeyedOwnProperty:
122 case IrOpcode::kJSDefineNamedOwnProperty:
124 case IrOpcode::kJSDefineKeyedOwnPropertyInLiteral:
126 case IrOpcode::kJSStoreInArrayLiteral:
127 return ReduceJSStoreInArrayLiteral(node);
128 case IrOpcode::kJSToObject:
129 return ReduceJSToObject(node);
130 case IrOpcode::kJSToString:
131 return ReduceJSToString(node);
132 case IrOpcode::kJSGetIterator:
133 return ReduceJSGetIterator(node);
134 default:
135 break;
136 }
137 return NoChange();
138}
139
140// If {node} is a HeapConstant<String>, return the String's length. If {node} is
141// a number, return the maximum size that a stringified number can have.
142// Otherwise, we can't easily convert {node} into a String, and we return
143// nullopt.
144// static
146 JSHeapBroker* broker, Node* node) {
147 HeapObjectMatcher matcher(node);
148 if (matcher.HasResolvedValue() && matcher.Ref(broker).IsString()) {
149 StringRef input = matcher.Ref(broker).AsString();
150 return input.length();
151 }
152
153 NumberMatcher number_matcher(node);
154 if (number_matcher.HasResolvedValue()) {
156 }
157
158 // We don't support objects with possibly monkey-patched prototype.toString
159 // as it might have side-effects, so we shouldn't attempt lowering them.
160 return std::nullopt;
161}
162
164 DCHECK_EQ(IrOpcode::kJSToString, node->opcode());
165 Node* const input = node->InputAt(0);
166
167 HeapObjectMatcher matcher(input);
168 if (matcher.HasResolvedValue() && matcher.Ref(broker()).IsString()) {
169 Reduction reduction = Changed(input); // JSToString(x:string) => x
170 ReplaceWithValue(node, reduction.replacement());
171 return reduction;
172 }
173
174 // TODO(turbofan): This optimization is weaker than what we used to have
175 // in js-typed-lowering for OrderedNumbers. We don't have types here though,
176 // so alternative approach should be designed if this causes performance
177 // regressions and the stronger optimization should be re-implemented.
178 NumberMatcher number_matcher(input);
179 if (number_matcher.HasResolvedValue()) {
180 DirectHandle<Object> num_obj =
181 broker()
183 ->factory()
184 ->NewNumber<AllocationType::kOld>(number_matcher.ResolvedValue());
185 Handle<String> num_str =
187 num_obj);
188 Node* reduced = graph()->NewNode(
189 common()->HeapConstant(broker()->CanonicalPersistentHandle(num_str)));
190
191 ReplaceWithValue(node, reduced);
192 return Replace(reduced);
193 }
194
195 return NoChange();
196}
197
198// Return a String from {node}, which should be either a HeapConstant<String>
199// (in which case we return the String), or a number (in which case we convert
200// it to a String).
202 DCHECK(IrOpcode::IsConstantOpcode(node->opcode()));
203 NumberMatcher number_matcher(node);
204 if (number_matcher.HasResolvedValue()) {
205 DirectHandle<Object> num_obj =
206 broker()
208 ->factory()
209 ->NewNumber<AllocationType::kOld>(number_matcher.ResolvedValue());
210 // Note that we do not store the result of NumberToString in
211 // {created_strings_}, because the latter is used to know if strings are
212 // safe to be used in the background, but we always have as additional
213 // information the node from which the string was created ({node} is that
214 // case), and if this node is a kHeapNumber, then we know that we must have
215 // created the string, and that there it is safe to read. So, we don't need
216 // {created_strings_} in that case.
218 num_obj);
219 } else {
220 HeapObjectMatcher matcher(node);
221 if (matcher.HasResolvedValue() && matcher.Ref(broker()).IsString()) {
222 return matcher.Ref(broker()).AsString().object();
223 } else {
224 UNREACHABLE();
225 }
226 }
227}
228
229namespace {
230bool IsStringConstant(JSHeapBroker* broker, Node* node) {
231 HeapObjectMatcher matcher(node);
232 return matcher.HasResolvedValue() && matcher.Ref(broker).IsString();
233}
234
235bool IsStringWithNonAccessibleContent(JSHeapBroker* broker, Node* node) {
236 HeapObjectMatcher matcher(node);
237 if (matcher.HasResolvedValue() && matcher.Ref(broker).IsString()) {
238 StringRef input = matcher.Ref(broker).AsString();
239 return !input.IsContentAccessible();
240 }
241 return false;
242}
243} // namespace
244
246 Node* node) {
247 DCHECK_EQ(IrOpcode::kJSAsyncFunctionEnter, node->opcode());
248 Node* closure = NodeProperties::GetValueInput(node, 0);
250 Node* context = NodeProperties::GetContextInput(node);
251 Node* frame_state = NodeProperties::GetFrameStateInput(node);
252 Node* effect = NodeProperties::GetEffectInput(node);
253 Node* control = NodeProperties::GetControlInput(node);
254
255 if (!dependencies()->DependOnPromiseHookProtector()) return NoChange();
256
257 // Create the promise for the async function.
258 Node* promise = effect =
259 graph()->NewNode(javascript()->CreatePromise(), context, effect);
260
261 // Create the JSAsyncFunctionObject based on the SharedFunctionInfo
262 // extracted from the top-most frame in {frame_state}.
264 broker(),
265 FrameStateInfoOf(frame_state->op()).shared_info().ToHandleChecked());
266 DCHECK(shared.is_compiled());
267 int register_count =
268 shared.internal_formal_parameter_count_without_receiver() +
269 shared.GetBytecodeArray(broker()).register_count();
270 MapRef fixed_array_map = broker()->fixed_array_map();
271 AllocationBuilder ab(jsgraph(), broker(), effect, control);
272 if (!ab.CanAllocateArray(register_count, fixed_array_map)) {
273 return NoChange();
274 }
275 Node* value = effect =
276 graph()->NewNode(javascript()->CreateAsyncFunctionObject(register_count),
277 closure, receiver, promise, context, effect, control);
278 ReplaceWithValue(node, value, effect, control);
279 return Replace(value);
280}
281
283 Node* node) {
284 DCHECK_EQ(IrOpcode::kJSAsyncFunctionReject, node->opcode());
285 Node* async_function_object = NodeProperties::GetValueInput(node, 0);
286 Node* reason = NodeProperties::GetValueInput(node, 1);
287 Node* context = NodeProperties::GetContextInput(node);
288 Node* frame_state = NodeProperties::GetFrameStateInput(node);
289 Node* effect = NodeProperties::GetEffectInput(node);
290 Node* control = NodeProperties::GetControlInput(node);
291
292 if (!dependencies()->DependOnPromiseHookProtector()) return NoChange();
293
294 // Load the promise from the {async_function_object}.
295 Node* promise = effect = graph()->NewNode(
297 async_function_object, effect, control);
298
299 // Create a nested frame state inside the current method's most-recent
300 // {frame_state} that will ensure that lazy deoptimizations at this
301 // point will still return the {promise} instead of the result of the
302 // JSRejectPromise operation (which yields undefined).
303 Node* parameters[] = {promise};
305 jsgraph(), Builtin::kAsyncFunctionLazyDeoptContinuation, context,
306 parameters, arraysize(parameters), frame_state,
308
309 // Disable the additional debug event for the rejection since a
310 // debug event already happend for the exception that got us here.
311 Node* debug_event = jsgraph()->FalseConstant();
312 effect = graph()->NewNode(javascript()->RejectPromise(), promise, reason,
313 debug_event, context, frame_state, effect, control);
314 ReplaceWithValue(node, promise, effect, control);
315 return Replace(promise);
316}
317
319 Node* node) {
320 DCHECK_EQ(IrOpcode::kJSAsyncFunctionResolve, node->opcode());
321 Node* async_function_object = NodeProperties::GetValueInput(node, 0);
322 Node* value = NodeProperties::GetValueInput(node, 1);
323 Node* context = NodeProperties::GetContextInput(node);
324 Node* frame_state = NodeProperties::GetFrameStateInput(node);
325 Node* effect = NodeProperties::GetEffectInput(node);
326 Node* control = NodeProperties::GetControlInput(node);
327
328 if (!dependencies()->DependOnPromiseHookProtector()) return NoChange();
329
330 // Load the promise from the {async_function_object}.
331 Node* promise = effect = graph()->NewNode(
333 async_function_object, effect, control);
334
335 // Create a nested frame state inside the current method's most-recent
336 // {frame_state} that will ensure that lazy deoptimizations at this
337 // point will still return the {promise} instead of the result of the
338 // JSResolvePromise operation (which yields undefined).
339 Node* parameters[] = {promise};
341 jsgraph(), Builtin::kAsyncFunctionLazyDeoptContinuation, context,
342 parameters, arraysize(parameters), frame_state,
344
345 effect = graph()->NewNode(javascript()->ResolvePromise(), promise, value,
346 context, frame_state, effect, control);
347 ReplaceWithValue(node, promise, effect, control);
348 return Replace(promise);
349}
350
351// Concatenates {left} and {right}. The result is fairly similar to creating a
352// new ConsString with {left} and {right} and then flattening it, which we don't
353// do because String::Flatten does not support background threads. Rather than
354// implementing a full String::Flatten for background threads, we prefered to
355// implement this Concatenate function, which, unlike String::Flatten, doesn't
356// need to replace ConsStrings by ThinStrings.
358 Handle<String> left, Handle<String> right) {
359 if (left->length() == 0) return right;
360 if (right->length() == 0) return left;
361
362 // Repeated concatenations have a quadratic cost (eg, "s+=a;s+=b;s+=c;...").
363 // Rather than doing static analysis to determine how many concatenations we
364 // there are and how many uses the result of each concatenation have, we
365 // generate ConsString when the result of the concatenation would have more
366 // than {kConstantStringFlattenMaxSize} characters, and flattened SeqString
367 // otherwise.
368 // TODO(dmercadier): ideally, we would like to get rid of this constant, and
369 // always flatten. This requires some care to avoid the quadratic worst-case.
370 constexpr int32_t kConstantStringFlattenMaxSize = 100;
371
372 int32_t length = left->length() + right->length();
373 if (length > kConstantStringFlattenMaxSize) {
374 // The generational write-barrier doesn't work in background threads, so,
375 // if {left} or {right} are in the young generation, we would have to copy
376 // them to the local heap (which is old) before creating the (old)
377 // ConsString. But, copying a ConsString instead of flattening it to a
378 // SeqString makes no sense here (since flattening would be faster and use
379 // less memory). Thus, if one of {left} or {right} is a young string, we'll
380 // build a SeqString rather than a ConsString, regardless of {length}.
381 // TODO(dmercadier, dinfuehr): always build a ConsString here once the
382 // generational write-barrier supports background threads.
383 if (!LocalHeap::Current() || (!HeapLayout::InYoungGeneration(*left) &&
385 return broker()
387 ->factory()
388 ->NewConsString(left, right, AllocationType::kOld)
389 .ToHandleChecked();
390 }
391 }
392
393 // If one of the string is not in readonly space, then we need a
394 // SharedStringAccessGuardIfNeeded before accessing its content.
396 *left, broker()->local_isolate_or_isolate()) ||
398 *right, broker()->local_isolate_or_isolate());
399
400 // Check string representation of both strings. This does not require the
401 // SharedStringAccessGuardIfNeeded as the representation is stable.
402 const bool result_is_one_byte_string =
403 left->IsOneByteRepresentation() && right->IsOneByteRepresentation();
404
405 if (result_is_one_byte_string) {
406 // {left} and {right} are 1-byte ==> the result will be 1-byte.
407 // Note that we need a canonical handle, because we insert in
408 // {created_strings_} the handle's address, which is kinda meaningless if
409 // the handle isn't canonical.
411 broker()
412 ->local_isolate_or_isolate()
413 ->factory()
414 ->NewRawOneByteString(length, AllocationType::kOld)
415 .ToHandleChecked());
416 created_strings_.insert(flat);
419 require_guard ? broker()->local_isolate_or_isolate() : nullptr);
420 String::WriteToFlat(*left, flat->GetChars(no_gc, access_guard), 0,
421 left->length(), access_guard);
422 String::WriteToFlat(*right,
423 flat->GetChars(no_gc, access_guard) + left->length(), 0,
424 right->length(), access_guard);
425 return flat;
426 } else {
427 // One (or both) of {left} and {right} is 2-byte ==> the result will be
428 // 2-byte.
430 broker()
431 ->local_isolate_or_isolate()
432 ->factory()
433 ->NewRawTwoByteString(length, AllocationType::kOld)
434 .ToHandleChecked());
435 created_strings_.insert(flat);
438 require_guard ? broker()->local_isolate_or_isolate() : nullptr);
439 String::WriteToFlat(*left, flat->GetChars(no_gc, access_guard), 0,
440 left->length(), access_guard);
441 String::WriteToFlat(*right,
442 flat->GetChars(no_gc, access_guard) + left->length(), 0,
443 right->length(), access_guard);
444 return flat;
445 }
446}
447
449 Handle<String> str) {
450 DCHECK(node->opcode() == IrOpcode::kHeapConstant ||
451 node->opcode() == IrOpcode::kNumberConstant);
452 if (broker()->IsMainThread()) {
453 // All strings are safe to be read on the main thread.
454 return true;
455 }
456 if (node->opcode() == IrOpcode::kNumberConstant) {
457 // If {node} is a number constant, then {str} is the stringification of this
458 // number which we must have created ourselves.
459 return true;
460 }
461 return !IsStringWithNonAccessibleContent(broker(), node) ||
462 created_strings_.find(str) != created_strings_.end();
463}
464
466 // TODO(turbofan): This has to run together with the inlining and
467 // native context specialization to be able to leverage the string
468 // constant-folding for optimizing property access, but we should
469 // nevertheless find a better home for this at some point.
470 DCHECK_EQ(IrOpcode::kJSAdd, node->opcode());
471
472 Node* const lhs = node->InputAt(0);
473 Node* const rhs = node->InputAt(1);
474
475 std::optional<size_t> lhs_len = GetMaxStringLength(broker(), lhs);
476 std::optional<size_t> rhs_len = GetMaxStringLength(broker(), rhs);
477 if (!lhs_len || !rhs_len) return NoChange();
478
479 // Fold if at least one of the parameters is a string constant and the
480 // addition won't throw due to too long result.
481 if (*lhs_len + *rhs_len <= String::kMaxLength &&
482 (IsStringConstant(broker(), lhs) || IsStringConstant(broker(), rhs))) {
483 // We need canonical handles for {left} and {right}, in order to be able to
484 // search {created_strings_} if needed.
485 Handle<String> left =
487 Handle<String> right =
489
490 if (!(StringCanSafelyBeRead(lhs, left) &&
491 StringCanSafelyBeRead(rhs, right))) {
492 // One of {lhs} or {rhs} is not safe to be read in the background.
493
494 if (left->length() + right->length() > ConsString::kMinLength &&
495 (!LocalHeap::Current() || (!HeapLayout::InYoungGeneration(*left) &&
497 // We can create a ConsString with {left} and {right}, without needing
498 // to read their content (and this ConsString will not introduce
499 // old-to-new pointers from the background).
500 Handle<String> concatenated =
501 broker()
503 ->factory()
504 ->NewConsString(left, right, AllocationType::kOld)
505 .ToHandleChecked();
506 Node* reduced = graph()->NewNode(common()->HeapConstant(
507 broker()->CanonicalPersistentHandle(concatenated)));
508 ReplaceWithValue(node, reduced);
509 return Replace(reduced);
510 } else {
511 // Concatenating those strings would not produce a ConsString but rather
512 // a flat string (because the result is small). And, since the strings
513 // are not safe to be read in the background, this wouldn't be safe.
514 // Or, one of the string is in the young generation, and since the
515 // generational barrier doesn't support background threads, we cannot
516 // create the ConsString.
517 return NoChange();
518 }
519 }
520
521 Handle<String> concatenated = Concatenate(left, right);
522 Node* reduced = graph()->NewNode(common()->HeapConstant(
523 broker()->CanonicalPersistentHandle(concatenated)));
524
525 ReplaceWithValue(node, reduced);
526 return Replace(reduced);
527 }
528
529 return NoChange();
530}
531
533 Node* node) {
534 DCHECK_EQ(IrOpcode::kJSGetSuperConstructor, node->opcode());
535 Node* constructor = NodeProperties::GetValueInput(node, 0);
536
537 // Check if the input is a known JSFunction.
538 HeapObjectMatcher m(constructor);
539 if (!m.HasResolvedValue() || !m.Ref(broker()).IsJSFunction()) {
540 return NoChange();
541 }
542 JSFunctionRef function = m.Ref(broker()).AsJSFunction();
543 MapRef function_map = function.map(broker());
544 HeapObjectRef function_prototype = function_map.prototype(broker());
545
546 // We can constant-fold the super constructor access if the
547 // {function}s map is stable, i.e. we can use a code dependency
548 // to guard against [[Prototype]] changes of {function}.
549 if (function_map.is_stable()) {
550 dependencies()->DependOnStableMap(function_map);
551 Node* value = jsgraph()->ConstantNoHole(function_prototype, broker());
552 ReplaceWithValue(node, value);
553 return Replace(value);
554 }
555
556 return NoChange();
557}
558
561 Node* node) {
563 Node* this_function = n.this_function();
564 Node* new_target = n.new_target();
565 Node* effect = n.effect();
566 Control control = n.control();
567
568 // If the JSFindNonDefaultConstructorOrConstruct operation is inside a try
569 // catch, wiring up the graph is complex (reason: if
570 // JSFindNonDefaultConstructorOrConstruct reduces to a constant which is
571 // something else than a default base ctor, it cannot throw an exception, and
572 // the try-catch structure has to be rewired). As this use case is rare, give
573 // up optimizing it here.
575 return NoChange();
576 }
577
578 // TODO(v8:13091): Don't produce incomplete stack traces when debug is active.
579 // We already deopt when a breakpoint is set. But it would be even nicer to
580 // avoid producting incomplete stack traces when when debug is active, even if
581 // there are no breakpoints - then a user inspecting stack traces via Dev
582 // Tools would always see the full stack trace.
583
584 // Check if the input is a known JSFunction.
585 HeapObjectMatcher m(this_function);
586 if (!m.HasResolvedValue() || !m.Ref(broker()).IsJSFunction()) {
587 return NoChange();
588 }
589
590 JSFunctionRef this_function_ref = m.Ref(broker()).AsJSFunction();
591 MapRef function_map = this_function_ref.map(broker());
592 HeapObjectRef current = function_map.prototype(broker());
593 // The uppermost JSFunction on the class hierarchy (above it, there can be
594 // other JSObjects, e.g., Proxies).
595 OptionalJSObjectRef last_function;
596
597 Node* return_value;
598 Node* ctor_or_instance;
599
600 // Walk the class inheritance tree until we find a ctor which is not a default
601 // derived ctor.
602 while (true) {
603 if (!current.IsJSFunction()) {
604 return NoChange();
605 }
606 JSFunctionRef current_function = current.AsJSFunction();
607
608 // If there are class fields, bail out. TODO(v8:13091): Handle them here.
609 if (current_function.shared(broker())
611 return NoChange();
612 }
613
614 // If there are private methods, bail out. TODO(v8:13091): Handle them here.
615 if (current_function.context(broker())
616 .scope_info(broker())
617 .ClassScopeHasPrivateBrand()) {
618 return NoChange();
619 }
620
621 FunctionKind kind = current_function.shared(broker()).kind();
622
624 // The hierarchy walk will end here; this is the last change to bail out
625 // before creating new nodes.
626 if (!dependencies()->DependOnArrayIteratorProtector()) {
627 return NoChange();
628 }
629 last_function = current_function;
630
632 return_value = jsgraph()->BooleanConstant(true);
633
634 // Generate a builtin call for creating the instance.
635 Node* constructor =
636 jsgraph()->ConstantNoHole(current_function, broker());
637
638 // In the current FrameState setup, the two outputs of this bytecode are
639 // poked at indices slot(index(reg_2)) (boolean_output) and
640 // slot(index(reg_2) + 1) (object_output). Now we're reducing this
641 // bytecode to a builtin call which only has one output (object_output).
642 // Change where in the FrameState the output is poked at.
643
644 // The current poke location points to the location for boolean_ouput.
645 // We move the poke location by -1, since the poke location decreases
646 // when the register index increases (see
647 // BytecodeGraphBuilder::Environment::BindRegistersToProjections).
648
649 // The location for boolean_output is already hard-wired to true (which
650 // is the correct value here) in
651 // BytecodeGraphBuilder::VisitFindNonDefaultConstructorOrConstruct.
652
653 FrameState old_frame_state = n.frame_state();
654 auto old_poke_offset = old_frame_state.frame_state_info()
655 .state_combine()
656 .GetOffsetToPokeAt();
657 FrameState new_frame_state = CloneFrameState(
658 jsgraph(), old_frame_state,
659 OutputFrameStateCombine::PokeAt(old_poke_offset - 1));
660
661 effect = ctor_or_instance = graph()->NewNode(
662 jsgraph()->javascript()->Create(), constructor, new_target,
663 n.context(), new_frame_state, effect, control);
664 } else {
665 return_value = jsgraph()->BooleanConstant(false);
666 ctor_or_instance =
667 jsgraph()->ConstantNoHole(current_function, broker());
668 }
669 break;
670 }
671
672 // Keep walking up the class tree.
673 current = current_function.map(broker()).prototype(broker());
674 }
675
677 function_map, WhereToStart::kStartAtReceiver, last_function);
678
679 // Update the uses of {node}.
680 for (Edge edge : node->use_edges()) {
681 Node* const user = edge.from();
683 edge.UpdateTo(effect);
684 } else if (NodeProperties::IsControlEdge(edge)) {
685 edge.UpdateTo(control);
686 } else {
688 switch (ProjectionIndexOf(user->op())) {
689 case 0:
690 Replace(user, return_value);
691 break;
692 case 1:
693 Replace(user, ctor_or_instance);
694 break;
695 default:
696 UNREACHABLE();
697 }
698 }
699 }
700 node->Kill();
701 return Replace(return_value);
702}
703
705 JSInstanceOfNode n(node);
706 FeedbackParameter const& p = n.Parameters();
707 Node* object = n.left();
708 Node* constructor = n.right();
709 TNode<Object> context = n.context();
710 FrameState frame_state = n.frame_state();
711 Effect effect = n.effect();
712 Control control = n.control();
713
714 // Check if the right hand side is a known {receiver}, or
715 // we have feedback from the InstanceOfIC.
716 OptionalJSObjectRef receiver;
717 HeapObjectMatcher m(constructor);
718 if (m.HasResolvedValue() && m.Ref(broker()).IsJSObject()) {
719 receiver = m.Ref(broker()).AsJSObject();
720 } else if (p.feedback().IsValid()) {
721 ProcessedFeedback const& feedback =
723 if (feedback.IsInsufficient()) return NoChange();
724 receiver = feedback.AsInstanceOf().value();
725 } else {
726 return NoChange();
727 }
728
729 if (!receiver.has_value()) return NoChange();
730
731 MapRef receiver_map = receiver->map(broker());
732 NameRef name = broker()->has_instance_symbol();
733 PropertyAccessInfo access_info =
734 broker()->GetPropertyAccessInfo(receiver_map, name, AccessMode::kLoad);
735
736 // TODO(v8:11457) Support dictionary mode holders here.
737 if (access_info.IsInvalid() || access_info.HasDictionaryHolder()) {
738 return NoChange();
739 }
740 access_info.RecordDependencies(dependencies());
741
742 PropertyAccessBuilder access_builder(jsgraph(), broker());
743
744 if (access_info.IsNotFound()) {
745 // If there's no @@hasInstance handler, the OrdinaryHasInstance operation
746 // takes over, but that requires the constructor to be callable.
747 if (!receiver_map.is_callable()) return NoChange();
748
751
752 // Monomorphic property access.
753 access_builder.BuildCheckMaps(constructor, &effect, control,
754 access_info.lookup_start_object_maps());
755
756 // Lower to OrdinaryHasInstance(C, O).
757 NodeProperties::ReplaceValueInput(node, constructor, 0);
758 NodeProperties::ReplaceValueInput(node, object, 1);
760 static_assert(n.FeedbackVectorIndex() == 2);
761 node->RemoveInput(n.FeedbackVectorIndex());
762 NodeProperties::ChangeOp(node, javascript()->OrdinaryHasInstance());
764 }
765
766 if (access_info.IsFastDataConstant()) {
767 OptionalJSObjectRef holder = access_info.holder();
768 bool found_on_proto = holder.has_value();
769 JSObjectRef holder_ref = found_on_proto ? holder.value() : receiver.value();
770 if (access_info.field_representation().IsDouble()) return NoChange();
771 OptionalObjectRef constant = holder_ref.GetOwnFastConstantDataProperty(
772 broker(), access_info.field_representation(), access_info.field_index(),
773 dependencies());
774 if (!constant.has_value() || !constant->IsHeapObject() ||
775 !constant->AsHeapObject().map(broker()).is_callable()) {
776 return NoChange();
777 }
778
779 if (found_on_proto) {
782 holder.value());
783 }
784
785 // Check that {constructor} is actually {receiver}.
786 constructor = access_builder.BuildCheckValue(constructor, &effect, control,
787 *receiver);
788
789 // Monomorphic property access.
790 access_builder.BuildCheckMaps(constructor, &effect, control,
791 access_info.lookup_start_object_maps());
792
793 // Create a nested frame state inside the current method's most-recent frame
794 // state that will ensure that deopts that happen after this point will not
795 // fallback to the last Checkpoint--which would completely re-execute the
796 // instanceof logic--but rather create an activation of a version of the
797 // ToBoolean stub that finishes the remaining work of instanceof and returns
798 // to the caller without duplicating side-effects upon a lazy deopt.
799 Node* continuation_frame_state = CreateStubBuiltinContinuationFrameState(
800 jsgraph(), Builtin::kToBooleanLazyDeoptContinuation, context, nullptr,
801 0, frame_state, ContinuationFrameStateMode::LAZY);
802
803 // Call the @@hasInstance handler.
804 Node* target = jsgraph()->ConstantNoHole(*constant, broker());
805 Node* feedback = jsgraph()->UndefinedConstant();
806 // Value inputs plus context, frame state, effect, control.
807 static_assert(JSCallNode::ArityForArgc(1) + 4 == 8);
808 node->EnsureInputCount(graph()->zone(), 8);
809 node->ReplaceInput(JSCallNode::TargetIndex(), target);
810 node->ReplaceInput(JSCallNode::ReceiverIndex(), constructor);
811 node->ReplaceInput(JSCallNode::ArgumentIndex(0), object);
812 node->ReplaceInput(3, feedback);
813 node->ReplaceInput(4, context);
814 node->ReplaceInput(5, continuation_frame_state);
815 node->ReplaceInput(6, effect);
816 node->ReplaceInput(7, control);
821
822 // Rewire the value uses of {node} to ToBoolean conversion of the result.
823 Node* value = graph()->NewNode(simplified()->ToBoolean(), node);
824 for (Edge edge : node->use_edges()) {
825 if (NodeProperties::IsValueEdge(edge) && edge.from() != value) {
826 edge.UpdateTo(value);
827 Revisit(edge.from());
828 }
829 }
830 return Changed(node);
831 }
832
833 return NoChange();
834}
835
838 Node* receiver, Effect effect, HeapObjectRef prototype) {
839 ZoneRefSet<Map> receiver_maps;
841 broker(), receiver, effect, &receiver_maps);
843
844 ZoneVector<MapRef> receiver_map_refs(zone());
845
846 // Try to determine either that all of the {receiver_maps} have the given
847 // {prototype} in their chain, or that none do. If we can't tell, return
848 // kMayBeInPrototypeChain.
849 bool all = true;
850 bool none = true;
851 for (MapRef map : receiver_maps) {
852 receiver_map_refs.push_back(map);
853 if (result == NodeProperties::kUnreliableMaps && !map.is_stable()) {
855 }
856 while (true) {
857 if (IsSpecialReceiverInstanceType(map.instance_type())) {
859 }
860 if (!map.IsJSObjectMap()) {
861 all = false;
862 break;
863 }
864 HeapObjectRef map_prototype = map.prototype(broker());
865 if (map_prototype.equals(prototype)) {
866 none = false;
867 break;
868 }
869 map = map_prototype.map(broker());
870 // TODO(v8:11457) Support dictionary mode protoypes here.
871 if (!map.is_stable() || map.is_dictionary_map()) {
873 }
874 if (map.oddball_type(broker()) == OddballType::kNull) {
875 all = false;
876 break;
877 }
878 }
879 }
880 DCHECK_IMPLIES(all, !none);
881 if (!all && !none) return kMayBeInPrototypeChain;
882
883 {
884 OptionalJSObjectRef last_prototype;
885 if (all) {
886 // We don't need to protect the full chain if we found the prototype, we
887 // can stop at {prototype}. In fact we could stop at the one before
888 // {prototype} but since we're dealing with multiple receiver maps this
889 // might be a different object each time, so it's much simpler to include
890 // {prototype}. That does, however, mean that we must check {prototype}'s
891 // map stability.
892 if (!prototype.IsJSObject() || !prototype.map(broker()).is_stable()) {
894 }
895 last_prototype = prototype.AsJSObject();
896 }
901 last_prototype);
902 }
903
904 DCHECK_EQ(all, !none);
906}
907
909 Node* node) {
910 DCHECK_EQ(IrOpcode::kJSHasInPrototypeChain, node->opcode());
911 Node* value = NodeProperties::GetValueInput(node, 0);
912 Node* prototype = NodeProperties::GetValueInput(node, 1);
914
915 // Check if we can constant-fold the prototype chain walk
916 // for the given {value} and the {prototype}.
917 HeapObjectMatcher m(prototype);
918 if (m.HasResolvedValue()) {
920 InferHasInPrototypeChain(value, effect, m.Ref(broker()));
922 Node* result_in_chain =
924 ReplaceWithValue(node, result_in_chain);
925 return Replace(result_in_chain);
926 }
927 }
928
929 return NoChange();
930}
931
933 Node* node) {
934 DCHECK_EQ(IrOpcode::kJSOrdinaryHasInstance, node->opcode());
935 Node* constructor = NodeProperties::GetValueInput(node, 0);
936 Node* object = NodeProperties::GetValueInput(node, 1);
937
938 // Check if the {constructor} is known at compile time.
939 HeapObjectMatcher m(constructor);
940 if (!m.HasResolvedValue()) return NoChange();
941
942 if (m.Ref(broker()).IsJSBoundFunction()) {
943 // OrdinaryHasInstance on bound functions turns into a recursive invocation
944 // of the instanceof operator again.
945 JSBoundFunctionRef function = m.Ref(broker()).AsJSBoundFunction();
946 Node* feedback = jsgraph()->UndefinedConstant();
948 JSInstanceOfNode::LeftIndex());
950 node,
951 jsgraph()->ConstantNoHole(function.bound_target_function(broker()),
952 broker()),
953 JSInstanceOfNode::RightIndex());
954 node->InsertInput(zone(), JSInstanceOfNode::FeedbackVectorIndex(),
955 feedback);
956 NodeProperties::ChangeOp(node, javascript()->InstanceOf(FeedbackSource()));
957 return Changed(node).FollowedBy(ReduceJSInstanceOf(node));
958 }
959
960 if (m.Ref(broker()).IsJSFunction()) {
961 // Optimize if we currently know the "prototype" property.
962
963 JSFunctionRef function = m.Ref(broker()).AsJSFunction();
964
965 // TODO(neis): Remove the has_prototype_slot condition once the broker is
966 // always enabled.
967 if (!function.map(broker()).has_prototype_slot() ||
968 !function.has_instance_prototype(broker()) ||
969 function.PrototypeRequiresRuntimeLookup(broker())) {
970 return NoChange();
971 }
972
973 HeapObjectRef prototype =
975 Node* prototype_constant = jsgraph()->ConstantNoHole(prototype, broker());
976
977 // Lower the {node} to JSHasInPrototypeChain.
978 NodeProperties::ReplaceValueInput(node, object, 0);
979 NodeProperties::ReplaceValueInput(node, prototype_constant, 1);
980 NodeProperties::ChangeOp(node, javascript()->HasInPrototypeChain());
982 }
983
984 return NoChange();
985}
986
987// ES section #sec-promise-resolve
989 DCHECK_EQ(IrOpcode::kJSPromiseResolve, node->opcode());
990 Node* constructor = NodeProperties::GetValueInput(node, 0);
991 Node* value = NodeProperties::GetValueInput(node, 1);
992 Node* context = NodeProperties::GetContextInput(node);
996
997 // Check if the {constructor} is the %Promise% function.
998 HeapObjectMatcher m(constructor);
999 if (!m.HasResolvedValue() ||
1000 !m.Ref(broker()).equals(native_context().promise_function(broker()))) {
1001 return NoChange();
1002 }
1003
1004 // Only optimize if {value} cannot be a JSPromise.
1005 MapInference inference(broker(), value, effect);
1006 if (!inference.HaveMaps() ||
1007 inference.AnyOfInstanceTypesAre(JS_PROMISE_TYPE)) {
1008 return NoChange();
1009 }
1010
1011 if (!dependencies()->DependOnPromiseHookProtector()) return NoChange();
1012
1013 // Create a %Promise% instance and resolve it with {value}.
1014 Node* promise = effect =
1015 graph()->NewNode(javascript()->CreatePromise(), context, effect);
1016
1017 // Create a nested frame state inside the current method's most-recent
1018 // {frame_state} that will ensure that lazy deoptimizations at this
1019 // point will still return the {promise} instead of the result of the
1020 // ResolvePromise operation (which yields undefined).
1021 Node* parameters[] = {promise};
1023 jsgraph(), Builtin::kAsyncFunctionLazyDeoptContinuation, context,
1024 parameters, arraysize(parameters), frame_state,
1026
1027 effect = graph()->NewNode(javascript()->ResolvePromise(), promise, value,
1028 context, frame_state, effect, control);
1029 ReplaceWithValue(node, promise, effect, control);
1030 return Replace(promise);
1031}
1032
1033// ES section #sec-promise-resolve-functions
1035 DCHECK_EQ(IrOpcode::kJSResolvePromise, node->opcode());
1036 Node* promise = NodeProperties::GetValueInput(node, 0);
1037 Node* resolution = NodeProperties::GetValueInput(node, 1);
1038 Node* context = NodeProperties::GetContextInput(node);
1041
1042 // Check if we know something about the {resolution}.
1043 MapInference inference(broker(), resolution, effect);
1044 if (!inference.HaveMaps()) return NoChange();
1045 ZoneRefSet<Map> const& resolution_maps = inference.GetMaps();
1046
1047 // Compute property access info for "then" on {resolution}.
1048 ZoneVector<PropertyAccessInfo> access_infos(graph()->zone());
1049 AccessInfoFactory access_info_factory(broker(), graph()->zone());
1050
1051 for (MapRef map : resolution_maps) {
1052 access_infos.push_back(broker()->GetPropertyAccessInfo(
1053 map, broker()->then_string(), AccessMode::kLoad));
1054 }
1055 PropertyAccessInfo access_info =
1056 access_info_factory.FinalizePropertyAccessInfosAsOne(access_infos,
1058
1059 // TODO(v8:11457) Support dictionary mode prototypes here.
1060 if (access_info.IsInvalid() || access_info.HasDictionaryHolder()) {
1061 return inference.NoChange();
1062 }
1063
1064 // Only optimize when {resolution} definitely doesn't have a "then" property.
1065 if (!access_info.IsNotFound()) return inference.NoChange();
1066
1067 if (!inference.RelyOnMapsViaStability(dependencies())) {
1068 return inference.NoChange();
1069 }
1070
1073
1074 // Simply fulfill the {promise} with the {resolution}.
1075 Node* value = effect =
1076 graph()->NewNode(javascript()->FulfillPromise(), promise, resolution,
1077 context, effect, control);
1078 ReplaceWithValue(node, value, effect, control);
1079 return Replace(value);
1080}
1081
1082namespace {
1083
1084FieldAccess ForPropertyCellValue(MachineRepresentation representation,
1085 Type type, OptionalMapRef map, NameRef name) {
1087 if (representation == MachineRepresentation::kTaggedSigned) {
1089 } else if (representation == MachineRepresentation::kTaggedPointer) {
1091 }
1092 MachineType r = MachineType::TypeForRepresentation(representation);
1093 FieldAccess access = {
1094 kTaggedBase, PropertyCell::kValueOffset, name.object(), map, type, r,
1095 kind, "PropertyCellValue"};
1096 return access;
1097}
1098
1099} // namespace
1100
1101// TODO(neis): Try to merge this with ReduceNamedAccess by introducing a new
1102// PropertyAccessInfo kind for global accesses and using the existing mechanism
1103// for building loads/stores.
1104// Note: The "receiver" parameter is only used for DCHECKS, but that's on
1105// purpose. This way we can assert the super property access cases won't hit the
1106// code which hasn't been modified to support super property access.
1108 Node* node, Node* lookup_start_object, Node* receiver, Node* value,
1109 NameRef name, AccessMode access_mode, Node* key,
1110 PropertyCellRef property_cell, Node* effect) {
1111 if (!property_cell.Cache(broker())) {
1112 TRACE_BROKER_MISSING(broker(), "usable data for " << property_cell);
1113 return NoChange();
1114 }
1115
1116 ObjectRef property_cell_value = property_cell.value(broker());
1117 if (property_cell_value.IsPropertyCellHole()) {
1118 // The property cell is no longer valid.
1119 return NoChange();
1120 }
1121
1122 PropertyDetails property_details = property_cell.property_details();
1123 PropertyCellType property_cell_type = property_details.cell_type();
1124 DCHECK_EQ(PropertyKind::kData, property_details.kind());
1125
1126 Node* control = NodeProperties::GetControlInput(node);
1127 if (effect == nullptr) {
1128 effect = NodeProperties::GetEffectInput(node);
1129 }
1130
1131 // We have additional constraints for stores.
1132 if (access_mode == AccessMode::kStore) {
1133 DCHECK_EQ(receiver, lookup_start_object);
1134 if (property_details.IsReadOnly()) {
1135 // Don't even bother trying to lower stores to read-only data properties.
1136 // TODO(neis): We could generate code that checks if the new value equals
1137 // the old one and then does nothing or deopts, respectively.
1138 return NoChange();
1139 } else if (property_cell_type == PropertyCellType::kUndefined) {
1140 return NoChange();
1141 } else if (property_cell_type == PropertyCellType::kConstantType) {
1142 // We rely on stability further below.
1143 if (property_cell_value.IsHeapObject() &&
1144 !property_cell_value.AsHeapObject().map(broker()).is_stable()) {
1145 return NoChange();
1146 }
1147 }
1148 } else if (access_mode == AccessMode::kHas) {
1149 DCHECK_EQ(receiver, lookup_start_object);
1150 // has checks cannot follow the fast-path used by loads when these
1151 // conditions hold.
1152 if ((property_details.IsConfigurable() || !property_details.IsReadOnly()) &&
1153 property_details.cell_type() != PropertyCellType::kConstant &&
1154 property_details.cell_type() != PropertyCellType::kUndefined)
1155 return NoChange();
1156 }
1157
1158 // Ensure that {key} matches the specified {name} (if {key} is given).
1159 if (key != nullptr) {
1160 effect = BuildCheckEqualsName(name, key, effect, control);
1161 }
1162
1163 // If we have a {lookup_start_object} to validate, we do so by checking that
1164 // its map is the (target) global proxy's map. This guarantees that in fact
1165 // the lookup start object is the global proxy.
1166 // Note: we rely on the map constant below being the same as what is used in
1167 // NativeContextRef::GlobalIsDetached().
1168 if (lookup_start_object != nullptr) {
1169 effect = graph()->NewNode(
1170 simplified()->CheckMaps(
1173 native_context().global_proxy_object(broker()).map(broker()))),
1174 lookup_start_object, effect, control);
1175 }
1176
1177 if (access_mode == AccessMode::kLoad || access_mode == AccessMode::kHas) {
1178 // Load from non-configurable, read-only data property on the global
1179 // object can be constant-folded, even without deoptimization support.
1180 if (!property_details.IsConfigurable() && property_details.IsReadOnly()) {
1181 value = access_mode == AccessMode::kHas
1182 ? jsgraph()->TrueConstant()
1183 : jsgraph()->ConstantNoHole(property_cell_value, broker());
1184 } else {
1185 // Record a code dependency on the cell if we can benefit from the
1186 // additional feedback, or the global property is configurable (i.e.
1187 // can be deleted or reconfigured to an accessor property).
1188 if (property_details.cell_type() != PropertyCellType::kMutable ||
1189 property_details.IsConfigurable()) {
1190 dependencies()->DependOnGlobalProperty(property_cell);
1191 }
1192
1193 // Load from constant/undefined global property can be constant-folded.
1194 if (property_details.cell_type() == PropertyCellType::kConstant ||
1195 property_details.cell_type() == PropertyCellType::kUndefined) {
1196 value = access_mode == AccessMode::kHas
1197 ? jsgraph()->TrueConstant()
1198 : jsgraph()->ConstantNoHole(property_cell_value, broker());
1199 DCHECK(!property_cell_value.IsHeapObject() ||
1200 !property_cell_value.IsPropertyCellHole());
1201 } else {
1202 DCHECK_NE(AccessMode::kHas, access_mode);
1203
1204 // Load from constant type cell can benefit from type feedback.
1205 OptionalMapRef map;
1206 Type property_cell_value_type = Type::NonInternal();
1208 if (property_details.cell_type() == PropertyCellType::kConstantType) {
1209 // Compute proper type based on the current value in the cell.
1210 if (property_cell_value.IsSmi()) {
1211 property_cell_value_type = Type::SignedSmall();
1212 representation = MachineRepresentation::kTaggedSigned;
1213 } else if (property_cell_value.IsHeapNumber()) {
1214 property_cell_value_type = Type::Number();
1216 } else {
1217 MapRef property_cell_value_map =
1218 property_cell_value.AsHeapObject().map(broker());
1219 property_cell_value_type =
1220 Type::For(property_cell_value_map, broker());
1222
1223 // We can only use the property cell value map for map check
1224 // elimination if it's stable, i.e. the HeapObject wasn't
1225 // mutated without the cell state being updated.
1226 if (property_cell_value_map.is_stable()) {
1227 dependencies()->DependOnStableMap(property_cell_value_map);
1228 map = property_cell_value_map;
1229 }
1230 }
1231 }
1232 value = effect = graph()->NewNode(
1233 simplified()->LoadField(ForPropertyCellValue(
1234 representation, property_cell_value_type, map, name)),
1235 jsgraph()->ConstantNoHole(property_cell, broker()), effect,
1236 control);
1237 }
1238 }
1239 } else if (access_mode == AccessMode::kStore) {
1240 DCHECK_EQ(receiver, lookup_start_object);
1241 DCHECK(!property_details.IsReadOnly());
1242 switch (property_details.cell_type()) {
1244 // Record a code dependency on the cell, and just deoptimize if the new
1245 // value doesn't match the previous value stored inside the cell.
1246 dependencies()->DependOnGlobalProperty(property_cell);
1247 Node* check = graph()->NewNode(
1248 simplified()->ReferenceEqual(), value,
1249 jsgraph()->ConstantNoHole(property_cell_value, broker()));
1250 effect = graph()->NewNode(
1251 simplified()->CheckIf(DeoptimizeReason::kValueMismatch), check,
1252 effect, control);
1253 break;
1254 }
1256 // Record a code dependency on the cell, and just deoptimize if the new
1257 // value's type doesn't match the type of the previous value in the
1258 // cell.
1259 dependencies()->DependOnGlobalProperty(property_cell);
1260 Type property_cell_value_type;
1262 if (property_cell_value.IsHeapObject()) {
1263 MapRef property_cell_value_map =
1264 property_cell_value.AsHeapObject().map(broker());
1265 dependencies()->DependOnStableMap(property_cell_value_map);
1266
1267 // Check that the {value} is a HeapObject.
1268 value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
1269 value, effect, control);
1270 // Check {value} map against the {property_cell_value} map.
1271 effect = graph()->NewNode(
1272 simplified()->CheckMaps(CheckMapsFlag::kNone,
1273 ZoneRefSet<Map>(property_cell_value_map)),
1274 value, effect, control);
1275 property_cell_value_type = Type::OtherInternal();
1277 } else {
1278 // Check that the {value} is a Smi.
1279 value = effect = graph()->NewNode(
1280 simplified()->CheckSmi(FeedbackSource()), value, effect, control);
1281 property_cell_value_type = Type::SignedSmall();
1282 representation = MachineRepresentation::kTaggedSigned;
1283 }
1284 effect =
1285 graph()->NewNode(simplified()->StoreField(ForPropertyCellValue(
1286 representation, property_cell_value_type,
1287 OptionalMapRef(), name)),
1288 jsgraph()->ConstantNoHole(property_cell, broker()),
1289 value, effect, control);
1290 break;
1291 }
1293 // Record a code dependency on the cell, and just deoptimize if the
1294 // property ever becomes read-only.
1295 dependencies()->DependOnGlobalProperty(property_cell);
1296 effect =
1297 graph()->NewNode(simplified()->StoreField(ForPropertyCellValue(
1299 Type::NonInternal(), OptionalMapRef(), name)),
1300 jsgraph()->ConstantNoHole(property_cell, broker()),
1301 value, effect, control);
1302 break;
1303 }
1306 UNREACHABLE();
1307 }
1308 } else {
1309 return NoChange();
1310 }
1311
1312 ReplaceWithValue(node, value, effect, control);
1313 return Replace(value);
1314}
1315
1317 JSLoadGlobalNode n(node);
1318 LoadGlobalParameters const& p = n.Parameters();
1319 if (!p.feedback().IsValid()) return NoChange();
1320
1321 ProcessedFeedback const& processed =
1323 if (processed.IsInsufficient()) return NoChange();
1324
1325 GlobalAccessFeedback const& feedback = processed.AsGlobalAccess();
1326 if (feedback.IsScriptContextSlot()) {
1327 Effect effect = n.effect();
1328 Control control = n.control();
1329 Node* script_context =
1330 jsgraph()->ConstantNoHole(feedback.script_context(), broker());
1331 Node* value;
1332 if ((v8_flags.script_context_mutable_heap_number ||
1333 v8_flags.const_tracking_let) &&
1334 !feedback.immutable()) {
1335 // We collect feedback only for mutable context slots.
1336 value = effect = graph()->NewNode(
1337 javascript()->LoadScriptContext(0, feedback.slot_index()),
1338 script_context, effect, control);
1339 } else {
1340 value = effect =
1341 graph()->NewNode(javascript()->LoadContext(0, feedback.slot_index(),
1342 feedback.immutable()),
1343 script_context, effect);
1344 }
1345 ReplaceWithValue(node, value, effect, control);
1346 return Replace(value);
1347 } else if (feedback.IsPropertyCell()) {
1348 return ReduceGlobalAccess(node, nullptr, nullptr, nullptr, p.name(),
1349 AccessMode::kLoad, nullptr,
1350 feedback.property_cell());
1351 } else {
1352 DCHECK(feedback.IsMegamorphic());
1353 return NoChange();
1354 }
1355}
1356
1358 JSStoreGlobalNode n(node);
1359 StoreGlobalParameters const& p = n.Parameters();
1360 Node* value = n.value();
1361 if (!p.feedback().IsValid()) return NoChange();
1362
1363 ProcessedFeedback const& processed =
1365 if (processed.IsInsufficient()) return NoChange();
1366
1367 GlobalAccessFeedback const& feedback = processed.AsGlobalAccess();
1368 if (feedback.IsScriptContextSlot()) {
1369 if (feedback.immutable()) return NoChange();
1370 Node* effect = n.effect();
1371 Node* control = n.control();
1372 Node* script_context =
1373 jsgraph()->ConstantNoHole(feedback.script_context(), broker());
1374 if (v8_flags.script_context_mutable_heap_number ||
1375 v8_flags.const_tracking_let) {
1376 effect = control = graph()->NewNode(
1377 javascript()->StoreScriptContext(0, feedback.slot_index()), value,
1378 script_context, effect, control);
1379 } else {
1380 effect =
1381 graph()->NewNode(javascript()->StoreContext(0, feedback.slot_index()),
1382 value, script_context, effect, control);
1383 }
1384 ReplaceWithValue(node, value, effect, control);
1385 return Replace(value);
1386 } else if (feedback.IsPropertyCell()) {
1387 return ReduceGlobalAccess(node, nullptr, nullptr, value, p.name(),
1388 AccessMode::kStore, nullptr,
1389 feedback.property_cell());
1390 } else {
1391 DCHECK(feedback.IsMegamorphic());
1392 return NoChange();
1393 }
1394}
1395
1397 Node* node, Node* value, MegaDOMPropertyAccessFeedback const& feedback,
1398 FeedbackSource const& source) {
1399 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
1400 node->opcode() == IrOpcode::kJSLoadProperty);
1401 // TODO(mslekova): Add support and tests for kJSLoadNamedFromSuper.
1402 static_assert(JSLoadNamedNode::ObjectIndex() == 0 &&
1403 JSLoadPropertyNode::ObjectIndex() == 0,
1404 "Assumptions about ObjectIndex have changed, please update "
1405 "this function.");
1406
1407 Node* effect = NodeProperties::GetEffectInput(node);
1408 Node* control = NodeProperties::GetControlInput(node);
1409 Node* frame_state = NodeProperties::GetFrameStateInput(node);
1410
1411 Node* lookup_start_object = NodeProperties::GetValueInput(node, 0);
1412
1413 if (!dependencies()->DependOnMegaDOMProtector()) {
1414 return NoChange();
1415 }
1416
1417 FunctionTemplateInfoRef function_template_info = feedback.info();
1418 int16_t range_start =
1419 function_template_info.allowed_receiver_instance_type_range_start();
1420 int16_t range_end =
1421 function_template_info.allowed_receiver_instance_type_range_end();
1422 DCHECK_IMPLIES(range_start == 0, range_end == 0);
1423 DCHECK_LE(range_start, range_end);
1424
1425 // TODO(mslekova): This could be a new InstanceTypeCheck operator
1426 // that gets lowered later on (e.g. during generic lowering).
1427 Node* receiver_map = effect =
1428 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
1429 lookup_start_object, effect, control);
1430 Node* receiver_instance_type = effect = graph()->NewNode(
1432 receiver_map, effect, control);
1433
1434 if (v8_flags.experimental_embedder_instance_types && range_start != 0) {
1435 // Embedder instance ID is set, doing a simple range check.
1436 Node* diff_to_start =
1437 graph()->NewNode(simplified()->NumberSubtract(), receiver_instance_type,
1438 jsgraph()->ConstantNoHole(range_start));
1439 Node* range_length = jsgraph()->ConstantNoHole(range_end - range_start);
1440
1441 // TODO(mslekova): Once we have the InstanceTypeCheck operator, we could
1442 // lower it to Uint32LessThan later on to perform what is done in bounds.h.
1443 Node* check = graph()->NewNode(simplified()->NumberLessThanOrEqual(),
1444 diff_to_start, range_length);
1445 effect = graph()->NewNode(
1446 simplified()->CheckIf(DeoptimizeReason::kWrongInstanceType), check,
1447 effect, control);
1448 } else if (function_template_info.is_signature_undefined(broker())) {
1449 // Signature is undefined, enough to check if the receiver is a JSApiObject.
1450 Node* check =
1451 graph()->NewNode(simplified()->NumberEqual(), receiver_instance_type,
1452 jsgraph()->ConstantNoHole(JS_API_OBJECT_TYPE));
1453 effect = graph()->NewNode(
1454 simplified()->CheckIf(DeoptimizeReason::kWrongInstanceType), check,
1455 effect, control);
1456 } else {
1457 // Calling out to builtin to do signature check.
1459 isolate(), Builtin::kCallFunctionTemplate_CheckCompatibleReceiver);
1460 int stack_arg_count = callable.descriptor().GetStackParameterCount() +
1461 1 /* implicit receiver */;
1462
1464 graph()->zone(), callable.descriptor(), stack_arg_count,
1466
1467 Node* inputs[8] = {
1468 jsgraph()->HeapConstantNoHole(callable.code()),
1469 jsgraph()->ConstantNoHole(function_template_info, broker()),
1470 jsgraph()->Int32Constant(stack_arg_count),
1471 lookup_start_object,
1473 frame_state,
1474 effect,
1475 control};
1476
1477 value = effect = control =
1478 graph()->NewNode(common()->Call(call_descriptor), 8, inputs);
1479 return Replace(value);
1480 }
1481
1482 value = InlineApiCall(lookup_start_object, frame_state, nullptr /*value*/,
1483 &effect, &control, function_template_info, source);
1484 ReplaceWithValue(node, value, effect, control);
1485 return Replace(value);
1486}
1487
1489 Node* node, Node* value, NamedAccessFeedback const& feedback,
1490 AccessMode access_mode, Node* key) {
1491 DCHECK(node->opcode() == IrOpcode::kJSLoadNamed ||
1492 node->opcode() == IrOpcode::kJSSetNamedProperty ||
1493 node->opcode() == IrOpcode::kJSLoadProperty ||
1494 node->opcode() == IrOpcode::kJSSetKeyedProperty ||
1495 node->opcode() == IrOpcode::kJSDefineNamedOwnProperty ||
1496 node->opcode() == IrOpcode::kJSDefineKeyedOwnPropertyInLiteral ||
1497 node->opcode() == IrOpcode::kJSHasProperty ||
1498 node->opcode() == IrOpcode::kJSLoadNamedFromSuper ||
1499 node->opcode() == IrOpcode::kJSDefineKeyedOwnProperty);
1500 static_assert(JSLoadNamedNode::ObjectIndex() == 0 &&
1501 JSSetNamedPropertyNode::ObjectIndex() == 0 &&
1502 JSLoadPropertyNode::ObjectIndex() == 0 &&
1503 JSSetKeyedPropertyNode::ObjectIndex() == 0 &&
1504 JSDefineNamedOwnPropertyNode::ObjectIndex() == 0 &&
1505 JSSetNamedPropertyNode::ObjectIndex() == 0 &&
1506 JSDefineKeyedOwnPropertyInLiteralNode::ObjectIndex() == 0 &&
1507 JSHasPropertyNode::ObjectIndex() == 0 &&
1508 JSDefineKeyedOwnPropertyNode::ObjectIndex() == 0);
1509 static_assert(JSLoadNamedFromSuperNode::ReceiverIndex() == 0);
1510
1511 Node* context = NodeProperties::GetContextInput(node);
1515
1516 // receiver = the object we pass to the accessor (if any) as the "this" value.
1518 // lookup_start_object = the object where we start looking for the property.
1519 Node* lookup_start_object;
1520 if (node->opcode() == IrOpcode::kJSLoadNamedFromSuper) {
1521 DCHECK(v8_flags.super_ic);
1523 // Lookup start object is the __proto__ of the home object.
1524 lookup_start_object = effect =
1525 BuildLoadPrototypeFromObject(n.home_object(), effect, control);
1526 } else {
1527 lookup_start_object = receiver;
1528 }
1529
1530 // Either infer maps from the graph or use the feedback.
1531 ZoneVector<MapRef> inferred_maps(zone());
1532 if (!InferMaps(lookup_start_object, effect, &inferred_maps)) {
1533 for (MapRef map : feedback.maps()) {
1534 inferred_maps.push_back(map);
1535 }
1536 }
1537 RemoveImpossibleMaps(lookup_start_object, &inferred_maps);
1538
1539 // Check if we have an access o.x or o.x=v where o is the target native
1540 // contexts' global proxy, and turn that into a direct access to the
1541 // corresponding global object instead.
1542 if (inferred_maps.size() == 1) {
1543 MapRef lookup_start_object_map = inferred_maps[0];
1544 if (lookup_start_object_map.equals(
1545 native_context().global_proxy_object(broker()).map(broker()))) {
1546 if (!native_context().GlobalIsDetached(broker())) {
1547 OptionalPropertyCellRef cell =
1548 native_context().global_object(broker()).GetPropertyCell(
1549 broker(), feedback.name());
1550 if (!cell.has_value()) return NoChange();
1551 // Note: The map check generated by ReduceGlobalAccesses ensures that we
1552 // will deopt when/if GlobalIsDetached becomes true.
1553 return ReduceGlobalAccess(node, lookup_start_object, receiver, value,
1554 feedback.name(), access_mode, key, *cell,
1555 effect);
1556 }
1557 }
1558 }
1559
1560 ZoneVector<PropertyAccessInfo> access_infos(zone());
1561 {
1562 ZoneVector<PropertyAccessInfo> access_infos_for_feedback(zone());
1563 for (MapRef map : inferred_maps) {
1564 if (map.is_deprecated()) continue;
1565
1566 // TODO(v8:12547): Support writing to objects in shared space, which need
1567 // a write barrier that calls Object::Share to ensure the RHS is shared.
1568 if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(
1569 map.instance_type()) &&
1570 access_mode == AccessMode::kStore) {
1571 return NoChange();
1572 }
1573
1574 PropertyAccessInfo access_info =
1575 broker()->GetPropertyAccessInfo(map, feedback.name(), access_mode);
1576 access_infos_for_feedback.push_back(access_info);
1577 }
1578
1579 AccessInfoFactory access_info_factory(broker(), graph()->zone());
1580 if (!access_info_factory.FinalizePropertyAccessInfos(
1581 access_infos_for_feedback, access_mode, &access_infos)) {
1582 return NoChange();
1583 }
1584 }
1585
1586 // Ensure that {key} matches the specified name (if {key} is given).
1587 if (key != nullptr) {
1588 effect = BuildCheckEqualsName(feedback.name(), key, effect, control);
1589 }
1590
1591 // Collect call nodes to rewire exception edges.
1592 ZoneVector<Node*> if_exception_nodes(zone());
1593 ZoneVector<Node*>* if_exceptions = nullptr;
1594 Node* if_exception = nullptr;
1595 if (NodeProperties::IsExceptionalCall(node, &if_exception)) {
1596 if_exceptions = &if_exception_nodes;
1597 }
1598
1599 PropertyAccessBuilder access_builder(jsgraph(), broker());
1600
1601 // Check for the monomorphic cases.
1602 if (access_infos.size() == 1) {
1603 PropertyAccessInfo access_info = access_infos.front();
1604 if (receiver != lookup_start_object) {
1605 // Super property access. lookup_start_object is a JSReceiver or
1606 // null. It can't be a number, a string etc. So trying to build the
1607 // checks in the "else if" branch doesn't make sense.
1608
1609 access_builder.BuildCheckMaps(
1610 lookup_start_object, &effect, control,
1611 access_info.lookup_start_object_maps(),
1612 feedback.has_deprecated_map_without_migration_target());
1613
1615 access_info.lookup_start_object_maps())) {
1616 // In order to be able to use StringWrapperLength, we need a TypeGuard
1617 // when all input maps are StringWrapper maps.
1618 lookup_start_object = effect =
1619 graph()->NewNode(common()->TypeGuard(Type::StringWrapper()),
1620 lookup_start_object, effect, control);
1622 broker(), access_info.lookup_start_object_maps())) {
1623 // In order to be able to use TypedArrayLength, we need a TypeGuard
1624 // when all input maps are TypedArray maps. We need this only when
1625 // all maps are non-RAB/GSAB maps, since TypedArrayLength only handles
1626 // non-RAB/GSAB maps.
1627 lookup_start_object = effect =
1628 graph()->NewNode(common()->TypeGuard(Type::TypedArray()),
1629 lookup_start_object, effect, control);
1630 }
1631
1632 } else if (!access_builder.TryBuildStringCheck(
1633 broker(), access_info.lookup_start_object_maps(), &receiver,
1634 &effect, control) &&
1635 !access_builder.TryBuildNumberCheck(
1636 broker(), access_info.lookup_start_object_maps(), &receiver,
1637 &effect, control)) {
1638 // Try to build string check or number check if possible. Otherwise build
1639 // a map check.
1640
1641 // TryBuildStringCheck and TryBuildNumberCheck don't update the receiver
1642 // if they fail.
1643 DCHECK_EQ(receiver, lookup_start_object);
1644 if (HasNumberMaps(broker(), access_info.lookup_start_object_maps())) {
1645 // We need to also let Smi {receiver}s through in this case, so
1646 // we construct a diamond, guarded by the Sminess of the {receiver}
1647 // and if {receiver} is not a Smi just emit a sequence of map checks.
1648 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
1649 Node* branch = graph()->NewNode(common()->Branch(), check, control);
1650
1651 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
1652 Node* etrue = effect;
1653
1654 Control if_false{graph()->NewNode(common()->IfFalse(), branch)};
1655 Effect efalse = effect;
1656 access_builder.BuildCheckMaps(
1657 receiver, &efalse, if_false, access_info.lookup_start_object_maps(),
1658 feedback.has_deprecated_map_without_migration_target());
1659
1660 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
1661 effect =
1662 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
1663 } else {
1664 access_builder.BuildCheckMaps(
1665 receiver, &effect, control, access_info.lookup_start_object_maps(),
1666 feedback.has_deprecated_map_without_migration_target());
1667 }
1668
1670 access_info.lookup_start_object_maps())) {
1671 // In order to be able to use StringWrapperLength, we need a TypeGuard
1672 // when all input maps are StringWrapper maps. Note that, alternatively,
1673 // we could have a CheckStringWrapper, but it makes things simpler to
1674 // just rely on CheckMaps. This is slightly suboptimal in case the code
1675 // contains multiple string wrappers with different properties, but this
1676 // should be a rare case.
1677 lookup_start_object = receiver = effect =
1678 graph()->NewNode(common()->TypeGuard(Type::StringWrapper()),
1679 lookup_start_object, effect, control);
1681 broker(), access_info.lookup_start_object_maps())) {
1682 // In order to be able to use TypedArrayLength, we need a TypeGuard
1683 // when all input maps are TypedArray maps. We need this only when
1684 // all maps are non-RAB/GSAB maps, since TypedArrayLength only handles
1685 // non-RAB/GSAB maps.
1686 lookup_start_object = receiver = effect =
1687 graph()->NewNode(common()->TypeGuard(Type::TypedArray()),
1688 lookup_start_object, effect, control);
1689 }
1690 } else {
1691 // At least one of TryBuildStringCheck & TryBuildNumberCheck succeeded
1692 // and updated the receiver. Update lookup_start_object to match (they
1693 // should be the same).
1694 lookup_start_object = receiver;
1695 }
1696
1697 // Generate the actual property access.
1698 std::optional<ValueEffectControl> continuation = BuildPropertyAccess(
1699 lookup_start_object, receiver, value, context, frame_state, effect,
1700 control, feedback.name(), if_exceptions, access_info, access_mode);
1701 if (!continuation) {
1702 // At this point we maybe have added nodes into the graph (e.g. via
1703 // NewNode or BuildCheckMaps) in some cases but we haven't connected them
1704 // to End since we haven't called ReplaceWithValue. Since they are nodes
1705 // which are not connected with End, they will be removed by graph
1706 // trimming.
1707 return NoChange();
1708 }
1709 value = continuation->value();
1710 effect = continuation->effect();
1711 control = continuation->control();
1712 } else {
1713 // The final states for every polymorphic branch. We join them with
1714 // Merge+Phi+EffectPhi at the bottom.
1715 ZoneVector<Node*> values(zone());
1716 ZoneVector<Node*> effects(zone());
1717 ZoneVector<Node*> controls(zone());
1718
1719 Node* receiverissmi_control = nullptr;
1720 Node* receiverissmi_effect = effect;
1721
1722 if (receiver == lookup_start_object) {
1723 // Check if {receiver} may be a number.
1724 bool receiverissmi_possible = false;
1725 for (PropertyAccessInfo const& access_info : access_infos) {
1726 if (HasNumberMaps(broker(), access_info.lookup_start_object_maps())) {
1727 receiverissmi_possible = true;
1728 break;
1729 }
1730 }
1731
1732 // Handle the case that {receiver} may be a number.
1733 if (receiverissmi_possible) {
1734 Node* check = graph()->NewNode(simplified()->ObjectIsSmi(), receiver);
1735 Node* branch = graph()->NewNode(common()->Branch(), check, control);
1736 control = graph()->NewNode(common()->IfFalse(), branch);
1737 receiverissmi_control = graph()->NewNode(common()->IfTrue(), branch);
1738 receiverissmi_effect = effect;
1739 }
1740 }
1741
1742 // Generate code for the various different property access patterns.
1743 Node* fallthrough_control = control;
1744 for (size_t j = 0; j < access_infos.size(); ++j) {
1745 PropertyAccessInfo const& access_info = access_infos[j];
1746 Node* this_value = value;
1747 Node* this_lookup_start_object = lookup_start_object;
1748 Node* this_receiver = receiver;
1749 Effect this_effect = effect;
1750 Control this_control{fallthrough_control};
1751
1752 // Perform map check on {lookup_start_object}.
1753 ZoneVector<MapRef> const& lookup_start_object_maps =
1754 access_info.lookup_start_object_maps();
1755 {
1756 // Whether to insert a dedicated MapGuard node into the
1757 // effect to be able to learn from the control flow.
1758 bool insert_map_guard = true;
1759
1760 // Check maps for the {lookup_start_object}s.
1761 if (j == access_infos.size() - 1) {
1762 // Last map check on the fallthrough control path, do a
1763 // conditional eager deoptimization exit here.
1764 access_builder.BuildCheckMaps(
1765 lookup_start_object, &this_effect, this_control,
1766 lookup_start_object_maps,
1767 feedback.has_deprecated_map_without_migration_target());
1768 fallthrough_control = nullptr;
1769
1770 // Don't insert a MapGuard in this case, as the CheckMaps
1771 // node already gives you all the information you need
1772 // along the effect chain.
1773 insert_map_guard = false;
1774 } else {
1775 // Explicitly branch on the {lookup_start_object_maps}.
1776 ZoneRefSet<Map> maps(lookup_start_object_maps.begin(),
1777 lookup_start_object_maps.end(), graph()->zone());
1778 Node* check = this_effect =
1779 graph()->NewNode(simplified()->CompareMaps(maps),
1780 lookup_start_object, this_effect, this_control);
1781 Node* branch =
1782 graph()->NewNode(common()->Branch(), check, this_control);
1783 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
1784 this_control = graph()->NewNode(common()->IfTrue(), branch);
1785 }
1786
1787 // The Number case requires special treatment to also deal with Smis.
1788 if (HasNumberMaps(broker(), lookup_start_object_maps)) {
1789 // Join this check with the "receiver is smi" check above.
1790 DCHECK_EQ(receiver, lookup_start_object);
1791 DCHECK_NOT_NULL(receiverissmi_effect);
1792 DCHECK_NOT_NULL(receiverissmi_control);
1793 this_control = graph()->NewNode(common()->Merge(2), this_control,
1794 receiverissmi_control);
1795 this_effect = graph()->NewNode(common()->EffectPhi(2), this_effect,
1796 receiverissmi_effect, this_control);
1797 receiverissmi_effect = receiverissmi_control = nullptr;
1798
1799 // The {lookup_start_object} can also be a Smi in this case, so
1800 // a MapGuard doesn't make sense for this at all.
1801 insert_map_guard = false;
1802 }
1803
1804 // Introduce a MapGuard to learn from this on the effect chain.
1805 if (insert_map_guard) {
1806 ZoneRefSet<Map> maps(lookup_start_object_maps.begin(),
1807 lookup_start_object_maps.end(), graph()->zone());
1808 this_effect =
1809 graph()->NewNode(simplified()->MapGuard(maps),
1810 lookup_start_object, this_effect, this_control);
1811 }
1812
1813 // If all {lookup_start_object_maps} are Strings we also need to rename
1814 // the {lookup_start_object} here to make sure that TurboFan knows that
1815 // along this path the {this_lookup_start_object} is a String. This is
1816 // because we want strict checking of types, for example for
1817 // StringLength operators.
1818 if (HasOnlyStringMaps(broker(), lookup_start_object_maps)) {
1819 DCHECK_EQ(receiver, lookup_start_object);
1820 this_lookup_start_object = this_receiver = this_effect =
1821 graph()->NewNode(common()->TypeGuard(Type::String()),
1822 lookup_start_object, this_effect, this_control);
1823 } else if (HasOnlyStringWrapperMaps(broker(),
1824 lookup_start_object_maps)) {
1825 bool receiver_is_lookup_start =
1826 this_lookup_start_object == this_receiver;
1827 DCHECK_IMPLIES(access_mode != AccessMode::kLoad,
1828 receiver_is_lookup_start);
1829 this_lookup_start_object = this_effect =
1830 graph()->NewNode(common()->TypeGuard(Type::StringWrapper()),
1831 lookup_start_object, this_effect, this_control);
1832 if (receiver_is_lookup_start) {
1833 this_receiver = this_lookup_start_object;
1834 }
1836 broker(), lookup_start_object_maps)) {
1837 bool receiver_is_lookup_start =
1838 this_lookup_start_object == this_receiver;
1839 DCHECK_IMPLIES(access_mode != AccessMode::kLoad,
1840 receiver_is_lookup_start);
1841 this_lookup_start_object = this_effect =
1842 graph()->NewNode(common()->TypeGuard(Type::TypedArray()),
1843 lookup_start_object, this_effect, this_control);
1844 if (receiver_is_lookup_start) {
1845 this_receiver = this_lookup_start_object;
1846 }
1847 }
1848 }
1849
1850 // Generate the actual property access.
1851 std::optional<ValueEffectControl> continuation = BuildPropertyAccess(
1852 this_lookup_start_object, this_receiver, this_value, context,
1853 frame_state, this_effect, this_control, feedback.name(),
1854 if_exceptions, access_info, access_mode);
1855 if (!continuation) {
1856 // At this point we maybe have added nodes into the graph (e.g. via
1857 // NewNode or BuildCheckMaps) in some cases but we haven't connected
1858 // them to End since we haven't called ReplaceWithValue. Since they are
1859 // nodes which are not connected with End, they will be removed by graph
1860 // trimming.
1861 return NoChange();
1862 }
1863
1864 values.push_back(continuation->value());
1865 effects.push_back(continuation->effect());
1866 controls.push_back(continuation->control());
1867 }
1868
1869 DCHECK_NULL(fallthrough_control);
1870
1871 // Generate the final merge point for all (polymorphic) branches.
1872 int const control_count = static_cast<int>(controls.size());
1873 if (control_count == 0) {
1874 value = effect = control = jsgraph()->Dead();
1875 } else if (control_count == 1) {
1876 value = values.front();
1877 effect = effects.front();
1878 control = controls.front();
1879 } else {
1880 control = graph()->NewNode(common()->Merge(control_count), control_count,
1881 &controls.front());
1882 values.push_back(control);
1883 value = graph()->NewNode(
1884 common()->Phi(MachineRepresentation::kTagged, control_count),
1885 control_count + 1, &values.front());
1886 effects.push_back(control);
1887 effect = graph()->NewNode(common()->EffectPhi(control_count),
1888 control_count + 1, &effects.front());
1889 }
1890 }
1891
1892 // Properly rewire IfException edges if {node} is inside a try-block.
1893 if (!if_exception_nodes.empty()) {
1894 DCHECK_NOT_NULL(if_exception);
1895 DCHECK_EQ(if_exceptions, &if_exception_nodes);
1896 int const if_exception_count = static_cast<int>(if_exceptions->size());
1897 Node* merge = graph()->NewNode(common()->Merge(if_exception_count),
1898 if_exception_count, &if_exceptions->front());
1899 if_exceptions->push_back(merge);
1900 Node* ephi =
1901 graph()->NewNode(common()->EffectPhi(if_exception_count),
1902 if_exception_count + 1, &if_exceptions->front());
1903 Node* phi = graph()->NewNode(
1904 common()->Phi(MachineRepresentation::kTagged, if_exception_count),
1905 if_exception_count + 1, &if_exceptions->front());
1906 ReplaceWithValue(if_exception, phi, ephi, merge);
1907 }
1908
1909 ReplaceWithValue(node, value, effect, control);
1910 return Replace(value);
1911}
1912
1914 JSLoadNamedNode n(node);
1915 NamedAccess const& p = n.Parameters();
1916 Node* const receiver = n.object();
1917 NameRef name = p.name();
1918
1919 // Check if we have a constant receiver.
1921 if (m.HasResolvedValue()) {
1922 ObjectRef object = m.Ref(broker());
1923 if (object.IsJSFunction() && name.equals(broker()->prototype_string())) {
1924 // Optimize "prototype" property of functions.
1925 JSFunctionRef function = object.AsJSFunction();
1926 // TODO(neis): Remove the has_prototype_slot condition once the broker is
1927 // always enabled.
1928 if (!function.map(broker()).has_prototype_slot() ||
1929 !function.has_instance_prototype(broker()) ||
1930 function.PrototypeRequiresRuntimeLookup(broker())) {
1931 return NoChange();
1932 }
1933 HeapObjectRef prototype =
1935 Node* value = jsgraph()->ConstantNoHole(prototype, broker());
1936 ReplaceWithValue(node, value);
1937 return Replace(value);
1938 } else if (object.IsString() && name.equals(broker()->length_string())) {
1939 // Constant-fold "length" property on constant strings.
1940 Node* value = jsgraph()->ConstantNoHole(object.AsString().length());
1941 ReplaceWithValue(node, value);
1942 return Replace(value);
1943 }
1944 }
1945
1946 if (!p.feedback().IsValid()) return NoChange();
1947 return ReducePropertyAccess(node, nullptr, name, jsgraph()->Dead(),
1949}
1950
1952 Node* node) {
1954 NamedAccess const& p = n.Parameters();
1955 NameRef name = p.name();
1956
1957 if (!p.feedback().IsValid()) return NoChange();
1958 return ReducePropertyAccess(node, nullptr, name, jsgraph()->Dead(),
1960}
1961
1963 JSGetIteratorNode n(node);
1964 GetIteratorParameters const& p = n.Parameters();
1965
1966 TNode<Object> receiver = n.receiver();
1967 TNode<Object> context = n.context();
1968 FrameState frame_state = n.frame_state();
1969 Effect effect = n.effect();
1970 Control control = n.control();
1971
1972 Node* iterator_exception_node = nullptr;
1973 Node* if_exception_merge = nullptr;
1974 Node* if_exception_effect_phi = nullptr;
1975 Node* if_exception_phi = nullptr;
1976 bool has_exception_node =
1977 NodeProperties::IsExceptionalCall(node, &iterator_exception_node);
1978 int exception_node_index = 0;
1979 if (has_exception_node) {
1980 DCHECK_NOT_NULL(iterator_exception_node);
1981 // If there exists an IfException node for the iterator node, we need
1982 // to merge all the desugared nodes exception. The iterator node will be
1983 // desugared to LoadNamed, Call, CallRuntime, we can pre-allocate the
1984 // nodes with 4 inputs here and we use dead_node as a placeholder for the
1985 // input, which will be replaced.
1986 // We use dead_node as a placeholder for the original exception node before
1987 // it's uses are rewired.
1988
1989 Node* dead_node = jsgraph()->Dead();
1990 if_exception_merge =
1991 graph()->NewNode(common()->Merge(5), dead_node, dead_node, dead_node,
1992 dead_node, dead_node);
1993 if_exception_effect_phi =
1994 graph()->NewNode(common()->EffectPhi(5), dead_node, dead_node,
1995 dead_node, dead_node, dead_node, if_exception_merge);
1996 if_exception_phi = graph()->NewNode(
1997 common()->Phi(MachineRepresentation::kTagged, 5), dead_node, dead_node,
1998 dead_node, dead_node, dead_node, if_exception_merge);
1999 // Rewire the original exception node uses.
2000 ReplaceWithValue(iterator_exception_node, if_exception_phi,
2001 if_exception_effect_phi, if_exception_merge);
2002 if_exception_merge->ReplaceInput(exception_node_index,
2003 iterator_exception_node);
2004 if_exception_effect_phi->ReplaceInput(exception_node_index,
2005 iterator_exception_node);
2006 if_exception_phi->ReplaceInput(exception_node_index,
2007 iterator_exception_node);
2008 exception_node_index++;
2009 }
2010
2011 // Load iterator property operator
2012 NameRef iterator_symbol = broker()->iterator_symbol();
2013 const Operator* load_op =
2014 javascript()->LoadNamed(iterator_symbol, p.loadFeedback());
2015
2016 // Lazy deopt of the load iterator property
2017 // TODO(v8:10047): Use TaggedIndexConstant here once deoptimizer supports it.
2018 Node* call_slot = jsgraph()->SmiConstant(p.callFeedback().slot.ToInt());
2019 Node* call_feedback = jsgraph()->HeapConstantNoHole(p.callFeedback().vector);
2020 Node* lazy_deopt_parameters[] = {receiver, call_slot, call_feedback};
2021 Node* lazy_deopt_frame_state = CreateStubBuiltinContinuationFrameState(
2022 jsgraph(), Builtin::kGetIteratorWithFeedbackLazyDeoptContinuation,
2023 context, lazy_deopt_parameters, arraysize(lazy_deopt_parameters),
2025 Node* load_property =
2026 graph()->NewNode(load_op, receiver, n.feedback_vector(), context,
2027 lazy_deopt_frame_state, effect, control);
2028 effect = load_property;
2029 control = load_property;
2030
2031 // Merge the exception path for LoadNamed.
2032 if (has_exception_node) {
2033 Node* if_exception =
2034 graph()->NewNode(common()->IfException(), effect, control);
2035 if_exception_merge->ReplaceInput(exception_node_index, if_exception);
2036 if_exception_phi->ReplaceInput(exception_node_index, if_exception);
2037 if_exception_effect_phi->ReplaceInput(exception_node_index, if_exception);
2038 exception_node_index++;
2039 control = graph()->NewNode(common()->IfSuccess(), control);
2040 }
2041
2042 Node* check = graph()->NewNode(simplified()->ReferenceEqual(), load_property,
2043 jsgraph()->UndefinedConstant());
2044 Node* branch =
2045 graph()->NewNode(common()->Branch(BranchHint::kFalse), check, control);
2046
2047 {
2048 Node* if_not_iterator = graph()->NewNode(common()->IfTrue(), branch);
2049 Node* effect_not_iterator = effect;
2050 Node* control_not_iterator = if_not_iterator;
2051 Node* call_runtime = effect_not_iterator = control_not_iterator =
2052 graph()->NewNode(
2053 javascript()->CallRuntime(Runtime::kThrowIteratorError, 1),
2054 receiver, context, frame_state, effect_not_iterator,
2055 control_not_iterator);
2056 // Merge the exception path for CallRuntime.
2057 if (has_exception_node) {
2058 Node* if_exception = graph()->NewNode(
2059 common()->IfException(), effect_not_iterator, control_not_iterator);
2060 if_exception_merge->ReplaceInput(exception_node_index, if_exception);
2061 if_exception_phi->ReplaceInput(exception_node_index, if_exception);
2062 if_exception_effect_phi->ReplaceInput(exception_node_index, if_exception);
2063 exception_node_index++;
2064 control_not_iterator =
2065 graph()->NewNode(common()->IfSuccess(), control_not_iterator);
2066 }
2067 Node* throw_node =
2068 graph()->NewNode(common()->Throw(), call_runtime, control_not_iterator);
2069 MergeControlToEnd(graph(), common(), throw_node);
2070 }
2071
2072 control = graph()->NewNode(common()->IfFalse(), branch);
2073
2074 // Eager deopt of call iterator property
2075 Node* parameters[] = {receiver, load_property, call_slot, call_feedback};
2076 Node* eager_deopt_frame_state = CreateStubBuiltinContinuationFrameState(
2077 jsgraph(), Builtin::kCallIteratorWithFeedback, context, parameters,
2078 arraysize(parameters), frame_state, ContinuationFrameStateMode::EAGER);
2079 Node* deopt_checkpoint = graph()->NewNode(
2080 common()->Checkpoint(), eager_deopt_frame_state, effect, control);
2081 effect = deopt_checkpoint;
2082
2083 // Call iterator property operator
2084 ProcessedFeedback const& feedback =
2086 SpeculationMode mode = feedback.IsInsufficient()
2088 : feedback.AsCall().speculation_mode();
2089 const Operator* call_op = javascript()->Call(
2093 // Lazy deopt to check the call result is JSReceiver.
2094 Node* call_lazy_deopt_frame_state = CreateStubBuiltinContinuationFrameState(
2095 jsgraph(), Builtin::kCallIteratorWithFeedbackLazyDeoptContinuation,
2096 context, nullptr, 0, frame_state, ContinuationFrameStateMode::LAZY);
2097 Node* call_property = effect = control =
2098 graph()->NewNode(call_op, load_property, receiver, n.feedback_vector(),
2099 context, call_lazy_deopt_frame_state, effect, control);
2100
2101 // Merge the exception path for Call.
2102 if (has_exception_node) {
2103 Node* if_exception =
2104 graph()->NewNode(common()->IfException(), effect, control);
2105 if_exception_merge->ReplaceInput(exception_node_index, if_exception);
2106 if_exception_phi->ReplaceInput(exception_node_index, if_exception);
2107 if_exception_effect_phi->ReplaceInput(exception_node_index, if_exception);
2108 exception_node_index++;
2109 control = graph()->NewNode(common()->IfSuccess(), control);
2110 }
2111
2112 // If the result is not JSReceiver, throw invalid iterator exception.
2113 Node* is_receiver =
2114 graph()->NewNode(simplified()->ObjectIsReceiver(), call_property);
2115 Node* branch_node = graph()->NewNode(common()->Branch(BranchHint::kTrue),
2116 is_receiver, control);
2117 {
2118 Node* if_not_receiver = graph()->NewNode(common()->IfFalse(), branch_node);
2119 Node* effect_not_receiver = effect;
2120 Node* control_not_receiver = if_not_receiver;
2121 Node* call_runtime = effect_not_receiver = control_not_receiver =
2122 graph()->NewNode(
2123 javascript()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid, 0),
2124 context, frame_state, effect_not_receiver, control_not_receiver);
2125 // Merge the exception path for CallRuntime.
2126 if (has_exception_node) {
2127 Node* if_exception = graph()->NewNode(
2128 common()->IfException(), effect_not_receiver, control_not_receiver);
2129 if_exception_merge->ReplaceInput(exception_node_index, if_exception);
2130 if_exception_phi->ReplaceInput(exception_node_index, if_exception);
2131 if_exception_effect_phi->ReplaceInput(exception_node_index, if_exception);
2132 exception_node_index++;
2133 control_not_receiver =
2134 graph()->NewNode(common()->IfSuccess(), control_not_receiver);
2135 }
2136 Node* throw_node =
2137 graph()->NewNode(common()->Throw(), call_runtime, control_not_receiver);
2138 MergeControlToEnd(graph(), common(), throw_node);
2139 }
2140 Node* if_receiver = graph()->NewNode(common()->IfTrue(), branch_node);
2141 ReplaceWithValue(node, call_property, effect, if_receiver);
2142
2143 if (has_exception_node) {
2144 DCHECK_EQ(exception_node_index, if_exception_merge->InputCount());
2145 DCHECK_EQ(exception_node_index, if_exception_effect_phi->InputCount() - 1);
2146 DCHECK_EQ(exception_node_index, if_exception_phi->InputCount() - 1);
2147#ifdef DEBUG
2148 for (Node* input : if_exception_merge->inputs()) {
2149 DCHECK(!input->IsDead());
2150 }
2151 for (Node* input : if_exception_effect_phi->inputs()) {
2152 DCHECK(!input->IsDead());
2153 }
2154 for (Node* input : if_exception_phi->inputs()) {
2155 DCHECK(!input->IsDead());
2156 }
2157#endif
2158 }
2159 return Replace(if_receiver);
2160}
2161
2164 NamedAccess const& p = n.Parameters();
2165 if (!p.feedback().IsValid()) return NoChange();
2166 return ReducePropertyAccess(node, nullptr, p.name(), n.value(),
2168}
2169
2171 Node* node) {
2173 DefineNamedOwnPropertyParameters const& p = n.Parameters();
2174 if (!p.feedback().IsValid()) return NoChange();
2175 return ReducePropertyAccess(node, nullptr, p.name(), n.value(),
2178}
2179
2181 Node* node, Node* index, Node* value, KeyedAccessMode const& keyed_mode) {
2183 Node* effect = NodeProperties::GetEffectInput(node);
2184 Node* control = NodeProperties::GetControlInput(node);
2185
2186 // Strings are immutable in JavaScript.
2187 if (keyed_mode.access_mode() == AccessMode::kStore) return NoChange();
2188
2189 // `in` cannot be used on strings.
2190 if (keyed_mode.access_mode() == AccessMode::kHas) return NoChange();
2191
2192 // Ensure that the {receiver} is actually a String.
2193 receiver = effect = graph()->NewNode(
2194 simplified()->CheckString(FeedbackSource()), receiver, effect, control);
2195
2196 // Determine the {receiver} length.
2197 Node* length = graph()->NewNode(simplified()->StringLength(), receiver);
2198
2199 // Load the single character string from {receiver} or yield undefined
2200 // if the {index} is out of bounds (depending on the {load_mode}).
2201 value = BuildIndexedStringLoad(receiver, index, length, &effect, &control,
2202 keyed_mode.load_mode());
2203
2204 ReplaceWithValue(node, value, effect, control);
2205 return Replace(value);
2206}
2207
2208namespace {
2209
2210OptionalJSTypedArrayRef GetTypedArrayConstant(JSHeapBroker* broker,
2211 Node* receiver) {
2213 if (!m.HasResolvedValue()) return std::nullopt;
2214 ObjectRef object = m.Ref(broker);
2215 if (!object.IsJSTypedArray()) return std::nullopt;
2216 JSTypedArrayRef typed_array = object.AsJSTypedArray();
2217 if (typed_array.is_on_heap()) return std::nullopt;
2218 return typed_array;
2219}
2220
2221} // namespace
2222
2224 Node* object, ZoneVector<MapRef>* maps) const {
2225 OptionalMapRef root_map = InferRootMap(object);
2226 if (root_map.has_value() && !root_map->is_abandoned_prototype_map()) {
2227 maps->erase(
2228 std::remove_if(maps->begin(), maps->end(),
2229 [root_map, this](MapRef map) {
2230 return map.is_abandoned_prototype_map() ||
2231 !map.FindRootMap(broker()).equals(*root_map);
2232 }),
2233 maps->end());
2234 }
2235}
2236
2237// Possibly refine the feedback using inferred map information from the graph.
2240 ElementAccessFeedback const& feedback, Node* receiver,
2241 Effect effect) const {
2242 AccessMode access_mode = feedback.keyed_mode().access_mode();
2243 bool use_inference =
2244 access_mode == AccessMode::kLoad || access_mode == AccessMode::kHas;
2245 if (!use_inference) return feedback;
2246
2247 ZoneVector<MapRef> inferred_maps(zone());
2248 if (!InferMaps(receiver, effect, &inferred_maps)) return feedback;
2249
2250 RemoveImpossibleMaps(receiver, &inferred_maps);
2251 // TODO(neis): After Refine, the resulting feedback can still contain
2252 // impossible maps when a target is kept only because more than one of its
2253 // sources was inferred. Think of a way to completely rule out impossible
2254 // maps.
2255 return feedback.Refine(broker(), inferred_maps);
2256}
2257
2259 Node* node, Node* index, Node* value,
2260 ElementAccessFeedback const& feedback) {
2261 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
2262 node->opcode() == IrOpcode::kJSSetKeyedProperty ||
2263 node->opcode() == IrOpcode::kJSStoreInArrayLiteral ||
2264 node->opcode() == IrOpcode::kJSDefineKeyedOwnPropertyInLiteral ||
2265 node->opcode() == IrOpcode::kJSHasProperty ||
2266 node->opcode() == IrOpcode::kJSDefineKeyedOwnProperty);
2267 static_assert(JSLoadPropertyNode::ObjectIndex() == 0 &&
2268 JSSetKeyedPropertyNode::ObjectIndex() == 0 &&
2269 JSStoreInArrayLiteralNode::ArrayIndex() == 0 &&
2270 JSDefineKeyedOwnPropertyInLiteralNode::ObjectIndex() == 0 &&
2271 JSHasPropertyNode::ObjectIndex() == 0);
2272
2276 Node* context = NodeProperties::GetContextInput(node);
2277
2278 // TODO(neis): It's odd that we do optimizations below that don't really care
2279 // about the feedback, but we don't do them when the feedback is megamorphic.
2280 if (feedback.transition_groups().empty()) return NoChange();
2281
2282 ElementAccessFeedback const& refined_feedback =
2283 TryRefineElementAccessFeedback(feedback, receiver, effect);
2284
2285 AccessMode access_mode = refined_feedback.keyed_mode().access_mode();
2286 if ((access_mode == AccessMode::kLoad || access_mode == AccessMode::kHas) &&
2287 receiver->opcode() == IrOpcode::kHeapConstant) {
2289 node, index, access_mode, refined_feedback.keyed_mode().load_mode());
2290 if (reduction.Changed()) return reduction;
2291 }
2292
2293 if (!refined_feedback.transition_groups().empty() &&
2294 refined_feedback.HasOnlyStringMaps(broker())) {
2295 return ReduceElementAccessOnString(node, index, value,
2296 refined_feedback.keyed_mode());
2297 }
2298
2299 AccessInfoFactory access_info_factory(broker(), graph()->zone());
2300 ZoneVector<ElementAccessInfo> access_infos(zone());
2301 if (!access_info_factory.ComputeElementAccessInfos(refined_feedback,
2302 &access_infos) ||
2303 access_infos.empty()) {
2304 return NoChange();
2305 }
2306
2307 // Do not optimize AccessMode::kDefine for typed arrays.
2308 if (access_mode == AccessMode::kDefine) {
2309 for (const ElementAccessInfo& access_info : access_infos) {
2311 access_info.elements_kind())) {
2312 return NoChange();
2313 }
2314 }
2315 }
2316
2317 // For holey stores or growing stores, we need to check that the prototype
2318 // chain contains no setters for elements, and we need to guard those checks
2319 // via code dependencies on the relevant prototype maps.
2320 if (access_mode == AccessMode::kStore) {
2321 // TODO(turbofan): We could have a fast path here, that checks for the
2322 // common case of Array or Object prototype only and therefore avoids
2323 // the zone allocation of this vector.
2324 ZoneVector<MapRef> prototype_maps(zone());
2325 for (ElementAccessInfo const& access_info : access_infos) {
2326 for (MapRef receiver_map : access_info.lookup_start_object_maps()) {
2327 // If the {receiver_map} has a prototype and its elements backing
2328 // store is either holey, or we have a potentially growing store,
2329 // then we need to check that all prototypes have stable maps with
2330 // no element accessors and no throwing behavior for elements (and we
2331 // need to guard against changes to that below).
2332 if ((IsHoleyOrDictionaryElementsKind(receiver_map.elements_kind()) ||
2333 StoreModeCanGrow(feedback.keyed_mode().store_mode())) &&
2334 !receiver_map.PrototypesElementsDoNotHaveAccessorsOrThrow(
2335 broker(), &prototype_maps)) {
2336 return NoChange();
2337 }
2338
2339 // TODO(v8:12547): Support writing to objects in shared space, which
2340 // need a write barrier that calls Object::Share to ensure the RHS is
2341 // shared.
2342 if (InstanceTypeChecker::IsAlwaysSharedSpaceJSObject(
2343 receiver_map.instance_type())) {
2344 return NoChange();
2345 }
2346 }
2347 }
2348 for (MapRef prototype_map : prototype_maps) {
2349 dependencies()->DependOnStableMap(prototype_map);
2350 }
2351 } else if (access_mode == AccessMode::kHas) {
2352 // If we have any fast arrays, we need to check and depend on
2353 // NoElementsProtector.
2354 for (ElementAccessInfo const& access_info : access_infos) {
2355 if (IsFastElementsKind(access_info.elements_kind())) {
2356 if (!dependencies()->DependOnNoElementsProtector()) return NoChange();
2357 break;
2358 }
2359 }
2360 }
2361
2362 // Check for the monomorphic case.
2363 PropertyAccessBuilder access_builder(jsgraph(), broker());
2364 if (access_infos.size() == 1) {
2365 ElementAccessInfo access_info = access_infos.front();
2366
2367 if (!access_info.transition_sources().empty()) {
2368 DCHECK_EQ(access_info.lookup_start_object_maps().size(), 1);
2369 // Perform possible elements kind transitions.
2370 MapRef transition_target = access_info.lookup_start_object_maps().front();
2371 ZoneRefSet<Map> sources(access_info.transition_sources().begin(),
2372 access_info.transition_sources().end(),
2373 graph()->zone());
2374 effect = graph()->NewNode(simplified()->TransitionElementsKindOrCheckMap(
2376 sources, transition_target)),
2377 receiver, effect, control);
2378 } else {
2379 // Perform map check on the {receiver}.
2380 access_builder.BuildCheckMaps(receiver, &effect, control,
2381 access_info.lookup_start_object_maps());
2382 }
2383
2384 // Access the actual element.
2386 BuildElementAccess(receiver, index, value, effect, control, context,
2387 access_info, feedback.keyed_mode());
2388 value = continuation.value();
2389 effect = continuation.effect();
2390 control = continuation.control();
2391 } else {
2392 // The final states for every polymorphic branch. We join them with
2393 // Merge+Phi+EffectPhi at the bottom.
2394 ZoneVector<Node*> values(zone());
2395 ZoneVector<Node*> effects(zone());
2396 ZoneVector<Node*> controls(zone());
2397
2398 // Generate code for the various different element access patterns.
2399 Node* fallthrough_control = control;
2400 for (size_t j = 0; j < access_infos.size(); ++j) {
2401 ElementAccessInfo const& access_info = access_infos[j];
2402 Node* this_receiver = receiver;
2403 Node* this_value = value;
2404 Node* this_index = index;
2405 Effect this_effect = effect;
2406 Control this_control{fallthrough_control};
2407
2408 // Perform possible elements kind transitions.
2409 MapRef transition_target = access_info.lookup_start_object_maps().front();
2410 for (MapRef transition_source : access_info.transition_sources()) {
2411 DCHECK_EQ(access_info.lookup_start_object_maps().size(), 1);
2412 this_effect = graph()->NewNode(
2413 simplified()->TransitionElementsKind(ElementsTransition(
2414 IsSimpleMapChangeTransition(transition_source.elements_kind(),
2415 transition_target.elements_kind())
2418 transition_source, transition_target)),
2419 receiver, this_effect, this_control);
2420 }
2421
2422 // Perform map check(s) on {receiver}.
2423 ZoneVector<MapRef> const& receiver_maps =
2424 access_info.lookup_start_object_maps();
2425 if (j == access_infos.size() - 1) {
2426 // Last map check on the fallthrough control path, do a
2427 // conditional eager deoptimization exit here.
2428 access_builder.BuildCheckMaps(receiver, &this_effect, this_control,
2429 receiver_maps);
2430 fallthrough_control = nullptr;
2431 } else {
2432 // Explicitly branch on the {receiver_maps}.
2433 ZoneRefSet<Map> maps(receiver_maps.begin(), receiver_maps.end(),
2434 graph()->zone());
2435 Node* check = this_effect =
2436 graph()->NewNode(simplified()->CompareMaps(maps), receiver,
2437 this_effect, fallthrough_control);
2438 Node* branch =
2439 graph()->NewNode(common()->Branch(), check, fallthrough_control);
2440 fallthrough_control = graph()->NewNode(common()->IfFalse(), branch);
2441 this_control = graph()->NewNode(common()->IfTrue(), branch);
2442
2443 // Introduce a MapGuard to learn from this on the effect chain.
2444 this_effect = graph()->NewNode(simplified()->MapGuard(maps), receiver,
2445 this_effect, this_control);
2446 }
2447
2448 // Access the actual element.
2450 this_receiver, this_index, this_value, this_effect, this_control,
2451 context, access_info, feedback.keyed_mode());
2452 values.push_back(continuation.value());
2453 effects.push_back(continuation.effect());
2454 controls.push_back(continuation.control());
2455 }
2456
2457 DCHECK_NULL(fallthrough_control);
2458
2459 // Generate the final merge point for all (polymorphic) branches.
2460 int const control_count = static_cast<int>(controls.size());
2461 if (control_count == 0) {
2462 value = effect = control = jsgraph()->Dead();
2463 } else if (control_count == 1) {
2464 value = values.front();
2465 effect = effects.front();
2466 control = controls.front();
2467 } else {
2468 control = graph()->NewNode(common()->Merge(control_count), control_count,
2469 &controls.front());
2470 values.push_back(control);
2471 value = graph()->NewNode(
2472 common()->Phi(MachineRepresentation::kTagged, control_count),
2473 control_count + 1, &values.front());
2474 effects.push_back(control);
2475 effect = graph()->NewNode(common()->EffectPhi(control_count),
2476 control_count + 1, &effects.front());
2477 }
2478 }
2479
2480 ReplaceWithValue(node, value, effect, control);
2481 return Replace(value);
2482}
2483
2485 Node* node, Node* key, AccessMode access_mode,
2486 KeyedAccessLoadMode load_mode) {
2487 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
2488 node->opcode() == IrOpcode::kJSHasProperty);
2490 Node* effect = NodeProperties::GetEffectInput(node);
2491 Node* control = NodeProperties::GetControlInput(node);
2492
2493 HeapObjectMatcher mreceiver(receiver);
2494 HeapObjectRef receiver_ref = mreceiver.Ref(broker());
2495 if (receiver_ref.IsNull() || receiver_ref.IsUndefined() ||
2496 // The 'in' operator throws a TypeError on primitive values.
2497 (receiver_ref.IsString() && access_mode == AccessMode::kHas)) {
2498 return NoChange();
2499 }
2500
2501 // Check whether we're accessing a known element on the {receiver} and can
2502 // constant-fold the load.
2503 NumberMatcher mkey(key);
2504 if (mkey.IsInteger() &&
2505 mkey.IsInRange(0.0, static_cast<double>(JSObject::kMaxElementIndex))) {
2506 static_assert(JSObject::kMaxElementIndex <= kMaxUInt32);
2507 const uint32_t index = static_cast<uint32_t>(mkey.ResolvedValue());
2508 OptionalObjectRef element;
2509
2510 if (receiver_ref.IsJSObject()) {
2511 JSObjectRef jsobject_ref = receiver_ref.AsJSObject();
2512 OptionalFixedArrayBaseRef elements =
2513 jsobject_ref.elements(broker(), kRelaxedLoad);
2514 if (elements.has_value()) {
2515 element = jsobject_ref.GetOwnConstantElement(broker(), *elements, index,
2516 dependencies());
2517 if (!element.has_value() && receiver_ref.IsJSArray()) {
2518 // We didn't find a constant element, but if the receiver is a
2519 // cow-array we can exploit the fact that any future write to the
2520 // element will replace the whole elements storage.
2521 element = receiver_ref.AsJSArray().GetOwnCowElement(broker(),
2522 *elements, index);
2523 if (element.has_value()) {
2524 Node* actual_elements = effect = graph()->NewNode(
2526 receiver, effect, control);
2527 Node* check = graph()->NewNode(
2528 simplified()->ReferenceEqual(), actual_elements,
2529 jsgraph()->ConstantNoHole(*elements, broker()));
2530 effect = graph()->NewNode(
2531 simplified()->CheckIf(
2532 DeoptimizeReason::kCowArrayElementsChanged),
2533 check, effect, control);
2534 }
2535 }
2536 }
2537 } else if (receiver_ref.IsString()) {
2538 element =
2539 receiver_ref.AsString().GetCharAsStringOrUndefined(broker(), index);
2540 }
2541
2542 if (element.has_value()) {
2543 Node* value = access_mode == AccessMode::kHas
2544 ? jsgraph()->TrueConstant()
2545 : jsgraph()->ConstantNoHole(*element, broker());
2546 ReplaceWithValue(node, value, effect, control);
2547 return Replace(value);
2548 }
2549 }
2550
2551 // For constant Strings we can eagerly strength-reduce the keyed
2552 // accesses using the known length, which doesn't change.
2553 if (receiver_ref.IsString()) {
2554 DCHECK_NE(access_mode, AccessMode::kHas);
2555 // Ensure that {key} is less than {receiver} length.
2556 Node* length = jsgraph()->ConstantNoHole(receiver_ref.AsString().length());
2557
2558 // Load the single character string from {receiver} or yield
2559 // undefined if the {key} is out of bounds (depending on the
2560 // {load_mode}).
2561 Node* value = BuildIndexedStringLoad(receiver, key, length, &effect,
2562 &control, load_mode);
2563 ReplaceWithValue(node, value, effect, control);
2564 return Replace(value);
2565 }
2566
2567 return NoChange();
2568}
2569
2571 Node* node, Node* key, OptionalNameRef static_name, Node* value,
2572 FeedbackSource const& source, AccessMode access_mode) {
2573 DCHECK_EQ(key == nullptr, static_name.has_value());
2574 DCHECK(node->opcode() == IrOpcode::kJSLoadProperty ||
2575 node->opcode() == IrOpcode::kJSSetKeyedProperty ||
2576 node->opcode() == IrOpcode::kJSStoreInArrayLiteral ||
2577 node->opcode() == IrOpcode::kJSDefineKeyedOwnPropertyInLiteral ||
2578 node->opcode() == IrOpcode::kJSHasProperty ||
2579 node->opcode() == IrOpcode::kJSLoadNamed ||
2580 node->opcode() == IrOpcode::kJSSetNamedProperty ||
2581 node->opcode() == IrOpcode::kJSDefineNamedOwnProperty ||
2582 node->opcode() == IrOpcode::kJSLoadNamedFromSuper ||
2583 node->opcode() == IrOpcode::kJSDefineKeyedOwnProperty);
2584 DCHECK_GE(node->op()->ControlOutputCount(), 1);
2585
2586 ProcessedFeedback const* feedback =
2587 &broker()->GetFeedbackForPropertyAccess(source, access_mode, static_name);
2588
2589 if (feedback->kind() == ProcessedFeedback::kElementAccess &&
2590 feedback->AsElementAccess().transition_groups().empty()) {
2591 HeapObjectMatcher m_key(key);
2592 if (m_key.HasResolvedValue() && m_key.Ref(broker()).IsName()) {
2593 NameRef name_key = m_key.Ref(broker()).AsName();
2594 if (name_key.IsUniqueName() && !name_key.object()->IsArrayIndex()) {
2595 feedback = &feedback->AsElementAccess().Refine(
2596 broker(), m_key.Ref(broker()).AsName());
2597 }
2598 }
2599 }
2600
2601 switch (feedback->kind()) {
2603 return ReduceEagerDeoptimize(
2604 node,
2605 DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess);
2607 return ReduceNamedAccess(node, value, feedback->AsNamedAccess(),
2608 access_mode, key);
2610 DCHECK_EQ(access_mode, AccessMode::kLoad);
2613 node, value, feedback->AsMegaDOMPropertyAccess(), source);
2615 DCHECK_EQ(feedback->AsElementAccess().keyed_mode().access_mode(),
2616 access_mode);
2617 DCHECK_NE(node->opcode(), IrOpcode::kJSLoadNamedFromSuper);
2618 return ReduceElementAccess(node, key, value, feedback->AsElementAccess());
2619 default:
2620 UNREACHABLE();
2621 }
2622}
2623
2625 Node* node, DeoptimizeReason reason) {
2626 if (!(flags() & kBailoutOnUninitialized)) return NoChange();
2627
2628 Node* effect = NodeProperties::GetEffectInput(node);
2629 Node* control = NodeProperties::GetControlInput(node);
2630 Node* frame_state =
2632 Node* deoptimize =
2633 graph()->NewNode(common()->Deoptimize(reason, FeedbackSource()),
2634 frame_state, effect, control);
2635 MergeControlToEnd(graph(), common(), deoptimize);
2636 node->TrimInputCount(0);
2637 NodeProperties::ChangeOp(node, common()->Dead());
2638 return Changed(node);
2639}
2640
2642 JSHasPropertyNode n(node);
2643 PropertyAccess const& p = n.Parameters();
2644 Node* value = jsgraph()->Dead();
2645 if (!p.feedback().IsValid()) return NoChange();
2646 return ReducePropertyAccess(node, n.key(), std::nullopt, value,
2648}
2649
2651 Node* node) {
2652 // We can optimize a property load if it's being used inside a for..in:
2653 // for (name in receiver) {
2654 // value = receiver[name];
2655 // ...
2656 // }
2657 //
2658 // If the for..in is in fast-mode, we know that the {receiver} has {name}
2659 // as own property, otherwise the enumeration wouldn't include it. The graph
2660 // constructed by the BytecodeGraphBuilder in this case looks like this:
2661
2662 // receiver
2663 // ^ ^
2664 // | |
2665 // | +-+
2666 // | |
2667 // | JSToObject
2668 // | ^
2669 // | |
2670 // | |
2671 // | JSForInNext
2672 // | ^
2673 // | |
2674 // +----+ |
2675 // | |
2676 // | |
2677 // JSLoadProperty
2678
2679 // If the for..in has only seen maps with enum cache consisting of keys
2680 // and indices so far, we can turn the {JSLoadProperty} into a map check
2681 // on the {receiver} and then just load the field value dynamically via
2682 // the {LoadFieldByIndex} operator. The map check is only necessary when
2683 // TurboFan cannot prove that there is no observable side effect between
2684 // the {JSForInNext} and the {JSLoadProperty} node.
2685 //
2686 // We can do a similar optimization when the receiver of {JSLoadProperty} is
2687 // not identical to the receiver of {JSForInNext}:
2688 // for (name in receiver) {
2689 // value = object[name];
2690 // ...
2691 // }
2692 //
2693 // This is because when the key is {JSForInNext}, we will generate a
2694 // {GetEnumeratedKeyedProperty} bytecode for {JSLoadProperty}. If the bytecode
2695 // always manages to use the enum cache, we will keep the inline cache in
2696 // uninitialized state. So If the graph is as below, we can firstly do a map
2697 // check on {object} and then turn the {JSLoadProperty} into the
2698 // {LoadFieldByIndex}. This is also safe when the bytecode has never been
2699 // profiled. When it happens to pass the the map check, we can use the fast
2700 // path. Otherwise it will trigger a deoptimization.
2701
2702 // object receiver
2703 // ^ ^
2704 // | |
2705 // | |
2706 // | |
2707 // | JSToObject
2708 // | ^
2709 // | |
2710 // | |
2711 // | JSForInNext
2712 // | ^
2713 // | |
2714 // +----+ +-----+
2715 // | |
2716 // | |
2717 // JSLoadProperty (insufficient feedback)
2718
2719 // Also note that it's safe to look through the {JSToObject}, since the
2720 // [[Get]] operation does an implicit ToObject anyway, and these operations
2721 // are not observable.
2722
2723 DCHECK_EQ(IrOpcode::kJSLoadProperty, node->opcode());
2726 Node* effect = NodeProperties::GetEffectInput(node);
2727 Node* control = NodeProperties::GetControlInput(node);
2728
2729 if (name.Parameters().mode() != ForInMode::kUseEnumCacheKeysAndIndices) {
2730 return NoChange();
2731 }
2732
2733 Node* object = name.receiver();
2734 Node* cache_type = name.cache_type();
2735 Node* index = name.index();
2736 if (object->opcode() == IrOpcode::kJSToObject) {
2737 object = NodeProperties::GetValueInput(object, 0);
2738 }
2739 bool speculating_object_is_receiver = false;
2740 if (object != receiver) {
2741 JSLoadPropertyNode n(node);
2742 PropertyAccess const& p = n.Parameters();
2743
2745 FeedbackSource(p.feedback()), AccessMode::kLoad, std::nullopt);
2746 // When the feedback is uninitialized, it is either a load from a
2747 // {GetEnumeratedKeyedProperty} which always hits the enum cache, or a keyed
2748 // load that had never been reached. In either case, we can check the map
2749 // of the receiver and use the enum cache if the map match the {cache_type}.
2750 if (feedback.kind() != ProcessedFeedback::kInsufficient) {
2751 return NoChange();
2752 }
2753
2754 // Ensure that {receiver} is a HeapObject.
2755 receiver = effect = graph()->NewNode(simplified()->CheckHeapObject(),
2756 receiver, effect, control);
2757 speculating_object_is_receiver = true;
2758 }
2759
2760 // No need to repeat the map check if we can prove that there's no
2761 // observable side effect between {effect} and {name]. But we always need a
2762 // map check when {object} is not identical to {receiver}.
2764 speculating_object_is_receiver) {
2765 // Check that the {receiver} map is still valid.
2766 Node* receiver_map = effect =
2767 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()),
2768 receiver, effect, control);
2769 Node* check = graph()->NewNode(simplified()->ReferenceEqual(), receiver_map,
2770 cache_type);
2771 effect =
2772 graph()->NewNode(simplified()->CheckIf(DeoptimizeReason::kWrongMap),
2773 check, effect, control);
2774 }
2775
2776 // Load the enum cache indices from the {cache_type}.
2777 Node* descriptor_array = effect = graph()->NewNode(
2778 simplified()->LoadField(AccessBuilder::ForMapDescriptors()), cache_type,
2779 effect, control);
2780 Node* enum_cache = effect = graph()->NewNode(
2782 descriptor_array, effect, control);
2783 Node* enum_indices = effect = graph()->NewNode(
2784 simplified()->LoadField(AccessBuilder::ForEnumCacheIndices()), enum_cache,
2785 effect, control);
2786
2787 // Ensure that the {enum_indices} are valid.
2788 Node* check = graph()->NewNode(
2789 simplified()->BooleanNot(),
2790 graph()->NewNode(simplified()->ReferenceEqual(), enum_indices,
2791 jsgraph()->EmptyFixedArrayConstant()));
2792 effect = graph()->NewNode(
2793 simplified()->CheckIf(DeoptimizeReason::kWrongEnumIndices), check, effect,
2794 control);
2795
2796 // Determine the key from the {enum_indices}.
2797 Node* key = effect = graph()->NewNode(
2798 simplified()->LoadElement(
2800 enum_indices, index, effect, control);
2801
2802 // Load the actual field value.
2803 Node* value = effect = graph()->NewNode(simplified()->LoadFieldByIndex(),
2804 receiver, key, effect, control);
2805 ReplaceWithValue(node, value, effect, control);
2806 return Replace(value);
2807}
2808
2810 JSLoadPropertyNode n(node);
2811 PropertyAccess const& p = n.Parameters();
2812 Node* name = n.key();
2813
2814 if (name->opcode() == IrOpcode::kJSForInNext) {
2816 if (reduction.Changed()) return reduction;
2817 }
2818
2819 if (!p.feedback().IsValid()) return NoChange();
2820 Node* value = jsgraph()->Dead();
2821 return ReducePropertyAccess(node, name, std::nullopt, value,
2823}
2824
2827 PropertyAccess const& p = n.Parameters();
2828 if (!p.feedback().IsValid()) return NoChange();
2829 return ReducePropertyAccess(node, n.key(), std::nullopt, n.value(),
2831}
2832
2834 Node* node) {
2836 PropertyAccess const& p = n.Parameters();
2837 if (!p.feedback().IsValid()) return NoChange();
2838 return ReducePropertyAccess(node, n.key(), std::nullopt, n.value(),
2841}
2842
2844 Node* receiver, ConvertReceiverMode receiver_mode,
2845 Node* lookup_start_object, Node* context, Node* frame_state, Node** effect,
2846 Node** control, ZoneVector<Node*>* if_exceptions,
2847 PropertyAccessInfo const& access_info) {
2848 ObjectRef constant = access_info.constant().value();
2849
2850 if (access_info.IsDictionaryProtoAccessorConstant()) {
2851 // For fast mode holders we recorded dependencies in BuildPropertyLoad.
2852 for (const MapRef map : access_info.lookup_start_object_maps()) {
2854 map, access_info.name(), constant, PropertyKind::kAccessor);
2855 }
2856 }
2857
2858 Node* target = jsgraph()->ConstantNoHole(constant, broker());
2859 // Introduce the call to the getter function.
2860 Node* value;
2861 if (constant.IsJSFunction()) {
2862 Node* feedback = jsgraph()->UndefinedConstant();
2863 value = *effect = *control = graph()->NewNode(
2866 receiver_mode),
2867 target, receiver, feedback, context, frame_state, *effect, *control);
2868 } else {
2869 // Disable optimizations for super ICs using API getters, so that we get
2870 // the correct receiver checks.
2871 if (receiver != lookup_start_object) {
2872 return nullptr;
2873 }
2874 value = InlineApiCall(receiver, frame_state, nullptr, effect, control,
2875 constant.AsFunctionTemplateInfo(), FeedbackSource());
2876 }
2877 // Remember to rewire the IfException edge if this is inside a try-block.
2878 if (if_exceptions != nullptr) {
2879 // Create the appropriate IfException/IfSuccess projections.
2880 Node* const if_exception =
2881 graph()->NewNode(common()->IfException(), *control, *effect);
2882 Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
2883 if_exceptions->push_back(if_exception);
2884 *control = if_success;
2885 }
2886 return value;
2887}
2888
2890 Node* receiver, Node* value, Node* context, Node* frame_state,
2891 Node** effect, Node** control, ZoneVector<Node*>* if_exceptions,
2892 PropertyAccessInfo const& access_info) {
2893 ObjectRef constant = access_info.constant().value();
2894 Node* target = jsgraph()->ConstantNoHole(constant, broker());
2895 // Introduce the call to the setter function.
2896 if (constant.IsJSFunction()) {
2897 Node* feedback = jsgraph()->UndefinedConstant();
2898 *effect = *control = graph()->NewNode(
2902 target, receiver, value, feedback, context, frame_state, *effect,
2903 *control);
2904 } else {
2905 InlineApiCall(receiver, frame_state, value, effect, control,
2906 constant.AsFunctionTemplateInfo(), FeedbackSource());
2907 }
2908 // Remember to rewire the IfException edge if this is inside a try-block.
2909 if (if_exceptions != nullptr) {
2910 // Create the appropriate IfException/IfSuccess projections.
2911 Node* const if_exception =
2912 graph()->NewNode(common()->IfException(), *control, *effect);
2913 Node* const if_success = graph()->NewNode(common()->IfSuccess(), *control);
2914 if_exceptions->push_back(if_exception);
2915 *control = if_success;
2916 }
2917}
2918
2919namespace {
2920CallDescriptor* PushRegularApiCallInputs(
2921 JSGraph* jsgraph, JSHeapBroker* broker, Node* receiver, Node* frame_state,
2922 Node* value, Node** effect, Node** control,
2923 FunctionTemplateInfoRef function_template_info, Node** inputs,
2924 int& cursor) {
2925 // Only setters have a value.
2926 int const argc = value == nullptr ? 0 : 1;
2927
2928 // The builtin always expects the receiver as the first param on the stack.
2929 bool no_profiling = broker->dependencies()->DependOnNoProfilingProtector();
2930 Callable call_api_callback = Builtins::CallableFor(
2931 jsgraph->isolate(), no_profiling
2932 ? Builtin::kCallApiCallbackOptimizedNoProfiling
2933 : Builtin::kCallApiCallbackOptimized);
2934
2935 Node* func_templ =
2936 jsgraph->HeapConstantNoHole(function_template_info.object());
2937 ApiFunction function(function_template_info.callback(broker));
2938 Node* function_reference = jsgraph->graph()->NewNode(
2941 function_template_info.c_functions(broker).data(),
2942 function_template_info.c_signatures(broker).data(),
2943 static_cast<uint32_t>(
2944 function_template_info.c_functions(broker).size()))));
2945 Node* code = jsgraph->HeapConstantNoHole(call_api_callback.code());
2946
2947 // Add CallApiCallbackStub's register argument as well.
2948 Node* context =
2950 inputs[cursor++] = code;
2951 inputs[cursor++] = function_reference;
2952 inputs[cursor++] = jsgraph->ConstantNoHole(argc);
2953 inputs[cursor++] = func_templ;
2954 inputs[cursor++] = receiver;
2955 if (value) {
2956 inputs[cursor++] = value;
2957 }
2958 inputs[cursor++] = context;
2959 inputs[cursor++] = frame_state;
2960 inputs[cursor++] = *effect;
2961 inputs[cursor++] = *control;
2962
2963 CallInterfaceDescriptor call_interface_descriptor =
2964 call_api_callback.descriptor();
2966 jsgraph->zone(), call_interface_descriptor,
2967 call_interface_descriptor.GetStackParameterCount() + argc +
2968 1 /* implicit receiver */,
2970}
2971} // namespace
2972
2974 Node* receiver, Node* frame_state, Node* value, Node** effect,
2975 Node** control, FunctionTemplateInfoRef function_template_info,
2976 const FeedbackSource& feedback) {
2977 compiler::OptionalObjectRef maybe_callback_data =
2978 function_template_info.callback_data(broker());
2979 // Check if the function has an associated C++ code to execute.
2980 if (!maybe_callback_data.has_value()) {
2981 // TODO(ishell): consider generating "return undefined" for empty function
2982 // instead of failing.
2983 TRACE_BROKER_MISSING(broker(), "call code for function template info "
2984 << function_template_info);
2985 return nullptr;
2986 }
2987
2988 // Only setters have a value.
2989 int const argc = value == nullptr ? 0 : 1;
2990
2991 // Handles overloaded functions.
2993 broker(), function_template_info, argc);
2994
2995 if (c_function.address) {
2996 const int input_count = 14;
2997 Node* inputs[input_count];
2998
2999 int cursor = 0;
3000 inputs[cursor++] = receiver;
3001 if (value) {
3002 inputs[cursor++] = value;
3003 }
3004 inputs[cursor++] =
3005 jsgraph()->ConstantNoHole(maybe_callback_data.value(), broker());
3006
3007 auto call_descriptor = PushRegularApiCallInputs(
3008 jsgraph(), broker(), receiver, frame_state, value, effect, control,
3009 function_template_info, inputs, cursor);
3010
3011 // The input_count is constant, but getters have less parameters than
3012 // setters.
3013 DCHECK_LE(cursor, input_count);
3014
3015 return *effect = *control = graph()->NewNode(
3016 simplified()->FastApiCall(c_function, feedback, call_descriptor),
3017 cursor, inputs);
3018 }
3019
3020 Node* inputs[11];
3021 int cursor = 0;
3022 CallDescriptor* call_descriptor = PushRegularApiCallInputs(
3023 jsgraph(), broker(), receiver, frame_state, value, effect, control,
3024 function_template_info, inputs, cursor);
3025
3026 return *effect = *control =
3027 graph()->NewNode(common()->Call(call_descriptor), cursor, inputs);
3028}
3029
3030std::optional<JSNativeContextSpecialization::ValueEffectControl>
3032 Node* lookup_start_object, Node* receiver, Node* context, Node* frame_state,
3033 Node* effect, Node* control, NameRef name, ZoneVector<Node*>* if_exceptions,
3034 PropertyAccessInfo const& access_info) {
3035 // Determine actual holder and perform prototype chain checks.
3036 OptionalJSObjectRef holder = access_info.holder();
3037 if (holder.has_value() && !access_info.HasDictionaryHolder()) {
3040 holder.value());
3041 }
3042
3043 // Generate the actual property access.
3044 Node* value;
3045 if (access_info.IsNotFound()) {
3046 value = jsgraph()->UndefinedConstant();
3047 } else if (access_info.IsFastAccessorConstant() ||
3048 access_info.IsDictionaryProtoAccessorConstant()) {
3049 ConvertReceiverMode receiver_mode =
3050 receiver == lookup_start_object
3054 receiver, receiver_mode, lookup_start_object, context, frame_state,
3055 &effect, &control, if_exceptions, access_info);
3056 } else if (access_info.IsModuleExport()) {
3057 Node* cell = jsgraph()->ConstantNoHole(
3058 access_info.constant().value().AsCell(), broker());
3059 value = effect =
3061 cell, effect, control);
3062 } else if (access_info.IsStringLength()) {
3063 DCHECK_EQ(receiver, lookup_start_object);
3064 value = graph()->NewNode(simplified()->StringLength(), receiver);
3065 } else if (access_info.IsStringWrapperLength()) {
3066 value = graph()->NewNode(simplified()->StringWrapperLength(),
3067 lookup_start_object);
3068 } else if (access_info.IsTypedArrayLength()) {
3069 if (receiver != lookup_start_object) {
3070 // We're accessing the TypedArray length via a prototype (a TypedArray
3071 // object in the prototype chain, objects below it not having a "length"
3072 // property, reading via super.length). That will throw a TypeError.
3073 value = effect = control = graph()->NewNode(
3074 javascript()->CallRuntime(Runtime::kThrowTypeError, 3),
3075 jsgraph()->ConstantNoHole(
3076 static_cast<int>(MessageTemplate::kIncompatibleMethodReceiver)),
3077 jsgraph()->HeapConstantNoHole(factory()->TypedArrayLength_string()),
3078 receiver, context, frame_state, effect, control);
3079
3080 // Remember to rewire the IfException edge if this is inside a try-block.
3081 if (if_exceptions != nullptr) {
3082 // Create the appropriate IfException/IfSuccess projections.
3083 Node* const if_exception =
3084 graph()->NewNode(common()->IfException(), control, effect);
3085 Node* const if_success =
3086 graph()->NewNode(common()->IfSuccess(), control);
3087 if_exceptions->push_back(if_exception);
3088 control = if_success;
3089 }
3090
3091 } else {
3092 const ZoneVector<MapRef> maps = access_info.lookup_start_object_maps();
3093 DCHECK_EQ(maps.size(), 1);
3094 value = graph()->NewNode(
3095 simplified()->TypedArrayLength(maps[0].elements_kind()),
3096 lookup_start_object);
3097 }
3098 } else {
3099 DCHECK(access_info.IsDataField() || access_info.IsFastDataConstant() ||
3100 access_info.IsDictionaryProtoDataConstant());
3101 PropertyAccessBuilder access_builder(jsgraph(), broker());
3102 if (access_info.IsDictionaryProtoDataConstant()) {
3103 auto maybe_value =
3104 access_builder.FoldLoadDictPrototypeConstant(access_info);
3105 if (!maybe_value) return {};
3106 value = maybe_value.value();
3107 } else {
3108 value = access_builder.BuildLoadDataField(
3109 name, access_info, lookup_start_object, &effect, &control);
3110 }
3111 }
3112 if (value != nullptr) {
3113 return ValueEffectControl(value, effect, control);
3114 }
3115 return std::optional<ValueEffectControl>();
3116}
3117
3120 Node* effect, Node* control, PropertyAccessInfo const& access_info) {
3121 // TODO(v8:11457) Support property tests for dictionary mode protoypes.
3122 DCHECK(!access_info.HasDictionaryHolder());
3123
3124 // Determine actual holder and perform prototype chain checks.
3125 OptionalJSObjectRef holder = access_info.holder();
3126 if (holder.has_value()) {
3129 holder.value());
3130 }
3131
3132 return ValueEffectControl(
3133 jsgraph()->BooleanConstant(!access_info.IsNotFound()), effect, control);
3134}
3135
3136std::optional<JSNativeContextSpecialization::ValueEffectControl>
3138 Node* lookup_start_object, Node* receiver, Node* value, Node* context,
3139 Node* frame_state, Node* effect, Node* control, NameRef name,
3140 ZoneVector<Node*>* if_exceptions, PropertyAccessInfo const& access_info,
3141 AccessMode access_mode) {
3142 switch (access_mode) {
3143 case AccessMode::kLoad:
3144 return BuildPropertyLoad(lookup_start_object, receiver, context,
3145 frame_state, effect, control, name,
3146 if_exceptions, access_info);
3147 case AccessMode::kStore:
3150 DCHECK_EQ(receiver, lookup_start_object);
3151 return BuildPropertyStore(receiver, value, context, frame_state, effect,
3152 control, name, if_exceptions, access_info,
3153 access_mode);
3154 case AccessMode::kHas:
3155 DCHECK_EQ(receiver, lookup_start_object);
3156 return BuildPropertyTest(effect, control, access_info);
3157 }
3158 UNREACHABLE();
3159}
3160
3163 Node* receiver, Node* value, Node* context, Node* frame_state, Node* effect,
3164 Node* control, NameRef name, ZoneVector<Node*>* if_exceptions,
3165 PropertyAccessInfo const& access_info, AccessMode access_mode) {
3166 // Determine actual holder and perform prototype chain checks.
3167 PropertyAccessBuilder access_builder(jsgraph(), broker());
3168 OptionalJSObjectRef holder = access_info.holder();
3169 if (holder.has_value()) {
3171 DCHECK_NE(AccessMode::kDefine, access_mode);
3174 holder.value());
3175 }
3176
3177 DCHECK(!access_info.IsNotFound());
3178
3179 // Generate the actual property access.
3180 if (access_info.IsFastAccessorConstant()) {
3181 InlinePropertySetterCall(receiver, value, context, frame_state, &effect,
3182 &control, if_exceptions, access_info);
3183 } else {
3184 DCHECK(access_info.IsDataField() || access_info.IsFastDataConstant());
3185 DCHECK(access_mode == AccessMode::kStore ||
3186 access_mode == AccessMode::kStoreInLiteral ||
3187 access_mode == AccessMode::kDefine);
3188 FieldIndex const field_index = access_info.field_index();
3189 Type const field_type = access_info.field_type();
3190 MachineRepresentation const field_representation =
3192 access_info.field_representation());
3193 Node* storage = receiver;
3194 if (!field_index.is_inobject()) {
3195 storage = effect = graph()->NewNode(
3196 simplified()->LoadField(
3198 storage, effect, control);
3199 }
3200 if (access_info.IsFastDataConstant() && access_mode == AccessMode::kStore &&
3201 !access_info.HasTransitionMap()) {
3202 Node* deoptimize = graph()->NewNode(
3203 simplified()->CheckIf(DeoptimizeReason::kStoreToConstant),
3204 jsgraph()->FalseConstant(), effect, control);
3205 return ValueEffectControl(jsgraph()->UndefinedConstant(), deoptimize,
3206 control);
3207 }
3208 FieldAccess field_access = {
3210 field_index.offset(),
3211 name.object(),
3212 OptionalMapRef(),
3213 field_type,
3214 MachineType::TypeForRepresentation(field_representation),
3216 "BuildPropertyStore",
3217 access_info.GetConstFieldInfo(),
3218 access_mode == AccessMode::kStoreInLiteral};
3219
3220 switch (field_representation) {
3222 value = effect =
3223 graph()->NewNode(simplified()->CheckNumber(FeedbackSource()), value,
3224 effect, control);
3225 if (access_info.HasTransitionMap()) {
3226 // Allocate a HeapNumber for the new property.
3227 AllocationBuilder a(jsgraph(), broker(), effect, control);
3228 a.Allocate(sizeof(HeapNumber), AllocationType::kYoung,
3229 Type::OtherInternal());
3230 a.Store(AccessBuilder::ForMap(), broker()->heap_number_map());
3231 FieldAccess value_field_access = AccessBuilder::ForHeapNumberValue();
3232 value_field_access.const_field_info = field_access.const_field_info;
3233 a.Store(value_field_access, value);
3234 value = effect = a.Finish();
3235
3236 field_access.type = Type::Any();
3239 } else {
3240 // We just store directly to the HeapNumber.
3241 FieldAccess const storage_access = {
3243 field_index.offset(),
3244 name.object(),
3245 OptionalMapRef(),
3246 Type::OtherInternal(),
3249 "BuildPropertyStore",
3250 access_info.GetConstFieldInfo(),
3251 access_mode == AccessMode::kStoreInLiteral};
3252 storage = effect =
3253 graph()->NewNode(simplified()->LoadField(storage_access), storage,
3254 effect, control);
3255 FieldAccess value_field_access = AccessBuilder::ForHeapNumberValue();
3256 value_field_access.const_field_info = field_access.const_field_info;
3257 value_field_access.is_store_in_literal =
3258 field_access.is_store_in_literal;
3259 field_access = value_field_access;
3260 }
3261 break;
3262 }
3266 if (field_representation == MachineRepresentation::kTaggedSigned) {
3267 value = effect = graph()->NewNode(
3268 simplified()->CheckSmi(FeedbackSource()), value, effect, control);
3269 field_access.write_barrier_kind = kNoWriteBarrier;
3270
3271 } else if (field_representation ==
3273 OptionalMapRef field_map = access_info.field_map();
3274 if (field_map.has_value()) {
3275 // Emit a map check for the value.
3276 effect = graph()->NewNode(
3277 simplified()->CheckMaps(CheckMapsFlag::kNone,
3278 ZoneRefSet<Map>(*field_map)),
3279 value, effect, control);
3280 } else {
3281 // Ensure that {value} is a HeapObject.
3282 value = effect = graph()->NewNode(simplified()->CheckHeapObject(),
3283 value, effect, control);
3284 }
3286
3287 } else {
3288 DCHECK(field_representation == MachineRepresentation::kTagged);
3289 }
3290 break;
3308 UNREACHABLE();
3309 }
3310 // Check if we need to perform a transitioning store.
3311 OptionalMapRef transition_map = access_info.transition_map();
3312 if (transition_map.has_value()) {
3313 // Check if we need to grow the properties backing store
3314 // with this transitioning store.
3315 MapRef transition_map_ref = transition_map.value();
3316 MapRef original_map = transition_map_ref.GetBackPointer(broker()).AsMap();
3317 if (!field_index.is_inobject()) {
3318 // If slack tracking ends after this compilation started but before it's
3319 // finished, then we could {original_map} could be out-of-sync with
3320 // {transition_map_ref}. In particular, its UnusedPropertyFields could
3321 // be non-zero, which would lead us to not extend the property backing
3322 // store, while the underlying Map has actually zero
3323 // UnusedPropertyFields. Thus, we install a dependency on {orininal_map}
3324 // now, so that if such a situation happens, we'll throw away the code.
3326 }
3327 if (original_map.UnusedPropertyFields() == 0) {
3328 DCHECK(!field_index.is_inobject());
3329
3330 // Reallocate the properties {storage}.
3331 storage = effect = BuildExtendPropertiesBackingStore(
3332 original_map, storage, effect, control);
3333
3334 // Perform the actual store.
3335 effect = graph()->NewNode(simplified()->StoreField(field_access),
3336 storage, value, effect, control);
3337
3338 // Atomically switch to the new properties below.
3340 value = storage;
3341 storage = receiver;
3342 }
3343 effect = graph()->NewNode(
3344 common()->BeginRegion(RegionObservability::kObservable), effect);
3345 effect = graph()->NewNode(
3346 simplified()->StoreField(AccessBuilder::ForMap()), receiver,
3347 jsgraph()->ConstantNoHole(transition_map_ref, broker()), effect,
3348 control);
3349 effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
3350 value, effect, control);
3351 effect = graph()->NewNode(common()->FinishRegion(),
3352 jsgraph()->UndefinedConstant(), effect);
3353 } else {
3354 // Regular non-transitioning field store.
3355 effect = graph()->NewNode(simplified()->StoreField(field_access), storage,
3356 value, effect, control);
3357 }
3358 }
3359
3360 return ValueEffectControl(value, effect, control);
3361}
3362
3365 Node* node) {
3367 FeedbackParameter const& p = n.Parameters();
3368 if (!p.feedback().IsValid()) return NoChange();
3369
3370 NumberMatcher mflags(n.flags());
3371 CHECK(mflags.HasResolvedValue());
3374 return NoChange();
3375
3376 return ReducePropertyAccess(node, n.name(), std::nullopt, n.value(),
3379}
3380
3382 Node* node) {
3384 FeedbackParameter const& p = n.Parameters();
3385 if (!p.feedback().IsValid()) return NoChange();
3386 return ReducePropertyAccess(node, n.index(), std::nullopt, n.value(),
3389}
3390
3392 DCHECK_EQ(IrOpcode::kJSToObject, node->opcode());
3395
3396 MapInference inference(broker(), receiver, effect);
3397 if (!inference.HaveMaps() || !inference.AllOfInstanceTypesAreJSReceiver()) {
3398 return NoChange();
3399 }
3400
3401 ReplaceWithValue(node, receiver, effect);
3402 return Replace(receiver);
3403}
3404
3407 Node* receiver, Node* index, Node* value, Node* effect, Node* control,
3408 Node* context, ElementAccessInfo const& access_info,
3409 KeyedAccessMode const& keyed_mode) {
3410 // TODO(bmeurer): We currently specialize based on elements kind. We should
3411 // also be able to properly support strings and other JSObjects here.
3412 ElementsKind elements_kind = access_info.elements_kind();
3413 ZoneVector<MapRef> const& receiver_maps =
3414 access_info.lookup_start_object_maps();
3415
3416 if (IsTypedArrayElementsKind(elements_kind) ||
3417 IsRabGsabTypedArrayElementsKind(elements_kind)) {
3419 receiver, index, value, effect, control, context, elements_kind,
3420 keyed_mode);
3421 }
3422
3423 // Load the elements for the {receiver}.
3424 Node* elements = effect = graph()->NewNode(
3426 effect, control);
3427
3428 // Don't try to store to a copy-on-write backing store (unless supported by
3429 // the store mode).
3430 if (IsAnyStore(keyed_mode.access_mode()) &&
3431 IsSmiOrObjectElementsKind(elements_kind) &&
3432 !StoreModeHandlesCOW(keyed_mode.store_mode())) {
3433 effect = graph()->NewNode(
3434 simplified()->CheckMaps(CheckMapsFlag::kNone,
3435 ZoneRefSet<Map>(broker()->fixed_array_map())),
3436 elements, effect, control);
3437 }
3438
3439 // Check if the {receiver} is a JSArray.
3440 bool receiver_is_jsarray = HasOnlyJSArrayMaps(broker(), receiver_maps);
3441
3442 // Load the length of the {receiver}.
3443 Node* length = effect =
3444 receiver_is_jsarray
3445 ? graph()->NewNode(
3446 simplified()->LoadField(
3447 AccessBuilder::ForJSArrayLength(elements_kind)),
3448 receiver, effect, control)
3449 : graph()->NewNode(
3451 elements, effect, control);
3452
3453 // Check if we might need to grow the {elements} backing store.
3454 if (keyed_mode.IsStore() && StoreModeCanGrow(keyed_mode.store_mode())) {
3455 // For growing stores we validate the {index} below.
3456 } else if (keyed_mode.IsLoad() &&
3457 LoadModeHandlesOOB(keyed_mode.load_mode()) &&
3458 CanTreatHoleAsUndefined(receiver_maps)) {
3459 // Check that the {index} is a valid array index, we do the actual
3460 // bounds check below and just skip the store below if it's out of
3461 // bounds for the {receiver}.
3462 index = effect = graph()->NewNode(
3465 index, jsgraph()->ConstantNoHole(Smi::kMaxValue), effect, control);
3466 } else {
3467 // Check that the {index} is in the valid range for the {receiver}.
3468 index = effect = graph()->NewNode(
3471 index, length, effect, control);
3472 }
3473
3474 // Compute the element access.
3475 Type element_type = Type::NonInternal();
3476 MachineType element_machine_type = MachineType::AnyTagged();
3477 if (IsDoubleElementsKind(elements_kind)) {
3478 element_type = Type::Number();
3479 element_machine_type = MachineType::Float64();
3480 } else if (IsSmiElementsKind(elements_kind)) {
3481 element_type = Type::SignedSmall();
3482 element_machine_type = MachineType::TaggedSigned();
3483 }
3485 element_type, element_machine_type,
3487
3488 // Access the actual element.
3489 if (keyed_mode.access_mode() == AccessMode::kLoad) {
3490 // Compute the real element access type, which includes the hole in case
3491 // of holey backing stores.
3492 if (IsHoleyElementsKind(elements_kind)) {
3493 element_access.type =
3494 Type::Union(element_type, Type::Hole(), graph()->zone());
3495 }
3496 if (elements_kind == HOLEY_ELEMENTS ||
3497 elements_kind == HOLEY_SMI_ELEMENTS) {
3498 element_access.machine_type = MachineType::AnyTagged();
3499 }
3500
3501 // Check if we can return undefined for out-of-bounds loads.
3502 if (LoadModeHandlesOOB(keyed_mode.load_mode()) &&
3503 CanTreatHoleAsUndefined(receiver_maps)) {
3504 Node* check =
3505 graph()->NewNode(simplified()->NumberLessThan(), index, length);
3506 Node* branch =
3507 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, control);
3508
3509 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
3510 Node* etrue = effect;
3511 Node* vtrue;
3512 {
3513 // Do a real bounds check against {length}. This is in order to
3514 // protect against a potential typer bug leading to the elimination of
3515 // the NumberLessThan above.
3516 if (v8_flags.turbo_typer_hardening) {
3517 index = etrue =
3522 index, length, etrue, if_true);
3523 }
3524
3525 // Perform the actual load
3526 vtrue = etrue =
3527 graph()->NewNode(simplified()->LoadElement(element_access),
3528 elements, index, etrue, if_true);
3529
3530 // Handle loading from holey backing stores correctly by mapping
3531 // the hole to undefined.
3532 if (elements_kind == HOLEY_ELEMENTS ||
3533 elements_kind == HOLEY_SMI_ELEMENTS) {
3534 // Turn the hole into undefined.
3535 vtrue = graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(),
3536 vtrue);
3537 } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
3538 // Return the signaling NaN hole directly if all uses are
3539 // truncating.
3540 if (LoadModeHandlesHoles(keyed_mode.load_mode())) {
3541 vtrue = graph()->NewNode(simplified()->ChangeFloat64HoleToTagged(),
3542 vtrue);
3543 } else {
3544 vtrue = etrue = graph()->NewNode(
3545 simplified()->CheckFloat64Hole(
3547 vtrue, etrue, if_true);
3548 }
3549 }
3550 }
3551
3552 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
3553 Node* efalse = effect;
3554 Node* vfalse;
3555 {
3556 // Materialize undefined for out-of-bounds loads.
3557 vfalse = jsgraph()->UndefinedConstant();
3558 }
3559
3560 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
3561 effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
3562 value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
3563 vtrue, vfalse, control);
3564 } else {
3565 // Perform the actual load.
3566 value = effect =
3567 graph()->NewNode(simplified()->LoadElement(element_access), elements,
3568 index, effect, control);
3569
3570 // Handle loading from holey backing stores correctly, by either mapping
3571 // the hole to undefined if possible, or deoptimizing otherwise.
3572 if (elements_kind == HOLEY_ELEMENTS ||
3573 elements_kind == HOLEY_SMI_ELEMENTS) {
3574 // Check if we are allowed to turn the hole into undefined.
3575 if (CanTreatHoleAsUndefined(receiver_maps)) {
3576 // Turn the hole into undefined.
3577 value = graph()->NewNode(simplified()->ConvertTaggedHoleToUndefined(),
3578 value);
3579 } else {
3580 // Bailout if we see the hole.
3581 value = effect = graph()->NewNode(simplified()->CheckNotTaggedHole(),
3582 value, effect, control);
3583 }
3584 } else if (elements_kind == HOLEY_DOUBLE_ELEMENTS) {
3585 // Perform the hole check on the result.
3586 // Check if we are allowed to return the hole directly.
3587 if (CanTreatHoleAsUndefined(receiver_maps)) {
3588 if (LoadModeHandlesHoles(keyed_mode.load_mode())) {
3589 // Return the signaling NaN hole directly if all uses are
3590 // truncating.
3591 value = graph()->NewNode(simplified()->ChangeFloat64HoleToTagged(),
3592 value);
3593 } else {
3594 value = effect = graph()->NewNode(
3595 simplified()->CheckFloat64Hole(
3597 value, effect, control);
3598 }
3599 } else {
3600 value = effect = graph()->NewNode(
3601 simplified()->CheckFloat64Hole(
3603 value, effect, control);
3604 }
3605 }
3606 }
3607 } else if (keyed_mode.access_mode() == AccessMode::kHas) {
3608 // For packed arrays with NoElementsProctector valid, a bound check
3609 // is equivalent to HasProperty.
3610 value = effect = graph()->NewNode(simplified()->SpeculativeNumberLessThan(
3612 index, length, effect, control);
3613 if (IsHoleyElementsKind(elements_kind)) {
3614 // If the index is in bounds, do a load and hole check.
3615
3616 Node* branch = graph()->NewNode(common()->Branch(), value, control);
3617
3618 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
3619 Node* efalse = effect;
3620 Node* vfalse = jsgraph()->FalseConstant();
3621
3622 element_access.type =
3623 Type::Union(element_type, Type::Hole(), graph()->zone());
3624
3625 if (elements_kind == HOLEY_ELEMENTS ||
3626 elements_kind == HOLEY_SMI_ELEMENTS) {
3627 element_access.machine_type = MachineType::AnyTagged();
3628 }
3629
3630 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
3631 Node* etrue = effect;
3632
3633 Node* checked = etrue = graph()->NewNode(
3636 index, length, etrue, if_true);
3637
3638 Node* element = etrue =
3639 graph()->NewNode(simplified()->LoadElement(element_access), elements,
3640 checked, etrue, if_true);
3641
3642 Node* vtrue;
3643 if (CanTreatHoleAsUndefined(receiver_maps)) {
3644 if (elements_kind == HOLEY_ELEMENTS ||
3645 elements_kind == HOLEY_SMI_ELEMENTS) {
3646 // Check if we are allowed to turn the hole into undefined.
3647 // Turn the hole into undefined.
3648 vtrue = graph()->NewNode(simplified()->ReferenceEqual(), element,
3649 jsgraph()->TheHoleConstant());
3650 } else {
3651 vtrue =
3652 graph()->NewNode(simplified()->NumberIsFloat64Hole(), element);
3653 }
3654
3655 // has == !IsHole
3656 vtrue = graph()->NewNode(simplified()->BooleanNot(), vtrue);
3657 } else {
3658 if (elements_kind == HOLEY_ELEMENTS ||
3659 elements_kind == HOLEY_SMI_ELEMENTS) {
3660 // Bailout if we see the hole.
3661 etrue = graph()->NewNode(simplified()->CheckNotTaggedHole(), element,
3662 etrue, if_true);
3663 } else {
3664 etrue = graph()->NewNode(
3665 simplified()->CheckFloat64Hole(
3667 element, etrue, if_true);
3668 }
3669
3670 vtrue = jsgraph()->TrueConstant();
3671 }
3672
3673 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
3674 effect = graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
3675 value = graph()->NewNode(common()->Phi(MachineRepresentation::kTagged, 2),
3676 vtrue, vfalse, control);
3677 }
3678 } else {
3679 DCHECK(keyed_mode.access_mode() == AccessMode::kStore ||
3680 keyed_mode.access_mode() == AccessMode::kStoreInLiteral ||
3681 keyed_mode.access_mode() == AccessMode::kDefine);
3682
3683 if (IsSmiElementsKind(elements_kind)) {
3684 value = effect = graph()->NewNode(
3685 simplified()->CheckSmi(FeedbackSource()), value, effect, control);
3686 } else if (IsDoubleElementsKind(elements_kind)) {
3687 value = effect = graph()->NewNode(
3688 simplified()->CheckNumber(FeedbackSource()), value, effect, control);
3689 // Make sure we do not store signalling NaNs into double arrays.
3690 value = graph()->NewNode(simplified()->NumberSilenceNaN(), value);
3691 }
3692
3693 // Ensure that copy-on-write backing store is writable.
3694 if (IsSmiOrObjectElementsKind(elements_kind) &&
3696 elements = effect =
3697 graph()->NewNode(simplified()->EnsureWritableFastElements(), receiver,
3698 elements, effect, control);
3699 } else if (StoreModeCanGrow(keyed_mode.store_mode())) {
3700 // Determine the length of the {elements} backing store.
3701 Node* elements_length = effect = graph()->NewNode(
3703 elements, effect, control);
3704
3705 // Validate the {index} depending on holeyness:
3706 //
3707 // For HOLEY_*_ELEMENTS the {index} must not exceed the {elements}
3708 // backing store capacity plus the maximum allowed gap, as otherwise
3709 // the (potential) backing store growth would normalize and thus
3710 // the elements kind of the {receiver} would change to slow mode.
3711 //
3712 // For JSArray PACKED_*_ELEMENTS the {index} must be within the range
3713 // [0,length+1[ to be valid. In case {index} equals {length},
3714 // the {receiver} will be extended, but kept packed.
3715 //
3716 // Non-JSArray PACKED_*_ELEMENTS always grow by adding holes because they
3717 // lack the magical length property, which requires a map transition.
3718 // So we can assume that this did not happen if we did not see this map.
3719 Node* limit =
3720 IsHoleyElementsKind(elements_kind)
3721 ? graph()->NewNode(simplified()->NumberAdd(), elements_length,
3722 jsgraph()->ConstantNoHole(JSObject::kMaxGap))
3723 : receiver_is_jsarray
3724 ? graph()->NewNode(simplified()->NumberAdd(), length,
3725 jsgraph()->OneConstant())
3726 : elements_length;
3727 index = effect = graph()->NewNode(
3730 index, limit, effect, control);
3731
3732 // Grow {elements} backing store if necessary.
3734 IsDoubleElementsKind(elements_kind)
3737 elements = effect = graph()->NewNode(
3738 simplified()->MaybeGrowFastElements(mode, FeedbackSource()), receiver,
3739 elements, index, elements_length, effect, control);
3740
3741 // If we didn't grow {elements}, it might still be COW, in which case we
3742 // copy it now.
3743 if (IsSmiOrObjectElementsKind(elements_kind) &&
3745 elements = effect =
3746 graph()->NewNode(simplified()->EnsureWritableFastElements(),
3747 receiver, elements, effect, control);
3748 }
3749
3750 // Also update the "length" property if {receiver} is a JSArray.
3751 if (receiver_is_jsarray) {
3752 Node* check =
3753 graph()->NewNode(simplified()->NumberLessThan(), index, length);
3754 Node* branch = graph()->NewNode(common()->Branch(), check, control);
3755
3756 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
3757 Node* etrue = effect;
3758 {
3759 // We don't need to do anything, the {index} is within
3760 // the valid bounds for the JSArray {receiver}.
3761 }
3762
3763 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
3764 Node* efalse = effect;
3765 {
3766 // Update the JSArray::length field. Since this is observable,
3767 // there must be no other check after this.
3768 Node* new_length = graph()->NewNode(simplified()->NumberAdd(), index,
3769 jsgraph()->OneConstant());
3770 efalse = graph()->NewNode(
3771 simplified()->StoreField(
3772 AccessBuilder::ForJSArrayLength(elements_kind)),
3773 receiver, new_length, efalse, if_false);
3774 }
3775
3776 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
3777 effect =
3778 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
3779 }
3780 }
3781
3782 // Perform the actual element access.
3783 effect = graph()->NewNode(simplified()->StoreElement(element_access),
3784 elements, index, value, effect, control);
3785 }
3786
3787 return ValueEffectControl(value, effect, control);
3788}
3789
3793 Node* receiver, Node* index, Node* value, Node* effect, Node* control,
3794 Node* context, ElementsKind elements_kind,
3795 KeyedAccessMode const& keyed_mode) {
3796 DCHECK(IsTypedArrayElementsKind(elements_kind) ||
3797 IsRabGsabTypedArrayElementsKind(elements_kind));
3798 // AccessMode::kDefine is not handled here. Optimization should be skipped by
3799 // caller.
3800 DCHECK(keyed_mode.access_mode() != AccessMode::kDefine);
3801
3802 Node* buffer_or_receiver = receiver;
3803 Node* length;
3804 Node* base_pointer;
3805 Node* external_pointer;
3806
3807 // Check if we can constant-fold information about the {receiver} (e.g.
3808 // for asm.js-like code patterns).
3809 OptionalJSTypedArrayRef typed_array =
3810 GetTypedArrayConstant(broker(), receiver);
3811 if (typed_array.has_value() &&
3812 // TODO(v8:11111): Add support for rab/gsab here.
3813 !IsRabGsabTypedArrayElementsKind(elements_kind)) {
3814 if (typed_array->map(broker()).elements_kind() != elements_kind) {
3815 // This case should never be reachable at runtime.
3818 [this](Node* n) { this->Revisit(n); });
3819 assembler.InitializeEffectControl(effect, control);
3820 assembler.Unreachable();
3822 Node* dead = jsgraph_->Dead();
3823 return ValueEffectControl{dead, dead, dead};
3824 } else {
3825 length =
3826 jsgraph()->ConstantNoHole(static_cast<double>(typed_array->length()));
3827
3828 DCHECK(!typed_array->is_on_heap());
3829 // Load the (known) data pointer for the {receiver} and set
3830 // {base_pointer} and {external_pointer} to the values that will allow
3831 // to generate typed element accesses using the known data pointer. The
3832 // data pointer might be invalid if the {buffer} was detached, so we
3833 // need to make sure that any access is properly guarded.
3834 base_pointer = jsgraph()->ZeroConstant();
3835 external_pointer = jsgraph()->PointerConstant(typed_array->data_ptr());
3836 }
3837 } else {
3838 // Load the {receiver}s length.
3840 [this](Node* n) { this->Revisit(n); });
3841 assembler.InitializeEffectControl(effect, control);
3842 length = assembler.TypedArrayLength(
3845 std::tie(effect, control) =
3847
3848 // Load the base pointer for the {receiver}. This will always be Smi
3849 // zero unless we allow on-heap TypedArrays, which is only the case
3850 // for Chrome. Node and Electron both set this limit to 0. Setting
3851 // the base to Smi zero here allows the EffectControlLinearizer to
3852 // optimize away the tricky part of the access later.
3854 base_pointer = jsgraph()->ZeroConstant();
3855 } else {
3856 base_pointer = effect = graph()->NewNode(
3858 receiver, effect, control);
3859 }
3860
3861 // Load the external pointer for the {receiver}.
3862 external_pointer = effect =
3863 graph()->NewNode(simplified()->LoadField(
3865 receiver, effect, control);
3866 }
3867
3868 // See if we can skip the detaching check.
3869 if (!dependencies()->DependOnArrayBufferDetachingProtector()) {
3870 // Load the buffer for the {receiver}.
3871 Node* buffer =
3872 typed_array.has_value()
3873 ? jsgraph()->ConstantNoHole(typed_array->buffer(broker()), broker())
3874 : (effect = graph()->NewNode(
3875 simplified()->LoadField(
3877 receiver, effect, control));
3878
3879 // Deopt if the {buffer} was detached.
3880 // Note: A detached buffer leads to megamorphic feedback.
3881 Node* buffer_bit_field = effect = graph()->NewNode(
3883 buffer, effect, control);
3884 Node* check = graph()->NewNode(
3885 simplified()->NumberEqual(),
3886 graph()->NewNode(
3887 simplified()->NumberBitwiseAnd(), buffer_bit_field,
3888 jsgraph()->ConstantNoHole(JSArrayBuffer::WasDetachedBit::kMask)),
3889 jsgraph()->ZeroConstant());
3890 effect = graph()->NewNode(
3891 simplified()->CheckIf(DeoptimizeReason::kArrayBufferWasDetached), check,
3892 effect, control);
3893
3894 // Retain the {buffer} instead of {receiver} to reduce live ranges.
3895 buffer_or_receiver = buffer;
3896 }
3897
3898 enum Situation { kBoundsCheckDone, kHandleOOB_SmiAndRangeCheckComputed };
3899 Situation situation;
3900 TNode<BoolT> check;
3901 if ((keyed_mode.IsLoad() && LoadModeHandlesOOB(keyed_mode.load_mode())) ||
3902 (keyed_mode.IsStore() &&
3904 // Only check that the {index} is in SignedSmall range. We do the actual
3905 // bounds check below and just skip the property access if it's out of
3906 // bounds for the {receiver}.
3907 index = effect = graph()->NewNode(simplified()->CheckSmi(FeedbackSource()),
3908 index, effect, control);
3910 graph()->NewNode(simplified()->NumberLessThan(), index, length));
3911
3913 [this](Node* n) { this->Revisit(n); });
3914 assembler.InitializeEffectControl(effect, control);
3915 TNode<BoolT> check_less_than_length =
3916 assembler.EnterMachineGraph<BoolT>(compare_length, UseInfo::Bool());
3917 TNode<Int32T> index_int32 = assembler.EnterMachineGraph<Int32T>(
3919 TNode<BoolT> check_non_negative =
3920 assembler.Int32LessThanOrEqual(assembler.Int32Constant(0), index_int32);
3922 assembler.Word32And(check_less_than_length, check_non_negative));
3923 std::tie(effect, control) =
3925
3926 situation = kHandleOOB_SmiAndRangeCheckComputed;
3927 } else {
3928 // Check that the {index} is in the valid range for the {receiver}.
3929 index = effect = graph()->NewNode(
3932 index, length, effect, control);
3933 situation = kBoundsCheckDone;
3934 }
3935
3936 // Access the actual element.
3937 ExternalArrayType external_array_type =
3938 GetArrayTypeFromElementsKind(elements_kind);
3939 switch (keyed_mode.access_mode()) {
3940 case AccessMode::kLoad: {
3941 // Check if we can return undefined for out-of-bounds loads.
3942 if (situation == kHandleOOB_SmiAndRangeCheckComputed) {
3943 DCHECK_NE(check, nullptr);
3944 Node* branch = graph()->NewNode(
3946 check, control);
3947
3948 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
3949 Node* etrue = effect;
3950 Node* vtrue;
3951 {
3952 // Do a real bounds check against {length}. This is in order to
3953 // protect against a potential typer bug leading to the elimination
3954 // of the NumberLessThan above.
3955 if (v8_flags.turbo_typer_hardening) {
3956 index = etrue = graph()->NewNode(
3961 index, length, etrue, if_true);
3962 }
3963
3964 // Perform the actual load
3965 vtrue = etrue = graph()->NewNode(
3966 simplified()->LoadTypedElement(external_array_type),
3967 buffer_or_receiver, base_pointer, external_pointer, index, etrue,
3968 if_true);
3969 }
3970
3971 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
3972 Node* efalse = effect;
3973 Node* vfalse;
3974 {
3975 // Materialize undefined for out-of-bounds loads.
3976 vfalse = jsgraph()->UndefinedConstant();
3977 }
3978
3979 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
3980 effect =
3981 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
3982 value =
3984 vtrue, vfalse, control);
3985 } else {
3986 // Perform the actual load.
3987 DCHECK_EQ(kBoundsCheckDone, situation);
3988 value = effect = graph()->NewNode(
3989 simplified()->LoadTypedElement(external_array_type),
3990 buffer_or_receiver, base_pointer, external_pointer, index, effect,
3991 control);
3992 }
3993 break;
3994 }
3997 UNREACHABLE();
3998 case AccessMode::kStore: {
3999 if (external_array_type == kExternalBigInt64Array ||
4000 external_array_type == kExternalBigUint64Array) {
4001 value = effect = graph()->NewNode(
4002 simplified()->SpeculativeToBigInt(BigIntOperationHint::kBigInt,
4003 FeedbackSource()),
4004 value, effect, control);
4005 } else {
4006 // Ensure that the {value} is actually a Number or an Oddball,
4007 // and truncate it to a Number appropriately.
4008 // TODO(panq): Eliminate the deopt loop introduced by the speculation.
4009 value = effect = graph()->NewNode(
4010 simplified()->SpeculativeToNumber(
4012 value, effect, control);
4013 }
4014
4015 // Introduce the appropriate truncation for {value}. Currently we
4016 // only need to do this for ClamedUint8Array {receiver}s, as the
4017 // other truncations are implicit in the StoreTypedElement, but we
4018 // might want to change that at some point.
4019 if (external_array_type == kExternalUint8ClampedArray) {
4020 value = graph()->NewNode(simplified()->NumberToUint8Clamped(), value);
4021 }
4022
4023 if (situation == kHandleOOB_SmiAndRangeCheckComputed) {
4024 // We have to detect OOB stores and handle them without deopt (by
4025 // simply not performing them).
4026 DCHECK_NE(check, nullptr);
4027 Node* branch = graph()->NewNode(
4029 check, control);
4030
4031 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
4032 Node* etrue = effect;
4033 {
4034 // Do a real bounds check against {length}. This is in order to
4035 // protect against a potential typer bug leading to the elimination
4036 // of the NumberLessThan above.
4037 if (v8_flags.turbo_typer_hardening) {
4038 index = etrue = graph()->NewNode(
4043 index, length, etrue, if_true);
4044 }
4045
4046 // Perform the actual store.
4047 etrue = graph()->NewNode(
4048 simplified()->StoreTypedElement(external_array_type),
4049 buffer_or_receiver, base_pointer, external_pointer, index, value,
4050 etrue, if_true);
4051 }
4052
4053 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
4054 Node* efalse = effect;
4055 {
4056 // Just ignore the out-of-bounds write.
4057 }
4058
4059 control = graph()->NewNode(common()->Merge(2), if_true, if_false);
4060 effect =
4061 graph()->NewNode(common()->EffectPhi(2), etrue, efalse, control);
4062 } else {
4063 // Perform the actual store
4064 DCHECK_EQ(kBoundsCheckDone, situation);
4065 effect = graph()->NewNode(
4066 simplified()->StoreTypedElement(external_array_type),
4067 buffer_or_receiver, base_pointer, external_pointer, index, value,
4068 effect, control);
4069 }
4070 break;
4071 }
4072 case AccessMode::kHas:
4073 if (situation == kHandleOOB_SmiAndRangeCheckComputed) {
4074 DCHECK_NE(check, nullptr);
4077 [this](Node* n) { this->Revisit(n); });
4078 assembler.InitializeEffectControl(effect, control);
4079 value = assembler.MachineSelectIf<Boolean>(check)
4080 .Then([&]() { return assembler.TrueConstant(); })
4081 .Else([&]() { return assembler.FalseConstant(); })
4082 .ExpectTrue()
4083 .Value();
4084 std::tie(effect, control) =
4086 } else {
4087 DCHECK_EQ(kBoundsCheckDone, situation);
4088 // For has-property on a typed array, all we need is a bounds check.
4089 value = jsgraph()->TrueConstant();
4090 }
4091 break;
4092 }
4093
4094 return ValueEffectControl(value, effect, control);
4095}
4096
4098 Node* receiver, Node* index, Node* length, Node** effect, Node** control,
4099 KeyedAccessLoadMode load_mode) {
4100 if (LoadModeHandlesOOB(load_mode) &&
4101 dependencies()->DependOnNoElementsProtector()) {
4102 // Ensure that the {index} is a valid String length.
4103 index = *effect = graph()->NewNode(
4106 index, jsgraph()->ConstantNoHole(String::kMaxLength), *effect,
4107 *control);
4108
4109 // Load the single character string from {receiver} or yield
4110 // undefined if the {index} is not within the valid bounds.
4111 Node* check =
4112 graph()->NewNode(simplified()->NumberLessThan(), index, length);
4113 Node* branch =
4114 graph()->NewNode(common()->Branch(BranchHint::kTrue), check, *control);
4115
4116 Node* if_true = graph()->NewNode(common()->IfTrue(), branch);
4117 // Do a real bounds check against {length}. This is in order to protect
4118 // against a potential typer bug leading to the elimination of the
4119 // NumberLessThan above.
4120 Node* etrue = *effect;
4121 if (v8_flags.turbo_typer_hardening) {
4122 etrue = index = graph()->NewNode(
4126 index, length, etrue, if_true);
4127 }
4128 Node* vtrue = etrue = graph()->NewNode(simplified()->StringCharCodeAt(),
4129 receiver, index, etrue, if_true);
4130 vtrue = graph()->NewNode(simplified()->StringFromSingleCharCode(), vtrue);
4131
4132 Node* if_false = graph()->NewNode(common()->IfFalse(), branch);
4133 Node* vfalse = jsgraph()->UndefinedConstant();
4134
4135 *control = graph()->NewNode(common()->Merge(2), if_true, if_false);
4136 *effect =
4137 graph()->NewNode(common()->EffectPhi(2), etrue, *effect, *control);
4139 vtrue, vfalse, *control);
4140 } else {
4141 // Ensure that {index} is less than {receiver} length.
4142 index = *effect = graph()->NewNode(
4145 index, length, *effect, *control);
4146
4147 // Return the character from the {receiver} as single character string.
4148 Node* value = *effect = graph()->NewNode(
4149 simplified()->StringCharCodeAt(), receiver, index, *effect, *control);
4150 value = graph()->NewNode(simplified()->StringFromSingleCharCode(), value);
4151 return value;
4152 }
4153}
4154
4156 MapRef map, Node* properties, Node* effect, Node* control) {
4157 // TODO(bmeurer/jkummerow): Property deletions can undo map transitions
4158 // while keeping the backing store around, meaning that even though the
4159 // map might believe that objects have no unused property fields, there
4160 // might actually be some. It would be nice to not create a new backing
4161 // store in that case (i.e. when properties->length() >= new_length).
4162 // However, introducing branches and Phi nodes here would make it more
4163 // difficult for escape analysis to get rid of the backing stores used
4164 // for intermediate states of chains of property additions. That makes
4165 // it unclear what the best approach is here.
4166 DCHECK_EQ(map.UnusedPropertyFields(), 0);
4167 int length = map.NextFreePropertyIndex() - map.GetInObjectProperties();
4168 // Under normal circumstances, NextFreePropertyIndex() will always be larger
4169 // than GetInObjectProperties(). However, an attacker able to corrupt heap
4170 // memory can break this invariant, in which case we'll get confused here,
4171 // potentially causing a sandbox violation. This CHECK defends against that.
4172 SBXCHECK_GE(length, 0);
4173 int new_length = length + JSObject::kFieldsAdded;
4174 // Collect the field values from the {properties}.
4175 ZoneVector<Node*> values(zone());
4176 values.reserve(new_length);
4177 for (int i = 0; i < length; ++i) {
4178 Node* value = effect = graph()->NewNode(
4180 properties, effect, control);
4181 values.push_back(value);
4182 }
4183 // Initialize the new fields to undefined.
4184 for (int i = 0; i < JSObject::kFieldsAdded; ++i) {
4185 values.push_back(jsgraph()->UndefinedConstant());
4186 }
4187
4188 // Compute new length and hash.
4189 Node* hash;
4190 if (length == 0) {
4191 hash = graph()->NewNode(
4193 graph()->NewNode(simplified()->ObjectIsSmi(), properties), properties,
4194 jsgraph()->SmiConstant(PropertyArray::kNoHashSentinel));
4195 hash = effect = graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
4196 hash, effect, control);
4197 hash = graph()->NewNode(
4198 simplified()->NumberShiftLeft(), hash,
4199 jsgraph()->ConstantNoHole(PropertyArray::HashField::kShift));
4200 } else {
4201 hash = effect = graph()->NewNode(
4203 properties, effect, control);
4204 hash = graph()->NewNode(
4205 simplified()->NumberBitwiseAnd(), hash,
4206 jsgraph()->ConstantNoHole(PropertyArray::HashField::kMask));
4207 }
4208 Node* new_length_and_hash =
4209 graph()->NewNode(simplified()->NumberBitwiseOr(),
4210 jsgraph()->ConstantNoHole(new_length), hash);
4211 // TDOO(jarin): Fix the typer to infer tighter bound for NumberBitwiseOr.
4212 new_length_and_hash = effect =
4213 graph()->NewNode(common()->TypeGuard(Type::SignedSmall()),
4214 new_length_and_hash, effect, control);
4215
4216 // Allocate and initialize the new properties.
4217 AllocationBuilder a(jsgraph(), broker(), effect, control);
4219 Type::OtherInternal());
4220 a.Store(AccessBuilder::ForMap(), jsgraph()->PropertyArrayMapConstant());
4221 a.Store(AccessBuilder::ForPropertyArrayLengthAndHash(), new_length_and_hash);
4222 for (int i = 0; i < new_length; ++i) {
4223 a.Store(AccessBuilder::ForFixedArraySlot(i), values[i]);
4224 }
4225 return a.Finish();
4226}
4227
4229 Node* value,
4230 Node* effect,
4231 Node* control) {
4232 DCHECK(name.IsUniqueName());
4233 Operator const* const op =
4234 name.IsSymbol() ? simplified()->CheckEqualsSymbol()
4236 return graph()->NewNode(op, jsgraph()->ConstantNoHole(name, broker()), value,
4237 effect, control);
4238}
4239
4241 ZoneVector<MapRef> const& receiver_maps) {
4242 // Check if all {receiver_maps} have one of the initial Array.prototype
4243 // or Object.prototype objects as their prototype (in any of the current
4244 // native contexts, as the global Array protector works isolate-wide).
4245 for (MapRef receiver_map : receiver_maps) {
4246 ObjectRef receiver_prototype = receiver_map.prototype(broker());
4247 if (!receiver_prototype.IsJSObject() ||
4248 !broker()->IsArrayOrObjectPrototype(receiver_prototype.AsJSObject())) {
4249 return false;
4250 }
4251 }
4252
4253 // Check if the array prototype chain is intact.
4255}
4256
4258 ZoneVector<MapRef>* maps) const {
4259 ZoneRefSet<Map> map_set;
4261 NodeProperties::InferMapsUnsafe(broker(), object, effect, &map_set);
4263 for (MapRef map : map_set) {
4264 maps->push_back(map);
4265 }
4266 return true;
4268 // For untrusted maps, we can still use the information
4269 // if the maps are stable.
4270 for (MapRef map : map_set) {
4271 if (!map.is_stable()) return false;
4272 }
4273 for (MapRef map : map_set) {
4274 maps->push_back(map);
4275 }
4276 return true;
4277 }
4278 return false;
4279}
4280
4282 HeapObjectMatcher m(object);
4283 if (m.HasResolvedValue()) {
4284 MapRef map = m.Ref(broker()).map(broker());
4285 return map.FindRootMap(broker());
4286 } else if (m.IsJSCreate()) {
4287 OptionalMapRef initial_map =
4289 if (initial_map.has_value()) {
4290 DCHECK(initial_map->equals(initial_map->FindRootMap(broker())));
4291 return *initial_map;
4292 }
4293 }
4294 return std::nullopt;
4295}
4296
4298 Node* object, Node* effect, Node* control) {
4299 Node* map = effect =
4300 graph()->NewNode(simplified()->LoadField(AccessBuilder::ForMap()), object,
4301 effect, control);
4302 return graph()->NewNode(
4303 simplified()->LoadField(AccessBuilder::ForMapPrototype()), map, effect,
4304 control);
4305}
4306
4307std::pair<Node*, Node*>
4309 JSGraphAssembler* gasm) {
4310 auto catch_scope = gasm->catch_scope();
4311 DCHECK(catch_scope->is_outermost());
4312
4313 if (catch_scope->has_handler() &&
4314 catch_scope->has_exceptional_control_flow()) {
4315 TNode<Object> handler_exception;
4316 Effect handler_effect{nullptr};
4317 Control handler_control{nullptr};
4319 &handler_exception, &handler_effect, &handler_control);
4320
4321 ReplaceWithValue(gasm->outermost_handler(), handler_exception,
4322 handler_effect, handler_control);
4323 }
4324
4325 return {gasm->effect(), gasm->control()};
4326}
4327
4331
4335
4339
4343
4347
4351
4352} // namespace compiler
4353} // namespace internal
4354} // namespace v8
JSGraph * jsgraph
Builtins::Kind kind
Definition builtins.cc:40
#define SBXCHECK_GE(lhs, rhs)
Definition check.h:65
static constexpr U kMask
Definition bit-field.h:41
static constexpr int kShift
Definition bit-field.h:39
static V8_EXPORT_PRIVATE Callable CallableFor(Isolate *isolate, Builtin builtin)
Definition builtins.cc:214
Handle< Code > code() const
Definition callable.h:22
CallInterfaceDescriptor descriptor() const
Definition callable.h:23
static const uint32_t kMinLength
Definition string.h:1029
static ExternalReference Create(const SCTableReference &table_ref)
V8_WARN_UNUSED_RESULT Handle< String > NumberToString(DirectHandle< Object > number, NumberCacheMode mode=NumberCacheMode::kBoth)
V8_WARN_UNUSED_RESULT HandleType< String >::MaybeType NewConsString(HandleType< String > left, HandleType< String > right, AllocationType allocation=AllocationType::kYoung)
Handle< Number > NewNumber(double value)
static V8_INLINE bool InYoungGeneration(Tagged< Object > object)
v8::internal::Factory * factory()
Definition isolate.h:1527
static const int kFieldsAdded
Definition js-objects.h:954
static constexpr uint32_t kMaxElementIndex
Definition js-objects.h:924
static const uint32_t kMaxGap
Definition js-objects.h:931
static constexpr size_t kMaxSizeInHeap
v8::internal::LocalFactory * factory()
static MachineType TypeForRepresentation(const MachineRepresentation &rep, bool isSigned=true)
static constexpr MachineType Float64()
static constexpr MachineType AnyTagged()
static constexpr MachineType TaggedSigned()
static constexpr MachineType TaggedPointer()
static constexpr int SizeFor(int length)
static const int kNoHashSentinel
PropertyCellType cell_type() const
constexpr bool IsDouble() const
static bool IsNeeded(Tagged< String > str, LocalIsolate *local_isolate)
Definition string-inl.h:76
static constexpr int kMaxValue
Definition smi.h:101
static void WriteToFlat(Tagged< String > source, SinkCharT *sink, uint32_t start, uint32_t length)
Definition string.cc:772
static const uint32_t kMaxLength
Definition string.h:511
static TNode UncheckedCast(compiler::Node *node)
Definition tnode.h:413
void push_back(const T &value)
static FieldAccess ForJSArrayBufferViewBuffer()
static ElementAccess ForFixedArrayElement()
static FieldAccess ForMap(WriteBarrierKind write_barrier=kMapWriteBarrier)
static FieldAccess ForFixedArraySlot(size_t index, WriteBarrierKind write_barrier_kind=kFullWriteBarrier)
static FieldAccess ForJSObjectPropertiesOrHashKnownPointer()
static FieldAccess ForJSArrayBufferBitField()
static FieldAccess ForJSArrayLength(ElementsKind elements_kind)
static FieldAccess ForJSAsyncFunctionObjectPromise()
static FieldAccess ForJSTypedArrayExternalPointer()
static FieldAccess ForPropertyArrayLengthAndHash()
static FieldAccess ForJSTypedArrayBasePointer()
static FieldAccess ForDescriptorArrayEnumCache()
bool ComputeElementAccessInfos(ElementAccessFeedback const &feedback, ZoneVector< ElementAccessInfo > *access_infos) const
bool FinalizePropertyAccessInfos(ZoneVector< PropertyAccessInfo > infos, AccessMode access_mode, ZoneVector< PropertyAccessInfo > *result) const
PropertyAccessInfo FinalizePropertyAccessInfosAsOne(ZoneVector< PropertyAccessInfo > infos, AccessMode access_mode) const
void ReplaceWithValue(Node *node, Node *value, Node *effect=nullptr, Node *control=nullptr)
void MergeControlToEnd(TFGraph *graph, CommonOperatorBuilder *common, Node *node)
static Reduction Replace(Node *node)
bool CanAllocateArray(int length, MapRef map, AllocationType allocation=AllocationType::kYoung)
const Operator * ExternalConstant(const ExternalReference &)
void DependOnConstantInDictionaryPrototypeChain(MapRef receiver_map, NameRef property_name, ObjectRef constant, PropertyKind kind)
HeapObjectRef DependOnPrototypeProperty(JSFunctionRef function)
void DependOnStablePrototypeChain(MapRef receiver_maps, WhereToStart start, OptionalJSObjectRef last_prototype=OptionalJSObjectRef())
void DependOnStablePrototypeChains(ZoneVector< MapRef > const &receiver_maps, WhereToStart start, OptionalJSObjectRef last_prototype=OptionalJSObjectRef())
bool HasOnlyStringMaps(JSHeapBroker *broker) const
ZoneVector< TransitionGroup > const & transition_groups() const
ZoneVector< MapRef > const & lookup_start_object_maps() const
Definition access-info.h:39
ZoneVector< MapRef > const & transition_sources() const
Definition access-info.h:42
FeedbackSource const & feedback() const
const FrameStateInfo & frame_state_info() const
ZoneVector< const CFunctionInfo * > c_signatures(JSHeapBroker *broker) const
ZoneVector< Address > c_functions(JSHeapBroker *broker) const
IndirectHandle< FunctionTemplateInfo > object() const
Address callback(JSHeapBroker *broker) const
bool is_signature_undefined(JSHeapBroker *broker) const
OptionalObjectRef callback_data(JSHeapBroker *broker) const
FeedbackSource const & callFeedback() const
FeedbackSource const & loadFeedback() const
void InitializeEffectControl(Node *effect, Node *control)
IndirectHandle< HeapObject > object() const
V8_EXPORT_PRIVATE MapRef map(JSHeapBroker *broker) const
static bool IsConstantOpcode(Value value)
Definition opcodes.h:1387
static constexpr int ArityForArgc(int parameters)
static constexpr int ArgumentIndex(int i)
ContextRef context(JSHeapBroker *broker) const
SharedFunctionInfoRef shared(JSHeapBroker *broker) const
void MergeExceptionalPaths(TNode< Object > *exception_out, Effect *effect_out, Control *control_out)
JSOperatorBuilder * javascript() const
Definition js-graph.h:104
SimplifiedOperatorBuilder * simplified() const
Definition js-graph.h:105
Node * HeapConstantNoHole(Handle< HeapObject > value)
Definition js-graph.cc:146
Isolate * isolate() const
Definition js-graph.h:106
Node * ConstantNoHole(ObjectRef ref, JSHeapBroker *broker)
Definition js-graph.cc:51
TNode< Boolean > BooleanConstant(bool is_true)
Definition js-graph.h:94
Node * SmiConstant(int32_t immediate)
Definition js-graph.h:99
LocalIsolate * local_isolate_or_isolate() const
Handle< T > CanonicalPersistentHandle(Tagged< T > object)
ProcessedFeedback const & GetFeedbackForPropertyAccess(FeedbackSource const &source, AccessMode mode, OptionalNameRef static_name)
CompilationDependencies * dependencies() const
ProcessedFeedback const & GetFeedbackForGlobalAccess(FeedbackSource const &source)
ProcessedFeedback const & GetFeedbackForInstanceOf(FeedbackSource const &source)
ProcessedFeedback const & GetFeedbackForCall(FeedbackSource const &source)
PropertyAccessInfo GetPropertyAccessInfo(MapRef map, NameRef name, AccessMode access_mode)
NativeContextRef target_native_context() const
Node * BuildIndexedStringLoad(Node *receiver, Node *index, Node *length, Node **effect, Node **control, KeyedAccessLoadMode load_mode)
Reduction ReduceNamedAccess(Node *node, Node *value, NamedAccessFeedback const &feedback, AccessMode access_mode, Node *key=nullptr)
Node * BuildExtendPropertiesBackingStore(MapRef map, Node *properties, Node *effect, Node *control)
static std::optional< size_t > GetMaxStringLength(JSHeapBroker *broker, Node *node)
Node * BuildCheckEqualsName(NameRef name, Node *value, Node *effect, Node *control)
ValueEffectControl BuildElementAccessForTypedArrayOrRabGsabTypedArray(Node *receiver, Node *index, Node *value, Node *effect, Node *control, Node *context, ElementsKind elements_kind, KeyedAccessMode const &keyed_mode)
Reduction ReducePropertyAccess(Node *node, Node *key, OptionalNameRef static_name, Node *value, FeedbackSource const &source, AccessMode access_mode)
Node * InlineApiCall(Node *receiver, Node *frame_state, Node *value, Node **effect, Node **control, FunctionTemplateInfoRef function_template_info, const FeedbackSource &feedback)
ValueEffectControl BuildElementAccess(Node *receiver, Node *index, Node *value, Node *effect, Node *control, Node *context, ElementAccessInfo const &access_info, KeyedAccessMode const &keyed_mode)
Node * BuildLoadPrototypeFromObject(Node *object, Node *effect, Node *control)
std::optional< ValueEffectControl > BuildPropertyAccess(Node *lookup_start_object, Node *receiver, Node *value, Node *context, Node *frame_state, Node *effect, Node *control, NameRef name, ZoneVector< Node * > *if_exceptions, PropertyAccessInfo const &access_info, AccessMode access_mode)
Reduction ReduceMegaDOMPropertyAccess(Node *node, Node *value, MegaDOMPropertyAccessFeedback const &feedback, FeedbackSource const &source)
bool InferMaps(Node *object, Effect effect, ZoneVector< MapRef > *maps) const
bool StringCanSafelyBeRead(Node *const node, Handle< String > str)
Handle< String > Concatenate(Handle< String > left, Handle< String > right)
InferHasInPrototypeChainResult InferHasInPrototypeChain(Node *receiver, Effect effect, HeapObjectRef prototype)
void InlinePropertySetterCall(Node *receiver, Node *value, Node *context, Node *frame_state, Node **effect, Node **control, ZoneVector< Node * > *if_exceptions, PropertyAccessInfo const &access_info)
ValueEffectControl BuildPropertyTest(Node *effect, Node *control, PropertyAccessInfo const &access_info)
Node * InlinePropertyGetterCall(Node *receiver, ConvertReceiverMode receiver_mode, Node *lookup_start_object, Node *context, Node *frame_state, Node **effect, Node **control, ZoneVector< Node * > *if_exceptions, PropertyAccessInfo const &access_info)
JSNativeContextSpecialization(Editor *editor, JSGraph *jsgraph, JSHeapBroker *broker, Flags flags, Zone *zone, Zone *shared_zone)
Reduction ReduceElementLoadFromHeapConstant(Node *node, Node *key, AccessMode access_mode, KeyedAccessLoadMode load_mode)
ValueEffectControl BuildPropertyStore(Node *receiver, Node *value, Node *context, Node *frame_state, Node *effect, Node *control, NameRef name, ZoneVector< Node * > *if_exceptions, PropertyAccessInfo const &access_info, AccessMode access_mode)
Reduction ReduceEagerDeoptimize(Node *node, DeoptimizeReason reason)
ZoneUnorderedSet< IndirectHandle< String >, IndirectHandle< String >::hash, IndirectHandle< String >::equal_to > created_strings_
void RemoveImpossibleMaps(Node *object, ZoneVector< MapRef > *maps) const
std::pair< Node *, Node * > ReleaseEffectAndControlFromAssembler(JSGraphAssembler *assembler)
std::optional< ValueEffectControl > BuildPropertyLoad(Node *lookup_start_object, Node *receiver, Node *context, Node *frame_state, Node *effect, Node *control, NameRef name, ZoneVector< Node * > *if_exceptions, PropertyAccessInfo const &access_info)
Reduction ReduceElementAccess(Node *node, Node *index, Node *value, ElementAccessFeedback const &feedback)
Reduction ReduceElementAccessOnString(Node *node, Node *index, Node *value, KeyedAccessMode const &keyed_mode)
ElementAccessFeedback const & TryRefineElementAccessFeedback(ElementAccessFeedback const &feedback, Node *receiver, Effect effect) const
bool CanTreatHoleAsUndefined(ZoneVector< MapRef > const &receiver_maps)
Reduction ReduceGlobalAccess(Node *node, Node *lookup_start_object, Node *receiver, Node *value, NameRef name, AccessMode access_mode, Node *key, PropertyCellRef property_cell, Node *effect=nullptr)
OptionalObjectRef GetOwnFastConstantDataProperty(JSHeapBroker *broker, Representation field_representation, FieldIndex index, CompilationDependencies *dependencies) const
OptionalObjectRef GetOwnConstantElement(JSHeapBroker *broker, FixedArrayBaseRef elements_ref, uint32_t index, CompilationDependencies *dependencies) const
OptionalFixedArrayBaseRef elements(JSHeapBroker *broker, RelaxedLoadTag) const
const Operator * Call(size_t arity, CallFrequency const &frequency=CallFrequency(), FeedbackSource const &feedback=FeedbackSource(), ConvertReceiverMode convert_mode=ConvertReceiverMode::kAny, SpeculationMode speculation_mode=SpeculationMode::kDisallowSpeculation, CallFeedbackRelation feedback_relation=CallFeedbackRelation::kUnrelated)
const Operator * LoadNamed(NameRef name, FeedbackSource const &feedback)
KeyedAccessStoreMode store_mode() const
KeyedAccessLoadMode load_mode() const
static CallDescriptor * GetStubCallDescriptor(Zone *zone, const CallInterfaceDescriptor &descriptor, int stack_parameter_count, CallDescriptor::Flags flags, Operator::Properties properties=Operator::kNoProperties, StubCallMode stub_mode=StubCallMode::kCallCodeObject)
Definition linkage.cc:587
const FeedbackSource & feedback() const
CommonOperatorBuilder * common() const
Node * PointerConstant(intptr_t value)
V8_WARN_UNUSED_RESULT bool RelyOnMapsViaStability(CompilationDependencies *dependencies)
V8_WARN_UNUSED_RESULT ZoneRefSet< Map > const & GetMaps()
V8_WARN_UNUSED_RESULT Reduction NoChange()
V8_WARN_UNUSED_RESULT bool HaveMaps() const
V8_WARN_UNUSED_RESULT bool AllOfInstanceTypesAreJSReceiver() const
V8_WARN_UNUSED_RESULT bool AnyOfInstanceTypesAre(InstanceType type) const
MapRef FindRootMap(JSHeapBroker *broker) const
HeapObjectRef prototype(JSHeapBroker *broker) const
HeapObjectRef GetBackPointer(JSHeapBroker *broker) const
ElementsKind elements_kind() const
IndirectHandle< Name > object() const
FeedbackSource const & feedback() const
static void ChangeOp(Node *node, const Operator *new_op)
static void ReplaceEffectInput(Node *node, Node *effect, int index=0)
static OptionalMapRef GetJSCreateMap(JSHeapBroker *broker, Node *receiver)
static Node * GetEffectInput(Node *node, int index=0)
static Node * GetContextInput(Node *node)
static Node * GetFrameStateInput(Node *node)
static Node * GetValueInput(Node *node, int index)
static void ReplaceValueInput(Node *node, Node *value, int index)
static Node * FindFrameStateBefore(Node *node, Node *unreachable_sentinel)
static bool NoObservableSideEffectBetween(Node *effect, Node *dominator)
static bool IsExceptionalCall(Node *node, Node **out_exception=nullptr)
static InferMapsResult InferMapsUnsafe(JSHeapBroker *broker, Node *receiver, Effect effect, ZoneRefSet< Map > *maps_out)
static Node * GetControlInput(Node *node, int index=0)
Inputs inputs() const
Definition node.h:478
const Operator * op() const
Definition node.h:50
int InputCount() const
Definition node.h:59
void ReplaceInput(int index, Node *new_to)
Definition node.h:76
Node * InputAt(int index) const
Definition node.h:70
static OutputFrameStateCombine PokeAt(size_t index)
GlobalAccessFeedback const & AsGlobalAccess() const
static MachineRepresentation ConvertRepresentation(Representation representation)
bool TryBuildNumberCheck(JSHeapBroker *broker, ZoneVector< MapRef > const &maps, Node **receiver, Effect *effect, Control control)
Node * BuildCheckValue(Node *receiver, Effect *effect, Control control, ObjectRef value)
bool TryBuildStringCheck(JSHeapBroker *broker, ZoneVector< MapRef > const &maps, Node **receiver, Effect *effect, Control control)
void BuildCheckMaps(Node *object, Effect *effect, Control control, ZoneVector< MapRef > const &maps, bool has_deprecated_map_without_migration_target=false)
std::optional< Node * > FoldLoadDictPrototypeConstant(PropertyAccessInfo const &access_info)
Node * BuildLoadDataField(NameRef name, PropertyAccessInfo const &access_info, Node *lookup_start_object, Node **effect, Node **control)
Representation field_representation() const
OptionalJSObjectRef holder() const
ZoneVector< MapRef > const & lookup_start_object_maps() const
void RecordDependencies(CompilationDependencies *dependencies)
FeedbackSource const & feedback() const
V8_WARN_UNUSED_RESULT bool Cache(JSHeapBroker *broker) const
PropertyDetails property_details() const
ObjectRef value(JSHeapBroker *broker) const
static Reduction Changed(Node *node)
Reduction FollowedBy(Reduction next) const
FeedbackSource const & feedback() const
Node * NewNode(const Operator *op, int input_count, Node *const *inputs, bool incomplete=false)
static Type Union(Type type1, Type type2, Zone *zone)
static Type For(MapRef type, JSHeapBroker *broker)
static UseInfo TruncatingWord32()
Definition use-info.h:200
Zone * zone_
Handle< Code > code
JSRegExp::Flags flags_
JSHeapBroker *const broker_
int start
DirectHandle< Object > new_target
Definition execution.cc:75
AssemblerT assembler
JSHeapBroker * broker
TNode< Context > context
std::optional< TNode< JSArray > > a
TNode< Object > receiver
std::map< const std::string, const std::string > map
#define TRACE_BROKER_MISSING(broker, x)
ZoneVector< RpoNumber > & result
MovableLabel continuation
int m
Definition mul-fft.cc:294
int n
Definition mul-fft.cc:296
int r
Definition mul-fft.cc:298
FastApiCallFunction GetFastApiCallTarget(JSHeapBroker *broker, FunctionTemplateInfoRef function_template_info, size_t arg_count)
V8_INLINE const Operation & Get(const Graph &graph, OpIndex index)
Definition graph.h:1231
bool HasOnlyNonResizableTypedArrayMaps(JSHeapBroker *broker, ZoneVector< MapRef > const &maps)
bool HasOnlyStringMaps(JSHeapBroker *broker, ZoneVector< MapRef > const &maps)
FrameState CloneFrameState(JSGraph *jsgraph, FrameState frame_state, OutputFrameStateCombine changed_state_combine)
size_t ProjectionIndexOf(const Operator *const op)
bool IsAnyStore(AccessMode mode)
Definition heap-refs.h:65
bool HasOnlyStringWrapperMaps(JSHeapBroker *broker, ZoneVector< MapRef > const &maps)
const FrameStateInfo & FrameStateInfoOf(const Operator *op)
FrameState CreateStubBuiltinContinuationFrameState(JSGraph *jsgraph, Builtin name, Node *context, Node *const *parameters, int parameter_count, Node *outer_frame_state, ContinuationFrameStateMode mode, const wasm::CanonicalSig *signature)
HeapConstantNoHole(BUILTIN_CODE(isolate(), AllocateInOldGeneration))) DEFINE_GETTER(ArrayConstructorStubConstant
ExternalArrayType GetArrayTypeFromElementsKind(ElementsKind kind)
Definition globals.h:142
ref_traits< T >::ref_type MakeRef(JSHeapBroker *broker, Tagged< T > object)
HeapObjectMatcherImpl< IrOpcode::kHeapConstant > HeapObjectMatcher
bool HasOnlyJSArrayMaps(base::Vector< const compiler::MapRef > maps)
Definition maglev-ir.h:859
bool StoreModeHandlesCOW(KeyedAccessStoreMode store_mode)
Definition globals.h:2728
bool StoreModeCanGrow(KeyedAccessStoreMode store_mode)
Definition globals.h:2742
constexpr bool IsHoleyElementsKind(ElementsKind kind)
bool IsSpecialReceiverInstanceType(InstanceType instance_type)
bool IsTypedArrayElementsKind(ElementsKind kind)
bool IsRabGsabTypedArrayElementsKind(ElementsKind kind)
constexpr bool IsSmiElementsKind(ElementsKind kind)
constexpr int kMaxDoubleStringLength
Definition globals.h:430
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in name
Definition flags.cc:2086
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
bool IsSmiOrObjectElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES HOLEY_ELEMENTS
bool IsFastElementsKind(ElementsKind kind)
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
bool IsHoleyOrDictionaryElementsKind(ElementsKind kind)
bool StoreModeIgnoresTypeArrayOOB(KeyedAccessStoreMode store_mode)
Definition globals.h:2738
V8_EXPORT_PRIVATE FlagValues v8_flags
@ kExternalBigInt64Array
Definition globals.h:2463
@ kExternalUint8ClampedArray
Definition globals.h:2462
@ kExternalBigUint64Array
Definition globals.h:2464
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible less compaction in non memory reducing mode use high priority threads for concurrent Marking Test mode only flag It allows an unit test to select evacuation candidates use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long enable use of partial constant none
Definition flags.cc:2422
return value
Definition map-inl.h:893
SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit SharedFunctionInfo::MaglevCompilationFailedBit SharedFunctionInfo::FunctionSyntaxKindBits SharedFunctionInfo::HasDuplicateParametersBit requires_instance_members_initializer
bool IsTypedArrayOrRabGsabTypedArrayElementsKind(ElementsKind kind)
bool LoadModeHandlesHoles(KeyedAccessLoadMode load_mode)
Definition globals.h:2695
constexpr bool IsDoubleElementsKind(ElementsKind kind)
constexpr uint32_t kMaxUInt32
Definition globals.h:387
bool LoadModeHandlesOOB(KeyedAccessLoadMode load_mode)
Definition globals.h:2689
@ kStartAtPrototype
Definition globals.h:1714
static constexpr RelaxedLoadTag kRelaxedLoad
Definition globals.h:2909
uint32_t equals
#define DCHECK_LE(v1, v2)
Definition logging.h:490
#define DCHECK_NULL(val)
Definition logging.h:491
#define CHECK(condition)
Definition logging.h:124
#define DCHECK_NOT_NULL(val)
Definition logging.h:492
#define DCHECK_IMPLIES(v1, v2)
Definition logging.h:493
#define DCHECK_NE(v1, v2)
Definition logging.h:486
#define DCHECK_GE(v1, v2)
Definition logging.h:488
#define DCHECK(condition)
Definition logging.h:482
#define DCHECK_EQ(v1, v2)
Definition logging.h:485
#define arraysize(array)
Definition macros.h:67
IndirectHandle< FeedbackVector > vector
bool IsInRange(const T &low, const T &high) const
HeapObjectRef Ref(JSHeapBroker *broker) const
#define OFFSET_OF_DATA_START(Type)
wasm::ValueType type