5#if V8_TARGET_ARCH_ARM64
105#define __ ACCESS_MASM(masm_)
108 Zone* zone, Mode mode,
109 int registers_to_save)
110 : NativeRegExpMacroAssembler(isolate, zone),
111 masm_(
std::make_unique<MacroAssembler>(
114 no_root_array_scope_(
masm_.get()),
116 num_registers_(registers_to_save),
117 num_saved_registers_(registers_to_save),
125 static_assert(kNumCachedRegisters <= 16);
126 static_assert((kNumCachedRegisters % 2) == 0);
130 __ Bind(&start_label_);
133RegExpMacroAssemblerARM64::~RegExpMacroAssemblerARM64() =
default;
135void RegExpMacroAssemblerARM64::AbortedCodeGeneration() {
136 masm_->AbortedCodeGeneration();
137 entry_label_.Unuse();
138 start_label_.Unuse();
139 success_label_.Unuse();
140 backtrack_label_.Unuse();
142 check_preempt_label_.Unuse();
143 stack_overflow_label_.Unuse();
144 fallback_label_.Unuse();
147int RegExpMacroAssemblerARM64::stack_limit_slack_slot_count() {
148 return RegExpStack::kStackLimitSlackSlotCount;
151void RegExpMacroAssemblerARM64::AdvanceCurrentPosition(
int by) {
153 __ Add(current_input_offset(),
154 current_input_offset(), by * char_size());
159void RegExpMacroAssemblerARM64::AdvanceRegister(
int reg,
int by) {
163 switch (register_state) {
165 __ Ldr(w10, register_location(
reg));
166 __ Add(w10, w10, by);
167 __ Str(w10, register_location(
reg));
171 __ Add(to_advance, to_advance, by);
178 to_advance, to_advance,
179 static_cast<int64_t
>(
static_cast<uint64_t
>(
static_cast<int64_t
>(by))
180 << kWRegSizeInBits));
190void RegExpMacroAssemblerARM64::Backtrack() {
192 if (has_backtrack_limit()) {
194 UseScratchRegisterScope temps(
masm_.get());
195 Register scratch = temps.AcquireW();
196 __ Ldr(scratch,
MemOperand(frame_pointer(), kBacktrackCountOffset));
197 __ Add(scratch, scratch, 1);
198 __ Str(scratch,
MemOperand(frame_pointer(), kBacktrackCountOffset));
199 __ Cmp(scratch, Operand(backtrack_limit()));
203 if (can_fallback()) {
204 __ B(&fallback_label_);
213 __ Add(x10, code_pointer(), Operand(w10, UXTW));
218void RegExpMacroAssemblerARM64::Bind(Label*
label) {
222void RegExpMacroAssemblerARM64::BindJumpTarget(Label*
label) {
226void RegExpMacroAssemblerARM64::CheckCharacter(uint32_t c, Label* on_equal) {
227 CompareAndBranchOrBacktrack(current_character(), c, eq, on_equal);
230void RegExpMacroAssemblerARM64::CheckCharacterGT(base::uc16 limit,
232 CompareAndBranchOrBacktrack(current_character(), limit, hi, on_greater);
235void RegExpMacroAssemblerARM64::CheckAtStart(
int cp_offset,
236 Label* on_at_start) {
237 __ Add(w10, current_input_offset(),
238 Operand(-char_size() + cp_offset * char_size()));
239 __ Cmp(w10, string_start_minus_one());
240 BranchOrBacktrack(eq, on_at_start);
243void RegExpMacroAssemblerARM64::CheckNotAtStart(
int cp_offset,
244 Label* on_not_at_start) {
245 __ Add(w10, current_input_offset(),
246 Operand(-char_size() + cp_offset * char_size()));
247 __ Cmp(w10, string_start_minus_one());
248 BranchOrBacktrack(ne, on_not_at_start);
251void RegExpMacroAssemblerARM64::CheckCharacterLT(base::uc16 limit,
253 CompareAndBranchOrBacktrack(current_character(), limit, lo, on_less);
256void RegExpMacroAssemblerARM64::CheckCharacters(
257 base::Vector<const base::uc16> str,
int cp_offset, Label* on_failure,
258 bool check_end_of_string) {
261 if (check_end_of_string) {
263 CheckPosition(cp_offset + str.length() - 1, on_failure);
268 __ Add(characters_address,
270 Operand(current_input_offset(), SXTW));
271 if (cp_offset != 0) {
272 __ Add(characters_address, characters_address, cp_offset * char_size());
276 if (
mode_ == LATIN1) {
277 __ Ldrb(w10,
MemOperand(characters_address, 1, PostIndex));
278 DCHECK_GE(String::kMaxOneByteCharCode, str[
i]);
280 __ Ldrh(w10,
MemOperand(characters_address, 2, PostIndex));
282 CompareAndBranchOrBacktrack(w10, str[
i], ne, on_failure);
286void RegExpMacroAssemblerARM64::CheckGreedyLoop(Label* on_equal) {
288 __ Cmp(current_input_offset(), w10);
290 __ Add(backtrack_stackpointer(),
291 backtrack_stackpointer(), Operand(x11, LSL, kWRegSizeLog2));
292 BranchOrBacktrack(eq, on_equal);
295void RegExpMacroAssemblerARM64::PushCachedRegisters() {
296 CPURegList cached_registers(CPURegister::kRegister, kXRegSizeInBits, 0, 7);
297 DCHECK_EQ(kNumCachedRegisters, cached_registers.Count() * 2);
298 __ PushCPURegList(cached_registers);
301void RegExpMacroAssemblerARM64::PopCachedRegisters() {
302 CPURegList cached_registers(CPURegister::kRegister, kXRegSizeInBits, 0, 7);
303 DCHECK_EQ(kNumCachedRegisters, cached_registers.Count() * 2);
304 __ PopCPURegList(cached_registers);
307void RegExpMacroAssemblerARM64::CheckNotBackReferenceIgnoreCase(
308 int start_reg,
bool read_backward,
bool unicode, Label* on_no_match) {
311 Register capture_start_offset = w10;
319 if (start_reg < kNumCachedRegisters) {
320 __ Mov(capture_start_offset.X(), GetCachedRegister(start_reg));
321 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits);
323 __ Ldp(w11, capture_start_offset, capture_location(start_reg, x10));
325 __ Sub(capture_length, w11, capture_start_offset);
330 __ CompareAndBranch(capture_length, Operand(0), eq, &fallthrough);
334 __ Add(w12, string_start_minus_one(), capture_length);
335 __ Cmp(current_input_offset(), w12);
336 BranchOrBacktrack(le, on_no_match);
338 __ Cmn(capture_length, current_input_offset());
339 BranchOrBacktrack(gt, on_no_match);
342 if (
mode_ == LATIN1) {
347 Register capture_start_address = x12;
349 Register current_position_address = x14;
351 __ Add(capture_start_address,
353 Operand(capture_start_offset, SXTW));
354 __ Add(capture_end_address, capture_start_address,
355 Operand(capture_length, SXTW));
356 __ Add(current_position_address,
358 Operand(current_input_offset(), SXTW));
361 __ Sub(current_position_address, current_position_address,
362 Operand(capture_length, SXTW));
367 __ Ldrb(w10,
MemOperand(capture_start_address, 1, PostIndex));
368 __ Ldrb(w11,
MemOperand(current_position_address, 1, PostIndex));
370 __ B(eq, &loop_check);
373 __ Orr(w10, w10, 0x20);
374 __ Orr(w11, w11, 0x20);
377 __ Sub(w10, w10,
'a');
378 __ Cmp(w10,
'z' -
'a');
379 __ B(ls, &loop_check);
381 __ Sub(w10, w10, 224 -
'a');
382 __ Cmp(w10, 254 - 224);
383 __ Ccmp(w10, 247 - 224, ZFlag, ls);
386 __ Bind(&loop_check);
387 __ Cmp(capture_start_address, capture_end_address);
392 BranchOrBacktrack(al, on_no_match);
396 __ Sub(current_input_offset().
X(), current_position_address, input_end());
398 __ Sub(current_input_offset().
X(), current_input_offset().
X(),
399 Operand(capture_length, SXTW));
402 __ Cmp(current_input_offset().
X(), Operand(current_input_offset(), SXTW));
403 __ Ccmp(current_input_offset(), 0, NoFlag, eq);
405 __ Check(le, AbortReason::kOffsetOutOfRange);
409 int argument_count = 4;
411 PushCachedRegisters();
421 __ Add(x0, input_end(), Operand(capture_start_offset, SXTW));
423 __ Mov(w2, capture_length);
425 __ Add(x1, input_end(), Operand(current_input_offset(), SXTW));
427 __ Sub(x1, x1, Operand(capture_length, SXTW));
430 __ Mov(x3, ExternalReference::isolate_address(
isolate()));
433 AllowExternalCallThatCantCauseGC scope(
masm_.get());
434 ExternalReference function =
436 ? ExternalReference::re_case_insensitive_compare_unicode()
437 : ExternalReference::re_case_insensitive_compare_non_unicode();
438 CallCFunctionFromIrregexpCode(function, argument_count);
445 PopCachedRegisters();
446 BranchOrBacktrack(eq, on_no_match);
450 __ Sub(current_input_offset(), current_input_offset(), capture_length);
452 __ Add(current_input_offset(), current_input_offset(), capture_length);
456 __ Bind(&fallthrough);
459void RegExpMacroAssemblerARM64::CheckNotBackReference(
int start_reg,
461 Label* on_no_match) {
464 Register capture_start_address = x12;
466 Register current_position_address = x14;
471 if (start_reg < kNumCachedRegisters) {
472 __ Mov(x10, GetCachedRegister(start_reg));
473 __ Lsr(x11, GetCachedRegister(start_reg), kWRegSizeInBits);
475 __ Ldp(w11, w10, capture_location(start_reg, x10));
477 __ Sub(capture_length, w11, w10);
482 __ CompareAndBranch(capture_length, Operand(0), eq, &fallthrough);
486 __ Add(w12, string_start_minus_one(), capture_length);
487 __ Cmp(current_input_offset(), w12);
488 BranchOrBacktrack(le, on_no_match);
490 __ Cmn(capture_length, current_input_offset());
491 BranchOrBacktrack(gt, on_no_match);
495 __ Add(capture_start_address, input_end(), Operand(w10, SXTW));
496 __ Add(capture_end_address,
497 capture_start_address,
498 Operand(capture_length, SXTW));
499 __ Add(current_position_address,
501 Operand(current_input_offset(), SXTW));
504 __ Sub(current_position_address, current_position_address,
505 Operand(capture_length, SXTW));
510 if (
mode_ == LATIN1) {
511 __ Ldrb(w10,
MemOperand(capture_start_address, 1, PostIndex));
512 __ Ldrb(w11,
MemOperand(current_position_address, 1, PostIndex));
515 __ Ldrh(w10,
MemOperand(capture_start_address, 2, PostIndex));
516 __ Ldrh(w11,
MemOperand(current_position_address, 2, PostIndex));
519 BranchOrBacktrack(ne, on_no_match);
520 __ Cmp(capture_start_address, capture_end_address);
524 __ Sub(current_input_offset().
X(), current_position_address, input_end());
526 __ Sub(current_input_offset().
X(), current_input_offset().
X(),
527 Operand(capture_length, SXTW));
531 __ Cmp(current_input_offset().
X(), Operand(current_input_offset(), SXTW));
532 __ Ccmp(current_input_offset(), 0, NoFlag, eq);
534 __ Check(le, AbortReason::kOffsetOutOfRange);
536 __ Bind(&fallthrough);
540void RegExpMacroAssemblerARM64::CheckNotCharacter(
unsigned c,
541 Label* on_not_equal) {
542 CompareAndBranchOrBacktrack(current_character(), c, ne, on_not_equal);
546void RegExpMacroAssemblerARM64::CheckCharacterAfterAnd(uint32_t c,
550 CompareAndBranchOrBacktrack(w10, c, eq, on_equal);
554void RegExpMacroAssemblerARM64::CheckNotCharacterAfterAnd(
unsigned c,
556 Label* on_not_equal) {
558 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal);
561void RegExpMacroAssemblerARM64::CheckNotCharacterAfterMinusAnd(
562 base::uc16 c, base::uc16 minus, base::uc16
mask, Label* on_not_equal) {
563 DCHECK_GT(String::kMaxUtf16CodeUnit, minus);
564 __ Sub(w10, current_character(), minus);
566 CompareAndBranchOrBacktrack(w10, c, ne, on_not_equal);
569void RegExpMacroAssemblerARM64::CheckCharacterInRange(base::uc16 from,
571 Label* on_in_range) {
572 __ Sub(w10, current_character(), from);
574 CompareAndBranchOrBacktrack(w10, to - from, ls, on_in_range);
577void RegExpMacroAssemblerARM64::CheckCharacterNotInRange(
578 base::uc16 from, base::uc16 to, Label* on_not_in_range) {
579 __ Sub(w10, current_character(), from);
581 CompareAndBranchOrBacktrack(w10, to - from, hi, on_not_in_range);
584void RegExpMacroAssemblerARM64::CallIsCharacterInRangeArray(
585 const ZoneList<CharacterRange>* ranges) {
586 static const int kNumArguments = 2;
587 __ Mov(w0, current_character());
588 __ Mov(x1, GetOrAddRangeArray(ranges));
592 FrameScope scope(
masm_.get(), StackFrame::MANUAL);
593 CallCFunctionFromIrregexpCode(
594 ExternalReference::re_is_character_in_range_array(), kNumArguments);
597 __ Mov(code_pointer(), Operand(
masm_->CodeObject()));
600bool RegExpMacroAssemblerARM64::CheckCharacterInRangeArray(
601 const ZoneList<CharacterRange>* ranges, Label* on_in_range) {
605 PushCachedRegisters();
606 CallIsCharacterInRangeArray(ranges);
608 PopCachedRegisters();
609 BranchOrBacktrack(ne, on_in_range);
613bool RegExpMacroAssemblerARM64::CheckCharacterNotInRangeArray(
614 const ZoneList<CharacterRange>* ranges, Label* on_not_in_range) {
618 PushCachedRegisters();
619 CallIsCharacterInRangeArray(ranges);
621 PopCachedRegisters();
622 BranchOrBacktrack(eq, on_not_in_range);
626void RegExpMacroAssemblerARM64::CheckBitInTable(
627 Handle<ByteArray> table,
629 __ Mov(x11, Operand(table));
630 if ((
mode_ != LATIN1) || (kTableMask != String::kMaxOneByteCharCode)) {
631 __ And(w10, current_character(), kTableMask);
634 __ Add(w10, current_character(),
638 CompareAndBranchOrBacktrack(w11, 0, ne, on_bit_set);
641void RegExpMacroAssemblerARM64::SkipUntilBitInTable(
642 int cp_offset, Handle<ByteArray> table,
643 Handle<ByteArray> nibble_table_array,
int advance_by) {
644 Label cont, scalar_repeat;
646 const bool use_simd = SkipUntilBitInTableUseSimd(advance_by);
648 DCHECK(!nibble_table_array.is_null());
649 Label simd_repeat, found, scalar;
650 static constexpr int kVectorSize = 16;
651 const int kCharsPerVector = kVectorSize / char_size();
658 CheckPosition(cp_offset + kCharsPerVector - 1, &scalar);
663 VRegister nibble_table = v0;
664 __ Mov(x8, Operand(nibble_table_array));
667 VRegister nibble_mask = v1;
668 const uint64_t nibble_mask_imm = 0x0f0f0f0f'0f0f0f0f;
669 __ Movi(nibble_mask.V16B(), nibble_mask_imm, nibble_mask_imm);
670 VRegister hi_nibble_lookup_mask = v2;
671 const uint64_t hi_nibble_mask_imm = 0x80402010'08040201;
672 __ Movi(hi_nibble_lookup_mask.V16B(), hi_nibble_mask_imm,
677 VRegister input_vec = v3;
678 __ Add(x8, input_end(), Operand(current_input_offset(), SXTW));
679 __ Add(x8, x8, cp_offset * char_size());
684 VRegister lo_nibbles = v4;
685 __ And(lo_nibbles.V16B(), nibble_mask.V16B(), input_vec.V16B());
688 VRegister hi_nibbles = v5;
689 __ Ushr(hi_nibbles.V16B(), input_vec.V16B(), 4);
690 __ And(hi_nibbles.V16B(), hi_nibbles.V16B(), nibble_mask.V16B());
695 __ Tbl(row.V16B(), nibble_table.V16B(), lo_nibbles.V16B());
702 VRegister bitmask = v7;
703 __ Tbl(bitmask.V16B(), hi_nibble_lookup_mask.V16B(), hi_nibbles.V16B());
707 __ Cmtst(
result.V16B(), row.V16B(), bitmask.V16B());
716 AdvanceCurrentPosition(kCharsPerVector);
717 CheckPosition(cp_offset + kCharsPerVector - 1, &scalar);
730 __ And(x8, x8, Immediate(0xfffe));
732 __ Add(current_input_offset(), current_input_offset(), w8);
739 __ Mov(table_reg, Operand(table));
741 Bind(&scalar_repeat);
742 CheckPosition(cp_offset, &cont);
743 LoadCurrentCharacterUnchecked(cp_offset, 1);
745 if ((
mode_ != LATIN1) || (kTableMask != String::kMaxOneByteCharCode)) {
746 __ And(index, current_character(), kTableMask);
749 __ Add(index, current_character(),
753 __ Ldrb(found_in_table,
MemOperand(table_reg, index, UXTW));
754 __ Cbnz(found_in_table, &cont);
755 AdvanceCurrentPosition(advance_by);
756 __ B(&scalar_repeat);
761bool RegExpMacroAssemblerARM64::SkipUntilBitInTableUseSimd(
int advance_by) {
764 return v8_flags.regexp_simd && advance_by * char_size() == 1;
767bool RegExpMacroAssemblerARM64::CheckSpecialClassRanges(
768 StandardCharacterSet type, Label* on_no_match) {
773 case StandardCharacterSet::kWhitespace:
775 if (
mode_ == LATIN1) {
779 __ Cmp(current_character(),
' ');
780 __ Ccmp(current_character(), 0x00A0, ZFlag, ne);
783 __ Sub(w10, current_character(),
'\t');
784 CompareAndBranchOrBacktrack(w10,
'\r' -
'\t', hi, on_no_match);
789 case StandardCharacterSet::kNotWhitespace:
792 case StandardCharacterSet::kDigit:
794 __ Sub(w10, current_character(),
'0');
795 CompareAndBranchOrBacktrack(w10,
'9' -
'0', hi, on_no_match);
797 case StandardCharacterSet::kNotDigit:
799 __ Sub(w10, current_character(),
'0');
800 CompareAndBranchOrBacktrack(w10,
'9' -
'0', ls, on_no_match);
802 case StandardCharacterSet::kNotLineTerminator: {
807 __ Cmp(current_character(), 0x0A);
808 __ Ccmp(current_character(), 0x0D, ZFlag, ne);
810 __ Sub(w10, current_character(), 0x2028);
812 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne);
814 BranchOrBacktrack(ls, on_no_match);
816 BranchOrBacktrack(eq, on_no_match);
820 case StandardCharacterSet::kLineTerminator: {
824 __ Cmp(current_character(), 0x0A);
825 __ Ccmp(current_character(), 0x0D, ZFlag, ne);
827 __ Sub(w10, current_character(), 0x2028);
829 __ Ccmp(w10, 0x2029 - 0x2028, NoFlag, ne);
831 BranchOrBacktrack(hi, on_no_match);
833 BranchOrBacktrack(ne, on_no_match);
837 case StandardCharacterSet::kWord: {
838 if (
mode_ != LATIN1) {
840 CompareAndBranchOrBacktrack(current_character(),
'z', hi, on_no_match);
842 ExternalReference map = ExternalReference::re_word_character_map();
844 __ Ldrb(w10,
MemOperand(x10, current_character(), UXTW));
845 CompareAndBranchOrBacktrack(w10, 0, eq, on_no_match);
848 case StandardCharacterSet::kNotWord: {
850 if (
mode_ != LATIN1) {
852 __ Cmp(current_character(),
'z');
855 ExternalReference map = ExternalReference::re_word_character_map();
857 __ Ldrb(w10,
MemOperand(x10, current_character(), UXTW));
858 CompareAndBranchOrBacktrack(w10, 0, ne, on_no_match);
862 case StandardCharacterSet::kEverything:
868void RegExpMacroAssemblerARM64::Fail() {
873void RegExpMacroAssemblerARM64::LoadRegExpStackPointerFromMemory(Register dst) {
874 ExternalReference ref =
875 ExternalReference::address_of_regexp_stack_stack_pointer(
isolate());
880void RegExpMacroAssemblerARM64::StoreRegExpStackPointerToMemory(
881 Register src, Register scratch) {
882 ExternalReference ref =
883 ExternalReference::address_of_regexp_stack_stack_pointer(
isolate());
884 __ Mov(scratch, ref);
888void RegExpMacroAssemblerARM64::PushRegExpBasePointer(Register stack_pointer,
890 ExternalReference ref =
891 ExternalReference::address_of_regexp_stack_memory_top_address(
isolate());
892 __ Mov(scratch, ref);
894 __ Sub(scratch, stack_pointer, scratch);
895 __ Str(scratch,
MemOperand(frame_pointer(), kRegExpStackBasePointerOffset));
898void RegExpMacroAssemblerARM64::PopRegExpBasePointer(Register stack_pointer_out,
900 ExternalReference ref =
901 ExternalReference::address_of_regexp_stack_memory_top_address(
isolate());
902 __ Ldr(stack_pointer_out,
903 MemOperand(frame_pointer(), kRegExpStackBasePointerOffset));
904 __ Mov(scratch, ref);
906 __ Add(stack_pointer_out, stack_pointer_out, scratch);
907 StoreRegExpStackPointerToMemory(stack_pointer_out, scratch);
910DirectHandle<HeapObject> RegExpMacroAssemblerARM64::GetCode(
911 DirectHandle<String> source, RegExpFlags flags) {
917 __ Bind(&entry_label_);
933 FrameScope scope(
masm_.get(), StackFrame::MANUAL);
938 DCHECK_EQ(registers_to_retain.Count(), kNumCalleeSavedRegisters);
939 __ PushCPURegList(registers_to_retain);
941 __ EnterFrame(StackFrame::IRREGEXP);
943 static_assert(kIsolateOffset ==
946 static_assert(kNumOutputRegistersOffset ==
948 static_assert(kInputStringOffset ==
950 __ PushCPURegList(CPURegList{x0, x5, x6, x7});
953 __ Mov(start_offset(), w1);
954 __ Mov(input_start(), x2);
955 __ Mov(input_end(), x3);
956 __ Mov(output_array(), x4);
959 const int alignment =
masm_->ActivationFrameAlignment();
961 const int align_mask = (alignment /
kWRegSize) - 1;
965 DCHECK_EQ(kNumberOfStackLocals * kWRegPerXReg,
966 ((kNumberOfStackLocals * kWRegPerXReg) + align_mask) & ~align_mask);
967 __ Claim(kNumberOfStackLocals * kWRegPerXReg);
971 static_assert(backtrack_stackpointer() == x23);
972 LoadRegExpStackPointerFromMemory(backtrack_stackpointer());
976 PushRegExpBasePointer(backtrack_stackpointer(), x11);
980 const int num_stack_registers =
981 std::max(0, num_registers_ - kNumCachedRegisters);
982 const int num_wreg_to_allocate =
983 (num_stack_registers + align_mask) & ~align_mask;
987 Label stack_limit_hit, stack_ok;
989 ExternalReference stack_limit =
990 ExternalReference::address_of_jslimit(
isolate());
991 __ Mov(x10, stack_limit);
993 __ Subs(x10, sp, x10);
994 Operand extra_space_for_variables(num_wreg_to_allocate * kWRegSize);
997 __ B(ls, &stack_limit_hit);
1001 __ Cmp(x10, extra_space_for_variables);
1002 __ B(hs, &stack_ok);
1006 __ Mov(w0, EXCEPTION);
1009 __ Bind(&stack_limit_hit);
1010 CallCheckStackGuardState(x10, extra_space_for_variables);
1012 __ Cbnz(w0, &return_w0);
1018 __ Claim(num_wreg_to_allocate, kWRegSize);
1021 __ Str(wzr,
MemOperand(frame_pointer(), kSuccessfulCapturesOffset));
1022 __ Str(wzr,
MemOperand(frame_pointer(), kBacktrackCountOffset));
1025 __ Sub(x10, input_start(), input_end());
1029 __ Cmp(x11, SeqTwoByteString::kMaxCharsSize);
1030 __ Check(ls, AbortReason::kInputStringTooLong);
1032 __ Mov(current_input_offset(), w10);
1037 __ Sub(string_start_minus_one(), current_input_offset(), char_size());
1038 __ Sub(string_start_minus_one(), string_start_minus_one(),
1039 Operand(start_offset(), LSL, (
mode_ == UC16) ? 1 : 0));
1042 __ Orr(twice_non_position_value(), string_start_minus_one().
X(),
1043 Operand(string_start_minus_one().
X(), LSL, kWRegSizeInBits));
1046 __ Mov(code_pointer(), Operand(
masm_->CodeObject()));
1048 Label load_char_start_regexp;
1052 __ Cbnz(start_offset(), &load_char_start_regexp);
1053 __ Mov(current_character(),
'\n');
1054 __ B(&start_regexp);
1057 __ Bind(&load_char_start_regexp);
1059 LoadCurrentCharacterUnchecked(-1, 1);
1060 __ Bind(&start_regexp);
1064 if (num_saved_registers_ > 0) {
1065 ClearRegisters(0, num_saved_registers_ - 1);
1069 __ B(&start_label_);
1071 if (backtrack_label_.is_linked()) {
1072 __ Bind(&backtrack_label_);
1076 if (success_label_.is_linked()) {
1077 Register first_capture_start = w15;
1080 __ Bind(&success_label_);
1082 if (num_saved_registers_ > 0) {
1091 __ Sub(x10, input_end(), input_start());
1094 __ Cmp(x10, SeqTwoByteString::kMaxCharsSize);
1095 __ Check(ls, AbortReason::kInputStringTooLong);
1099 if (
mode_ == UC16) {
1100 __ Add(input_length, start_offset(), Operand(w10, LSR, 1));
1102 __ Add(input_length, start_offset(), w10);
1106 for (
int i = 0; (
i < num_saved_registers_) && (
i < kNumCachedRegisters);
1108 __ Mov(capture_start.X(), GetCachedRegister(
i));
1109 __ Lsr(capture_end.X(), capture_start.X(), kWRegSizeInBits);
1110 if ((
i == 0) && global_with_zero_length_check()) {
1114 static_assert(kNumCachedRegisters > 0);
1115 __ Mov(first_capture_start, capture_start);
1118 if (
mode_ == UC16) {
1119 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1));
1120 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
1122 __ Add(capture_start, input_length, capture_start);
1123 __ Add(capture_end, input_length, capture_end);
1126 __ Stp(capture_start, capture_end,
1127 MemOperand(output_array(), kSystemPointerSize, PostIndex));
1132 int num_registers_left_on_stack =
1133 num_saved_registers_ - kNumCachedRegisters;
1134 if (num_registers_left_on_stack > 0) {
1138 DCHECK_EQ(0, num_registers_left_on_stack % 2);
1139 __ Add(
base, frame_pointer(), kFirstCaptureOnStackOffset);
1143 static_assert(kNumRegistersToUnroll > 2);
1144 if (num_registers_left_on_stack <= kNumRegistersToUnroll) {
1145 for (
int i = 0;
i < num_registers_left_on_stack / 2;
i++) {
1146 __ Ldp(capture_end, capture_start,
1149 if (
mode_ == UC16) {
1150 __ Add(capture_start, input_length,
1151 Operand(capture_start, ASR, 1));
1152 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
1154 __ Add(capture_start, input_length, capture_start);
1155 __ Add(capture_end, input_length, capture_end);
1158 __ Stp(capture_start, capture_end,
1159 MemOperand(output_array(), kSystemPointerSize, PostIndex));
1163 __ Mov(x11, num_registers_left_on_stack);
1166 __ Ldp(capture_end, capture_start,
1168 if (
mode_ == UC16) {
1169 __ Add(capture_start, input_length, Operand(capture_start, ASR, 1));
1170 __ Add(capture_end, input_length, Operand(capture_end, ASR, 1));
1172 __ Add(capture_start, input_length, capture_start);
1173 __ Add(capture_end, input_length, capture_end);
1176 __ Stp(capture_start, capture_end,
1177 MemOperand(output_array(), kSystemPointerSize, PostIndex));
1178 __ Sub(x11, x11, 2);
1179 __ Cbnz(x11, &loop);
1190 __ Ldr(success_counter,
1191 MemOperand(frame_pointer(), kSuccessfulCapturesOffset));
1192 __ Add(success_counter, success_counter, 1);
1193 __ Str(success_counter,
1194 MemOperand(frame_pointer(), kSuccessfulCapturesOffset));
1199 MemOperand(frame_pointer(), kNumOutputRegistersOffset));
1200 __ Sub(output_size, output_size, num_saved_registers_);
1202 __ Cmp(output_size, num_saved_registers_);
1203 __ B(lt, &return_w0);
1209 MemOperand(frame_pointer(), kNumOutputRegistersOffset));
1213 PopRegExpBasePointer(backtrack_stackpointer(), x11);
1215 if (global_with_zero_length_check()) {
1217 __ Cmp(current_input_offset(), first_capture_start);
1219 __ B(ne, &load_char_start_regexp);
1221 __ Cbz(current_input_offset(), &return_w0);
1225 __ Add(current_input_offset(), current_input_offset(),
1226 Operand((
mode_ == UC16) ? 2 : 1));
1227 if (global_unicode()) CheckNotInSurrogatePair(0, &advance);
1230 __ B(&load_char_start_regexp);
1232 __ Mov(w0, SUCCESS);
1236 if (exit_label_.is_linked()) {
1238 __ Bind(&exit_label_);
1240 __ Ldr(w0,
MemOperand(frame_pointer(), kSuccessfulCapturesOffset));
1244 __ Bind(&return_w0);
1247 PopRegExpBasePointer(backtrack_stackpointer(), x11);
1249 __ LeaveFrame(StackFrame::IRREGEXP);
1250 __ PopCPURegList(registers_to_retain);
1253 Label exit_with_exception;
1254 if (check_preempt_label_.is_linked()) {
1255 __ Bind(&check_preempt_label_);
1257 StoreRegExpStackPointerToMemory(backtrack_stackpointer(), x10);
1260 PushCachedRegisters();
1261 CallCheckStackGuardState(x10);
1264 __ Cbnz(w0, &return_w0);
1266 PopCachedRegisters();
1268 LoadRegExpStackPointerFromMemory(backtrack_stackpointer());
1270 RestoreLinkRegister();
1274 if (stack_overflow_label_.is_linked()) {
1275 __ Bind(&stack_overflow_label_);
1277 StoreRegExpStackPointerToMemory(backtrack_stackpointer(), x10);
1280 PushCachedRegisters();
1282 static constexpr int kNumArguments = 1;
1283 __ Mov(x0, ExternalReference::isolate_address(
isolate()));
1284 CallCFunctionFromIrregexpCode(ExternalReference::re_grow_stack(),
1290 __ Cbz(w0, &exit_with_exception);
1292 __ Mov(backtrack_stackpointer(), x0);
1293 PopCachedRegisters();
1294 RestoreLinkRegister();
1298 if (exit_with_exception.is_linked()) {
1299 __ Bind(&exit_with_exception);
1300 __ Mov(w0, EXCEPTION);
1304 if (fallback_label_.is_linked()) {
1305 __ Bind(&fallback_label_);
1306 __ Mov(w0, FALLBACK_TO_EXPERIMENTAL);
1314 .set_self_reference(
masm_->CodeObject())
1315 .set_empty_source_position_table()
1318 RegExpCodeCreateEvent(Cast<AbstractCode>(code), source, flags));
1319 return Cast<HeapObject>(code);
1322void RegExpMacroAssemblerARM64::GoTo(Label* to) {
1323 BranchOrBacktrack(al, to);
1326void RegExpMacroAssemblerARM64::IfRegisterGE(
int reg,
int comparand,
1329 CompareAndBranchOrBacktrack(to_compare, comparand, ge, if_ge);
1333void RegExpMacroAssemblerARM64::IfRegisterLT(
int reg,
int comparand,
1336 CompareAndBranchOrBacktrack(to_compare, comparand, lt, if_lt);
1340void RegExpMacroAssemblerARM64::IfRegisterEqPos(
int reg, Label* if_eq) {
1342 __ Cmp(to_compare, current_input_offset());
1343 BranchOrBacktrack(eq, if_eq);
1346RegExpMacroAssembler::IrregexpImplementation
1347 RegExpMacroAssemblerARM64::Implementation() {
1348 return kARM64Implementation;
1352void RegExpMacroAssemblerARM64::PopCurrentPosition() {
1353 Pop(current_input_offset());
1357void RegExpMacroAssemblerARM64::PopRegister(
int register_index) {
1359 StoreRegister(register_index, w10);
1363void RegExpMacroAssemblerARM64::PushBacktrack(Label*
label) {
1364 if (
label->is_bound()) {
1365 int target =
label->pos();
1366 __ Mov(w10, target + InstructionStream::kHeaderSize - kHeapObjectTag);
1368 __ Adr(x10,
label, MacroAssembler::kAdrFar);
1369 __ Sub(x10, x10, code_pointer());
1371 __ Cmp(x10, kWRegMask);
1373 __ Check(ls, AbortReason::kOffsetOutOfRange);
1381void RegExpMacroAssemblerARM64::PushCurrentPosition() {
1382 Push(current_input_offset());
1387void RegExpMacroAssemblerARM64::PushRegister(
int register_index,
1388 StackCheckFlag check_stack_limit) {
1389 Register to_push = GetRegister(register_index, w10);
1391 if (check_stack_limit) {
1394 AssertAboveStackLimitMinusSlack();
1399void RegExpMacroAssemblerARM64::ReadCurrentPositionFromRegister(
int reg) {
1401 switch (register_state) {
1403 __ Ldr(current_input_offset(), register_location(
reg));
1406 __ Mov(current_input_offset(), GetCachedRegister(
reg).
W());
1409 __ Lsr(current_input_offset().
X(), GetCachedRegister(
reg),
1417void RegExpMacroAssemblerARM64::WriteStackPointerToRegister(
int reg) {
1418 ExternalReference ref =
1419 ExternalReference::address_of_regexp_stack_memory_top_address(
isolate());
1422 __ Sub(x10, backtrack_stackpointer(), x10);
1424 __ Cmp(x10, Operand(w10, SXTW));
1426 __ Check(eq, AbortReason::kOffsetOutOfRange);
1428 StoreRegister(
reg, w10);
1431void RegExpMacroAssemblerARM64::ReadStackPointerFromRegister(
int reg) {
1432 ExternalReference ref =
1433 ExternalReference::address_of_regexp_stack_memory_top_address(
isolate());
1437 __ Add(backtrack_stackpointer(), x11, Operand(read_from, SXTW));
1440void RegExpMacroAssemblerARM64::SetCurrentPositionFromEnd(
int by) {
1441 Label after_position;
1442 __ Cmp(current_input_offset(), -by * char_size());
1443 __ B(ge, &after_position);
1444 __ Mov(current_input_offset(), -by * char_size());
1448 LoadCurrentCharacterUnchecked(-1, 1);
1449 __ Bind(&after_position);
1453void RegExpMacroAssemblerARM64::SetRegister(
int register_index,
int to) {
1454 DCHECK(register_index >= num_saved_registers_);
1460 StoreRegister(register_index, set_to);
1464bool RegExpMacroAssemblerARM64::Succeed() {
1465 __ B(&success_label_);
1470void RegExpMacroAssemblerARM64::WriteCurrentPositionToRegister(
int reg,
1473 if (cp_offset != 0) {
1475 __ Add(
position, current_input_offset(), cp_offset * char_size());
1481void RegExpMacroAssemblerARM64::ClearRegisters(
int reg_from,
int reg_to) {
1482 DCHECK(reg_from <= reg_to);
1483 int num_registers = reg_to - reg_from + 1;
1487 if ((reg_from < kNumCachedRegisters) && ((reg_from % 2) != 0)) {
1488 StoreRegister(reg_from, string_start_minus_one());
1494 while ((num_registers >= 2) && (reg_from < kNumCachedRegisters)) {
1495 DCHECK(GetRegisterState(reg_from) == CACHED_LSW);
1496 __ Mov(GetCachedRegister(reg_from), twice_non_position_value());
1501 if ((num_registers % 2) == 1) {
1502 StoreRegister(reg_from, string_start_minus_one());
1507 if (num_registers > 0) {
1509 DCHECK_LE(kNumCachedRegisters, reg_from);
1513 reg_from -= kNumCachedRegisters;
1514 reg_to -= kNumCachedRegisters;
1516 static_assert(kNumRegistersToUnroll > 2);
1520 if (num_registers > kNumRegistersToUnroll) {
1522 __ Add(
base, frame_pointer(), base_offset);
1525 __ Mov(x11, num_registers);
1527 __ Str(twice_non_position_value(),
1529 __ Sub(x11, x11, 2);
1530 __ Cbnz(x11, &loop);
1532 for (
int i = reg_from;
i <= reg_to;
i += 2) {
1533 __ Str(twice_non_position_value(),
1542template <
typename T>
1543static T& frame_entry(Address re_frame,
int frame_offset) {
1544 return *
reinterpret_cast<T*
>(re_frame + frame_offset);
1548template <
typename T>
1549static T* frame_entry_address(Address re_frame,
int frame_offset) {
1550 return reinterpret_cast<T*
>(re_frame + frame_offset);
1553int RegExpMacroAssemblerARM64::CheckStackGuardState(
1554 Address* return_address, Address raw_code, Address re_frame,
1555 int start_index,
const uint8_t** input_start,
const uint8_t** input_end,
1556 uintptr_t extra_space) {
1557 Tagged<InstructionStream> re_code =
1558 Cast<InstructionStream>(Tagged<Object>(raw_code));
1559 return NativeRegExpMacroAssembler::CheckStackGuardState(
1560 frame_entry<Isolate*>(re_frame, kIsolateOffset), start_index,
1561 static_cast<RegExp::CallOrigin
>(
1562 frame_entry<int>(re_frame, kDirectCallOffset)),
1563 return_address, re_code,
1564 frame_entry_address<Address>(re_frame, kInputStringOffset), input_start,
1565 input_end, extra_space);
1568void RegExpMacroAssemblerARM64::CheckPosition(
int cp_offset,
1569 Label* on_outside_input) {
1570 if (cp_offset >= 0) {
1571 CompareAndBranchOrBacktrack(current_input_offset(),
1572 -cp_offset * char_size(), ge, on_outside_input);
1574 __ Add(w12, current_input_offset(), Operand(cp_offset * char_size()));
1575 __ Cmp(w12, string_start_minus_one());
1576 BranchOrBacktrack(le, on_outside_input);
1583void RegExpMacroAssemblerARM64::CallCheckStackGuardState(Register scratch,
1584 Operand extra_space) {
1586 DCHECK(!
masm_->options().isolate_independent_code);
1592 int alignment =
masm_->ActivationFrameAlignment();
1594 int align_mask = (alignment /
kXRegSize) - 1;
1595 int xreg_to_claim = (3 + align_mask) & ~align_mask;
1597 __ Claim(xreg_to_claim);
1599 __ Mov(x6, extra_space);
1601 __ Poke(input_end(), 2 * kSystemPointerSize);
1602 __ Add(x5, sp, 2 * kSystemPointerSize);
1603 __ Poke(input_start(), kSystemPointerSize);
1604 __ Add(x4, sp, kSystemPointerSize);
1606 __ Mov(w3, start_offset());
1608 __ Mov(x2, frame_pointer());
1610 __ Mov(x1, Operand(
masm_->CodeObject()));
1618 ExternalReference check_stack_guard_state =
1619 ExternalReference::re_check_stack_guard_state();
1620 __ Mov(scratch, check_stack_guard_state);
1622 __ CallBuiltin(Builtin::kDirectCEntry);
1625 __ Peek(input_start(), kSystemPointerSize);
1626 __ Peek(input_end(), 2 * kSystemPointerSize);
1628 __ Drop(xreg_to_claim);
1631 __ Mov(code_pointer(), Operand(
masm_->CodeObject()));
1634void RegExpMacroAssemblerARM64::BranchOrBacktrack(Condition
condition,
1637 if (to ==
nullptr) {
1644 if (to ==
nullptr) {
1645 to = &backtrack_label_;
1650void RegExpMacroAssemblerARM64::CompareAndBranchOrBacktrack(Register
reg,
1655 if (to ==
nullptr) {
1656 to = &backtrack_label_;
1664 __ Cmp(
reg, immediate);
1669void RegExpMacroAssemblerARM64::CallCFunctionFromIrregexpCode(
1670 ExternalReference function,
int num_arguments) {
1684void RegExpMacroAssemblerARM64::CheckPreemption() {
1686 ExternalReference stack_limit =
1687 ExternalReference::address_of_jslimit(
isolate());
1688 __ Mov(x10, stack_limit);
1691 CallIf(&check_preempt_label_, ls);
1695void RegExpMacroAssemblerARM64::CheckStackLimit() {
1696 ExternalReference stack_limit =
1697 ExternalReference::address_of_regexp_stack_limit_address(
isolate());
1698 __ Mov(x10, stack_limit);
1700 __ Cmp(backtrack_stackpointer(), x10);
1701 CallIf(&stack_overflow_label_, ls);
1704void RegExpMacroAssemblerARM64::AssertAboveStackLimitMinusSlack() {
1706 Label no_stack_overflow;
1708 auto l = ExternalReference::address_of_regexp_stack_limit_address(
isolate());
1711 __ Sub(x10, x10, RegExpStack::kStackLimitSlackSize);
1712 __ Cmp(backtrack_stackpointer(), x10);
1713 __ B(hi, &no_stack_overflow);
1715 __ bind(&no_stack_overflow);
1718void RegExpMacroAssemblerARM64::Push(Register source) {
1719 DCHECK(source.Is32Bits());
1720 DCHECK_NE(source, backtrack_stackpointer());
1723 -
static_cast<int>(kWRegSize),
1728void RegExpMacroAssemblerARM64::Pop(Register target) {
1729 DCHECK(target.Is32Bits());
1730 DCHECK_NE(target, backtrack_stackpointer());
1732 MemOperand(backtrack_stackpointer(), kWRegSize, PostIndex));
1736Register RegExpMacroAssemblerARM64::GetCachedRegister(
int register_index) {
1737 DCHECK_GT(kNumCachedRegisters, register_index);
1738 return Register::Create(register_index / 2, kXRegSizeInBits);
1742Register RegExpMacroAssemblerARM64::GetRegister(
int register_index,
1743 Register maybe_result) {
1744 DCHECK(maybe_result.Is32Bits());
1746 if (num_registers_ <= register_index) {
1747 num_registers_ = register_index + 1;
1750 RegisterState register_state = GetRegisterState(register_index);
1751 switch (register_state) {
1753 __ Ldr(maybe_result, register_location(register_index));
1757 result = GetCachedRegister(register_index).W();
1760 __ Lsr(maybe_result.X(), GetCachedRegister(register_index),
1772void RegExpMacroAssemblerARM64::StoreRegister(
int register_index,
1774 DCHECK(source.Is32Bits());
1776 if (num_registers_ <= register_index) {
1777 num_registers_ = register_index + 1;
1780 RegisterState register_state = GetRegisterState(register_index);
1781 switch (register_state) {
1783 __ Str(source, register_location(register_index));
1786 Register cached_register = GetCachedRegister(register_index);
1787 if (source != cached_register.W()) {
1788 __ Bfi(cached_register, source.X(), 0, kWRegSizeInBits);
1793 Register cached_register = GetCachedRegister(register_index);
1794 __ Bfi(cached_register, source.X(), kWRegSizeInBits, kWRegSizeInBits);
1803void RegExpMacroAssemblerARM64::CallIf(Label* to, Condition
condition) {
1807 __ Bind(&skip_call);
1811void RegExpMacroAssemblerARM64::RestoreLinkRegister() {
1814 __ Pop<MacroAssembler::kAuthLR>(padreg, lr);
1815 __ Add(lr, lr, Operand(
masm_->CodeObject()));
1819void RegExpMacroAssemblerARM64::SaveLinkRegister() {
1820 __ Sub(lr, lr, Operand(
masm_->CodeObject()));
1821 __ Push<MacroAssembler::kSignLR>(lr, padreg);
1825MemOperand RegExpMacroAssemblerARM64::register_location(
int register_index) {
1826 DCHECK(register_index < (1<<30));
1827 DCHECK_LE(kNumCachedRegisters, register_index);
1828 if (num_registers_ <= register_index) {
1829 num_registers_ = register_index + 1;
1831 register_index -= kNumCachedRegisters;
1832 int offset = kFirstRegisterOnStackOffset - register_index *
kWRegSize;
1836MemOperand RegExpMacroAssemblerARM64::capture_location(
int register_index,
1838 DCHECK(register_index < (1<<30));
1839 DCHECK(register_index < num_saved_registers_);
1840 DCHECK_LE(kNumCachedRegisters, register_index);
1842 register_index -= kNumCachedRegisters;
1854void RegExpMacroAssemblerARM64::LoadCurrentCharacterUnchecked(
int cp_offset,
1866 if (!CanReadUnaligned()) {
1870 if (cp_offset != 0) {
1872 __ Mov(x10, cp_offset * char_size());
1873 __ Add(x10, x10, Operand(current_input_offset(), SXTW));
1874 __ Cmp(x10, Operand(w10, SXTW));
1876 __ Check(eq, AbortReason::kOffsetOutOfRange);
1878 __ Add(w10, current_input_offset(), cp_offset * char_size());
1883 if (
mode_ == LATIN1) {
1884 if (characters == 4) {
1886 }
else if (characters == 2) {
1894 if (characters == 2) {
RegExpMacroAssemblerARM64(Isolate *isolate, Zone *zone, Mode mode, int registers_to_save)
#define PROFILE(the_isolate, Call)
RecordWriteMode const mode_
const CodeDesc * code_desc
#define ASM_CODE_COMMENT_STRING(asm,...)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enable experimental feedback collection in generic lowering enable Turboshaft s WasmLoadElimination enable Turboshaft s low level load elimination for JS enable Turboshaft s escape analysis for string concatenation use enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often force marking at random points between and X(inclusive) percent " "of the regular marking start limit") DEFINE_INT(stress_scavenge
ZoneVector< RpoNumber > & result
MaglevAssembler *const masm_
void Add(RWDigits Z, Digits X, Digits Y)
UntaggedUnion< WordPtr, Code, JSFunction, Word32 > CallTarget
base::PointerWithPayload< void, RegisterStateFlags, 2 > RegisterState
void And(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
void Sub(LiftoffAssembler *lasm, Register dst, Register lhs, Register rhs)
constexpr int kSystemPointerSize
std::unique_ptr< AssemblerBuffer > NewAssemblerBuffer(int size)
Condition NegateCondition(Condition cond)
V8_EXPORT_PRIVATE FlagValues v8_flags
Register ReassignRegister(Register &source)
#define DCHECK_LE(v1, v2)
#define DCHECK_NE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define DCHECK_GT(v1, v2)
#define OFFSET_OF_DATA_START(Type)
#define V8_UNLIKELY(condition)