11#if V8_HOST_ARCH_ARM && V8_TARGET_ARCH_ARM
13#elif V8_HOST_ARCH_ARM64 && V8_TARGET_ARCH_ARM64
15#elif V8_HOST_ARCH_IA32 && V8_TARGET_ARCH_IA32
17#elif V8_HOST_ARCH_X64 && V8_TARGET_ARCH_X64
19#elif V8_HOST_ARCH_LOONG64 && V8_TARGET_ARCH_LOONG64
21#elif V8_HOST_ARCH_MIPS64 && V8_TARGET_ARCH_MIPS64
30#if V8_HOST_ARCH_IA32 && V8_TARGET_ARCH_IA32
31#define CLOBBER_REGISTER(R) __asm xorps R, R
37#if (V8_HOST_ARCH_X64 && V8_TARGET_ARCH_X64) || \
38 (V8_HOST_ARCH_IA32 && V8_TARGET_ARCH_IA32)
39#define CLOBBER_REGISTER(R) \
47#elif V8_HOST_ARCH_ARM64 && V8_TARGET_ARCH_ARM64
48#define CLOBBER_REGISTER(R) __asm__ volatile("fmov " #R ",xzr" :::);
50#elif V8_HOST_ARCH_LOONG64 && V8_TARGET_ARCH_LOONG64
51#define CLOBBER_REGISTER(R) __asm__ volatile("movgr2fr.d $" #R ",$zero" :::);
53#elif V8_HOST_ARCH_MIPS64 && V8_TARGET_ARCH_MIPS64
54#define CLOBBER_USE_REGISTER(R) __asm__ volatile("dmtc1 $zero,$" #R :::);
63#if defined(CLOBBER_REGISTER)
65#undef CLOBBER_REGISTER
68#elif defined(CLOBBER_USE_REGISTER)
70#undef CLOBBER_USE_REGISTER
77 return x1 * 1.01 + x2 * 2.02 + x3 * 3.03 + x4 * 4.04;
double ClobberDoubleRegisters(double x1, double x2, double x3, double x4)
#define DOUBLE_REGISTERS(V)
#define DOUBLE_USE_REGISTERS(V)