5#ifndef V8_UTILS_MEMCOPY_H_
6#define V8_UTILS_MEMCOPY_H_
29#if defined(V8_TARGET_ARCH_IA32)
36using MemMoveFunction = void (*)(
void* dest,
const void* src,
size_t size);
43#elif defined(V8_HOST_ARCH_ARM)
44using MemCopyUint8Function = void (*)(uint8_t* dest,
const uint8_t* src,
47V8_INLINE void MemCopyUint8Wrapper(uint8_t* dest,
const uint8_t* src,
49 memcpy(dest, src, chars);
54 (*memcopy_uint8_function)(
reinterpret_cast<uint8_t*
>(dest),
55 reinterpret_cast<const uint8_t*
>(src),
size);
59 memmove(dest, src, size);
63const int kMinComplexConvertMemCopy = 12;
65#if defined(V8_OPTIMIZE_WITH_NEON)
70template <
typename IntType>
72 void* dst, const
void* src,
size_t count) {
73 *
reinterpret_cast<IntType*
>(dst) = *
reinterpret_cast<const IntType*
>(src);
74 *
reinterpret_cast<IntType*
>(
static_cast<uint8_t*
>(dst) + count -
76 *
reinterpret_cast<const IntType*
>(
static_cast<const uint8_t*
>(src) +
77 count -
sizeof(IntType));
81inline
void MemCopy(
void* dst, const
void* src,
size_t count) {
82 auto* dst_u =
static_cast<uint8_t*
>(dst);
83 const auto* src_u =
static_cast<const uint8_t*
>(src);
96 *
reinterpret_cast<uint16_t*
>(dst_u) =
97 *
reinterpret_cast<const uint16_t*
>(src_u);
100 OverlappingWrites<uint16_t>(dst_u, src_u, count);
103 OverlappingWrites<uint32_t>(dst_u, src_u, count);
106 OverlappingWrites<uint64_t>(dst_u, src_u, count);
109 vst1q_u8(dst_u, vld1q_u8(src_u));
110 vst1q_u8(dst_u + count -
sizeof(uint8x16_t),
111 vld1q_u8(src_u + count -
sizeof(uint8x16_t)));
114 vst1q_u8(dst_u, vld1q_u8(src_u));
115 for (
size_t i = count %
sizeof(uint8x16_t);
i <
count;
116 i +=
sizeof(uint8x16_t)) {
117 vst1q_u8(dst_u +
i, vld1q_u8(src_u +
i));
124inline void MemCopy(
void* dest,
const void* src,
size_t size) {
131 memcpy(dest, src, N); \
151 memcpy(dest, src, size);
156#if V8_TARGET_BIG_ENDIAN
157inline void MemCopyAndSwitchEndianness(
void* dst,
void* src,
159 size_t element_size) {
160#define COPY_LOOP(type, reverse) \
162 for (uint32_t i = 0; i < num_elements; i++) { \
164 type* s = reinterpret_cast<type*>(src) + i; \
165 type* d = reinterpret_cast<type*>(dst) + i; \
166 memcpy(&t, reinterpret_cast<void*>(s), element_size); \
168 memcpy(reinterpret_cast<void*>(d), &t, element_size); \
173 switch (element_size) {
175 MemCopy(dst, src, num_elements);
197 memmove(dest, src, N); \
217 memmove(dest, src, size);
226template <
size_t kBlockCopyLimit,
typename T>
228 constexpr int kTWordSize =
sizeof(
T);
234 DCHECK(((src <= dst) && ((src +
count * kTWordSize) <= dst)) ||
235 ((dst <= src) && ((dst +
count * kTWordSize) <= src)));
237 if (
count == 0)
return;
241 if (
count < kBlockCopyLimit) {
244 *dst_ptr++ = *src_ptr++;
254 static const size_t kBlockCopyLimit = 16;
256 reinterpret_cast<const Address*
>(src), num_words);
261inline void CopyBytes(T* dst,
const T* src,
size_t num_bytes) {
262 static_assert(
sizeof(
T) == 1);
263 if (num_bytes == 0)
return;
267inline void MemsetUint32(uint32_t* dest, uint32_t value,
size_t counter) {
268#if V8_HOST_ARCH_IA32 || V8_HOST_ARCH_X64
272#if defined(MEMORY_SANITIZER)
277#if defined(__GNUC__) && defined(STOS)
281 :
"+&c"(counter),
"+&D"(dest)
285 for (
size_t i = 0;
i < counter;
i++) {
296#elif V8_HOST_ARCH_X64
300#if defined(MEMORY_SANITIZER)
305#if defined(__GNUC__) && defined(STOS)
309 :
"+&c"(counter),
"+&D"(dest)
313 for (
size_t i = 0;
i < counter;
i++) {
321template <
typename T,
typename U>
330 reinterpret_cast<Address>(value), counter);
342template <
typename SrcType,
typename DstType>
345template <
typename SrcType,
typename DstType>
347 static_assert(std::is_integral<SrcType>::value);
348 static_assert(std::is_integral<DstType>::value);
349 using SrcTypeUnsigned =
typename std::make_unsigned<SrcType>::type;
350 using DstTypeUnsigned =
typename std::make_unsigned<DstType>::type;
358 DCHECK(src_end <= dst_start || dst_end <= src_start);
361 auto* dst_u =
reinterpret_cast<DstTypeUnsigned*
>(dst);
362 auto* src_u =
reinterpret_cast<const SrcTypeUnsigned*
>(src);
364#if defined(V8_OPTIMIZE_WITH_NEON)
365 if constexpr (
sizeof(DstType) == 1 &&
sizeof(SrcType) == 1) {
378 std::copy_n(src_u, N, dst_u); \
398 std::copy_n(src_u,
count, dst_u);
constexpr unsigned CountLeadingZeros(T value)
void CopyBytes(T *dst, const T *src, size_t num_bytes)
static uint16_t ByteReverse16(uint16_t value)
static uint32_t ByteReverse32(uint32_t value)
void CopyImpl(T *dst_ptr, const T *src_ptr, size_t count)
V8_EXPORT_PRIVATE void MemMove(void *dest, const void *src, size_t size)
void CopyWords(Address dst, const Address src, size_t num_words)
void CopyChars(DstType *dst, const SrcType *src, size_t count) V8_NONNULL(1
void MemsetPointer(FullObjectSlot start, Tagged< Object > value, size_t counter)
void MemCopy(void *dest, const void *src, size_t size)
static uint64_t ByteReverse64(uint64_t value)
const size_t kMinComplexMemCopy
void init_memcopy_functions()
void MemsetUint32(uint32_t *dest, uint32_t value, size_t counter)
#define DCHECK(condition)
#define V8_EXPORT_PRIVATE
constexpr bool IsAligned(T value, U alignment)
#define V8_CLANG_NO_SANITIZE(what)
defined(V8_TRIVIAL_ABI)