LCOV - code coverage report
Current view: top level - arch/x86/include/asm - string_64.h (source / functions) Hit Total Coverage
Test: coverage.info Lines: 0 1 0.0 %
Date: 2023-08-24 13:40:31 Functions: 0 0 -

          Line data    Source code
       1             : /* SPDX-License-Identifier: GPL-2.0 */
       2             : #ifndef _ASM_X86_STRING_64_H
       3             : #define _ASM_X86_STRING_64_H
       4             : 
       5             : #ifdef __KERNEL__
       6             : #include <linux/jump_label.h>
       7             : 
       8             : /* Written 2002 by Andi Kleen */
       9             : 
      10             : /* Even with __builtin_ the compiler may decide to use the out of line
      11             :    function. */
      12             : 
      13             : #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
      14             : #include <linux/kmsan_string.h>
      15             : #endif
      16             : 
      17             : #define __HAVE_ARCH_MEMCPY 1
      18             : extern void *memcpy(void *to, const void *from, size_t len);
      19             : extern void *__memcpy(void *to, const void *from, size_t len);
      20             : 
      21             : #define __HAVE_ARCH_MEMSET
      22             : void *memset(void *s, int c, size_t n);
      23             : void *__memset(void *s, int c, size_t n);
      24             : 
      25             : /*
      26             :  * KMSAN needs to instrument as much code as possible. Use C versions of
      27             :  * memsetXX() from lib/string.c under KMSAN.
      28             :  */
      29             : #if !defined(CONFIG_KMSAN)
      30             : #define __HAVE_ARCH_MEMSET16
      31             : static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
      32             : {
      33             :         long d0, d1;
      34             :         asm volatile("rep\n\t"
      35             :                      "stosw"
      36             :                      : "=&c" (d0), "=&D" (d1)
      37             :                      : "a" (v), "1" (s), "0" (n)
      38             :                      : "memory");
      39             :         return s;
      40             : }
      41             : 
      42             : #define __HAVE_ARCH_MEMSET32
      43             : static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
      44             : {
      45             :         long d0, d1;
      46             :         asm volatile("rep\n\t"
      47             :                      "stosl"
      48             :                      : "=&c" (d0), "=&D" (d1)
      49             :                      : "a" (v), "1" (s), "0" (n)
      50             :                      : "memory");
      51             :         return s;
      52             : }
      53             : 
      54             : #define __HAVE_ARCH_MEMSET64
      55             : static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
      56             : {
      57             :         long d0, d1;
      58           0 :         asm volatile("rep\n\t"
      59             :                      "stosq"
      60             :                      : "=&c" (d0), "=&D" (d1)
      61             :                      : "a" (v), "1" (s), "0" (n)
      62             :                      : "memory");
      63             :         return s;
      64             : }
      65             : #endif
      66             : 
      67             : #define __HAVE_ARCH_MEMMOVE
      68             : void *memmove(void *dest, const void *src, size_t count);
      69             : void *__memmove(void *dest, const void *src, size_t count);
      70             : 
      71             : int memcmp(const void *cs, const void *ct, size_t count);
      72             : size_t strlen(const char *s);
      73             : char *strcpy(char *dest, const char *src);
      74             : char *strcat(char *dest, const char *src);
      75             : int strcmp(const char *cs, const char *ct);
      76             : 
      77             : #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
      78             : #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
      79             : void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
      80             : static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
      81             : {
      82             :         if (__builtin_constant_p(cnt)) {
      83             :                 switch (cnt) {
      84             :                         case 4:
      85             :                                 asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
      86             :                                 return;
      87             :                         case 8:
      88             :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
      89             :                                 return;
      90             :                         case 16:
      91             :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
      92             :                                 asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
      93             :                                 return;
      94             :                 }
      95             :         }
      96             :         __memcpy_flushcache(dst, src, cnt);
      97             : }
      98             : #endif
      99             : 
     100             : #endif /* __KERNEL__ */
     101             : 
     102             : #endif /* _ASM_X86_STRING_64_H */

Generated by: LCOV version 1.14