2008-10-23 09:26:29 +04:00
# ifndef _ASM_X86_STRING_64_H
# define _ASM_X86_STRING_64_H
2005-04-17 02:20:36 +04:00
# ifdef __KERNEL__
2008-03-23 11:03:34 +03:00
/* Written 2002 by Andi Kleen */
2005-04-17 02:20:36 +04:00
2008-03-23 11:03:34 +03:00
/* Only used for special circumstances. Stolen from i386/string.h */
static __always_inline void * __inline_memcpy ( void * to , const void * from , size_t n )
2005-04-17 02:20:36 +04:00
{
2008-03-23 11:03:34 +03:00
unsigned long d0 , d1 , d2 ;
asm volatile ( " rep ; movsl \n \t "
" testb $2,%b4 \n \t "
" je 1f \n \t "
" movsw \n "
" 1: \t testb $1,%b4 \n \t "
" je 2f \n \t "
" movsb \n "
" 2: "
: " =&c " ( d0 ) , " =&D " ( d1 ) , " =&S " ( d2 )
: " 0 " ( n / 4 ) , " q " ( n ) , " 1 " ( ( long ) to ) , " 2 " ( ( long ) from )
: " memory " ) ;
return to ;
2005-04-17 02:20:36 +04:00
}
/* Even with __builtin_ the compiler may decide to use the out of line
function . */
# define __HAVE_ARCH_MEMCPY 1
2015-02-14 01:39:56 +03:00
extern void * __memcpy ( void * to , const void * from , size_t len ) ;
2008-04-04 02:53:23 +04:00
# ifndef CONFIG_KMEMCHECK
2007-07-21 19:09:58 +04:00
# if (__GNUC__ == 4 && __GNUC_MINOR__ >= 3) || __GNUC__ > 4
extern void * memcpy ( void * to , const void * from , size_t len ) ;
# else
2008-03-23 11:03:34 +03:00
# define memcpy(dst, src, len) \
( { \
size_t __len = ( len ) ; \
void * __ret ; \
if ( __builtin_constant_p ( len ) & & __len > = 64 ) \
__ret = __memcpy ( ( dst ) , ( src ) , __len ) ; \
else \
__ret = __builtin_memcpy ( ( dst ) , ( src ) , __len ) ; \
__ret ; \
} )
2007-07-21 19:09:58 +04:00
# endif
2008-04-04 02:53:23 +04:00
# else
/*
* kmemcheck becomes very happy if we use the REP instructions unconditionally ,
* because it means that we know both memory operands in advance .
*/
# define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len))
# endif
2005-04-17 02:20:36 +04:00
# define __HAVE_ARCH_MEMSET
2006-03-25 18:29:49 +03:00
void * memset ( void * s , int c , size_t n ) ;
2015-02-14 01:39:56 +03:00
void * __memset ( void * s , int c , size_t n ) ;
2005-04-17 02:20:36 +04:00
# define __HAVE_ARCH_MEMMOVE
2008-03-23 11:03:34 +03:00
void * memmove ( void * dest , const void * src , size_t count ) ;
2015-02-14 01:39:56 +03:00
void * __memmove ( void * dest , const void * src , size_t count ) ;
2005-04-17 02:20:36 +04:00
2008-03-23 11:03:34 +03:00
int memcmp ( const void * cs , const void * ct , size_t count ) ;
size_t strlen ( const char * s ) ;
char * strcpy ( char * dest , const char * src ) ;
char * strcat ( char * dest , const char * src ) ;
int strcmp ( const char * cs , const char * ct ) ;
2005-04-17 02:20:36 +04:00
2015-02-14 01:39:56 +03:00
# if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
/*
* For files that not instrumented ( e . g . mm / slub . c ) we
* should use not instrumented version of mem * functions .
*/
# undef memcpy
# define memcpy(dst, src, len) __memcpy(dst, src, len)
# define memmove(dst, src, len) __memmove(dst, src, len)
# define memset(s, c, n) __memset(s, c, n)
# endif
2005-04-17 02:20:36 +04:00
# endif /* __KERNEL__ */
2008-10-23 09:26:29 +04:00
# endif /* _ASM_X86_STRING_64_H */