@ -10,126 +10,22 @@
# ifndef _ASM_STRING_H
# define _ASM_STRING_H
/*
* Most of the inline functions are rather naive implementations so I just
* didn ' t bother updating them for 64 - bit . . .
* We don ' t do inline string functions , since the
* optimised inline asm versions are not small .
*/
# ifdef CONFIG_32BIT
# ifndef IN_STRING_C
# define __HAVE_ARCH_STRCPY
static __inline__ char * strcpy ( char * __dest , __const__ char * __src )
{
char * __xdest = __dest ;
__asm__ __volatile__ (
" .set \t noreorder \n \t "
" .set \t noat \n "
" 1: \t lbu \t $1,(%1) \n \t "
" addiu \t %1,1 \n \t "
" sb \t $1,(%0) \n \t "
" bnez \t $1,1b \n \t "
" addiu \t %0,1 \n \t "
" .set \t at \n \t "
" .set \t reorder "
: " =r " ( __dest ) , " =r " ( __src )
: " 0 " ( __dest ) , " 1 " ( __src )
: " memory " ) ;
return __xdest ;
}
# define __HAVE_ARCH_STRNCPY
static __inline__ char * strncpy ( char * __dest , __const__ char * __src , size_t __n )
{
char * __xdest = __dest ;
if ( __n = = 0 )
return __xdest ;
__asm__ __volatile__ (
" .set \t noreorder \n \t "
" .set \t noat \n "
" 1: \t lbu \t $1,(%1) \n \t "
" subu \t %2,1 \n \t "
" sb \t $1,(%0) \n \t "
" beqz \t $1,2f \n \t "
" addiu \t %0,1 \n \t "
" bnez \t %2,1b \n \t "
" addiu \t %1,1 \n "
" 2: \n \t "
" .set \t at \n \t "
" .set \t reorder "
: " =r " ( __dest ) , " =r " ( __src ) , " =r " ( __n )
: " 0 " ( __dest ) , " 1 " ( __src ) , " 2 " ( __n )
: " memory " ) ;
return __xdest ;
}
# define __HAVE_ARCH_STRCMP
static __inline__ int strcmp ( __const__ char * __cs , __const__ char * __ct )
{
int __res ;
__asm__ __volatile__ (
" .set \t noreorder \n \t "
" .set \t noat \n \t "
" lbu \t %2,(%0) \n "
" 1: \t lbu \t $1,(%1) \n \t "
" addiu \t %0,1 \n \t "
" bne \t $1,%2,2f \n \t "
" addiu \t %1,1 \n \t "
" bnez \t %2,1b \n \t "
" lbu \t %2,(%0) \n \t "
# if defined(CONFIG_CPU_R3000)
" nop \n \t "
# endif
" move \t %2,$1 \n "
" 2: \t subu \t %2,$1 \n "
" 3: \t .set \t at \n \t "
" .set \t reorder "
: " =r " ( __cs ) , " =r " ( __ct ) , " =r " ( __res )
: " 0 " ( __cs ) , " 1 " ( __ct ) ) ;
return __res ;
}
# endif /* !defined(IN_STRING_C) */
# undef __HAVE_ARCH_STRCPY
extern char * strcpy ( char * __dest , __const__ char * __src ) ;
# define __HAVE_ARCH_STRNCMP
static __inline__ int
strncmp ( __const__ char * __cs , __const__ char * __ct , size_t __count )
{
int __res ;
# undef __HAVE_ARCH_STRNCPY
extern char * strncpy ( char * __dest , __const__ char * __src , size_t __n ) ;
__asm__ __volatile__ (
" .set \t noreorder \n \t "
" .set \t noat \n "
" 1: \t lbu \t %3,(%0) \n \t "
" beqz \t %2,2f \n \t "
" lbu \t $1,(%1) \n \t "
" subu \t %2,1 \n \t "
" bne \t $1,%3,3f \n \t "
" addiu \t %0,1 \n \t "
" bnez \t %3,1b \n \t "
" addiu \t %1,1 \n "
" 2: \n \t "
# if defined(CONFIG_CPU_R3000)
" nop \n \t "
# endif
" move \t %3,$1 \n "
" 3: \t subu \t %3,$1 \n \t "
" .set \t at \n \t "
" .set \t reorder "
: " =r " ( __cs ) , " =r " ( __ct ) , " =r " ( __count ) , " =r " ( __res )
: " 0 " ( __cs ) , " 1 " ( __ct ) , " 2 " ( __count ) ) ;
# undef __HAVE_ARCH_STRCMP
extern int strcmp ( __const__ char * __cs , __const__ char * __ct ) ;
return __res ;
}
# endif /* CONFIG_32BIT */
# undef __HAVE_ARCH_STRNCMP
extern int strncmp ( __const__ char * __cs , __const__ char * __ct , size_t __count ) ;
# undef __HAVE_ARCH_MEMSET
extern void * memset ( void * __s , int __c , size_t __count ) ;