Commit 3c0be584 authored by Paul Burton's avatar Paul Burton

MIPS: Drop 32-bit asm string functions

We have assembly implementations of strcpy(), strncpy(), strcmp() &
strncmp() which:

 - Are simple byte-at-a-time loops with no particular optimizations. As
   a comment in the code describes, they're "rather naive".

 - Offer no clear performance advantage over the generic C
   implementations - in microbenchmarks performed by Alexander Lobakin
   the asm functions sometimes win & sometimes lose, but generally not
   by large margins in either direction.

 - Don't support 64-bit kernels, where we already make use of the
   generic C implementations.

 - Tend to bloat kernel code size due to inlining.

 - Don't support CONFIG_FORTIFY_SOURCE.

 - Won't support nanoMIPS without rework.

For all of these reasons, delete the asm implementations & make use of
the generic C implementations for 32-bit kernels just like we already do
for 64-bit kernels.
Signed-off-by: default avatarPaul Burton <paul.burton@mips.com>
URL: https://lore.kernel.org/linux-mips/a2a35f1cf58d6db19eb4af9b4ae21e35@dlink.ru/
Cc: Alexander Lobakin <alobakin@dlink.ru>
Reviewed-by: default avatarPhilippe Mathieu-Daudé <f4bug@amsat.org>
Cc: linux-mips@vger.kernel.org
parent 6baaeada
......@@ -10,129 +10,6 @@
#ifndef _ASM_STRING_H
#define _ASM_STRING_H
#if !defined(__OPTIMIZE__) || !defined(CONFIG_FORTIFY_SOURCE)
/*
* Most of the inline functions are rather naive implementations so I just
* didn't bother updating them for 64-bit ...
*/
#ifdef CONFIG_32BIT
#ifndef IN_STRING_C
#define __HAVE_ARCH_STRCPY
static __inline__ char *strcpy(char *__dest, __const__ char *__src)
{
char *__xdest = __dest;
__asm__ __volatile__(
".set\tnoreorder\n\t"
".set\tnoat\n"
"1:\tlbu\t$1,(%1)\n\t"
"addiu\t%1,1\n\t"
"sb\t$1,(%0)\n\t"
"bnez\t$1,1b\n\t"
"addiu\t%0,1\n\t"
".set\tat\n\t"
".set\treorder"
: "=r" (__dest), "=r" (__src)
: "0" (__dest), "1" (__src)
: "memory");
return __xdest;
}
#define __HAVE_ARCH_STRNCPY
static __inline__ char *strncpy(char *__dest, __const__ char *__src, size_t __n)
{
char *__xdest = __dest;
if (__n == 0)
return __xdest;
__asm__ __volatile__(
".set\tnoreorder\n\t"
".set\tnoat\n"
"1:\tlbu\t$1,(%1)\n\t"
"subu\t%2,1\n\t"
"sb\t$1,(%0)\n\t"
"beqz\t$1,2f\n\t"
"addiu\t%0,1\n\t"
"bnez\t%2,1b\n\t"
"addiu\t%1,1\n"
"2:\n\t"
".set\tat\n\t"
".set\treorder"
: "=r" (__dest), "=r" (__src), "=r" (__n)
: "0" (__dest), "1" (__src), "2" (__n)
: "memory");
return __xdest;
}
#define __HAVE_ARCH_STRCMP
static __inline__ int strcmp(__const__ char *__cs, __const__ char *__ct)
{
int __res;
__asm__ __volatile__(
".set\tnoreorder\n\t"
".set\tnoat\n\t"
"lbu\t%2,(%0)\n"
"1:\tlbu\t$1,(%1)\n\t"
"addiu\t%0,1\n\t"
"bne\t$1,%2,2f\n\t"
"addiu\t%1,1\n\t"
"bnez\t%2,1b\n\t"
"lbu\t%2,(%0)\n\t"
#if defined(CONFIG_CPU_R3000)
"nop\n\t"
#endif
"move\t%2,$1\n"
"2:\tsubu\t%2,$1\n"
"3:\t.set\tat\n\t"
".set\treorder"
: "=r" (__cs), "=r" (__ct), "=r" (__res)
: "0" (__cs), "1" (__ct));
return __res;
}
#endif /* !defined(IN_STRING_C) */
#define __HAVE_ARCH_STRNCMP
static __inline__ int
strncmp(__const__ char *__cs, __const__ char *__ct, size_t __count)
{
int __res;
__asm__ __volatile__(
".set\tnoreorder\n\t"
".set\tnoat\n"
"1:\tlbu\t%3,(%0)\n\t"
"beqz\t%2,2f\n\t"
"lbu\t$1,(%1)\n\t"
"subu\t%2,1\n\t"
"bne\t$1,%3,3f\n\t"
"addiu\t%0,1\n\t"
"bnez\t%3,1b\n\t"
"addiu\t%1,1\n"
"2:\n\t"
#if defined(CONFIG_CPU_R3000)
"nop\n\t"
#endif
"move\t%3,$1\n"
"3:\tsubu\t%3,$1\n\t"
".set\tat\n\t"
".set\treorder"
: "=r" (__cs), "=r" (__ct), "=r" (__count), "=r" (__res)
: "0" (__cs), "1" (__ct), "2" (__count));
return __res;
}
#endif /* CONFIG_32BIT */
#endif /* !defined(__OPTIMIZE__) || !defined(CONFIG_FORTIFY_SOURCE) */
#define __HAVE_ARCH_MEMSET
extern void *memset(void *__s, int __c, size_t __count);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment