summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEric Blake <eblake@redhat.com>2008-05-22 02:31:46 +0000
committerEric Blake <eblake@redhat.com>2008-05-22 02:31:46 +0000
commitae47b14a12d21f7d4a506b5682308d2fe3f8a0ec (patch)
tree01313a8418bb7a339ce6bcb154433b643186bd1d
parent0b99028af40e2d8c75f21148bd8cd7ba28e170bf (diff)
downloadcygnal-ae47b14a12d21f7d4a506b5682308d2fe3f8a0ec.tar.gz
cygnal-ae47b14a12d21f7d4a506b5682308d2fe3f8a0ec.tar.bz2
cygnal-ae47b14a12d21f7d4a506b5682308d2fe3f8a0ec.zip
Optimize the generic strchr.
* libc/string/strchr.c (strchr) [!__OPTIMIZE_SIZE__]: Pre-align data so unaligned searches aren't penalized. Special-case searching for 0.
-rw-r--r--newlib/ChangeLog10
-rw-r--r--newlib/libc/string/strchr.c69
2 files changed, 50 insertions, 29 deletions
diff --git a/newlib/ChangeLog b/newlib/ChangeLog
index 35a80e266..5876f2632 100644
--- a/newlib/ChangeLog
+++ b/newlib/ChangeLog
@@ -1,8 +1,14 @@
2008-05-21 Eric Blake <ebb9@byu.net>
+ Optimize the generic strchr.
+ * libc/string/strchr.c (strchr) [!__OPTIMIZE_SIZE__]: Pre-align
+ data so unaligned searches aren't penalized. Special-case
+ searching for 0.
+
Optimize strchr for x86.
- * libc/machine/i386/strchr.S (strchr): Pre-align data so unaligned
- searches aren't penalized. Special-case searching for 0.
+ * libc/machine/i386/strchr.S (strchr) [!__OPTIMIZE_SIZE__]:
+ Pre-align data so unaligned searches aren't penalized.
+ Special-case searching for 0.
2008-05-20 Nick Clifton <nickc@redhat.com>
diff --git a/newlib/libc/string/strchr.c b/newlib/libc/string/strchr.c
index 60b0fde8e..e921b5db7 100644
--- a/newlib/libc/string/strchr.c
+++ b/newlib/libc/string/strchr.c
@@ -53,7 +53,7 @@ QUICKREF
#endif
#endif
-/* DETECTCHAR returns nonzero if (long)X contains the byte used
+/* DETECTCHAR returns nonzero if (long)X contains the byte used
to fill (long)MASK. */
#define DETECTCHAR(X,MASK) (DETECTNULL(X ^ MASK))
@@ -63,46 +63,61 @@ _DEFUN (strchr, (s1, i),
int i)
{
_CONST unsigned char *s = (_CONST unsigned char *)s1;
-#if defined(PREFER_SIZE_OVER_SPEED) || defined(__OPTIMIZE_SIZE__)
- unsigned char c = (unsigned int)i;
+ unsigned char c = i;
- while (*s && *s != c)
+#if !defined(PREFER_SIZE_OVER_SPEED) && !defined(__OPTIMIZE_SIZE__)
+ unsigned long mask,j;
+ unsigned long *aligned_addr;
+
+ /* Special case for finding 0. */
+ if (!c)
{
- s++;
+ while (UNALIGNED (s))
+ {
+ if (!*s)
+ return (char *) s;
+ s++;
+ }
+ /* Operate a word at a time. */
+ aligned_addr = (unsigned long *) s;
+ while (!DETECTNULL (*aligned_addr))
+ aligned_addr++;
+ /* Found the end of string. */
+ s = (const unsigned char *) aligned_addr;
+ while (*s)
+ s++;
+ return (char *) s;
}
- if (*s != c)
+ /* All other bytes. Align the pointer, then search a long at a time. */
+ while (UNALIGNED (s))
{
- s = NULL;
+ if (!*s)
+ return NULL;
+ if (*s == c)
+ return (char *) s;
+ s++;
}
- return (char *) s;
-#else
- unsigned char c = (unsigned char)i;
- unsigned long mask,j;
- unsigned long *aligned_addr;
+ mask = c;
+ for (j = 8; j < LBLOCKSIZE * 8; j <<= 1)
+ mask = (mask << j) | mask;
- if (!UNALIGNED (s))
- {
- mask = 0;
- for (j = 0; j < LBLOCKSIZE; j++)
- mask = (mask << 8) | c;
+ aligned_addr = (unsigned long *) s;
+ while (!DETECTNULL (*aligned_addr) && !DETECTCHAR (*aligned_addr, mask))
+ aligned_addr++;
- aligned_addr = (unsigned long*)s;
- while (!DETECTNULL (*aligned_addr) && !DETECTCHAR (*aligned_addr, mask))
- aligned_addr++;
+ /* The block of bytes currently pointed to by aligned_addr
+ contains either a null or the target char, or both. We
+ catch it using the bytewise search. */
- /* The block of bytes currently pointed to by aligned_addr
- contains either a null or the target char, or both. We
- catch it using the bytewise search. */
+ s = (unsigned char *) aligned_addr;
- s = (unsigned char*)aligned_addr;
- }
+#endif /* not PREFER_SIZE_OVER_SPEED */
while (*s && *s != c)
- s++;
+ s++;
if (*s == c)
return (char *)s;
return NULL;
-#endif /* not PREFER_SIZE_OVER_SPEED */
}