summaryrefslogtreecommitdiffstats
path: root/newlib/libc/string/memmove.c
diff options
context:
space:
mode:
Diffstat (limited to 'newlib/libc/string/memmove.c')
-rw-r--r--newlib/libc/string/memmove.c21
1 files changed, 10 insertions, 11 deletions
diff --git a/newlib/libc/string/memmove.c b/newlib/libc/string/memmove.c
index 2528e27d4..b03bb3821 100644
--- a/newlib/libc/string/memmove.c
+++ b/newlib/libc/string/memmove.c
@@ -88,14 +88,13 @@ _DEFUN (memmove, (dst_void, src_void, length),
_CONST char *src = src_void;
long *aligned_dst;
_CONST long *aligned_src;
- int len = length;
- if (src < dst && dst < src + len)
+ if (src < dst && dst < src + length)
{
/* Destructive overlap...have to copy backwards */
- src += len;
- dst += len;
- while (len--)
+ src += length;
+ dst += length;
+ while (length--)
{
*--dst = *--src;
}
@@ -105,26 +104,26 @@ _DEFUN (memmove, (dst_void, src_void, length),
/* Use optimizing algorithm for a non-destructive copy to closely
match memcpy. If the size is small or either SRC or DST is unaligned,
then punt into the byte copy loop. This should be rare. */
- if (!TOO_SMALL(len) && !UNALIGNED (src, dst))
+ if (!TOO_SMALL(length) && !UNALIGNED (src, dst))
{
aligned_dst = (long*)dst;
aligned_src = (long*)src;
/* Copy 4X long words at a time if possible. */
- while (len >= BIGBLOCKSIZE)
+ while (length >= BIGBLOCKSIZE)
{
*aligned_dst++ = *aligned_src++;
*aligned_dst++ = *aligned_src++;
*aligned_dst++ = *aligned_src++;
*aligned_dst++ = *aligned_src++;
- len -= BIGBLOCKSIZE;
+ length -= BIGBLOCKSIZE;
}
/* Copy one long word at a time if possible. */
- while (len >= LITTLEBLOCKSIZE)
+ while (length >= LITTLEBLOCKSIZE)
{
*aligned_dst++ = *aligned_src++;
- len -= LITTLEBLOCKSIZE;
+ length -= LITTLEBLOCKSIZE;
}
/* Pick up any residual with a byte copier. */
@@ -132,7 +131,7 @@ _DEFUN (memmove, (dst_void, src_void, length),
src = (char*)aligned_src;
}
- while (len--)
+ while (length--)
{
*dst++ = *src++;
}