bionic/libc/arch-x86/generic/string/memcmp.S
Varvara Rainchik 5a92284167 Add 32-bit Silvermont-optimized string/memory functions.
Add following functions:
bcopy, memcpy, memmove, memset, bzero, memcmp, wmemcmp, strlen,
strcpy, strncpy, stpcpy, stpncpy.
Create new directories inside arch-x86 to specify architecture: atom,
silvermont and generic (non atom or silvermont architectures are treated like generic).
Due to introducing optimized versions of stpcpy and stpncpy,
c-implementations of these functions are moved from
common for architectures makefile to arm and mips specific makefiles.

Change-Id: I990f8061c3e9bca1f154119303da9e781c5d086e
Signed-off-by: Varvara Rainchik <varvara.rainchik@intel.com>
2014-05-12 13:56:59 -07:00

45 lines
869 B
ArmAsm

/* $OpenBSD: memcmp.S,v 1.4 2005/08/07 11:30:38 espie Exp $ */
/*
* Written by J.T. Conklin <jtc@netbsd.org>.
* Public domain.
*/
#include <private/bionic_asm.h>
ENTRY(memcmp)
pushl %edi
pushl %esi
movl 12(%esp),%edi
movl 16(%esp),%esi
cld /* set compare direction forward */
movl 20(%esp),%ecx /* compare by words */
shrl $2,%ecx
repe
cmpsl
jne L5 /* do we match so far? */
movl 20(%esp),%ecx /* compare remainder by bytes */
andl $3,%ecx
repe
cmpsb
jne L6 /* do we match? */
xorl %eax,%eax /* we match, return zero */
popl %esi
popl %edi
ret
L5: movl $4,%ecx /* We know that one of the next */
subl %ecx,%edi /* four pairs of bytes do not */
subl %ecx,%esi /* match. */
repe
cmpsb
L6: movzbl -1(%edi),%eax /* Perform unsigned comparison */
movzbl -1(%esi),%edx
subl %edx,%eax
popl %esi
popl %edi
ret
END(memcmp)