am 84d90bf5: am aa5f32ea: Merge "Add Aarch64 optimized bzero based on memset"

* commit '84d90bf5114466bb62d0580ce524da98b6b45dd2':
  Add Aarch64 optimized bzero based on memset
This commit is contained in:
Christopher Ferris 2014-05-29 21:27:03 +00:00 committed by Android Git Automerger
commit 4704238dd2
5 changed files with 49 additions and 41 deletions

View File

@ -34,7 +34,6 @@ libc_common_src_files_arm64 += \
### CPU specific source files
libc_bionic_src_files_arm64 := \
arch-arm64/bionic/__bionic_clone.S \
arch-arm64/bionic/bzero_arm64.c \
arch-arm64/bionic/_exit_with_stack_teardown.S \
arch-arm64/bionic/__get_sp.S \
arch-arm64/bionic/__rt_sigreturn.S \

View File

@ -1,33 +0,0 @@
/*
* Copyright (C) 2013 The Android Open Source Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <string.h>
void bzero(void* s, size_t n) {
memset(s, '\0', n);
}

View File

@ -0,0 +1,30 @@
/* Copyright (c) 2014, Linaro Limited
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Linaro nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#define BZERO
#include "memset.S"
#undef BZERO

View File

@ -38,15 +38,19 @@
data blocks more efficiently. In some circumstances this might be
unsafe, for example in an asymmetric multiprocessor environment with
different DC clear lengths (neither the upper nor lower lengths are
safe to use). The feature can be disabled by defining DONT_USE_DC.
safe to use).
If code may be run in a virtualized environment, then define
MAYBE_VIRT. This will cause the code to cache the system register
values rather than re-reading them each call. */
#define dstin x0
#define val w1
#ifdef BZERO
#define count x1
#else
#define count x2
#endif
#define val w1
#define tmp1 x3
#define tmp1w w3
#define tmp2 x4
@ -60,13 +64,18 @@
#define dst x8
#define tmp3w w9
#ifdef BZERO
ENTRY(bzero)
#else
ENTRY(memset)
#endif
mov dst, dstin /* Preserve return value. */
ands A_lw, val, #255
#ifndef DONT_USE_DC
b.eq .Lzero_mem
#ifdef BZERO
b .Lzero_mem
#endif
ands A_lw, val, #255
b.eq .Lzero_mem
orr A_lw, A_lw, A_lw, lsl #8
orr A_lw, A_lw, A_lw, lsl #16
orr A_l, A_l, A_l, lsl #32
@ -143,7 +152,6 @@ ENTRY(memset)
b.ne .Ltail63
ret
#ifndef DONT_USE_DC
/* For zeroing memory, check to see if we can use the ZVA feature to
* zero entire 'cache' lines. */
.Lzero_mem:
@ -225,7 +233,11 @@ ENTRY(memset)
ands count, count, zva_bits_x
b.ne .Ltail_maybe_long
ret
#ifdef BZERO
END(bzero)
#else
END(memset)
#endif
#ifdef MAYBE_VIRT
.bss
@ -233,4 +245,3 @@ END(memset)
.Lcache_clear:
.space 4
#endif
#endif /* DONT_USE_DC */

View File

@ -1,5 +1,6 @@
libc_bionic_src_files_arm64 += \
arch-arm64/generic/bionic/bcopy.S \
arch-arm64/generic/bionic/bzero.S \
arch-arm64/generic/bionic/memcmp.S \
arch-arm64/generic/bionic/memcpy.S \
arch-arm64/generic/bionic/memmove.S \