e210488e0d
This is the first patch out of a series of patches that add support for AArch64, the new 64bit execution state of the ARMv8 Architecture. The patches add support for LP64 programming model. The patch adds: * "arch-aarch64" to the architecture directories. * "arch-aarch64/include" - headers used by libc * "arch-aarch64/bionic": - crtbegin, crtend support; - aarch64 specific syscall stubs; - setjmp, clone, vfork assembly files. Change-Id: If72b859f81928d03ad05d4ccfcb54c2f5dbf99a5 Signed-off-by: Serban Constantinescu <serban.constantinescu@arm.com>
54 lines
2.3 KiB
C
54 lines
2.3 KiB
C
/*
|
|
* Copyright (C) 2008 The Android Open Source Project
|
|
* All rights reserved.
|
|
*
|
|
* Redistribution and use in source and binary forms, with or without
|
|
* modification, are permitted provided that the following conditions
|
|
* are met:
|
|
* * Redistributions of source code must retain the above copyright
|
|
* notice, this list of conditions and the following disclaimer.
|
|
* * Redistributions in binary form must reproduce the above copyright
|
|
* notice, this list of conditions and the following disclaimer in
|
|
* the documentation and/or other materials provided with the
|
|
* distribution.
|
|
*
|
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
|
|
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
|
|
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
|
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
|
|
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
|
|
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
|
|
* SUCH DAMAGE.
|
|
*/
|
|
|
|
#ifndef __BIONIC_PRIVATE_GET_TLS_H_
|
|
#define __BIONIC_PRIVATE_GET_TLS_H_
|
|
|
|
#if defined(__aarch64__)
|
|
# define __get_tls() ({ void** __val; __asm__("mrs %0, tpidr_el0" : "=r"(__val)); __val; })
|
|
#elif defined(__arm__)
|
|
# define __get_tls() ({ void** __val; __asm__("mrc p15, 0, %0, c13, c0, 3" : "=r"(__val)); __val; })
|
|
#elif defined(__mips__)
|
|
# define __get_tls() \
|
|
/* On mips32r1, this goes via a kernel illegal instruction trap that's optimized for v1. */ \
|
|
({ register void** __val asm("v1"); \
|
|
__asm__(".set push\n" \
|
|
".set mips32r2\n" \
|
|
"rdhwr %0,$29\n" \
|
|
".set pop\n" : "=r"(__val)); \
|
|
__val; })
|
|
#elif defined(__i386__)
|
|
# define __get_tls() ({ void** __val; __asm__("movl %%gs:0, %0" : "=r"(__val)); __val; })
|
|
#elif defined(__x86_64__)
|
|
# define __get_tls() ({ void** __val; __asm__("mov %%fs:0, %0" : "=r"(__val)); __val; })
|
|
#else
|
|
#error unsupported architecture
|
|
#endif
|
|
|
|
#endif /* __BIONIC_PRIVATE_GET_TLS_H_ */
|