15a0456d0b
__set_errno returns -1 exactly so that callers don't need to bother. The other architectures were already taking advantage of this, but no one had ever fixed x86 and x86_64. Change-Id: Ie131494be664f6c4a1bbf8c61bbbed58eac56122
37 lines
766 B
ArmAsm
37 lines
766 B
ArmAsm
/* Generated by gensyscalls.py. Do not edit. */
|
|
|
|
#include <private/bionic_asm.h>
|
|
|
|
ENTRY(lgetxattr)
|
|
pushl %ebx
|
|
.cfi_def_cfa_offset 8
|
|
.cfi_rel_offset ebx, 0
|
|
pushl %ecx
|
|
.cfi_adjust_cfa_offset 4
|
|
.cfi_rel_offset ecx, 0
|
|
pushl %edx
|
|
.cfi_adjust_cfa_offset 4
|
|
.cfi_rel_offset edx, 0
|
|
pushl %esi
|
|
.cfi_adjust_cfa_offset 4
|
|
.cfi_rel_offset esi, 0
|
|
mov 20(%esp), %ebx
|
|
mov 24(%esp), %ecx
|
|
mov 28(%esp), %edx
|
|
mov 32(%esp), %esi
|
|
movl $__NR_lgetxattr, %eax
|
|
int $0x80
|
|
cmpl $-MAX_ERRNO, %eax
|
|
jb 1f
|
|
negl %eax
|
|
pushl %eax
|
|
call __set_errno
|
|
addl $4, %esp
|
|
1:
|
|
popl %esi
|
|
popl %edx
|
|
popl %ecx
|
|
popl %ebx
|
|
ret
|
|
END(lgetxattr)
|