Add a bunch more missing ENDs to assembler routines.
This isn't everything; I've missed out those x86 files that are Change-Id: Idb7bb1a68796d6c0b70ea2b5c3300e49da6c62d2
This commit is contained in:
		@@ -115,7 +115,7 @@ ENTRY(memcmp)
 | 
			
		||||
         * pointer somewhere else
 | 
			
		||||
         */
 | 
			
		||||
         mov        r4, r0
 | 
			
		||||
        
 | 
			
		||||
 | 
			
		||||
        /* align first pointer to word boundary
 | 
			
		||||
         * offset = -src & 3
 | 
			
		||||
         */
 | 
			
		||||
@@ -151,7 +151,7 @@ ENTRY(memcmp)
 | 
			
		||||
        ldr         ip, [r1]
 | 
			
		||||
        subs        r2, r2, #(32 + 4)
 | 
			
		||||
        bmi         1f
 | 
			
		||||
        
 | 
			
		||||
 | 
			
		||||
0:      pld         [r4, #(CACHE_LINE_SIZE * 2)]
 | 
			
		||||
        pld         [r1, #(CACHE_LINE_SIZE * 2)]
 | 
			
		||||
        ldr         r0, [r4], #4
 | 
			
		||||
@@ -178,14 +178,14 @@ ENTRY(memcmp)
 | 
			
		||||
        ldreq       r0, [r4], #4
 | 
			
		||||
        ldreq       ip, [r1, #4]!
 | 
			
		||||
        eoreqs      r0, r0, lr
 | 
			
		||||
        bne         2f        
 | 
			
		||||
        bne         2f
 | 
			
		||||
        subs        r2, r2, #32
 | 
			
		||||
        bhs         0b
 | 
			
		||||
 | 
			
		||||
        /* do we have at least 4 bytes left? */
 | 
			
		||||
1:      adds        r2, r2, #(32 - 4 + 4)
 | 
			
		||||
        bmi         4f
 | 
			
		||||
        
 | 
			
		||||
 | 
			
		||||
        /* finish off 4 bytes at a time */
 | 
			
		||||
3:      ldr         r0, [r4], #4
 | 
			
		||||
        ldr         ip, [r1], #4
 | 
			
		||||
@@ -233,17 +233,14 @@ ENTRY(memcmp)
 | 
			
		||||
        subs        r2, r2, #1
 | 
			
		||||
        bne         11b
 | 
			
		||||
        bx          lr
 | 
			
		||||
END(memcmp)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
5:      /*************** non-congruent case ***************/
 | 
			
		||||
        and         r0, r1, #3      
 | 
			
		||||
        and         r0, r1, #3
 | 
			
		||||
        cmp         r0, #2
 | 
			
		||||
        bne         4f
 | 
			
		||||
 | 
			
		||||
        /* here, offset is 2 (16-bits aligned, special cased) */
 | 
			
		||||
        
 | 
			
		||||
 | 
			
		||||
        /* make sure we have at least 16 bytes to process */
 | 
			
		||||
        subs        r2, r2, #16
 | 
			
		||||
        addmi       r2, r2, #16
 | 
			
		||||
@@ -341,3 +338,4 @@ END(memcmp)
 | 
			
		||||
        mov         r2, #4
 | 
			
		||||
		ldmfd		sp!, {r5, r6, r7}
 | 
			
		||||
        b           8b
 | 
			
		||||
END(memcmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -42,6 +42,7 @@
 | 
			
		||||
ENTRY(bzero)
 | 
			
		||||
        mov     r2, r1
 | 
			
		||||
        mov     r1, #0
 | 
			
		||||
        // Fall through to memset...
 | 
			
		||||
END(bzero)
 | 
			
		||||
 | 
			
		||||
ENTRY(memset)
 | 
			
		||||
 
 | 
			
		||||
@@ -54,6 +54,7 @@ ENTRY(_setjmp)
 | 
			
		||||
	movl	%edi,20(%eax)
 | 
			
		||||
	xorl	%eax,%eax
 | 
			
		||||
	ret
 | 
			
		||||
END(_setjmp)
 | 
			
		||||
 | 
			
		||||
ENTRY(_longjmp)
 | 
			
		||||
	movl	4(%esp),%edx
 | 
			
		||||
@@ -69,3 +70,4 @@ ENTRY(_longjmp)
 | 
			
		||||
	incl	%eax
 | 
			
		||||
1:	movl	%ecx,0(%esp)
 | 
			
		||||
	ret
 | 
			
		||||
END(_longjmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -19,7 +19,6 @@ ENTRY(__futex_wait)
 | 
			
		||||
    ret
 | 
			
		||||
END(__futex_wait)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
// int __futex_wake(volatile void *ftx, int count)
 | 
			
		||||
ENTRY(__futex_wake)
 | 
			
		||||
    pushl   %ebx
 | 
			
		||||
 
 | 
			
		||||
@@ -52,7 +52,7 @@ ENTRY(setjmp)
 | 
			
		||||
	call	_C_LABEL(sigblock)
 | 
			
		||||
#endif
 | 
			
		||||
	addl	$4,%esp
 | 
			
		||||
	PIC_EPILOGUE 
 | 
			
		||||
	PIC_EPILOGUE
 | 
			
		||||
 | 
			
		||||
	movl	4(%esp),%ecx
 | 
			
		||||
	movl	0(%esp),%edx
 | 
			
		||||
@@ -65,6 +65,7 @@ ENTRY(setjmp)
 | 
			
		||||
	movl	%eax,24(%ecx)
 | 
			
		||||
	xorl	%eax,%eax
 | 
			
		||||
	ret
 | 
			
		||||
END(setjmp)
 | 
			
		||||
 | 
			
		||||
ENTRY(longjmp)
 | 
			
		||||
	movl	4(%esp),%edx
 | 
			
		||||
@@ -76,7 +77,7 @@ ENTRY(longjmp)
 | 
			
		||||
	call	_C_LABEL(sigsetmask)
 | 
			
		||||
#endif
 | 
			
		||||
	addl	$4,%esp
 | 
			
		||||
	PIC_EPILOGUE 
 | 
			
		||||
	PIC_EPILOGUE
 | 
			
		||||
 | 
			
		||||
	movl	4(%esp),%edx
 | 
			
		||||
	movl	8(%esp),%eax
 | 
			
		||||
@@ -91,3 +92,4 @@ ENTRY(longjmp)
 | 
			
		||||
	incl	%eax
 | 
			
		||||
1:	movl	%ecx,0(%esp)
 | 
			
		||||
	ret
 | 
			
		||||
END(longjmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -61,6 +61,7 @@ ENTRY(sigsetjmp)
 | 
			
		||||
	movl	%edi,20(%ecx)
 | 
			
		||||
	xorl	%eax,%eax
 | 
			
		||||
	ret
 | 
			
		||||
END(sigsetjmp)
 | 
			
		||||
 | 
			
		||||
ENTRY(siglongjmp)
 | 
			
		||||
	movl	4(%esp),%edx
 | 
			
		||||
@@ -90,3 +91,4 @@ ENTRY(siglongjmp)
 | 
			
		||||
	incl	%eax
 | 
			
		||||
2:	movl	%ecx,0(%esp)
 | 
			
		||||
	ret
 | 
			
		||||
END(siglongjmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -30,3 +30,4 @@ L1:	incl	%eax
 | 
			
		||||
L2:	popl	%esi
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(bcmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -41,3 +41,4 @@ L1:	movl	%edx,%ecx		/* zero remainder by bytes */
 | 
			
		||||
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(bzero)
 | 
			
		||||
 
 | 
			
		||||
@@ -15,3 +15,4 @@ ENTRY(ffs)
 | 
			
		||||
	.align 2
 | 
			
		||||
L1:	xorl	%eax,%eax		/* clear result */
 | 
			
		||||
	ret
 | 
			
		||||
END(ffs)
 | 
			
		||||
 
 | 
			
		||||
@@ -24,3 +24,4 @@ ENTRY(memchr)
 | 
			
		||||
L1:	xorl	%eax,%eax
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(memchr)
 | 
			
		||||
 
 | 
			
		||||
@@ -41,3 +41,4 @@ L6:	movzbl  -1(%edi),%eax		/* Perform unsigned comparison	*/
 | 
			
		||||
	popl	%esi
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(memcmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -53,3 +53,4 @@ L1:	rep
 | 
			
		||||
	popl	%ebx
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(memset)
 | 
			
		||||
 
 | 
			
		||||
@@ -71,3 +71,4 @@ L1:	movb	(%edx),%al		/* unroll loop, but not too much */
 | 
			
		||||
L2:	popl	%eax			/* pop destination address */
 | 
			
		||||
	popl	%edi			/* restore edi */
 | 
			
		||||
	ret
 | 
			
		||||
END(strcat)
 | 
			
		||||
 
 | 
			
		||||
@@ -79,3 +79,4 @@ L3:	movzbl	(%eax),%eax		/* unsigned comparison */
 | 
			
		||||
	movzbl	(%edx),%edx
 | 
			
		||||
	subl	%edx,%eax
 | 
			
		||||
	ret
 | 
			
		||||
END(strcmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -61,3 +61,4 @@ L1:	movb	(%edx),%al		/* unroll loop, but not too much */
 | 
			
		||||
	jnz	L1
 | 
			
		||||
L2:	popl	%eax			/* pop dst address */
 | 
			
		||||
	ret
 | 
			
		||||
END(strcpy)
 | 
			
		||||
 
 | 
			
		||||
@@ -18,3 +18,4 @@ ENTRY(strlen)
 | 
			
		||||
	leal	-1(%ecx),%eax		/* and subtracting one */
 | 
			
		||||
	popl	%edi
 | 
			
		||||
	ret
 | 
			
		||||
END(strlen)
 | 
			
		||||
 
 | 
			
		||||
@@ -111,3 +111,4 @@ L3:	movzbl	(%eax),%eax		/* unsigned comparision */
 | 
			
		||||
L4:	xorl	%eax,%eax
 | 
			
		||||
	popl	%ebx
 | 
			
		||||
	ret
 | 
			
		||||
END(strncmp)
 | 
			
		||||
 
 | 
			
		||||
@@ -65,3 +65,4 @@ L3:	lodsw
 | 
			
		||||
L4:	popl	%edi
 | 
			
		||||
	popl	%esi
 | 
			
		||||
	ret
 | 
			
		||||
END(swab)
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user