diff options
author | Denis Vlasenko <vda.linux@googlemail.com> | 2007-07-29 14:45:26 +0000 |
---|---|---|
committer | Denis Vlasenko <vda.linux@googlemail.com> | 2007-07-29 14:45:26 +0000 |
commit | 519ca27d4a720c900ad74e8018cdd5cc08a8b3a6 (patch) | |
tree | 15e4700c597a8932ef07a70f09ebfb848980e46e /libc | |
parent | 2ea436fb13abd2793dc39cca24a8f90d8f3b6328 (diff) |
Remove stray code alignment (.align 16 and .align 4 directives)
from i386 and x86_64.
Diffstat (limited to 'libc')
25 files changed, 16 insertions, 28 deletions
diff --git a/libc/string/sparc/_glibc_inc.h b/libc/string/sparc/_glibc_inc.h index 4eb4d755c..e0aef52c2 100644 --- a/libc/string/sparc/_glibc_inc.h +++ b/libc/string/sparc/_glibc_inc.h @@ -6,6 +6,8 @@ #include <features.h> #include <bits/wordsize.h> +/* Is alignment really needed? */ + #if __WORDSIZE == 32 # define ENTRY_ALIGN 4 #else diff --git a/libc/string/x86_64/_glibc_inc.h b/libc/string/x86_64/_glibc_inc.h index 88cef2ea3..415ce90a7 100644 --- a/libc/string/x86_64/_glibc_inc.h +++ b/libc/string/x86_64/_glibc_inc.h @@ -6,15 +6,8 @@ #include <features.h> #include <bits/wordsize.h> -#if __WORDSIZE == 32 -# define ENTRY_ALIGN 4 -#else -# define ENTRY_ALIGN 2 -#endif - #define ENTRY(sym) \ .global sym; \ - .align ENTRY_ALIGN; \ .type sym,%function; \ sym: diff --git a/libc/string/x86_64/memset.S b/libc/string/x86_64/memset.S index d72d74468..0f054ade6 100644 --- a/libc/string/x86_64/memset.S +++ b/libc/string/x86_64/memset.S @@ -71,7 +71,7 @@ ENTRY (memset) jae 11f .p2align 4 -3: /* Copy 64 bytes. */ +3: /* Fill 64 bytes. */ mov %r8,(%rcx) mov %r8,0x8(%rcx) mov %r8,0x10(%rcx) @@ -84,7 +84,7 @@ ENTRY (memset) dec %rax jne 3b -4: /* Copy final bytes. */ +4: /* Fill final bytes. */ and $0x3f,%edx mov %rdx,%rax shr $0x3,%rax @@ -107,7 +107,7 @@ ENTRY (memset) jne 8b 9: #if BZERO_P - nop + nop /* huh?? */ #else /* Load result (only if used as memset). */ mov %rdi,%rax /* start address of destination is result */ @@ -115,7 +115,7 @@ ENTRY (memset) retq .p2align 4 -11: /* Copy 64 bytes without polluting the cache. */ +11: /* Fill 64 bytes without polluting the cache. */ /* We could use movntdq %xmm0,(%rcx) here to further speed up for large cases but let's not use XMM registers. */ movnti %r8,(%rcx) diff --git a/libc/string/x86_64/strcat.S b/libc/string/x86_64/strcat.S index 9b0068981..ddec8ee79 100644 --- a/libc/string/x86_64/strcat.S +++ b/libc/string/x86_64/strcat.S @@ -21,6 +21,7 @@ #include "_glibc_inc.h" +/* Seems to be unrolled too much */ .text ENTRY (BP_SYM (strcat)) diff --git a/libc/string/x86_64/strchr.S b/libc/string/x86_64/strchr.S index 8e59c4c19..c357bfd03 100644 --- a/libc/string/x86_64/strchr.S +++ b/libc/string/x86_64/strchr.S @@ -20,6 +20,7 @@ #include "_glibc_inc.h" +/* Seems to be unrolled too much */ .text ENTRY (BP_SYM (strchr)) diff --git a/libc/string/x86_64/strcpy.S b/libc/string/x86_64/strcpy.S index d9a51b0bb..920d0543a 100644 --- a/libc/string/x86_64/strcpy.S +++ b/libc/string/x86_64/strcpy.S @@ -20,6 +20,8 @@ #include "_glibc_inc.h" +/* Seems to be unrolled too much */ + #ifndef USE_AS_STPCPY # define STRCPY strcpy #endif diff --git a/libc/string/x86_64/strcspn.S b/libc/string/x86_64/strcspn.S index fed12b5f6..c02fbbb5f 100644 --- a/libc/string/x86_64/strcspn.S +++ b/libc/string/x86_64/strcspn.S @@ -25,6 +25,9 @@ #include "_glibc_inc.h" +/* Seems to be unrolled too much */ +/* (and why testb $0xff, %cl, not testb %cl, %cl?) */ + /* BEWARE: `#ifdef strcspn' means that strcspn is redefined as `strpbrk' */ #define STRPBRK_P (defined strcspn) diff --git a/libc/string/x86_64/strlen.S b/libc/string/x86_64/strlen.S index 0441dc46c..e79537f87 100644 --- a/libc/string/x86_64/strlen.S +++ b/libc/string/x86_64/strlen.S @@ -20,6 +20,7 @@ #include "_glibc_inc.h" +/* Seems to be unrolled too much */ .text ENTRY (strlen) diff --git a/libc/sysdeps/linux/i386/__longjmp.S b/libc/sysdeps/linux/i386/__longjmp.S index d73a7cda2..e2809c06c 100644 --- a/libc/sysdeps/linux/i386/__longjmp.S +++ b/libc/sysdeps/linux/i386/__longjmp.S @@ -24,7 +24,6 @@ .global __longjmp .type __longjmp,%function -.align 4 __longjmp: movl 4(%esp), %ecx /* User's jmp_buf in %ecx. */ movl 8(%esp), %eax /* Second argument is return value. */ diff --git a/libc/sysdeps/linux/i386/bsd-_setjmp.S b/libc/sysdeps/linux/i386/bsd-_setjmp.S index ac2c8930d..f3cd6cbf6 100644 --- a/libc/sysdeps/linux/i386/bsd-_setjmp.S +++ b/libc/sysdeps/linux/i386/bsd-_setjmp.S @@ -27,7 +27,6 @@ .global _setjmp .type _setjmp,%function -.align 4 _setjmp: xorl %eax, %eax movl 4 (%esp), %edx diff --git a/libc/sysdeps/linux/i386/bsd-setjmp.S b/libc/sysdeps/linux/i386/bsd-setjmp.S index 532dfa1bd..df46997d9 100644 --- a/libc/sysdeps/linux/i386/bsd-setjmp.S +++ b/libc/sysdeps/linux/i386/bsd-setjmp.S @@ -23,7 +23,6 @@ .global setjmp .type setjmp,%function -.align 4 setjmp: movl 4 (%esp), %eax /* Save registers. */ diff --git a/libc/sysdeps/linux/i386/mmap64.S b/libc/sysdeps/linux/i386/mmap64.S index 5405c8a09..a6b4aa042 100644 --- a/libc/sysdeps/linux/i386/mmap64.S +++ b/libc/sysdeps/linux/i386/mmap64.S @@ -40,7 +40,6 @@ .text .global mmap64 .type mmap64,%function -.align 1<<4 mmap64: /* Save registers. */ diff --git a/libc/sysdeps/linux/i386/setjmp.S b/libc/sysdeps/linux/i386/setjmp.S index 917ee283b..20a6a0bcf 100644 --- a/libc/sysdeps/linux/i386/setjmp.S +++ b/libc/sysdeps/linux/i386/setjmp.S @@ -24,7 +24,6 @@ .global __sigsetjmp .type __sigsetjmp,%function -.align 4 __sigsetjmp: movl 4 (%esp), %eax diff --git a/libc/sysdeps/linux/i386/sigaction.c b/libc/sysdeps/linux/i386/sigaction.c index 341c0d16b..0b28ae522 100644 --- a/libc/sysdeps/linux/i386/sigaction.c +++ b/libc/sysdeps/linux/i386/sigaction.c @@ -145,7 +145,6 @@ libc_hidden_weak(sigaction) __asm__ \ ( \ ".text\n" \ - " .align 16\n" \ "__" #name ":\n" \ " movl $" #syscall ", %eax\n" \ " int $0x80" \ @@ -163,7 +162,6 @@ RESTORE (restore_rt, __NR_rt_sigreturn) __asm__ \ ( \ ".text\n" \ - " .align 8\n" \ "__" #name ":\n" \ " popl %eax\n" \ " movl $" #syscall ", %eax\n" \ diff --git a/libc/sysdeps/linux/i386/syscall.S b/libc/sysdeps/linux/i386/syscall.S index 19adf97a8..b9f65feb6 100644 --- a/libc/sysdeps/linux/i386/syscall.S +++ b/libc/sysdeps/linux/i386/syscall.S @@ -23,7 +23,6 @@ .text .global syscall .type syscall,%function -.align 4 syscall: pushl %ebp pushl %edi diff --git a/libc/sysdeps/linux/i386/vfork.S b/libc/sysdeps/linux/i386/vfork.S index 543da6e90..8005ff1d2 100644 --- a/libc/sysdeps/linux/i386/vfork.S +++ b/libc/sysdeps/linux/i386/vfork.S @@ -15,7 +15,6 @@ .global __vfork .hidden __vfork .type __vfork,%function -.align 1<<4 __vfork: popl %ecx diff --git a/libc/sysdeps/linux/sparc/_math_inc.h b/libc/sysdeps/linux/sparc/_math_inc.h index 9139c06f0..f08aa5869 100644 --- a/libc/sysdeps/linux/sparc/_math_inc.h +++ b/libc/sysdeps/linux/sparc/_math_inc.h @@ -9,6 +9,8 @@ #define __ASSEMBLY__ #include <asm/traps.h> +/* Is alignment really needed? */ + #if __WORDSIZE == 32 # define ENTRY_ALIGN 4 #else diff --git a/libc/sysdeps/linux/x86_64/__longjmp.S b/libc/sysdeps/linux/x86_64/__longjmp.S index 1ff6e0169..db2928bf0 100644 --- a/libc/sysdeps/linux/x86_64/__longjmp.S +++ b/libc/sysdeps/linux/x86_64/__longjmp.S @@ -26,7 +26,6 @@ void __longjmp (__jmp_buf env, int val). */ .global __longjmp .type __longjmp,%function -.align 16 __longjmp: /* Restore registers. */ movq (JB_RBX*8)(%rdi),%rbx diff --git a/libc/sysdeps/linux/x86_64/bsd-_setjmp.S b/libc/sysdeps/linux/x86_64/bsd-_setjmp.S index d5f9ae1b8..81a4352f3 100644 --- a/libc/sysdeps/linux/x86_64/bsd-_setjmp.S +++ b/libc/sysdeps/linux/x86_64/bsd-_setjmp.S @@ -23,7 +23,6 @@ .global _setjmp .type _setjmp,%function -.align 16 _setjmp: xorq %rsi, %rsi jmp __sigsetjmp@PLT diff --git a/libc/sysdeps/linux/x86_64/bsd-setjmp.S b/libc/sysdeps/linux/x86_64/bsd-setjmp.S index 21d0191d0..a906a6132 100644 --- a/libc/sysdeps/linux/x86_64/bsd-setjmp.S +++ b/libc/sysdeps/linux/x86_64/bsd-setjmp.S @@ -23,7 +23,6 @@ .global setjmp .type setjmp,%function -.align 16 setjmp: movq $1, %rsi jmp __sigsetjmp@PLT diff --git a/libc/sysdeps/linux/x86_64/clone.S b/libc/sysdeps/linux/x86_64/clone.S index b93c2d9a4..dc5eeb0a0 100644 --- a/libc/sysdeps/linux/x86_64/clone.S +++ b/libc/sysdeps/linux/x86_64/clone.S @@ -53,7 +53,6 @@ .text .global clone .type clone,%function -.align 4 clone: /* Sanity check arguments. */ movq $-EINVAL,%rax diff --git a/libc/sysdeps/linux/x86_64/setjmp.S b/libc/sysdeps/linux/x86_64/setjmp.S index a44f968c5..eb4b97363 100644 --- a/libc/sysdeps/linux/x86_64/setjmp.S +++ b/libc/sysdeps/linux/x86_64/setjmp.S @@ -23,7 +23,6 @@ .global __sigsetjmp .type __sigsetjmp,%function -.align 4 __sigsetjmp: /* Save registers. */ movq %rbx, (JB_RBX*8)(%rdi) diff --git a/libc/sysdeps/linux/x86_64/sigaction.c b/libc/sysdeps/linux/x86_64/sigaction.c index dad45f69e..8d9cbaaae 100644 --- a/libc/sysdeps/linux/x86_64/sigaction.c +++ b/libc/sysdeps/linux/x86_64/sigaction.c @@ -138,7 +138,6 @@ libc_hidden_weak(sigaction) asm \ ( \ ".text\n" \ - ".align 16\n" \ "__" #name ":\n" \ " movq $" #syscall ", %rax\n" \ " syscall\n" \ diff --git a/libc/sysdeps/linux/x86_64/syscall.S b/libc/sysdeps/linux/x86_64/syscall.S index 86306906a..ee223e3e6 100644 --- a/libc/sysdeps/linux/x86_64/syscall.S +++ b/libc/sysdeps/linux/x86_64/syscall.S @@ -26,7 +26,6 @@ .text .globl syscall .type syscall,%function -.align 16 syscall: movq %rdi, %rax /* Syscall number -> rax. */ movq %rsi, %rdi /* shift arg1 - arg5. */ diff --git a/libc/sysdeps/linux/x86_64/vfork.S b/libc/sysdeps/linux/x86_64/vfork.S index b22b7b422..2dadbbfe0 100644 --- a/libc/sysdeps/linux/x86_64/vfork.S +++ b/libc/sysdeps/linux/x86_64/vfork.S @@ -32,7 +32,6 @@ .global __vfork .hidden __vfork .type __vfork,%function -.align 16 __vfork: |