summaryrefslogtreecommitdiff
path: root/libc/sysdeps/linux/i386
diff options
context:
space:
mode:
Diffstat (limited to 'libc/sysdeps/linux/i386')
-rw-r--r--libc/sysdeps/linux/i386/bits/syscalls.h96
1 files changed, 78 insertions, 18 deletions
diff --git a/libc/sysdeps/linux/i386/bits/syscalls.h b/libc/sysdeps/linux/i386/bits/syscalls.h
index 09d48a2ff..0427d91cd 100644
--- a/libc/sysdeps/linux/i386/bits/syscalls.h
+++ b/libc/sysdeps/linux/i386/bits/syscalls.h
@@ -14,13 +14,36 @@
#include <errno.h>
+#define INTERNAL_SYSCALL_NCS(name, err, nr, args...) \
+({ \
+ register unsigned int resultvar; \
+ __asm__ __volatile__ ( \
+ LOADARGS_##nr \
+ "movl %1, %%eax\n\t" \
+ "int $0x80\n\t" \
+ RESTOREARGS_##nr \
+ : "=a" (resultvar) \
+ : "i" (name) ASMFMT_##nr(args) : "memory", "cc" \
+ ); \
+ (int) resultvar; \
+})
+
+
+#if 1 /* defined __PIC__ || defined __pic__ */
+
+/* This code avoids pushing/popping ebx as much as possible.
+ * I think the main reason was that older GCCs had problems
+ * with proper saving/restoring of ebx if "b" constraint was used,
+ * which was breaking -fPIC code really badly.
+ * At least gcc 4.2.x seems to not need these tricks anymore,
+ * but this code is still useful because it often avoids
+ * using stack for saving ebx.
+ * Keeping it unconditionally enabled for now.
+ */
+
/* We need some help from the assembler to generate optimal code.
* We define some macros here which later will be used. */
-#if defined __SUPPORT_LD_DEBUG__ && defined __DOMULTI__
-#error LD debugging and DOMULTI are incompatible
-#endif
-
__asm__ (
#ifdef __DOMULTI__
/* Protect against asm macro redefinition (happens in __DOMULTI__ mode).
@@ -74,20 +97,6 @@ __asm__ (
#endif
);
-#define INTERNAL_SYSCALL_NCS(name, err, nr, args...) \
-({ \
- register unsigned int resultvar; \
- __asm__ __volatile__ ( \
- LOADARGS_##nr \
- "movl %1, %%eax\n\t" \
- "int $0x80\n\t" \
- RESTOREARGS_##nr \
- : "=a" (resultvar) \
- : "i" (name) ASMFMT_##nr(args) : "memory", "cc" \
- ); \
- (int) resultvar; \
-})
-
#define LOADARGS_0
#define LOADARGS_1 "bpushl .L__X'%k2, %k2\n\t" "bmovl .L__X'%k2, %k2\n\t"
#define LOADARGS_2 LOADARGS_1
@@ -125,5 +134,56 @@ __asm__ (
#define ASMFMT_6(arg1, arg2, arg3, arg4, arg5, arg6) \
, "a" (arg1), "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5), "m" (arg6)
+#else /* !PIC */
+
+/* Simpler code which just uses "b" constraint to load ebx.
+ * Seems to work with gc 4.2.x, and generates slightly smaller,
+ * but slightly slower code. Example (time syscall):
+ *
+ * - 8b 4c 24 04 mov 0x4(%esp),%ecx
+ * - 87 cb xchg %ecx,%ebx
+ * + 53 push %ebx
+ * + 8b 5c 24 08 mov 0x8(%esp),%ebx
+ * b8 0d 00 00 00 mov $0xd,%eax
+ * cd 80 int $0x80
+ * - 87 cb xchg %ecx,%ebx
+ * + 5b pop %ebx
+ * c3 ret
+ *
+ * 2 bytes smaller, but uses stack via "push/pop ebx"
+ */
+
+#define LOADARGS_0
+#define LOADARGS_1
+#define LOADARGS_2
+#define LOADARGS_3
+#define LOADARGS_4
+#define LOADARGS_5
+#define LOADARGS_6 "push %%ebp\n\t" "movl %7, %%ebp\n\t"
+
+#define RESTOREARGS_0
+#define RESTOREARGS_1
+#define RESTOREARGS_2
+#define RESTOREARGS_3
+#define RESTOREARGS_4
+#define RESTOREARGS_5
+#define RESTOREARGS_6 "pop %%ebp\n\t"
+
+#define ASMFMT_0()
+#define ASMFMT_1(arg1) \
+ , "b" (arg1)
+#define ASMFMT_2(arg1, arg2) \
+ , "b" (arg1), "c" (arg2)
+#define ASMFMT_3(arg1, arg2, arg3) \
+ , "b" (arg1), "c" (arg2), "d" (arg3)
+#define ASMFMT_4(arg1, arg2, arg3, arg4) \
+ , "b" (arg1), "c" (arg2), "d" (arg3), "S" (arg4)
+#define ASMFMT_5(arg1, arg2, arg3, arg4, arg5) \
+ , "b" (arg1), "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)
+#define ASMFMT_6(arg1, arg2, arg3, arg4, arg5, arg6) \
+ , "b" (arg1), "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5), "m" (arg6)
+
+#endif /* !PIC */
+
#endif /* __ASSEMBLER__ */
#endif /* _BITS_SYSCALLS_H */