summaryrefslogtreecommitdiff
path: root/ldso/ldso/sh/dl-sysdep.h
diff options
context:
space:
mode:
authorEric Andersen <andersen@codepoet.org>2002-08-19 10:22:38 +0000
committerEric Andersen <andersen@codepoet.org>2002-08-19 10:22:38 +0000
commit5a66fdf15990341c1732c77ef2d1fbc8510f6899 (patch)
treed41a4cb8a59ccccbde4da9286170cc17b1a3765d /ldso/ldso/sh/dl-sysdep.h
parent0d86dfc46feaf8cce5a1d1e383e80f29d83ceaf1 (diff)
some updates from Stefan Allius to make up the SuperH shared lib
loader build properly. Mostly just sprinkling casts, and making static some things that should be static.
Diffstat (limited to 'ldso/ldso/sh/dl-sysdep.h')
-rw-r--r--ldso/ldso/sh/dl-sysdep.h62
1 files changed, 61 insertions, 1 deletions
diff --git a/ldso/ldso/sh/dl-sysdep.h b/ldso/ldso/sh/dl-sysdep.h
index 00938b655..a0ff05fdb 100644
--- a/ldso/ldso/sh/dl-sysdep.h
+++ b/ldso/ldso/sh/dl-sysdep.h
@@ -77,7 +77,67 @@
struct elf_resolve;
extern unsigned long _dl_linux_resolver(struct elf_resolve * tpnt, int reloc_entry);
-#define do_rem(result, n, base) result = (n % base)
+static __inline__ unsigned int
+_dl_urem(unsigned int n, unsigned int base)
+{
+register unsigned int __r0 __asm__ ("r0");
+register unsigned int __r4 __asm__ ("r4") = n;
+register unsigned int __r5 __asm__ ("r5") = base;
+
+ __asm__ ("
+ mov #0, r0
+ div0u
+
+ ! get one bit from the msb of the numerator into the T
+ ! bit and divide it by whats in %2. Put the answer bit
+ ! into the T bit so it can come out again at the bottom
+
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4 ; div1 r5, r0
+ rotcl r4
+ mov r4, r0
+"
+ : "=r" (__r0)
+ : "r" (__r4), "r" (__r5)
+ : "r4", "cc");
+
+ return n - (base * __r0);
+}
+
+#define do_rem(result, n, base) ((result) = _dl_urem((n), (base)))
/* 4096 bytes alignment */
#define PAGE_ALIGN 0xfffff000