diff options
| -rw-r--r-- | extra/Configs/Config.arm | 8 | ||||
| -rw-r--r-- | extra/Configs/Config.in | 1 | ||||
| -rw-r--r-- | libc/sysdeps/linux/arm/clone.S | 14 | ||||
| -rw-r--r-- | libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h | 56 | 
4 files changed, 65 insertions, 14 deletions
| diff --git a/extra/Configs/Config.arm b/extra/Configs/Config.arm index a16c9197a..00cf98281 100644 --- a/extra/Configs/Config.arm +++ b/extra/Configs/Config.arm @@ -28,6 +28,7 @@ config CONFIG_ARM_EABI  config COMPILE_IN_THUMB_MODE  	bool "Build using Thumb mode"  	select USE_BX +	select USE_LDREXSTREX  	help  	  Say 'y' here to force building uClibc in thumb mode.  	  Say 'n' to use your compiler's default mode. @@ -38,3 +39,10 @@ config USE_BX  	  Say 'y' to use BX to return from functions on your thumb-aware  	  processor. Say 'y' if you need to use interworking. Say 'n' if not.  	  It is safe to say 'y' even if you're not doing interworking. + +config USE_LDREXSTREX +	bool "Use load-store exclusive ASM ops (not supported in SmartFusion)" +	depends on COMPILE_IN_THUMB_MODE +	default n +	help +	  Say 'y' to use LDREX/STREX ASM ops. diff --git a/extra/Configs/Config.in b/extra/Configs/Config.in index c7a3b2dd2..31944bea1 100644 --- a/extra/Configs/Config.in +++ b/extra/Configs/Config.in @@ -524,7 +524,6 @@ config LINUXTHREADS_OLD  	# linuxthreads and linuxthreads.old need nanosleep()  	select UCLIBC_HAS_REALTIME  	depends on !TARGET_arc && \ -		   !TARGET_arm && \  		   !TARGET_i386 && \  		   !TARGET_metag && \  		   !TARGET_mips && \ diff --git a/libc/sysdeps/linux/arm/clone.S b/libc/sysdeps/linux/arm/clone.S index 29045ef7b..d1c9239da 100644 --- a/libc/sysdeps/linux/arm/clone.S +++ b/libc/sysdeps/linux/arm/clone.S @@ -26,7 +26,10 @@  #include <sys/syscall.h>  #include <bits/arm_asm.h>  #include <bits/arm_bx.h> + +#if defined __UCLIBC_HAS_THREADS__ && !defined __LINUXTHREADS_OLD__  #include <sysdep-cancel.h> +#endif  #define CLONE_VM      0x00000100  #define CLONE_THREAD  0x00010000 @@ -57,23 +60,22 @@ __clone:  	@ get flags  	mov	r0, r2  	@ new sp is already in r1 -	@ load remaining arguments off the stack -	stmfd	sp!, {r4} -	ldr	r2, [sp, #4] -	ldr	r3, [sp, #8] -	ldr	r4, [sp, #12]  	DO_CALL (clone)  	movs	a1, a1  	blt	__error -	ldmnefd sp!, {r4}  	beq	1f  	bx	lr  1:  	@ pick the function arg and call address off the stack and execute  	ldr	r0, [sp, #4] +#if defined(__USE_BX__)  	ldr	r1, [sp]  	bl	2f	@ blx r1 +#else +	mov     lr, pc +	ldr     pc, [sp] +#endif  	@ and we are done, passing the return value through r0  	bl	HIDDEN_JUMPTARGET(_exit) diff --git a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h index 438c12ab9..2b877f980 100644 --- a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h +++ b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h @@ -21,12 +21,50 @@  #ifndef _PT_MACHINE_H  #define _PT_MACHINE_H   1 -#include <features.h> +#include <sys/syscall.h> +#include <unistd.h>  #ifndef PT_EI  # define PT_EI __extern_always_inline  #endif +#if defined(__thumb__) +#if defined(__USE_LDREXSTREX__) +PT_EI long int ldrex(int *spinlock) +{ +	long int ret; +	__asm__ __volatile__( +		"ldrex %0, [%1]\n" +		: "=r"(ret) +		: "r"(spinlock) : "memory"); +	return ret; +} + +PT_EI long int strex(int val, int *spinlock) +{ +	long int ret; +	__asm__ __volatile__( +		"strex %0, %1, [%2]\n" +		: "=r"(ret) +		: "r" (val), "r"(spinlock) : "memory"); +	return ret; +} + +/* Spinlock implementation; required.  */ +PT_EI long int +testandset (int *spinlock) +{ +  register unsigned int ret; + +  do { +	  ret = ldrex(spinlock); +  } while (strex(1, spinlock)); + +  return ret; +} + +#else /* __USE_LDREXSTREX__ */ +  /* This will not work on ARM1 or ARM2 because SWP is lacking on those     machines.  Unfortunately we have no way to detect this at compile     time; let's hope nobody tries to use one.  */ @@ -36,8 +74,6 @@ PT_EI long int testandset (int *spinlock);  PT_EI long int testandset (int *spinlock)  {    register unsigned int ret; - -#if defined(__thumb__)    void *pc;    __asm__ __volatile__(  	".align 0\n" @@ -50,15 +86,21 @@ PT_EI long int testandset (int *spinlock)  	"\t.force_thumb"  	: "=r"(ret), "=r"(pc)  	: "0"(1), "r"(spinlock)); -#else +  return ret; +} +#endif +#else /* __thumb__ */ + +PT_EI long int testandset (int *spinlock); +PT_EI long int testandset (int *spinlock) +{ +  register unsigned int ret;    __asm__ __volatile__("swp %0, %1, [%2]"  		       : "=r"(ret)  		       : "0"(1), "r"(spinlock)); -#endif -    return ret;  } - +#endif  /* Get some notion of the current stack.  Need not be exactly the top     of the stack, just something somewhere in the current frame.  */ | 
