summaryrefslogtreecommitdiff
path: root/libc/stdlib/malloc-simple/alloc.c
diff options
context:
space:
mode:
authorEric Andersen <andersen@codepoet.org>2006-12-07 23:24:02 +0000
committerEric Andersen <andersen@codepoet.org>2006-12-07 23:24:02 +0000
commit1478c2de052374c6356db5513749a144c13791b1 (patch)
tree3b22a3f8361f94c99508c497e240ecb71acf8641 /libc/stdlib/malloc-simple/alloc.c
parent99d6c367c4820a072dc4ada51561df17e2093778 (diff)
Major cleanup of internal mutex locking. Be more consistant in how we do
things, and avoid potential deadlocks caused when a thread holding a uClibc internal lock get canceled and terminates without releasing the lock. This change also provides a single place, bits/uClibc_mutex.h, for thread libraries to modify to change all instances of internal locking.
Diffstat (limited to 'libc/stdlib/malloc-simple/alloc.c')
-rw-r--r--libc/stdlib/malloc-simple/alloc.c27
1 files changed, 13 insertions, 14 deletions
diff --git a/libc/stdlib/malloc-simple/alloc.c b/libc/stdlib/malloc-simple/alloc.c
index e382cee55..0b842076d 100644
--- a/libc/stdlib/malloc-simple/alloc.c
+++ b/libc/stdlib/malloc-simple/alloc.c
@@ -113,12 +113,11 @@ void free(void *ptr)
#endif
#ifdef L_memalign
-#ifdef __UCLIBC_HAS_THREADS__
-# include <pthread.h>
-pthread_mutex_t __malloc_lock = PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP;
-#endif
-#define LOCK __pthread_mutex_lock(&__malloc_lock)
-#define UNLOCK __pthread_mutex_unlock(&__malloc_lock)
+
+#include <bits/uClibc_mutex.h>
+__UCLIBC_MUTEX_STATIC(__malloc_lock, PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP);
+#define __MALLOC_LOCK __UCLIBC_MUTEX_LOCK(__malloc_lock)
+#define __MALLOC_UNLOCK __UCLIBC_MUTEX_UNLOCK(__malloc_lock)
/* List of blocks allocated with memalign or valloc */
struct alignlist
@@ -137,7 +136,7 @@ int __libc_free_aligned(void *ptr)
if (ptr == NULL)
return 0;
- LOCK;
+ __MALLOC_LOCK;
for (l = _aligned_blocks; l != NULL; l = l->next) {
if (l->aligned == ptr) {
/* Mark the block as free */
@@ -148,7 +147,7 @@ int __libc_free_aligned(void *ptr)
return 1;
}
}
- UNLOCK;
+ __MALLOC_UNLOCK;
return 0;
}
void * memalign (size_t alignment, size_t size)
@@ -160,11 +159,10 @@ void * memalign (size_t alignment, size_t size)
if (result == NULL)
return NULL;
- adj = (unsigned long int) ((unsigned long int) ((char *) result -
- (char *) NULL)) % alignment;
+ adj = (unsigned long int) ((unsigned long int) ((char *) result - (char *) NULL)) % alignment;
if (adj != 0) {
struct alignlist *l;
- LOCK;
+ __MALLOC_LOCK;
for (l = _aligned_blocks; l != NULL; l = l->next)
if (l->aligned == NULL)
/* This slot is free. Use it. */
@@ -173,15 +171,16 @@ void * memalign (size_t alignment, size_t size)
l = (struct alignlist *) malloc (sizeof (struct alignlist));
if (l == NULL) {
free(result);
- UNLOCK;
- return NULL;
+ result = NULL;
+ goto DONE;
}
l->next = _aligned_blocks;
_aligned_blocks = l;
}
l->exact = result;
result = l->aligned = (char *) result + alignment - adj;
- UNLOCK;
+DONE:
+ __MALLOC_UNLOCK;
}
return result;