summaryrefslogtreecommitdiff
path: root/libc/stdlib/malloc-standard/memalign.c
diff options
context:
space:
mode:
authorEric Andersen <andersen@codepoet.org>2006-12-07 23:24:02 +0000
committerEric Andersen <andersen@codepoet.org>2006-12-07 23:24:02 +0000
commit1478c2de052374c6356db5513749a144c13791b1 (patch)
tree3b22a3f8361f94c99508c497e240ecb71acf8641 /libc/stdlib/malloc-standard/memalign.c
parent99d6c367c4820a072dc4ada51561df17e2093778 (diff)
Major cleanup of internal mutex locking. Be more consistant in how we do
things, and avoid potential deadlocks caused when a thread holding a uClibc internal lock get canceled and terminates without releasing the lock. This change also provides a single place, bits/uClibc_mutex.h, for thread libraries to modify to change all instances of internal locking.
Diffstat (limited to 'libc/stdlib/malloc-standard/memalign.c')
-rw-r--r--libc/stdlib/malloc-standard/memalign.c18
1 files changed, 11 insertions, 7 deletions
diff --git a/libc/stdlib/malloc-standard/memalign.c b/libc/stdlib/malloc-standard/memalign.c
index 27502893d..7e0674be5 100644
--- a/libc/stdlib/malloc-standard/memalign.c
+++ b/libc/stdlib/malloc-standard/memalign.c
@@ -35,6 +35,7 @@ void* memalign(size_t alignment, size_t bytes)
mchunkptr remainder; /* spare room at end to split off */
unsigned long remainder_size; /* its size */
size_t size;
+ void *retval;
/* If need less alignment than we give anyway, just relay to malloc */
@@ -51,7 +52,7 @@ void* memalign(size_t alignment, size_t bytes)
alignment = a;
}
- LOCK;
+ __MALLOC_LOCK;
checked_request2size(bytes, nb);
/* Strategy: find a spot within that chunk that meets the alignment
@@ -63,8 +64,8 @@ void* memalign(size_t alignment, size_t bytes)
m = (char*)(malloc(nb + alignment + MINSIZE));
if (m == 0) {
- UNLOCK;
- return 0; /* propagate failure */
+ retval = 0; /* propagate failure */
+ goto DONE;
}
p = mem2chunk(m);
@@ -92,8 +93,8 @@ void* memalign(size_t alignment, size_t bytes)
if (chunk_is_mmapped(p)) {
newp->prev_size = p->prev_size + leadsize;
set_head(newp, newsize|IS_MMAPPED);
- UNLOCK;
- return chunk2mem(newp);
+ retval = chunk2mem(newp);
+ goto DONE;
}
/* Otherwise, give back leader, use the rest */
@@ -120,7 +121,10 @@ void* memalign(size_t alignment, size_t bytes)
}
check_inuse_chunk(p);
- UNLOCK;
- return chunk2mem(p);
+ retval = chunk2mem(p);
+
+ DONE:
+ __MALLOC_UNLOCK;
+ return retval;
}