summaryrefslogtreecommitdiff
path: root/malloc/arena.c
diff options
context:
space:
mode:
authorFlorian Weimer <fweimer@redhat.com>2016-09-06 12:49:54 +0200
committerFlorian Weimer <fweimer@redhat.com>2016-09-06 12:49:54 +0200
commit4bf5f2224baa1590f92f7a26930928fe9f7e4b57 (patch)
treed8a3457318df3733dfefc9476eb210288beba6d0 /malloc/arena.c
parent6d5336211d2e823d4d431a01e62a80d9be4cbc9d (diff)
malloc: Automated part of conversion to __libc_lock
Diffstat (limited to 'malloc/arena.c')
-rw-r--r--malloc/arena.c52
1 files changed, 26 insertions, 26 deletions
diff --git a/malloc/arena.c b/malloc/arena.c
index 4e16593d8b..922ae49e2f 100644
--- a/malloc/arena.c
+++ b/malloc/arena.c
@@ -112,7 +112,7 @@ int __malloc_initialized = -1;
#define arena_lock(ptr, size) do { \
if (ptr && !arena_is_corrupt (ptr)) \
- (void) mutex_lock (&ptr->mutex); \
+ __libc_lock_lock (ptr->mutex); \
else \
ptr = arena_get2 ((size), NULL); \
} while (0)
@@ -145,11 +145,11 @@ __malloc_fork_lock_parent (void)
/* We do not acquire free_list_lock here because we completely
reconstruct free_list in __malloc_fork_unlock_child. */
- (void) mutex_lock (&list_lock);
+ __libc_lock_lock (list_lock);
for (mstate ar_ptr = &main_arena;; )
{
- (void) mutex_lock (&ar_ptr->mutex);
+ __libc_lock_lock (ar_ptr->mutex);
ar_ptr = ar_ptr->next;
if (ar_ptr == &main_arena)
break;
@@ -165,12 +165,12 @@ __malloc_fork_unlock_parent (void)
for (mstate ar_ptr = &main_arena;; )
{
- (void) mutex_unlock (&ar_ptr->mutex);
+ __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = ar_ptr->next;
if (ar_ptr == &main_arena)
break;
}
- (void) mutex_unlock (&list_lock);
+ __libc_lock_unlock (list_lock);
}
void
@@ -182,13 +182,13 @@ __malloc_fork_unlock_child (void)
/* Push all arenas to the free list, except thread_arena, which is
attached to the current thread. */
- mutex_init (&free_list_lock);
+ __libc_lock_init (free_list_lock);
if (thread_arena != NULL)
thread_arena->attached_threads = 1;
free_list = NULL;
for (mstate ar_ptr = &main_arena;; )
{
- mutex_init (&ar_ptr->mutex);
+ __libc_lock_init (ar_ptr->mutex);
if (ar_ptr != thread_arena)
{
/* This arena is no longer attached to any thread. */
@@ -201,7 +201,7 @@ __malloc_fork_unlock_child (void)
break;
}
- mutex_init (&list_lock);
+ __libc_lock_init (list_lock);
}
/* Initialization routine. */
@@ -668,9 +668,9 @@ _int_new_arena (size_t size)
LIBC_PROBE (memory_arena_new, 2, a, size);
mstate replaced_arena = thread_arena;
thread_arena = a;
- mutex_init (&a->mutex);
+ __libc_lock_init (a->mutex);
- (void) mutex_lock (&list_lock);
+ __libc_lock_lock (list_lock);
/* Add the new arena to the global list. */
a->next = main_arena.next;
@@ -680,11 +680,11 @@ _int_new_arena (size_t size)
atomic_write_barrier ();
main_arena.next = a;
- (void) mutex_unlock (&list_lock);
+ __libc_lock_unlock (list_lock);
- (void) mutex_lock (&free_list_lock);
+ __libc_lock_lock (free_list_lock);
detach_arena (replaced_arena);
- (void) mutex_unlock (&free_list_lock);
+ __libc_lock_unlock (free_list_lock);
/* Lock this arena. NB: Another thread may have been attached to
this arena because the arena is now accessible from the
@@ -696,7 +696,7 @@ _int_new_arena (size_t size)
but this could result in a deadlock with
__malloc_fork_lock_parent. */
- (void) mutex_lock (&a->mutex);
+ __libc_lock_lock (a->mutex);
return a;
}
@@ -710,7 +710,7 @@ get_free_list (void)
mstate result = free_list;
if (result != NULL)
{
- (void) mutex_lock (&free_list_lock);
+ __libc_lock_lock (free_list_lock);
result = free_list;
if (result != NULL)
{
@@ -722,12 +722,12 @@ get_free_list (void)
detach_arena (replaced_arena);
}
- (void) mutex_unlock (&free_list_lock);
+ __libc_lock_unlock (free_list_lock);
if (result != NULL)
{
LIBC_PROBE (memory_arena_reuse_free_list, 1, result);
- (void) mutex_lock (&result->mutex);
+ __libc_lock_lock (result->mutex);
thread_arena = result;
}
}
@@ -772,7 +772,7 @@ reused_arena (mstate avoid_arena)
result = next_to_use;
do
{
- if (!arena_is_corrupt (result) && !mutex_trylock (&result->mutex))
+ if (!arena_is_corrupt (result) && !__libc_lock_trylock (result->mutex))
goto out;
/* FIXME: This is a data race, see _int_new_arena. */
@@ -799,14 +799,14 @@ reused_arena (mstate avoid_arena)
/* No arena available without contention. Wait for the next in line. */
LIBC_PROBE (memory_arena_reuse_wait, 3, &result->mutex, result, avoid_arena);
- (void) mutex_lock (&result->mutex);
+ __libc_lock_lock (result->mutex);
out:
/* Attach the arena to the current thread. */
{
/* Update the arena thread attachment counters. */
mstate replaced_arena = thread_arena;
- (void) mutex_lock (&free_list_lock);
+ __libc_lock_lock (free_list_lock);
detach_arena (replaced_arena);
/* We may have picked up an arena on the free list. We need to
@@ -821,7 +821,7 @@ out:
++result->attached_threads;
- (void) mutex_unlock (&free_list_lock);
+ __libc_lock_unlock (free_list_lock);
}
LIBC_PROBE (memory_arena_reuse, 2, result, avoid_arena);
@@ -892,17 +892,17 @@ arena_get_retry (mstate ar_ptr, size_t bytes)
LIBC_PROBE (memory_arena_retry, 2, bytes, ar_ptr);
if (ar_ptr != &main_arena)
{
- (void) mutex_unlock (&ar_ptr->mutex);
+ __libc_lock_unlock (ar_ptr->mutex);
/* Don't touch the main arena if it is corrupt. */
if (arena_is_corrupt (&main_arena))
return NULL;
ar_ptr = &main_arena;
- (void) mutex_lock (&ar_ptr->mutex);
+ __libc_lock_lock (ar_ptr->mutex);
}
else
{
- (void) mutex_unlock (&ar_ptr->mutex);
+ __libc_lock_unlock (ar_ptr->mutex);
ar_ptr = arena_get2 (bytes, ar_ptr);
}
@@ -917,7 +917,7 @@ arena_thread_freeres (void)
if (a != NULL)
{
- (void) mutex_lock (&free_list_lock);
+ __libc_lock_lock (free_list_lock);
/* If this was the last attached thread for this arena, put the
arena on the free list. */
assert (a->attached_threads > 0);
@@ -926,7 +926,7 @@ arena_thread_freeres (void)
a->next_free = free_list;
free_list = a;
}
- (void) mutex_unlock (&free_list_lock);
+ __libc_lock_unlock (free_list_lock);
}
}
text_set_element (__libc_thread_subfreeres, arena_thread_freeres);