Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/Unity-Technologies/bdwgc.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlessandro Bruni <alessandro.bruni@gmail.com>2016-01-28 00:03:49 +0300
committerIvan Maidanski <ivmai@mail.ru>2016-01-28 00:03:49 +0300
commit989056833ff24691cc26c8bc8b9ba951a08b4a66 (patch)
tree9c8309a5b911a2a7cae1580aeb35e543203c49c3 /malloc.c
parent283e7fded73a8428f94fa0e0baa24e5ed2a1f78b (diff)
GC_malloc[_atomic] global and thread-local generalization with kind
* include/gc_inline.h (GC_malloc_kind, GC_malloc_kind_global): New public function declaration. * include/gc_inline.h (GC_MALLOC_WORDS_KIND): New public macro. * include/gc_inline.h (GC_MALLOC_WORDS, GC_MALLOC_ATOMIC_WORDS): Use GC_MALLOC_WORDS_KIND. * include/gc_inline.h (GC_CONS): Use GC_malloc_kind (instead of GC_malloc); reformat code. * include/private/gc_priv.h (MAXOBJKINDS): Allow user-defined values. * include/private/gc_priv.h (GC_core_malloc, GC_core_malloc_atomic): Remove prototype. * malloc.c: Include gc_inline.h (to get GC_malloc_kind prototype). * mallocx.c: Likewise. * malloc.c (GC_generic_malloc_inner, GC_generic_malloc_inner_ignore_off_page, GC_generic_malloc): Add assertion on "k" (kind) argument (should be less than MAXOBJKINDS). * mallocx.c (GC_generic_malloc_ignore_off_page, GC_generic_malloc_many): Likewise. * malloc.c (GC_generic_malloc_uncollectable): Add assertion on "k" argument (should be less than PREDEFINED_KINDS). * malloc.c (GC_core_malloc_atomic, GC_core_malloc): Replace with GC_malloc_kind_global. * malloc.c (GC_malloc_atomic, GC_malloc): Define as a wrapper around GC_malloc_kind_global. * malloc.c (GC_malloc_kind): Redirect to GC_malloc_kind_global if not defined in gc_inline.h (as a macro) or in thread_local_alloc.c. * mallocx.c (GC_generic_or_special_malloc): Call GC_malloc_kind instead of GC_malloc_kind and GC_malloc. * thread_local_alloc.c (GC_malloc, GC_malloc_atomic): Replace with GC_malloc_kind; remove tiny_fl local variable; call GC_malloc_kind_global instead of GC_core_malloc and GC_core_malloc_atomic. * thread_local_alloc.c (GC_destroy_thread_local): Adjust static assert to guard against global _freelists overrun.
Diffstat (limited to 'malloc.c')
-rw-r--r--malloc.c90
1 files changed, 42 insertions, 48 deletions
diff --git a/malloc.c b/malloc.c
index 72a98ec2..083704cf 100644
--- a/malloc.c
+++ b/malloc.c
@@ -14,6 +14,7 @@
*/
#include "private/gc_priv.h"
+#include "gc_inline.h" /* for GC_malloc_kind */
#include <stdio.h>
#include <string.h>
@@ -109,7 +110,8 @@ GC_INNER void * GC_generic_malloc_inner(size_t lb, int k)
void *op;
GC_ASSERT(I_HOLD_LOCK());
- if(SMALL_OBJ(lb)) {
+ GC_ASSERT(k < MAXOBJKINDS);
+ if (SMALL_OBJ(lb)) {
struct obj_kind * kind = GC_obj_kinds + k;
size_t lg = GC_size_map[lb];
void ** opp = &(kind -> ok_freelist[lg]);
@@ -162,7 +164,8 @@ GC_INNER void * GC_generic_malloc_inner_ignore_off_page(size_t lb, int k)
GC_ASSERT(I_HOLD_LOCK());
if (lb <= HBLKSIZE)
- return(GC_generic_malloc_inner(lb, k));
+ return GC_generic_malloc_inner(lb, k);
+ GC_ASSERT(k < MAXOBJKINDS);
lb_adjusted = ADD_SLOP(lb);
op = GC_alloc_large_and_clear(lb_adjusted, k, IGNORE_OFF_PAGE);
GC_bytes_allocd += lb_adjusted;
@@ -180,6 +183,7 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc(size_t lb, int k)
void * result;
DCL_LOCK_STATE;
+ GC_ASSERT(k < MAXOBJKINDS);
if (EXPECT(GC_have_errors, FALSE))
GC_print_all_errors();
GC_INVOKE_FINALIZERS();
@@ -229,68 +233,57 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc(size_t lb, int k)
}
}
-/* Allocate lb bytes of atomic (pointer-free) data. */
-#ifdef THREAD_LOCAL_ALLOC
- GC_INNER void * GC_core_malloc_atomic(size_t lb)
-#else
- GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_atomic(size_t lb)
-#endif
+GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_kind_global(size_t lb, int k)
{
void *op;
size_t lg;
DCL_LOCK_STATE;
- if(SMALL_OBJ(lb)) {
+ GC_STATIC_ASSERT(MAXOBJKINDS >= PREDEFINED_KINDS);
+ GC_ASSERT(k < PREDEFINED_KINDS);
+ if (SMALL_OBJ(lb)) {
GC_DBG_COLLECT_AT_MALLOC(lb);
lg = GC_size_map[lb];
LOCK();
- op = GC_freelists[PTRFREE][lg];
- if (EXPECT(0 == op, FALSE)) {
+ op = GC_freelists[k][lg];
+ if (EXPECT(op != NULL, TRUE)) {
+ if (k == PTRFREE) {
+ GC_freelists[k][lg] = obj_link(op);
+ } else {
+ GC_ASSERT(0 == obj_link(op)
+ || ((word)obj_link(op)
+ <= (word)GC_greatest_plausible_heap_addr
+ && (word)obj_link(op)
+ >= (word)GC_least_plausible_heap_addr));
+ GC_freelists[k][lg] = obj_link(op);
+ obj_link(op) = 0;
+ }
+ GC_bytes_allocd += GRANULES_TO_BYTES(lg);
UNLOCK();
- return(GENERAL_MALLOC((word)lb, PTRFREE));
+ return op;
}
- GC_freelists[PTRFREE][lg] = obj_link(op);
- GC_bytes_allocd += GRANULES_TO_BYTES(lg);
UNLOCK();
- return((void *) op);
- } else {
- return(GENERAL_MALLOC((word)lb, PTRFREE));
- }
+ }
+ return GENERAL_MALLOC(lb, k);
}
-/* Allocate lb bytes of composite (pointerful) data */
-#ifdef THREAD_LOCAL_ALLOC
- GC_INNER void * GC_core_malloc(size_t lb)
-#else
- GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc(size_t lb)
+#if defined(THREADS) && !defined(THREAD_LOCAL_ALLOC)
+ GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_kind(size_t lb, int k)
+ {
+ return GC_malloc_kind_global(lb, k);
+ }
#endif
+
+/* Allocate lb bytes of atomic (pointer-free) data. */
+GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_atomic(size_t lb)
{
- void *op;
- size_t lg;
- DCL_LOCK_STATE;
+ return GC_malloc_kind(lb, PTRFREE);
+}
- if(SMALL_OBJ(lb)) {
- GC_DBG_COLLECT_AT_MALLOC(lb);
- lg = GC_size_map[lb];
- LOCK();
- op = GC_freelists[NORMAL][lg];
- if (EXPECT(0 == op, FALSE)) {
- UNLOCK();
- return (GENERAL_MALLOC((word)lb, NORMAL));
- }
- GC_ASSERT(0 == obj_link(op)
- || ((word)obj_link(op)
- <= (word)GC_greatest_plausible_heap_addr
- && (word)obj_link(op)
- >= (word)GC_least_plausible_heap_addr));
- GC_freelists[NORMAL][lg] = obj_link(op);
- obj_link(op) = 0;
- GC_bytes_allocd += GRANULES_TO_BYTES(lg);
- UNLOCK();
- return op;
- } else {
- return(GENERAL_MALLOC(lb, NORMAL));
- }
+/* Allocate lb bytes of composite (pointerful) data. */
+GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc(size_t lb)
+{
+ return GC_malloc_kind(lb, NORMAL);
}
GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc_uncollectable(
@@ -300,6 +293,7 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_malloc_uncollectable(
size_t lg;
DCL_LOCK_STATE;
+ GC_ASSERT(k < PREDEFINED_KINDS);
if (SMALL_OBJ(lb)) {
GC_DBG_COLLECT_AT_MALLOC(lb);
if (EXTRA_BYTES != 0 && lb != 0) lb--;