Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'source/blender/blenlib/intern/BLI_heap.c')
-rw-r--r--source/blender/blenlib/intern/BLI_heap.c128
1 files changed, 103 insertions, 25 deletions
diff --git a/source/blender/blenlib/intern/BLI_heap.c b/source/blender/blenlib/intern/BLI_heap.c
index 4bd404e5d73..0a8dafc2dc1 100644
--- a/source/blender/blenlib/intern/BLI_heap.c
+++ b/source/blender/blenlib/intern/BLI_heap.c
@@ -32,7 +32,6 @@
#include "MEM_guardedalloc.h"
#include "BLI_utildefines.h"
-#include "BLI_memarena.h"
#include "BLI_heap.h"
#include "BLI_strict_flags.h"
@@ -44,15 +43,37 @@ struct HeapNode {
unsigned int index;
};
+struct HeapNode_Chunk {
+ struct HeapNode_Chunk *prev;
+ unsigned int size;
+ unsigned int bufsize;
+ struct HeapNode buf[0];
+};
+
+/**
+ * Number of nodes to include per #HeapNode_Chunk when no reserved size is passed,
+ * or we allocate past the reserved number.
+ *
+ * \note Optimize number for 64kb allocs.
+ */
+#define HEAP_CHUNK_DEFAULT_NUM \
+ ((MEM_SIZE_OPTIMAL((1 << 16) - sizeof(struct HeapNode_Chunk))) / sizeof(HeapNode))
+
struct Heap {
unsigned int size;
unsigned int bufsize;
- MemArena *arena;
- HeapNode *freenodes;
HeapNode **tree;
+
+ struct {
+ /* Always keep at least one chunk (never NULL) */
+ struct HeapNode_Chunk *chunk;
+ /* when NULL, allocate a new chunk */
+ HeapNode *free;
+ } nodes;
};
-/* internal functions */
+/** \name Internal Functions
+ * \{ */
#define HEAP_PARENT(i) (((i) - 1) >> 1)
#define HEAP_LEFT(i) (((i) << 1) + 1)
@@ -92,11 +113,13 @@ static void heap_down(Heap *heap, unsigned int i)
smallest = ((l < size) && HEAP_COMPARE(heap->tree[l], heap->tree[i])) ? l : i;
- if ((r < size) && HEAP_COMPARE(heap->tree[r], heap->tree[smallest]))
+ if ((r < size) && HEAP_COMPARE(heap->tree[r], heap->tree[smallest])) {
smallest = r;
+ }
- if (smallest == i)
+ if (smallest == i) {
break;
+ }
heap_swap(heap, i, smallest);
i = smallest;
@@ -108,25 +131,73 @@ static void heap_up(Heap *heap, unsigned int i)
while (i > 0) {
const unsigned int p = HEAP_PARENT(i);
- if (HEAP_COMPARE(heap->tree[p], heap->tree[i]))
+ if (HEAP_COMPARE(heap->tree[p], heap->tree[i])) {
break;
-
+ }
heap_swap(heap, p, i);
i = p;
}
}
+/** \} */
-/***/
+
+/** \name Internal Memory Management
+ * \{ */
+
+static struct HeapNode_Chunk *heap_node_alloc_chunk(
+ unsigned int tot_nodes, struct HeapNode_Chunk *chunk_prev)
+{
+ struct HeapNode_Chunk *chunk = MEM_mallocN(
+ sizeof(struct HeapNode_Chunk) + (sizeof(HeapNode) * tot_nodes), __func__);
+ chunk->prev = chunk_prev;
+ chunk->bufsize = tot_nodes;
+ chunk->size = 0;
+ return chunk;
+}
+
+static struct HeapNode *heap_node_alloc(Heap *heap)
+{
+ HeapNode *node;
+
+ if (heap->nodes.free) {
+ node = heap->nodes.free;
+ heap->nodes.free = heap->nodes.free->ptr;
+ }
+ else {
+ struct HeapNode_Chunk *chunk = heap->nodes.chunk;
+ if (UNLIKELY(chunk->size == chunk->bufsize)) {
+ chunk = heap->nodes.chunk = heap_node_alloc_chunk(HEAP_CHUNK_DEFAULT_NUM, chunk);
+ }
+ node = &chunk->buf[chunk->size++];
+ }
+
+ return node;
+}
+
+static void heap_node_free(Heap *heap, HeapNode *node)
+{
+ node->ptr = heap->nodes.free;
+ heap->nodes.free = node;
+}
+
+/** \} */
+
+
+/** \name Public Heap API
+ * \{ */
/* use when the size of the heap is known in advance */
Heap *BLI_heap_new_ex(unsigned int tot_reserve)
{
- Heap *heap = (Heap *)MEM_callocN(sizeof(Heap), __func__);
+ Heap *heap = MEM_mallocN(sizeof(Heap), __func__);
/* ensure we have at least one so we can keep doubling it */
+ heap->size = 0;
heap->bufsize = MAX2(1u, tot_reserve);
- heap->tree = (HeapNode **)MEM_mallocN(heap->bufsize * sizeof(HeapNode *), "BLIHeapTree");
- heap->arena = BLI_memarena_new(MEM_SIZE_OPTIMAL(1 << 16), "heap arena");
+ heap->tree = MEM_mallocN(heap->bufsize * sizeof(HeapNode *), "BLIHeapTree");
+
+ heap->nodes.chunk = heap_node_alloc_chunk((tot_reserve > 1) ? tot_reserve : HEAP_CHUNK_DEFAULT_NUM, NULL);
+ heap->nodes.free = NULL;
return heap;
}
@@ -146,8 +217,15 @@ void BLI_heap_free(Heap *heap, HeapFreeFP ptrfreefp)
}
}
+ struct HeapNode_Chunk *chunk = heap->nodes.chunk;
+ do {
+ struct HeapNode_Chunk *chunk_prev;
+ chunk_prev = chunk->prev;
+ MEM_freeN(chunk);
+ chunk = chunk_prev;
+ } while (chunk);
+
MEM_freeN(heap->tree);
- BLI_memarena_free(heap->arena);
MEM_freeN(heap);
}
@@ -160,10 +238,16 @@ void BLI_heap_clear(Heap *heap, HeapFreeFP ptrfreefp)
ptrfreefp(heap->tree[i]->ptr);
}
}
-
heap->size = 0;
- BLI_memarena_clear(heap->arena);
- heap->freenodes = NULL;
+
+ /* Remove all except the last chunk */
+ while (heap->nodes.chunk->prev) {
+ struct HeapNode_Chunk *chunk_prev = heap->nodes.chunk->prev;
+ MEM_freeN(heap->nodes.chunk);
+ heap->nodes.chunk = chunk_prev;
+ }
+ heap->nodes.chunk->size = 0;
+ heap->nodes.free = NULL;
}
HeapNode *BLI_heap_insert(Heap *heap, float value, void *ptr)
@@ -175,13 +259,7 @@ HeapNode *BLI_heap_insert(Heap *heap, float value, void *ptr)
heap->tree = MEM_reallocN(heap->tree, heap->bufsize * sizeof(*heap->tree));
}
- if (heap->freenodes) {
- node = heap->freenodes;
- heap->freenodes = heap->freenodes->ptr;
- }
- else {
- node = (HeapNode *)BLI_memarena_alloc(heap->arena, sizeof(*node));
- }
+ node = heap_node_alloc(heap);
node->ptr = ptr;
node->value = value;
@@ -217,8 +295,7 @@ void *BLI_heap_popmin(Heap *heap)
BLI_assert(heap->size != 0);
- heap->tree[0]->ptr = heap->freenodes;
- heap->freenodes = heap->tree[0];
+ heap_node_free(heap, heap->tree[0]);
if (--heap->size) {
heap_swap(heap, 0, heap->size);
@@ -254,3 +331,4 @@ void *BLI_heap_node_ptr(HeapNode *node)
return node->ptr;
}
+/** \} */