diff options
author | Sergey Sharybin <sergey.vfx@gmail.com> | 2015-02-18 23:48:59 +0300 |
---|---|---|
committer | Sergey Sharybin <sergey.vfx@gmail.com> | 2015-02-18 23:50:46 +0300 |
commit | 0f2adc081716da9f6f09608e762056f3143f91d2 (patch) | |
tree | 4d3738a2537c7833eafedef77b76c7f8e07a33da /intern/cycles/util | |
parent | ee9ac4e4fc495ea4a913cfff63650dfe07da35e2 (diff) |
Cycles: Make aligned allocation to respect WITH_BLENDER_GUARDEDALLOC
Title pretty says it all actually. Can only briefly mention that we're
indeed entering that state when after applying some WIP patches having
much fuller statistics about memory usage would help giving exact memory
benefit.
Diffstat (limited to 'intern/cycles/util')
-rw-r--r-- | intern/cycles/util/util_aligned_malloc.cpp | 10 |
1 files changed, 9 insertions, 1 deletions
diff --git a/intern/cycles/util/util_aligned_malloc.cpp b/intern/cycles/util/util_aligned_malloc.cpp index 51f21db2dc3..3e825ce2e0f 100644 --- a/intern/cycles/util/util_aligned_malloc.cpp +++ b/intern/cycles/util/util_aligned_malloc.cpp @@ -15,6 +15,7 @@ */ #include "util_aligned_malloc.h" +#include "util_guarded_allocator.h" #include <cassert> @@ -41,6 +42,9 @@ CCL_NAMESPACE_BEGIN void *util_aligned_malloc(int size, int alignment) { +#ifdef WITH_BLENDER_GUARDEDALLOC + return MEM_mallocN_aligned(size, alignment, "Cycles Aligned Alloc"); +#endif #ifdef _WIN32 return _aligned_malloc(size, alignment); #elif defined(__APPLE__) @@ -65,7 +69,11 @@ void *util_aligned_malloc(int size, int alignment) void util_aligned_free(void *ptr) { -#ifdef _WIN32 +#if defined(WITH_BLENDER_GUARDEDALLOC) + if(ptr != NULL) { + MEM_freeN(ptr); + } +#elif defined(_WIN32) _aligned_free(ptr); #else free(ptr); |