diff options
author | Brecht Van Lommel <brechtvanlommel@pandora.be> | 2013-06-22 18:35:09 +0400 |
---|---|---|
committer | Brecht Van Lommel <brechtvanlommel@pandora.be> | 2013-06-22 18:35:09 +0400 |
commit | 240fb6fa26674b8fefeed2a226352ab4c31cfe74 (patch) | |
tree | 7ed01846c1b394af495dbf1d8c3a60b2ea9793da /intern/cycles/util/util_types.h | |
parent | 5da48f425fcfeb7b3eae67df4ac0d29e698809f6 (diff) |
Cycles: ensure any SSE data is allocated 16 byte aligned, happens automatically
on many platforms but is not assured everywhere.
Diffstat (limited to 'intern/cycles/util/util_types.h')
-rw-r--r-- | intern/cycles/util/util_types.h | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/intern/cycles/util/util_types.h b/intern/cycles/util/util_types.h index a67c55acf3d..891fb82d06b 100644 --- a/intern/cycles/util/util_types.h +++ b/intern/cycles/util/util_types.h @@ -552,6 +552,30 @@ template<size_t i0, size_t i1, size_t i2, size_t i3> __device_inline const __m12 } #endif +#ifndef __KERNEL_GPU__ + +static inline void *malloc_aligned(size_t size, size_t alignment) +{ + void *data = (void*)malloc(size + sizeof(void*) + alignment - 1); + + union { void *ptr; size_t offset; } u; + u.ptr = (char*)data + sizeof(void*); + u.offset = (u.offset + alignment - 1) & ~(alignment - 1); + *(((void**)u.ptr) - 1) = data; + + return u.ptr; +} + +static inline void free_aligned(void *ptr) +{ + if(ptr) { + void *data = *(((void**)ptr) - 1); + free(data); + } +} + +#endif + CCL_NAMESPACE_END #endif /* __UTIL_TYPES_H__ */ |