diff --git a/indra/llcommon/llmemory.h b/indra/llcommon/llmemory.h index 6f385304c510c4cfd54bee5601f67366afc4720b..8fe553c9b7b3641fb63e1ed102569aa843fcaf6c 100644 --- a/indra/llcommon/llmemory.h +++ b/indra/llcommon/llmemory.h @@ -41,13 +41,6 @@ class LLMutex ; #define LL_CHECK_MEMORY #endif - -#if LL_WINDOWS -#define LL_ALIGN_OF __alignof -#else -#define LL_ALIGN_OF __align_of__ -#endif - #if LL_WINDOWS #if ADDRESS_SIZE == 64 #define LL_DEFAULT_HEAP_ALIGN 16 diff --git a/indra/llcommon/lltrace.h b/indra/llcommon/lltrace.h index f668f2042765e357defe73f58181654b3e8279e9..c31f58d784b61619b68e77b9d343c21b966faf6d 100644 --- a/indra/llcommon/lltrace.h +++ b/indra/llcommon/lltrace.h @@ -387,41 +387,41 @@ public: S32 getMemFootprint() const { return mMemFootprint; } #endif - void* operator new(size_t size) + template<int CUSTOM_ALIGNMENT> + static void* aligned_new(size_t size) { #if LL_TRACE_ENABLED claim_alloc(sMemStat, size); #endif - return ll_aligned_malloc<ALIGNMENT>(size); + return ll_aligned_malloc<CUSTOM_ALIGNMENT>(size); } template<int CUSTOM_ALIGNMENT> - static void* aligned_new(size_t size) + static void aligned_delete(void* ptr, size_t size) { #if LL_TRACE_ENABLED - claim_alloc(sMemStat, size); + disclaim_alloc(sMemStat, size); #endif - return ll_aligned_malloc<CUSTOM_ALIGNMENT>(size); + ll_aligned_free<CUSTOM_ALIGNMENT>(ptr); } - void operator delete(void* ptr, size_t size) + void* operator new(std::size_t size) { #if LL_TRACE_ENABLED - disclaim_alloc(sMemStat, size); + claim_alloc(sMemStat, size); #endif - ll_aligned_free<ALIGNMENT>(ptr); + return ll_aligned_malloc<ALIGNMENT>(size); } - template<int CUSTOM_ALIGNMENT> - static void aligned_delete(void* ptr, size_t size) + void operator delete(void* ptr, std::size_t size) { #if LL_TRACE_ENABLED disclaim_alloc(sMemStat, size); #endif - ll_aligned_free<CUSTOM_ALIGNMENT>(ptr); + ll_aligned_free<ALIGNMENT>(ptr); } - void* operator new [](size_t size) + void* operator new[](std::size_t size) { #if LL_TRACE_ENABLED claim_alloc(sMemStat, size); @@ -429,7 +429,7 @@ public: return ll_aligned_malloc<ALIGNMENT>(size); } - void operator delete[](void* ptr, size_t size) + void operator delete[](void* ptr, std::size_t size) { #if LL_TRACE_ENABLED disclaim_alloc(sMemStat, size); diff --git a/indra/newview/llspatialpartition.h b/indra/newview/llspatialpartition.h index f3806493252af9510582d7156e8e3b058479d123..5a541d222d9118147dc29b5da151c9ddcf4f4ee3 100644 --- a/indra/newview/llspatialpartition.h +++ b/indra/newview/llspatialpartition.h @@ -197,28 +197,30 @@ class LLSpatialGroup : public LLOcclusionCullingGroup friend class LLOctreeStateCheck; public: + LLSpatialGroup(const LLSpatialGroup& rhs) = delete; + const LLSpatialGroup& operator=(const LLSpatialGroup& rhs) = delete; + // <alchemy> - void* operator new(size_t size) + void* operator new(std::size_t size) { - return ll_aligned_malloc<64>(size); + return aligned_new<64>(size); } - void operator delete(void* ptr) + void operator delete(void* ptr, std::size_t size) { - ll_aligned_free<64>(ptr); + aligned_delete<64>(ptr, size); } - // </alchemy> - LLSpatialGroup(const LLSpatialGroup& rhs) : LLOcclusionCullingGroup(rhs) + void* operator new[](std::size_t size) { - *this = rhs; + return aligned_new<64>(size); } - const LLSpatialGroup& operator=(const LLSpatialGroup& rhs) + void operator delete[](void* ptr, std::size_t size) { - LL_ERRS() << "Illegal operation!" << LL_ENDL; - return *this; + aligned_delete<64>(ptr, size); } + // </alchemy> static U32 sNodeCount; static BOOL sNoDelete; //deletion of spatial groups and draw info not allowed if TRUE