Index: ps/trunk/source/lib/allocators/DynamicArena.h =================================================================== --- ps/trunk/source/lib/allocators/DynamicArena.h +++ ps/trunk/source/lib/allocators/DynamicArena.h @@ -0,0 +1,116 @@ +/* Copyright (C) 2021 Wildfire Games. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef INCLUDED_ALLOCATORS_DYNAMIC_ARENA +#define INCLUDED_ALLOCATORS_DYNAMIC_ARENA + +#include "lib/bits.h" + +#include // bad_alloc +#include // unique_ptr +#include + +namespace Allocators { + +/** + * 'Blind' memory allocator. Characteristics: + * - grows dynamically, and linearily, by BLOCK_SIZE + * - no reallocations, pointers remain valid + * - can allocate objects up to BLOCK_SIZE in size + * - can handle any aligment (up to BLOCK_SIZE) + * - Doesn't de-allocate unless cleared or destroyed. + */ +template +class DynamicArena +{ +protected: + struct Block + { + bool Available(size_t n, size_t alignment) const + { + return n + ROUND_UP(size, alignment) <= BLOCK_SIZE; + } + + uint8_t* Allocate(size_t n, size_t alignment) + { + size = ROUND_UP(size, alignment); + uint8_t* ptr = &block[size]; + size += n; + return ptr; + } + + alignas(8) uint8_t block[BLOCK_SIZE]; + size_t size = 0; + }; + + NONCOPYABLE(DynamicArena); +public: + DynamicArena() + { + m_Blocks.reserve(16); + AllocateNewBlock(); + }; + + void AllocateNewBlock() + { + m_Blocks.emplace_back(std::make_unique()); + m_NewestBlock = m_Blocks.back().get(); + } + + void* allocate(size_t n, const void*, size_t alignment) + { + // Safely handle zero-sized allocations (happens with GCC STL - see ticket #909). + if (n == 0) + n = 1; + + if (n > BLOCK_SIZE) + { + debug_warn("DynamicArena cannot allocate more than chunk size"); + throw std::bad_alloc(); + } + + if (!m_NewestBlock->Available(n, alignment)) + AllocateNewBlock(); + + return reinterpret_cast(m_NewestBlock->Allocate(n, alignment)); + } + + void deallocate(void* UNUSED(p), size_t UNUSED(n)) + { + // ignored + } + + void clear() + { + m_NewestBlock = nullptr; + m_Blocks.clear(); + AllocateNewBlock(); + } + +protected: + std::vector> m_Blocks; + Block* m_NewestBlock = nullptr; +}; + +} // namespace Allocators + +#endif // INCLUDED_ALLOCATORS_DYNAMIC_ARENA Index: ps/trunk/source/lib/allocators/STLAllocators.h =================================================================== --- ps/trunk/source/lib/allocators/STLAllocators.h +++ ps/trunk/source/lib/allocators/STLAllocators.h @@ -0,0 +1,116 @@ +/* Copyright (C) 2021 Wildfire Games. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef INCLUDED_STL_ALLOCATORS +#define INCLUDED_STL_ALLOCATORS + +#include +#include + +/** + * Adapt a 0 A.D.-style allocator for usage in STL containers. + * Use 'Backend' as an underlying allocator. + */ +template +class STLAllocator +{ + template + friend class STLAllocator; +public: + using value_type = T; + using pointer = T*; + using is_always_equal = std::false_type; + + STLAllocator() : allocator(std::make_shared()) + { + } + + template + STLAllocator(const STLAllocator& proxy) : allocator(proxy.allocator) + { + } + + template + struct rebind + { + using other = STLAllocator; + }; + + T* allocate(size_t n) + { + return static_cast(allocator->allocate(n * sizeof(T), nullptr, alignof(T))); + } + + void deallocate(T* ptr, const size_t n) + { + return allocator->deallocate(static_cast(ptr), n * sizeof(T)); + } + +private: + std::shared_ptr allocator; +}; + + +/** + * Proxies allocation to another allocator. + * This allows a single allocator to serve multiple STL containers. + */ +template +class ProxyAllocator +{ + template + friend class ProxyAllocator; +public: + using value_type = T; + using pointer = T*; + using is_always_equal = std::false_type; + + ProxyAllocator(Backend& alloc) : allocator(alloc) + { + } + + template + ProxyAllocator(const ProxyAllocator& proxy) : allocator(proxy.allocator) + { + } + + template + struct rebind + { + using other = ProxyAllocator; + }; + + T* allocate(size_t n) + { + return static_cast(allocator.allocate(n * sizeof(T), nullptr, alignof(T))); + } + + void deallocate(T* ptr, const size_t n) + { + return allocator.deallocate(static_cast(ptr), n * sizeof(T)); + } + +private: + Backend& allocator; +}; + +#endif // INCLUDED_STL_ALLOCATORS Index: ps/trunk/source/lib/allocators/aligned_allocator.h =================================================================== --- ps/trunk/source/lib/allocators/aligned_allocator.h +++ ps/trunk/source/lib/allocators/aligned_allocator.h @@ -1,148 +0,0 @@ -/* Copyright (C) 2010 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * STL allocator for aligned memory - */ - -#ifndef ALIGNED_ALLOCATOR -#define ALIGNED_ALLOCATOR - -#include "lib/bits.h" // round_up -#include "lib/sysdep/arch/x86_x64/cache.h" -#include "lib/sysdep/rtl.h" // rtl_AllocateAligned - - -/** - * stateless STL allocator that aligns elements to the L1 cache line size. - * - * note: the alignment is hard-coded to avoid any allocator state. - * this avoids portability problems, which is important since allocators - * are rather poorly specified. - * - * references: - * http://www.tantalon.com/pete/customallocators.ppt - * http://www.flipcode.com/archives/Aligned_Block_Allocation.shtml - * http://www.josuttis.com/cppcode/allocator.html - * - * derived from code that bears the following copyright notice: - * (C) Copyright Nicolai M. Josuttis 1999. - * Permission to copy, use, modify, sell and distribute this software - * is granted provided this copyright notice appears in all copies. - * This software is provided "as is" without express or implied - * warranty, and with no claim as to its suitability for any purpose. - **/ -template -class AlignedAllocator -{ -public: - // type definitions - typedef T value_type; - typedef T* pointer; - typedef const T* const_pointer; - typedef T& reference; - typedef const T& const_reference; - typedef std::size_t size_type; - typedef std::ptrdiff_t difference_type; - - // rebind allocator to type U - template - struct rebind - { - typedef AlignedAllocator other; - }; - - pointer address(reference value) const - { - return &value; - } - - const_pointer address(const_reference value) const - { - return &value; - } - - NOTHROW_DEFINE AlignedAllocator() - { - } - - NOTHROW_DEFINE AlignedAllocator(const AlignedAllocator&) - { - } - - template - NOTHROW_DEFINE AlignedAllocator (const AlignedAllocator&) - { - } - - NOTHROW_DEFINE ~AlignedAllocator() - { - } - - NOTHROW_DEFINE size_type max_size() const - { - // maximum number of *elements* that can be allocated - return std::numeric_limits::max() / sizeof(T); - } - - // allocate uninitialized storage - pointer allocate(size_type numElements) - { - const size_type alignment = x86_x64::Caches(x86_x64::L1D)->entrySize; - const size_type elementSize = round_up(sizeof(T), alignment); - const size_type size = numElements * elementSize; - pointer p = (pointer)rtl_AllocateAligned(size, alignment); - return p; - } - - // deallocate storage of elements that have been destroyed - void deallocate(pointer p, size_type UNUSED(num)) - { - rtl_FreeAligned((void*)p); - } - - void construct(pointer p, const T& value) - { - new((void*)p) T(value); - } - - void destroy(pointer p) - { - p->~T(); - UNUSED2(p); // otherwise, warning is raised for reasons unknown - } -}; - -// indicate that all specializations of this allocator are interchangeable -template -NOTHROW_DEFINE bool operator==(const AlignedAllocator&, const AlignedAllocator&) -{ - return true; -} - -template -NOTHROW_DEFINE bool operator!=(const AlignedAllocator&, const AlignedAllocator&) -{ - return false; -} - -#endif // #ifndef ALIGNED_ALLOCATOR Index: ps/trunk/source/lib/allocators/allocator_adapters.h =================================================================== --- ps/trunk/source/lib/allocators/allocator_adapters.h +++ ps/trunk/source/lib/allocators/allocator_adapters.h @@ -1,187 +0,0 @@ -/* Copyright (C) 2019 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * adapters for allocators; provides a minimal subset of the - * STL allocator interface. - */ - -#ifndef ALLOCATOR_ADAPTERS -#define ALLOCATOR_ADAPTERS - -#include "lib/sysdep/rtl.h" -#include "lib/sysdep/vm.h" - -#include - -// NB: STL allocators are parameterized on the object type and indicate -// the number of elements to [de]allocate. however, these adapters are -// only used for allocating storage and receive the number of bytes. - -struct Allocator_Heap -{ - void* allocate(size_t size) - { - return malloc(size); - } - - void deallocate(void* p, size_t UNUSED(size)) - { - return free(p); - } -}; - -template -struct Allocator_Aligned -{ - void* allocate(size_t size) - { - return rtl_AllocateAligned(size, alignment); - } - - void deallocate(void* p, size_t UNUSED(size)) - { - return rtl_FreeAligned(p); - } -}; - -template -struct Allocator_VM -{ - void* allocate(size_t size) - { - return vm::Allocate(size, pageType, prot); - } - - void deallocate(void* p, size_t size) - { - vm::Free(p, size); - } -}; - -template -struct Allocator_AddressSpace -{ - void* allocate(size_t size) - { - return vm::ReserveAddressSpace(size, commitSize, pageType, prot); - } - - void deallocate(void* p, size_t size) - { - vm::ReleaseAddressSpace(p, size); - } -}; - - -/** - * fully STL-compatible allocator that simply draws upon another Allocator. - * this allows a single allocator to serve multiple STL containers. - */ -template -class ProxyAllocator -{ -public: - typedef T value_type; - typedef T* pointer; - typedef const T* const_pointer; - typedef T& reference; - typedef const T& const_reference; - typedef std::size_t size_type; - typedef std::ptrdiff_t difference_type; - - template - struct rebind - { - typedef ProxyAllocator other; - }; - - explicit NOTHROW_DEFINE ProxyAllocator(Allocator& allocator) - : allocator(&allocator) - { - } - - template - NOTHROW_DEFINE ProxyAllocator(const ProxyAllocator& rhs) - : allocator(rhs.allocator) - { - } - - // (required by VC2010 std::vector) - bool operator==(const ProxyAllocator& rhs) const - { - return allocator == rhs.allocator; - } - bool operator!=(const ProxyAllocator& rhs) const - { - return !operator==(rhs); - } - - pointer address(reference r) - { - return &r; - } - - const_pointer address(const_reference s) - { - return &s; - } - - size_type max_size() const throw () - { - return std::numeric_limits::max() / sizeof(T); - } - - void construct(const pointer ptr, const value_type& t) - { - new(ptr) T(t); - } - - void destroy(pointer ptr) - { - ptr->~T(); - UNUSED2(ptr); // silence MSVC warnings - } - - pointer allocate(size_type n) - { - // safely handle zero-sized allocations (happens with GCC STL - see ticket #909). - if(n == 0) - n = 1; - return (pointer)allocator->allocate(n*sizeof(T)); - } - - pointer allocate(size_type n, const void* const) - { - return allocate(n); - } - - void deallocate(const pointer ptr, const size_type n) - { - return allocator->deallocate(ptr, n*sizeof(T)); - } - -//private: // otherwise copy ctor cannot access it - Allocator* allocator; -}; - -#endif // #ifndef ALLOCATOR_ADAPTERS Index: ps/trunk/source/lib/allocators/allocator_policies.h =================================================================== --- ps/trunk/source/lib/allocators/allocator_policies.h +++ ps/trunk/source/lib/allocators/allocator_policies.h @@ -28,7 +28,7 @@ #define ALLOCATOR_POLICIES #include "lib/alignment.h" // pageSize -#include "lib/allocators/allocator_adapters.h" +#include "lib/allocators/stateless_allocators.h" #include "lib/allocators/freelist.h" #include Index: ps/trunk/source/lib/allocators/arena.h =================================================================== --- ps/trunk/source/lib/allocators/arena.h +++ ps/trunk/source/lib/allocators/arena.h @@ -1,173 +0,0 @@ -/* Copyright (C) 2013 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * arena allocator (variable-size blocks, no deallocation). - */ - -#ifndef INCLUDED_ALLOCATORS_ARENA -#define INCLUDED_ALLOCATORS_ARENA - -#include "lib/allocators/allocator_policies.h" - -namespace Allocators { - -/** - * allocator design parameters: - * - O(1) allocation; - * - variable-size blocks; - * - support for deallocating all objects; - * - consecutive allocations are back-to-back; - * - no extra alignment nor padding. - **/ -template > -class Arena -{ - NONCOPYABLE(Arena); -public: - Arena(size_t maxSize) - : storage(maxSize) - { - DeallocateAll(); - } - - size_t RemainingBytes() const - { - return storage.MaxCapacity() - end; - } - - void* allocate(size_t size) - { - return (void*)StorageAppend(storage, end, size); - } - - void deallocate(void* UNUSED(p), size_t UNUSED(size)) - { - // ignored - } - - void DeallocateAll() - { - end = 0; - } - - // @return whether the address lies within the previously allocated range. - bool Contains(uintptr_t address) const - { - return (address - storage.Address()) < end; - } - -private: - Storage storage; - size_t end; -}; - -LIB_API void TestArena(); - - -/** - * allocator design parameters: - * - grow dynamically with a fixed chunkSize - * - for frequent allocations of size << chunkSize - * - no reallocations, pointers remain valid - **/ -class DynamicArena -{ - struct ArenaChunk - { - bool Available(size_t size) const - { - return size <= (capacity - end); - } - - // Must check Available first or this may return an invalid address - uintptr_t Allocate(size_t size) - { - uintptr_t ptr = storage + end; - end += size; - return ptr; - } - - uintptr_t storage; - size_t end; - size_t capacity; - ArenaChunk* next; - }; - - NONCOPYABLE(DynamicArena); -public: - DynamicArena(size_t chunkSize) : chunkSize(chunkSize), head(NULL) - { - AllocateNewChunk(); - } - - ~DynamicArena() - { - ArenaChunk* chunk = head; - while (chunk != NULL) - { - ArenaChunk* next = chunk->next; - free(chunk); - chunk = next; - } - } - - void AllocateNewChunk() - { - // For efficiency, do a single allocation with the ArenaChunk and its storage - ArenaChunk* next = head; - head = (ArenaChunk*)malloc(sizeof(ArenaChunk) + chunkSize); - ENSURE(head); - head->storage = sizeof(ArenaChunk) + uintptr_t(head); - head->end = 0; - head->capacity = chunkSize; - head->next = next; - } - - void* allocate(size_t size) - { - if (size > chunkSize) - { - debug_warn(L"DynamicArena cannot allocate more than chunk size"); - throw std::bad_alloc(); - } - else if (!head->Available(size)) - AllocateNewChunk(); - - return (void*)head->Allocate(size); - } - - void deallocate(void* UNUSED(p), size_t UNUSED(size)) - { - // ignored - } - -private: - - const size_t chunkSize; - ArenaChunk *head; -}; - - -} // namespace Allocators - -#endif // #ifndef INCLUDED_ALLOCATORS_ARENA Index: ps/trunk/source/lib/allocators/arena.cpp =================================================================== --- ps/trunk/source/lib/allocators/arena.cpp +++ ps/trunk/source/lib/allocators/arena.cpp @@ -1,75 +0,0 @@ -/* Copyright (C) 2011 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * arena allocator (variable-size blocks, no deallocation). - */ - -#include "precompiled.h" -#include "lib/allocators/arena.h" - -namespace Allocators { - -template -struct BasicArenaTest -{ - void operator()() const - { - Arena a(100); - const size_t initialSpace = a.RemainingBytes(); - void* p = a.allocate(100); - ENSURE(p != 0); - ENSURE(a.Contains(uintptr_t(p))); - ENSURE(a.RemainingBytes() == initialSpace-100); - ENSURE(a.Contains(uintptr_t(p)+1)); - ENSURE(a.Contains(uintptr_t(p)+99)); - ENSURE(!a.Contains(uintptr_t(p)-1)); - ENSURE(!a.Contains(uintptr_t(p)+100)); - if(a.RemainingBytes() == 0) - ENSURE(a.allocate(1) == 0); // full - else - ENSURE(a.allocate(1) != 0); // can still expand - a.DeallocateAll(); - ENSURE(!a.Contains(uintptr_t(p))); - - p = a.allocate(36); - ENSURE(p != 0); - ENSURE(a.Contains(uintptr_t(p))); - ENSURE(a.RemainingBytes() == initialSpace-36); - void* p2 = a.allocate(64); - ENSURE(p2 != 0); - ENSURE(a.Contains(uintptr_t(p2))); - ENSURE(a.RemainingBytes() == initialSpace-36-64); - ENSURE(p2 == (void*)(uintptr_t(p)+36)); - if(a.RemainingBytes() == 0) - ENSURE(a.allocate(1) == 0); // full - else - ENSURE(a.allocate(1) != 0); // can still expand - } -}; - -void TestArena() -{ - ForEachStorage(); -} - -} // namespace Allocators Index: ps/trunk/source/lib/allocators/headerless.h =================================================================== --- ps/trunk/source/lib/allocators/headerless.h +++ ps/trunk/source/lib/allocators/headerless.h @@ -1,99 +0,0 @@ -/* Copyright (C) 2010 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * (header-less) pool-based heap allocator - */ - -#ifndef INCLUDED_ALLOCATORS_HEADERLESS -#define INCLUDED_ALLOCATORS_HEADERLESS - -/** - * (header-less) pool-based heap allocator - * provides Allocate and Deallocate without requiring in-band headers; - * this is useful when allocating page-aligned I/O buffers - * (headers would waste an entire page per buffer) - * - * policy: - * - allocation: first exhaust the freelist, then allocate more - * - freelist: address-ordered good fit, always split blocks - * - coalescing: immediate - * mechanism: - * - coalescing: boundary tags in freed memory with distinct bit patterns - * - freelist: segregated range lists of power-of-two size classes - * - * note: this module basically implements a (rather complex) freelist and - * could be made independent of the Pool allocation scheme. however, reading - * neighboring boundary tags may cause segmentation violations; knowing the - * bounds of valid committed memory (i.e. Pool extents) avoids this. - **/ -class HeaderlessAllocator -{ -public: - // allocators must 'naturally' align pointers, i.e. ensure they are - // multiples of the largest native type (currently __m128). - // since there are no headers, we can guarantee alignment by - // requiring sizes to be multiples of allocationAlignment. - static const size_t allocationAlignment = 16; - - // allocations must be large enough to hold our boundary tags - // when freed. (see rationale above BoundaryTagManager) - static const size_t minAllocationSize = 128; - - /** - * @param poolSize maximum amount of memory that can be allocated. - * this much virtual address space is reserved up-front (see Pool). - **/ - HeaderlessAllocator(size_t poolSize); - - /** - * restore the original state (as if newly constructed). - * this includes reclaiming all extant allocations. - **/ - void Reset(); - - /** - * @param size [bytes] (= minAllocationSize + i*allocationAlignment). - * (this allocator is designed for requests on the order of several KiB) - * @return allocated memory or 0 if the pool is too fragmented or full. - **/ - NOTHROW_DECLARE void* Allocate(size_t size); - - /** - * deallocate memory. - * @param p must be exactly as returned by Allocate (in particular, - * evenly divisible by allocationAlignment) - * @param size must be exactly as specified to Allocate. - **/ - void Deallocate(void* p, size_t size); - - /** - * perform sanity checks; ensure allocator state is consistent. - **/ - void Validate() const; - -private: - class Impl; - shared_ptr impl; -}; - -#endif // #ifndef INCLUDED_ALLOCATORS_HEADERLESS Index: ps/trunk/source/lib/allocators/headerless.cpp =================================================================== --- ps/trunk/source/lib/allocators/headerless.cpp +++ ps/trunk/source/lib/allocators/headerless.cpp @@ -1,773 +0,0 @@ -/* Copyright (C) 2010 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -/* - * (header-less) pool-based heap allocator - */ - -#include "precompiled.h" -#include "lib/allocators/headerless.h" - -#include "lib/bits.h" -#include "lib/allocators/pool.h" - - -static const bool performSanityChecks = true; - -// shared by Impl::Allocate and FreedBlock::Validate -static bool IsValidSize(size_t size); - - -//----------------------------------------------------------------------------- - -// this combines the boundary tags and link fields into one structure, -// which is safer than direct pointer arithmetic. -// -// it is written to freed memory, which is fine because IsValidSize ensures -// the allocations are large enough. -class FreedBlock -{ - friend class RangeList; // manipulates link fields directly - -public: - // (required for RangeList::m_sentinel) - FreedBlock() - { - } - - void Setup(uintptr_t id, size_t size) - { - m_magic = s_magic; - m_size = size; - m_id = id; - } - - void Reset() - { - // clear all fields to prevent accidental reuse - prev = next = 0; - m_id = 0; - m_size = ~size_t(0); - m_magic = 0; - } - - size_t Size() const - { - return m_size; - } - - /** - * @return whether this appears to be a FreedBlock instance with the - * desired ID. for additional safety, also call Validate(). - **/ - bool IsFreedBlock(uintptr_t id) const - { - if(m_id != id) - return false; - if(m_magic != s_magic) - return false; - return true; - } - - /** - * warn if any invariant doesn't hold. - **/ - void Validate(uintptr_t id) const - { - if(!performSanityChecks) return; - - // note: RangeList::Validate implicitly checks the prev and next - // fields by iterating over the list. - - // note: we can't check for prev != next because we're called for - // footers as well, and they don't have valid pointers. - - ENSURE(IsValidSize(m_size)); - ENSURE(IsFreedBlock(id)); - } - -private: - // note: the magic and ID fields are stored at both ends of this - // class to increase the chance of detecting memory corruption. - static const u64 s_magic = 0xFF55AA00BBCCDDEEull; - u64 m_magic; - - FreedBlock* prev; - FreedBlock* next; - - // size [bytes] of the entire memory block, including header and footer - size_t m_size; - - // this differentiates between headers and footers. - uintptr_t m_id; -}; - - -static bool IsValidSize(size_t size) -{ - // note: we disallow the questionable practice of zero-byte allocations - // because they may be indicative of bugs. - if(size < HeaderlessAllocator::minAllocationSize) - return false; - - if(size % HeaderlessAllocator::allocationAlignment) - return false; - - return true; -} - - -//----------------------------------------------------------------------------- -// freelists -//----------------------------------------------------------------------------- - -// policy: address-ordered good fit -// mechanism: segregated range lists of power-of-two size classes - -struct AddressOrder -{ - static bool ShouldInsertBefore(FreedBlock* current, FreedBlock* successor) - { - return current < successor; - } -}; - -// "range list" is a freelist of similarly-sized blocks. -class RangeList -{ -public: - RangeList() - { - Reset(); - } - - void Reset() - { - m_sentinel.prev = &m_sentinel; - m_sentinel.next = &m_sentinel; - m_freeBlocks = 0; - m_freeBytes = 0; - } - - template - void Insert(FreedBlock* freedBlock) - { - // find freedBlock before which to insert - FreedBlock* successor; - for(successor = m_sentinel.next; successor != &m_sentinel; successor = successor->next) - { - if(InsertPolicy::ShouldInsertBefore(freedBlock, successor)) - break; - } - - freedBlock->prev = successor->prev; - freedBlock->next = successor; - successor->prev->next = freedBlock; - successor->prev = freedBlock; - - m_freeBlocks++; - m_freeBytes += freedBlock->Size(); - } - - /** - * @return the first freed block of size >= minSize or 0 if none exists. - **/ - FreedBlock* Find(size_t minSize) - { - for(FreedBlock* freedBlock = m_sentinel.next; freedBlock != &m_sentinel; freedBlock = freedBlock->next) - { - if(freedBlock->Size() >= minSize) - return freedBlock; - } - - // none found, so average block size is less than the desired size - ENSURE(m_freeBytes/m_freeBlocks < minSize); - return 0; - } - - void Remove(FreedBlock* freedBlock) - { - freedBlock->next->prev = freedBlock->prev; - freedBlock->prev->next = freedBlock->next; - - ENSURE(m_freeBlocks != 0); - ENSURE(m_freeBytes >= freedBlock->Size()); - m_freeBlocks--; - m_freeBytes -= freedBlock->Size(); - } - - void Validate(uintptr_t id) const - { - if(!performSanityChecks) return; - - size_t freeBlocks = 0, freeBytes = 0; - - for(FreedBlock* freedBlock = m_sentinel.next; freedBlock != &m_sentinel; freedBlock = freedBlock->next) - { - freedBlock->Validate(id); - freeBlocks++; - freeBytes += freedBlock->Size(); - } - - for(FreedBlock* freedBlock = m_sentinel.prev; freedBlock != &m_sentinel; freedBlock = freedBlock->prev) - { - freedBlock->Validate(id); - freeBlocks++; - freeBytes += freedBlock->Size(); - } - - // our idea of the number and size of free blocks is correct - ENSURE(freeBlocks == m_freeBlocks*2 && freeBytes == m_freeBytes*2); - // if empty, state must be as established by Reset - ENSURE(!IsEmpty() || (m_sentinel.next == &m_sentinel && m_sentinel.prev == &m_sentinel)); - } - - bool IsEmpty() const - { - return (m_freeBlocks == 0); - } - - size_t FreeBlocks() const - { - return m_freeBlocks; - } - - size_t FreeBytes() const - { - return m_freeBytes; - } - -private: - // a sentinel simplifies Insert and Remove. we store it here instead of - // in a separate array to improve locality (it is actually accessed). - mutable FreedBlock m_sentinel; - - size_t m_freeBlocks; - size_t m_freeBytes; -}; - - -//----------------------------------------------------------------------------- - -class SegregatedRangeLists -{ -public: - SegregatedRangeLists() - { - Reset(); - } - - void Reset() - { - m_bitmap = 0; - for(size_t i = 0; i < numRangeLists; i++) - m_rangeLists[i].Reset(); - } - - void Insert(FreedBlock* freedBlock) - { - const size_t sizeClass = SizeClass(freedBlock->Size()); - m_rangeLists[sizeClass].Insert(freedBlock); - - m_bitmap |= Bit(sizeClass); - } - - /** - * @return the first freed block of size >= minSize or 0 if none exists. - **/ - FreedBlock* Find(size_t minSize) - { - // iterate over all large enough, non-empty size classes - // (zero overhead for empty size classes) - const size_t minSizeClass = SizeClass(minSize); - size_t sizeClassBits = m_bitmap & ((~size_t(0)) << minSizeClass); - while(sizeClassBits) - { - const size_t size = ValueOfLeastSignificantOneBit(sizeClassBits); - sizeClassBits &= ~size; // remove from sizeClassBits - const size_t sizeClass = SizeClass(size); - - FreedBlock* freedBlock = m_rangeLists[sizeClass].Find(minSize); - if(freedBlock) - return freedBlock; - } - - // apparently all classes above minSizeClass are empty, - // or the above would have succeeded. - ENSURE(m_bitmap < Bit(minSizeClass+1)); - return 0; - } - - void Remove(FreedBlock* freedBlock) - { - const size_t sizeClass = SizeClass(freedBlock->Size()); - m_rangeLists[sizeClass].Remove(freedBlock); - - // (masking with !IsEmpty() << sizeClass would probably be faster) - if(m_rangeLists[sizeClass].IsEmpty()) - m_bitmap &= ~Bit(sizeClass); - } - - void Validate(uintptr_t id) const - { - for(size_t i = 0; i < numRangeLists; i++) - { - m_rangeLists[i].Validate(id); - - // both bitmap and list must agree on whether they are empty - ENSURE(((m_bitmap & Bit(i)) == 0) == m_rangeLists[i].IsEmpty()); - } - } - - size_t FreeBlocks() const - { - size_t freeBlocks = 0; - for(size_t i = 0; i < numRangeLists; i++) - freeBlocks += m_rangeLists[i].FreeBlocks(); - return freeBlocks; - } - - size_t FreeBytes() const - { - size_t freeBytes = 0; - for(size_t i = 0; i < numRangeLists; i++) - freeBytes += m_rangeLists[i].FreeBytes(); - return freeBytes; - } - -private: - /** - * @return "size class" of a given size. - * class i > 0 contains blocks of size (2**(i-1), 2**i]. - **/ - static size_t SizeClass(size_t size) - { - return ceil_log2(size); - } - - static uintptr_t ValueOfLeastSignificantOneBit(uintptr_t x) - { - return (x & -(intptr_t)x); - } - - // segregated, i.e. one list per size class. - static const size_t numRangeLists = sizeof(uintptr_t)*CHAR_BIT; - RangeList m_rangeLists[numRangeLists]; - - // bit i set <==> size class i's freelist is not empty. - // this allows finding a non-empty list in O(1). - uintptr_t m_bitmap; -}; - - -//----------------------------------------------------------------------------- -// coalescing -//----------------------------------------------------------------------------- - -// policy: immediately coalesce -// mechanism: boundary tags - -// note: the id and magic values are all that differentiates tags from -// user data. this isn't 100% reliable, but as with headers, we don't want -// to insert extra boundary tags into the allocated memory. - -// note: footers consist of Tag{magic, ID, size}, while headers also -// need prev/next pointers. this could comfortably fit in 64 bytes, -// but we don't want to inherit headers from a base class because its -// prev/next pointers should reside between the magic and ID fields. -// maintaining separate FreedBlock and Footer classes is also undesirable; -// we prefer to use FreedBlock for both, which increases the minimum -// allocation size to 64 + allocationAlignment, e.g. 128. -// that's not a problem because the allocator is designed for -// returning pages or IO buffers (4..256 KB). -cassert(HeaderlessAllocator::minAllocationSize >= 2*sizeof(FreedBlock)); - - -class BoundaryTagManager -{ -public: - BoundaryTagManager() - : m_freeBlocks(0), m_freeBytes(0) - { - } - - FreedBlock* WriteTags(u8* p, size_t size) - { - FreedBlock* freedBlock = (FreedBlock*)p; - freedBlock->Setup(s_headerId, size); - Footer(freedBlock)->Setup(s_footerId, size); - - m_freeBlocks++; - m_freeBytes += size; - - Validate(freedBlock); - return freedBlock; - } - - void RemoveTags(FreedBlock* freedBlock) - { - Validate(freedBlock); - - ENSURE(m_freeBlocks != 0); - ENSURE(m_freeBytes >= freedBlock->Size()); - m_freeBlocks--; - m_freeBytes -= freedBlock->Size(); - - FreedBlock* footer = Footer(freedBlock); - freedBlock->Reset(); - footer->Reset(); - } - - FreedBlock* PrecedingBlock(u8* p, u8* beginningOfPool) const - { - if(p == beginningOfPool) // avoid accessing invalid memory - return 0; - - FreedBlock* precedingBlock; - { - FreedBlock* const footer = (FreedBlock*)(p - sizeof(FreedBlock)); - if(!footer->IsFreedBlock(s_footerId)) - return 0; - footer->Validate(s_footerId); - precedingBlock = (FreedBlock*)(p - footer->Size()); - } - - Validate(precedingBlock); - return precedingBlock; - } - - FreedBlock* FollowingBlock(u8* p, size_t size, u8* endOfPool) const - { - if(p+size == endOfPool) // avoid accessing invalid memory - return 0; - - FreedBlock* const followingBlock = (FreedBlock*)(p + size); - if(!followingBlock->IsFreedBlock(s_headerId)) - return 0; - - Validate(followingBlock); - return followingBlock; - } - - size_t FreeBlocks() const - { - return m_freeBlocks; - } - - size_t FreeBytes() const - { - return m_freeBytes; - } - - // (generated via GUID) - static const uintptr_t s_headerId = 0x111E8E6Fu; - static const uintptr_t s_footerId = 0x4D745342u; - -private: - void Validate(FreedBlock* freedBlock) const - { - if(!performSanityChecks) return; - - // the existence of freedBlock means our bookkeeping better have - // records of at least that much memory. - ENSURE(m_freeBlocks != 0); - ENSURE(m_freeBytes >= freedBlock->Size()); - - freedBlock->Validate(s_headerId); - Footer(freedBlock)->Validate(s_footerId); - } - - static FreedBlock* Footer(FreedBlock* freedBlock) - { - u8* const p = (u8*)freedBlock; - return (FreedBlock*)(p + freedBlock->Size() - sizeof(FreedBlock)); - } - - size_t m_freeBlocks; - size_t m_freeBytes; -}; - - -//----------------------------------------------------------------------------- -// stats -//----------------------------------------------------------------------------- - -class Stats -{ -public: - void OnReset() - { - if(!performSanityChecks) return; - - m_totalAllocatedBlocks = m_totalAllocatedBytes = 0; - m_totalDeallocatedBlocks = m_totalDeallocatedBytes = 0; - m_currentExtantBlocks = m_currentExtantBytes = 0; - m_currentFreeBlocks = m_currentFreeBytes = 0; - } - - void OnAllocate(size_t size) - { - if(!performSanityChecks) return; - - m_totalAllocatedBlocks++; - m_totalAllocatedBytes += size; - - m_currentExtantBlocks++; - m_currentExtantBytes += size; - } - - void OnDeallocate(size_t size) - { - if(!performSanityChecks) return; - - m_totalDeallocatedBlocks++; - m_totalDeallocatedBytes += size; - ENSURE(m_totalDeallocatedBlocks <= m_totalAllocatedBlocks); - ENSURE(m_totalDeallocatedBytes <= m_totalAllocatedBytes); - - ENSURE(m_currentExtantBlocks != 0); - ENSURE(m_currentExtantBytes >= size); - m_currentExtantBlocks--; - m_currentExtantBytes -= size; - } - - void OnAddToFreelist(size_t size) - { - m_currentFreeBlocks++; - m_currentFreeBytes += size; - } - - void OnRemoveFromFreelist(size_t size) - { - if(!performSanityChecks) return; - - ENSURE(m_currentFreeBlocks != 0); - ENSURE(m_currentFreeBytes >= size); - m_currentFreeBlocks--; - m_currentFreeBytes -= size; - } - - void Validate() const - { - if(!performSanityChecks) return; - - ENSURE(m_totalDeallocatedBlocks <= m_totalAllocatedBlocks); - ENSURE(m_totalDeallocatedBytes <= m_totalAllocatedBytes); - - ENSURE(m_currentExtantBlocks == m_totalAllocatedBlocks-m_totalDeallocatedBlocks); - ENSURE(m_currentExtantBytes == m_totalAllocatedBytes-m_totalDeallocatedBytes); - } - - size_t FreeBlocks() const - { - return m_currentFreeBlocks; - } - - size_t FreeBytes() const - { - return m_currentFreeBytes; - } - -private: - u64 m_totalAllocatedBlocks, m_totalAllocatedBytes; - u64 m_totalDeallocatedBlocks, m_totalDeallocatedBytes; - u64 m_currentExtantBlocks, m_currentExtantBytes; - u64 m_currentFreeBlocks, m_currentFreeBytes; -}; - - -//----------------------------------------------------------------------------- -// HeaderlessAllocator::Impl -//----------------------------------------------------------------------------- - -static void AssertEqual(size_t x1, size_t x2, size_t x3) -{ - ENSURE(x1 == x2 && x2 == x3); -} - -class HeaderlessAllocator::Impl -{ -public: - Impl(size_t poolSize) - { - (void)pool_create(&m_pool, poolSize, 0); - - Reset(); - } - - ~Impl() - { - Validate(); - - (void)pool_destroy(&m_pool); - } - - void Reset() - { - pool_free_all(&m_pool); - m_segregatedRangeLists.Reset(); - m_stats.OnReset(); - - Validate(); - } - - NOTHROW_DEFINE void* Allocate(size_t size) - { - ENSURE(IsValidSize(size)); - Validate(); - - void* p = TakeAndSplitFreeBlock(size); - if(!p) - { - p = pool_alloc(&m_pool, size); - if(!p) // both failed; don't throw bad_alloc because - return 0; // this often happens with the file cache. - } - - // (NB: we must not update the statistics if allocation failed) - m_stats.OnAllocate(size); - - Validate(); - ENSURE((uintptr_t)p % allocationAlignment == 0); - return p; - } - - void Deallocate(u8* p, size_t size) - { - ENSURE((uintptr_t)p % allocationAlignment == 0); - ENSURE(IsValidSize(size)); - ENSURE(pool_contains(&m_pool, p)); - ENSURE(pool_contains(&m_pool, p+size-1)); - - Validate(); - - m_stats.OnDeallocate(size); - Coalesce(p, size); - AddToFreelist(p, size); - - Validate(); - } - - void Validate() const - { - if(!performSanityChecks) return; - - m_segregatedRangeLists.Validate(BoundaryTagManager::s_headerId); - m_stats.Validate(); - - AssertEqual(m_stats.FreeBlocks(), m_segregatedRangeLists.FreeBlocks(), m_boundaryTagManager.FreeBlocks()); - AssertEqual(m_stats.FreeBytes(), m_segregatedRangeLists.FreeBytes(), m_boundaryTagManager.FreeBytes()); - } - -private: - void AddToFreelist(u8* p, size_t size) - { - FreedBlock* freedBlock = m_boundaryTagManager.WriteTags(p, size); - m_segregatedRangeLists.Insert(freedBlock); - m_stats.OnAddToFreelist(size); - } - - void RemoveFromFreelist(FreedBlock* freedBlock) - { - m_stats.OnRemoveFromFreelist(freedBlock->Size()); - m_segregatedRangeLists.Remove(freedBlock); - m_boundaryTagManager.RemoveTags(freedBlock); - } - - /** - * expand a block by coalescing it with its free neighbor(s). - **/ - void Coalesce(u8*& p, size_t& size) - { - { - FreedBlock* precedingBlock = m_boundaryTagManager.PrecedingBlock(p, m_pool.da.base); - if(precedingBlock) - { - p -= precedingBlock->Size(); - size += precedingBlock->Size(); - RemoveFromFreelist(precedingBlock); - } - } - - { - FreedBlock* followingBlock = m_boundaryTagManager.FollowingBlock(p, size, m_pool.da.base+m_pool.da.pos); - if(followingBlock) - { - size += followingBlock->Size(); - RemoveFromFreelist(followingBlock); - } - } - } - - void* TakeAndSplitFreeBlock(size_t size) - { - u8* p; - size_t leftoverSize = 0; - { - FreedBlock* freedBlock = m_segregatedRangeLists.Find(size); - if(!freedBlock) - return 0; - - p = (u8*)freedBlock; - leftoverSize = freedBlock->Size() - size; - RemoveFromFreelist(freedBlock); - } - - if(IsValidSize(leftoverSize)) - AddToFreelist(p+size, leftoverSize); - - return p; - } - - Pool m_pool; - SegregatedRangeLists m_segregatedRangeLists; - BoundaryTagManager m_boundaryTagManager; - Stats m_stats; -}; - - -//----------------------------------------------------------------------------- - -HeaderlessAllocator::HeaderlessAllocator(size_t poolSize) - : impl(new Impl(poolSize)) -{ -} - -void HeaderlessAllocator::Reset() -{ - return impl->Reset(); -} - -NOTHROW_DEFINE void* HeaderlessAllocator::Allocate(size_t size) -{ - return impl->Allocate(size); -} - -void HeaderlessAllocator::Deallocate(void* p, size_t size) -{ - return impl->Deallocate((u8*)p, size); -} - -void HeaderlessAllocator::Validate() const -{ - return impl->Validate(); -} Index: ps/trunk/source/lib/allocators/pool.cpp =================================================================== --- ps/trunk/source/lib/allocators/pool.cpp +++ ps/trunk/source/lib/allocators/pool.cpp @@ -29,7 +29,6 @@ #include "lib/alignment.h" #include "lib/allocators/freelist.h" -#include "lib/allocators/allocator_adapters.h" #include "lib/timer.h" Index: ps/trunk/source/lib/allocators/stateless_allocators.h =================================================================== --- ps/trunk/source/lib/allocators/stateless_allocators.h +++ ps/trunk/source/lib/allocators/stateless_allocators.h @@ -0,0 +1,90 @@ +/* Copyright (C) 2021 Wildfire Games. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#ifndef INCLUDED_STATELESS_ALLOCATORS +#define INCLUDED_STATELESS_ALLOCATORS + +#include "lib/sysdep/rtl.h" +#include "lib/sysdep/vm.h" + +#include + +// NB: STL allocators are parameterized on the object type and indicate +// the number of elements to [de]allocate. however, these adapters are +// only used for allocating storage and receive the number of bytes. + +struct Allocator_Heap +{ + void* allocate(size_t size) + { + return malloc(size); + } + + void deallocate(void* p, size_t UNUSED(size)) + { + return free(p); + } +}; + +template +struct Allocator_Aligned +{ + void* allocate(size_t size) + { + return rtl_AllocateAligned(size, alignment); + } + + void deallocate(void* p, size_t UNUSED(size)) + { + return rtl_FreeAligned(p); + } +}; + +template +struct Allocator_VM +{ + void* allocate(size_t size) + { + return vm::Allocate(size, pageType, prot); + } + + void deallocate(void* p, size_t size) + { + vm::Free(p, size); + } +}; + +template +struct Allocator_AddressSpace +{ + void* allocate(size_t size) + { + return vm::ReserveAddressSpace(size, commitSize, pageType, prot); + } + + void deallocate(void* p, size_t size) + { + vm::ReleaseAddressSpace(p, size); + } +}; + +#endif // INCLUDED_STATELESS_ALLOCATORS Index: ps/trunk/source/lib/allocators/tests/test_DynamicArena.h =================================================================== --- ps/trunk/source/lib/allocators/tests/test_DynamicArena.h +++ ps/trunk/source/lib/allocators/tests/test_DynamicArena.h @@ -0,0 +1,75 @@ +/* Copyright (C) 2021 Wildfire Games. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#include "lib/self_test.h" + +#include "lib/allocators/DynamicArena.h" + +class TestDynamicArena : public CxxTest::TestSuite +{ +public: + void test_allocate() + { + Allocators::DynamicArena<100> testArena; + u8* p = static_cast(testArena.allocate(10, nullptr, 1)); + TS_ASSERT(p != nullptr); + void* p2 = testArena.allocate(10, nullptr, 1); + TS_ASSERT(p + 10 == p2); + + void* p3 = testArena.allocate(80, nullptr, 1); + TS_ASSERT(p + 20 == p3); + + void* p4 = testArena.allocate(100, nullptr, 1); + TS_ASSERT(p4 != nullptr); + + void* p5 = testArena.allocate(1, nullptr, 1); + TS_ASSERT(p5 != nullptr); + + void* p6 = testArena.allocate(100, nullptr, 1); + TS_ASSERT(p6 != nullptr); + + void* p7 = testArena.allocate(0, nullptr, 1); + TS_ASSERT(p7 != nullptr); + } + + void test_alignment() + { + Allocators::DynamicArena<100> testArena; + u8* p = static_cast(testArena.allocate(4, nullptr, 1)); + TS_ASSERT(p != nullptr); + + u8* p2 = static_cast(testArena.allocate(1, nullptr, 8)); + TS_ASSERT_EQUALS(p + 8, p2); + + p2 = static_cast(testArena.allocate(1, nullptr, 8)); + TS_ASSERT_EQUALS(p + 16, p2); + + p2 = static_cast(testArena.allocate(1, nullptr, 8)); + TS_ASSERT_EQUALS(p + 24, p2); + + p2 = static_cast(testArena.allocate(1, nullptr, 2)); + TS_ASSERT_EQUALS(p + 26, p2); + + p2 = static_cast(testArena.allocate(1, nullptr, 8)); + TS_ASSERT_EQUALS(p + 32, p2); + } +}; Index: ps/trunk/source/lib/allocators/tests/test_adapters.h =================================================================== --- ps/trunk/source/lib/allocators/tests/test_adapters.h +++ ps/trunk/source/lib/allocators/tests/test_adapters.h @@ -0,0 +1,81 @@ +/* Copyright (C) 2021 Wildfire Games. + * + * Permission is hereby granted, free of charge, to any person obtaining + * a copy of this software and associated documentation files (the + * "Software"), to deal in the Software without restriction, including + * without limitation the rights to use, copy, modify, merge, publish, + * distribute, sublicense, and/or sell copies of the Software, and to + * permit persons to whom the Software is furnished to do so, subject to + * the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +#include "lib/self_test.h" + +#include "lib/allocators/DynamicArena.h" +#include "lib/allocators/STLAllocators.h" + +#include +#include +#include +#include + +class TestSTLAllocators : public CxxTest::TestSuite +{ +public: + struct SomeData + { + SomeData(int) {}; + int padding[10]; + }; + + template typename Alloc, typename D> + using Allocator = Alloc>; + + template typename Alloc> + using Map = std::map, Allocator::value_type>>; + + template typename Alloc> + using UMap = std::unordered_map, std::equal_to, Allocator::value_type>>; + + void test_CustomAllocator() + { + Map map; + UMap umap; + std::vector>> vec; + + map.emplace(4, 5); + umap.emplace(4, 5); + vec.emplace_back(5); + + map.clear(); + umap.clear(); + vec.clear(); + } + + void test_ProxyAllocator() + { + Allocators::DynamicArena<4096> arena; + Map map(arena); + UMap umap(arena); + std::vector> vec(arena); + + map.emplace(4, 5); + umap.emplace(4, 5); + vec.emplace_back(5); + + map.clear(); + umap.clear(); + vec.clear(); + } +}; Index: ps/trunk/source/lib/allocators/tests/test_headerless.h =================================================================== --- ps/trunk/source/lib/allocators/tests/test_headerless.h +++ ps/trunk/source/lib/allocators/tests/test_headerless.h @@ -1,148 +0,0 @@ -/* Copyright (C) 2010 Wildfire Games. - * - * Permission is hereby granted, free of charge, to any person obtaining - * a copy of this software and associated documentation files (the - * "Software"), to deal in the Software without restriction, including - * without limitation the rights to use, copy, modify, merge, publish, - * distribute, sublicense, and/or sell copies of the Software, and to - * permit persons to whom the Software is furnished to do so, subject to - * the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. - * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY - * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, - * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE - * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -#include "lib/self_test.h" - -#include "lib/bits.h" // round_down -#include "lib/allocators/headerless.h" - -void* const null = 0; - -class TestHeaderless: public CxxTest::TestSuite -{ -public: - void test_Basic() - { - HeaderlessAllocator a(8192); - - // (these are disabled because they raise an assert) -#if 0 - // can't Allocate unaligned sizes - TS_ASSERT_EQUALS(a.Allocate(HeaderlessAllocator::minAllocationSize+1), null); - - // can't Allocate too small amounts - TS_ASSERT_EQUALS(a.Allocate(HeaderlessAllocator::minAllocationSize/2), null); -#endif - - // can Allocate the entire pool - char* p1 = (char*)a.Allocate(4096); - char* p2 = (char*)a.Allocate(4096); - TS_ASSERT_DIFFERS(p1, null); - TS_ASSERT_DIFFERS(p2, null); - - // back-to-back (non-freelist) allocations should be contiguous - TS_ASSERT_EQUALS(p1+4096, p2); - - // allocations are writable - p1[0] = 11; - p1[4095] = 12; - } - - void test_Free() - { - // Deallocate allows immediate reuse of the freed pointer - HeaderlessAllocator a(4096); - void* p1 = a.Allocate(1024); - a.Deallocate(p1, 1024); - void* p2 = a.Allocate(1024); - TS_ASSERT_EQUALS(p1, p2); - } - - void test_Coalesce() - { - HeaderlessAllocator a(0x10000); - - // can Allocate non-power-of-two sizes (note: the current - // implementation only allows sizes that are multiples of 0x10) - void* p1 = a.Allocate(0x5680); - void* p2 = a.Allocate(0x78A0); - void* p3 = a.Allocate(0x1240); - TS_ASSERT_DIFFERS(p1, null); - TS_ASSERT_DIFFERS(p2, null); - TS_ASSERT_DIFFERS(p3, null); - - // after freeing, must be able to allocate the total amount - // note: we don't insist on being able to fill the entire - // memory range. in this case, the problem is that the pool has some - // virtual address space left, but the allocator doesn't grab that - // and add it to the freelist. that feature is currently not - // implemented. - a.Deallocate(p1, 0x5680); - a.Deallocate(p2, 0x78A0); - a.Deallocate(p3, 0x1240); - void* p4 = a.Allocate(0xE140); - TS_ASSERT_DIFFERS(p4, null); - } - - void test_Reset() - { - // after Reset, must return the same pointer as a freshly constructed instance - HeaderlessAllocator a(4096); - void* p1 = a.Allocate(128); - a.Reset(); - void* p2 = a.Allocate(128); - TS_ASSERT_EQUALS(p1, p2); - } - - // will the allocator survive a series of random but valid Allocate/Deallocate? - void test_Randomized() - { - const size_t poolSize = 1024*1024; - HeaderlessAllocator a(poolSize); - - typedef std::map AllocMap; - AllocMap allocs; - - srand(1); - - for(int i = 0; i < 1000; i++) - { - // allocate - if(rand() >= RAND_MAX/2) - { - const size_t maxSize = (size_t)((rand() / (float)RAND_MAX) * poolSize); - const size_t size = std::max((size_t)HeaderlessAllocator::minAllocationSize, round_down(maxSize, HeaderlessAllocator::allocationAlignment)); - // (the size_t cast on minAllocationSize prevents max taking a reference to the non-defined variable) - void* p = a.Allocate(size); - if(!p) - continue; - TS_ASSERT(allocs.find(p) == allocs.end()); - allocs[p] = size; - } - // free - else - { - if(allocs.empty()) - continue; - // find random allocation to deallocate - AllocMap::iterator it = allocs.begin(); - const int numToSkip = rand() % (int)allocs.size(); - for(int skip = 0; skip < numToSkip; skip++) - ++it; - void* p = (*it).first; - size_t size = (*it).second; - allocs.erase(it); - a.Deallocate(p, size); - } - } - } -}; Index: ps/trunk/source/renderer/ModelRenderer.cpp =================================================================== --- ps/trunk/source/renderer/ModelRenderer.cpp +++ ps/trunk/source/renderer/ModelRenderer.cpp @@ -1,4 +1,4 @@ -/* Copyright (C) 2020 Wildfire Games. +/* Copyright (C) 2021 Wildfire Games. * This file is part of 0 A.D. * * 0 A.D. is free software: you can redistribute it and/or modify @@ -24,8 +24,8 @@ #include "graphics/ModelDef.h" #include "graphics/ShaderManager.h" #include "graphics/TextureManager.h" -#include "lib/allocators/allocator_adapters.h" -#include "lib/allocators/arena.h" +#include "lib/allocators/DynamicArena.h" +#include "lib/allocators/STLAllocators.h" #include "lib/hash.h" #include "lib/ogl.h" #include "maths/Vector3D.h" @@ -403,8 +403,10 @@ * list in each, rebinding the GL state whenever it changes. */ - Allocators::DynamicArena arena(256 * KiB); - using ModelListAllocator = ProxyAllocator; + using Arena = Allocators::DynamicArena<256 * KiB>; + + Arena arena; + using ModelListAllocator = ProxyAllocator; using ModelList_t = std::vector; using MaterialBuckets_t = std::unordered_map< SMRMaterialBucketKey, @@ -413,7 +415,7 @@ std::equal_to, ProxyAllocator< std::pair, - Allocators::DynamicArena> >; + Arena> >; MaterialBuckets_t materialBuckets((MaterialBuckets_t::allocator_type(arena))); @@ -467,10 +469,10 @@ } } - typedef ProxyAllocator SortByDistItemsAllocator; + using SortByDistItemsAllocator = ProxyAllocator; std::vector sortByDistItems((SortByDistItemsAllocator(arena))); - typedef ProxyAllocator SortByTechItemsAllocator; + using SortByTechItemsAllocator = ProxyAllocator; std::vector sortByDistTechs((SortByTechItemsAllocator(arena))); // indexed by sortByDistItems[i].techIdx // (which stores indexes instead of CShaderTechniquePtr directly @@ -478,7 +480,7 @@ // if we just stored raw CShaderTechnique* and assumed the shader manager // will keep it alive long enough) - typedef ProxyAllocator TechBucketsAllocator; + using TechBucketsAllocator = ProxyAllocator; std::vector techBuckets((TechBucketsAllocator(arena))); { @@ -588,18 +590,18 @@ // This vector keeps track of texture changes during rendering. It is kept outside the // loops to avoid excessive reallocations. The token allocation of 64 elements // should be plenty, though it is reallocated below (at a cost) if necessary. - typedef ProxyAllocator TextureListAllocator; + using TextureListAllocator = ProxyAllocator; std::vector currentTexs((TextureListAllocator(arena))); currentTexs.reserve(64); // texBindings holds the identifier bindings in the shader, which can no longer be defined // statically in the ShaderRenderModifier class. texBindingNames uses interned strings to // keep track of when bindings need to be reevaluated. - typedef ProxyAllocator BindingListAllocator; + using BindingListAllocator = ProxyAllocator; std::vector texBindings((BindingListAllocator(arena))); texBindings.reserve(64); - typedef ProxyAllocator BindingNamesListAllocator; + using BindingNamesListAllocator = ProxyAllocator; std::vector texBindingNames((BindingNamesListAllocator(arena))); texBindingNames.reserve(64); Index: ps/trunk/source/renderer/PatchRData.cpp =================================================================== --- ps/trunk/source/renderer/PatchRData.cpp +++ ps/trunk/source/renderer/PatchRData.cpp @@ -1,4 +1,4 @@ -/* Copyright (C) 2020 Wildfire Games. +/* Copyright (C) 2021 Wildfire Games. * This file is part of 0 A.D. * * 0 A.D. is free software: you can redistribute it and/or modify @@ -27,8 +27,8 @@ #include "graphics/Terrain.h" #include "graphics/TerrainTextureEntry.h" #include "graphics/TextRenderer.h" -#include "lib/alignment.h" -#include "lib/allocators/arena.h" +#include "lib/allocators/DynamicArena.h" +#include "lib/allocators/STLAllocators.h" #include "maths/MathUtil.h" #include "ps/CLogger.h" #include "ps/Game.h" @@ -677,14 +677,16 @@ // batches uses a arena allocator. (All allocations are short-lived so we can // just throw away the whole arena at the end of each frame.) +using Arena = Allocators::DynamicArena<1 * MiB>; + // std::map types with appropriate arena allocators and default comparison operator -#define POOLED_BATCH_MAP(Key, Value) \ - std::map, ProxyAllocator, Allocators::DynamicArena > > +template +using PooledBatchMap = std::map, ProxyAllocator, Arena>>; // Equivalent to "m[k]", when it returns a arena-allocated std::map (since we can't // use the default constructor in that case) template -typename M::mapped_type& PooledMapGet(M& m, const typename M::key_type& k, Allocators::DynamicArena& arena) +typename M::mapped_type& PooledMapGet(M& m, const typename M::key_type& k, Arena& arena) { return m.insert(std::make_pair(k, typename M::mapped_type(typename M::mapped_type::key_compare(), typename M::mapped_type::allocator_type(arena)) @@ -693,7 +695,7 @@ // Equivalent to "m[k]", when it returns a std::pair of arena-allocated std::vectors template -typename M::mapped_type& PooledPairGet(M& m, const typename M::key_type& k, Allocators::DynamicArena& arena) +typename M::mapped_type& PooledPairGet(M& m, const typename M::key_type& k, Arena& arena) { return m.insert(std::make_pair(k, std::make_pair( typename M::mapped_type::first_type(typename M::mapped_type::first_type::allocator_type(arena)), @@ -702,23 +704,23 @@ } // Each multidraw batch has a list of index counts, and a list of pointers-to-first-indexes -typedef std::pair >, std::vector > > BatchElements; +using BatchElements = std::pair>, std::vector>>; // Group batches by index buffer -typedef POOLED_BATCH_MAP(CVertexBuffer*, BatchElements) IndexBufferBatches; +using IndexBufferBatches = PooledBatchMap; // Group batches by vertex buffer -typedef POOLED_BATCH_MAP(CVertexBuffer*, IndexBufferBatches) VertexBufferBatches; +using VertexBufferBatches = PooledBatchMap; // Group batches by texture -typedef POOLED_BATCH_MAP(CTerrainTextureEntry*, VertexBufferBatches) TextureBatches; +using TextureBatches = PooledBatchMap; void CPatchRData::RenderBases(const std::vector& patches, const CShaderDefines& context, ShadowMap* shadow, bool isDummyShader, const CShaderProgramPtr& dummy) { - Allocators::DynamicArena arena(1 * MiB); + Arena arena; - TextureBatches batches (TextureBatches::key_compare(), (TextureBatches::allocator_type(arena))); + TextureBatches batches(TextureBatches::key_compare(), (TextureBatches::allocator_type(arena))); PROFILE_START("compute batches"); @@ -863,7 +865,7 @@ */ struct SBlendBatch { - SBlendBatch(Allocators::DynamicArena& arena) : + SBlendBatch(Arena& arena) : m_Batches(VertexBufferBatches::key_compare(), VertexBufferBatches::allocator_type(arena)) { } @@ -878,12 +880,12 @@ struct SBlendStackItem { SBlendStackItem(CVertexBuffer::VBChunk* v, CVertexBuffer::VBChunk* i, - const std::vector& s, Allocators::DynamicArena& arena) : + const std::vector& s, Arena& arena) : vertices(v), indices(i), splats(s.begin(), s.end(), SplatStack::allocator_type(arena)) { } - typedef std::vector > SplatStack; + using SplatStack = std::vector>; CVertexBuffer::VBChunk* vertices; CVertexBuffer::VBChunk* indices; SplatStack splats; @@ -892,9 +894,9 @@ void CPatchRData::RenderBlends(const std::vector& patches, const CShaderDefines& context, ShadowMap* shadow, bool isDummyShader, const CShaderProgramPtr& dummy) { - Allocators::DynamicArena arena(1 * MiB); + Arena arena; - typedef std::vector > BatchesStack; + using BatchesStack = std::vector>; BatchesStack batches((BatchesStack::allocator_type(arena))); CShaderDefines contextBlend = context; @@ -906,7 +908,7 @@ // to avoid heavy reallocations batches.reserve(256); - typedef std::vector > BlendStacks; + using BlendStacks = std::vector>; BlendStacks blendStacks((BlendStacks::allocator_type(arena))); blendStacks.reserve(patches.size()); Index: ps/trunk/source/simulation2/serialization/BinarySerializer.h =================================================================== --- ps/trunk/source/simulation2/serialization/BinarySerializer.h +++ ps/trunk/source/simulation2/serialization/BinarySerializer.h @@ -1,4 +1,4 @@ -/* Copyright (C) 2020 Wildfire Games. +/* Copyright (C) 2021 Wildfire Games. * This file is part of 0 A.D. * * 0 A.D. is free software: you can redistribute it and/or modify @@ -21,7 +21,6 @@ #include "ISerializer.h" #include "lib/byte_order.h" -#include "lib/allocators/arena.h" #include #include