diff --git a/.gitmodules b/.gitmodules index f20655bd..5fca29d9 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,4 @@ [submodule "cmake/comp"] - path = cmake/comp - url = https://github.com/foonathan/compatibility.git - branch = git-submodule + path = cmake/comp + url = https://github.com/foonathan/compatibility.git + branch = git-submodule diff --git a/cmake/configuration.cmake b/cmake/configuration.cmake index 0364fc8c..18dc22aa 100644 --- a/cmake/configuration.cmake +++ b/cmake/configuration.cmake @@ -68,6 +68,8 @@ else() endif() # other options +option(FOONATHAN_MEMORY_CHECK_ALLOCATION_SIZE + "whether or not the size of the allocation will be checked" ON) set(FOONATHAN_MEMORY_DEFAULT_ALLOCATOR heap_allocator CACHE STRING "the default implementation allocator for higher-level ones") option(FOONATHAN_MEMORY_THREAD_SAFE_REFERENCE diff --git a/include/foonathan/memory/config.hpp b/include/foonathan/memory/config.hpp index bb5d772f..5bc11a59 100644 --- a/include/foonathan/memory/config.hpp +++ b/include/foonathan/memory/config.hpp @@ -70,6 +70,11 @@ /// \ingroup memory core #define FOONATHAN_MEMORY_VERSION (FOONATHAN_MEMORY_VERSION_MAJOR * 100 + FOONATHAN_MEMORY_VERSION_MINOR) + /// Whether or not the allocation size will be checked, + /// i.e. the \ref bad_allocation_size thrown. + /// \ingroup memory core + #define FOONATHAN_MEMORY_CHECK_ALLOCATION_SIZE 1 + /// Whether or not the \ref foonathan::memory::default_mutex will be \c std::mutex or \ref foonathan::memory::no_mutex. /// \ingroup memory core #define FOONATHAN_MEMORY_THREAD_SAFE_REFERENCE 1 diff --git a/include/foonathan/memory/detail/align.hpp b/include/foonathan/memory/detail/align.hpp index a9a0549d..1239693e 100644 --- a/include/foonathan/memory/detail/align.hpp +++ b/include/foonathan/memory/detail/align.hpp @@ -5,11 +5,14 @@ #ifndef FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAIL_ALIGN_HPP_INCLUDED +#include + #include #include #include #include "../config.hpp" +#include "assert.hpp" namespace foonathan { namespace memory { @@ -23,7 +26,13 @@ namespace foonathan { namespace memory // returns the offset needed to align ptr for given alignment // alignment must be valid - std::size_t align_offset(void *ptr, std::size_t alignment) FOONATHAN_NOEXCEPT; + inline std::size_t align_offset(void *ptr, std::size_t alignment) FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(is_valid_alignment(alignment)); + auto address = reinterpret_cast(ptr); + auto misaligned = address & (alignment - 1); + return misaligned != 0 ? (alignment - misaligned) : 0; + } // whether or not the pointer is aligned for given alignment // alignment must be valid diff --git a/include/foonathan/memory/detail/assert.hpp b/include/foonathan/memory/detail/assert.hpp index a952f743..fb8dc314 100644 --- a/include/foonathan/memory/detail/assert.hpp +++ b/include/foonathan/memory/detail/assert.hpp @@ -5,6 +5,8 @@ #ifndef FOONATHAN_MEMORY_DETAIL_ASSERT_HPP_INCLUDED #define FOONATHAN_MEMORY_DETAIL_ASSERT_HPP_INCLUDED +#include + #include "../config.hpp" namespace foonathan { namespace memory @@ -29,9 +31,9 @@ namespace foonathan { namespace memory #define FOONATHAN_MEMORY_UNREACHABLE(Msg) \ detail::handle_failed_assert("Unreachable code reached: " Msg, __FILE__, __LINE__, __func__) #elif !defined(FOONATHAN_MEMORY_ASSERT) - #define FOONATHAN_MEMORY_ASSERT(Expr) static_cast(Expr) - #define FOONATHAN_MEMORY_ASSERT_MSG(Expr, Msg) static_cast(Expr) - #define FOONATHAN_MEMORY_UNREACHABLE(Msg) /* nothing */ + #define FOONATHAN_MEMORY_ASSERT(Expr) + #define FOONATHAN_MEMORY_ASSERT_MSG(Expr, Msg) + #define FOONATHAN_MEMORY_UNREACHABLE(Msg) std::abort() #endif } // namespace detail }} // namespace foonathan::memory diff --git a/include/foonathan/memory/detail/debug_helpers.hpp b/include/foonathan/memory/detail/debug_helpers.hpp index 6fc09962..b9892d60 100644 --- a/include/foonathan/memory/detail/debug_helpers.hpp +++ b/include/foonathan/memory/detail/debug_helpers.hpp @@ -23,6 +23,7 @@ namespace foonathan { namespace memory FOONATHAN_CONSTEXPR std::size_t debug_fence_size = FOONATHAN_MEMORY_DEBUG_FILL ? FOONATHAN_MEMORY_DEBUG_FENCE : 0u; + #if FOONATHAN_MEMORY_DEBUG_FILL // fills size bytes of memory with debug_magic void debug_fill(void *memory, std::size_t size, debug_magic m) FOONATHAN_NOEXCEPT; @@ -41,6 +42,26 @@ namespace foonathan { namespace memory // fills internal memory void debug_fill_internal(void *memory, std::size_t size, bool free) FOONATHAN_NOEXCEPT; + #else + inline void debug_fill(void *, std::size_t, debug_magic) FOONATHAN_NOEXCEPT {} + + inline void* debug_is_filled(void *, std::size_t, debug_magic) FOONATHAN_NOEXCEPT + { + return nullptr; + } + + inline void* debug_fill_new(void *memory, std::size_t, std::size_t) FOONATHAN_NOEXCEPT + { + return memory; + } + + inline void* debug_fill_free(void *memory, std::size_t, std::size_t) FOONATHAN_NOEXCEPT + { + return static_cast(memory); + } + + inline void debug_fill_internal(void *, std::size_t, bool) FOONATHAN_NOEXCEPT {} + #endif void debug_handle_invalid_ptr(const allocator_info &info, void *ptr); diff --git a/include/foonathan/memory/detail/free_list.hpp b/include/foonathan/memory/detail/free_list.hpp index 0bb6aa9f..5d53274a 100644 --- a/include/foonathan/memory/detail/free_list.hpp +++ b/include/foonathan/memory/detail/free_list.hpp @@ -6,6 +6,7 @@ #define FOONATHAN_MEMORY_DETAILL_FREE_LIST_HPP_INCLUDED #include +#include #include "align.hpp" #include "utility.hpp" @@ -51,7 +52,7 @@ namespace foonathan { namespace memory // pre: !empty() void* allocate() FOONATHAN_NOEXCEPT; - // returns a memory block big enough for n bytes (!, not nodes) + // returns a memory block big enough for n bytes // might fail even if capacity is sufficient void* allocate(std::size_t n) FOONATHAN_NOEXCEPT; @@ -62,7 +63,13 @@ namespace foonathan { namespace memory void deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT; //=== getter ===// - std::size_t node_size() const FOONATHAN_NOEXCEPT; + std::size_t node_size() const FOONATHAN_NOEXCEPT + { + return node_size_; + } + + // alignment of all nodes + std::size_t alignment() const FOONATHAN_NOEXCEPT; // number of nodes remaining std::size_t capacity() const FOONATHAN_NOEXCEPT @@ -70,108 +77,16 @@ namespace foonathan { namespace memory return capacity_; } - bool empty() const FOONATHAN_NOEXCEPT; - - // alignment of all nodes - std::size_t alignment() const FOONATHAN_NOEXCEPT; + bool empty() const FOONATHAN_NOEXCEPT + { + return first_ == nullptr; + } private: - // cache for new nodes that were never used - // it works like a stack and is continous, so supports arrays - class cache - { - public: - cache() FOONATHAN_NOEXCEPT - : cur_(nullptr), end_(nullptr) {} - - cache(void *memory, std::size_t size) FOONATHAN_NOEXCEPT - : cur_(static_cast(memory)), end_(cur_ + size) {} - - cache(cache &&other) FOONATHAN_NOEXCEPT - : cur_(other.cur_), end_(other.end_) - { - other.cur_ = other.end_ = nullptr; - } - - ~cache() FOONATHAN_NOEXCEPT = default; - - cache& operator=(cache &&other) FOONATHAN_NOEXCEPT - { - cur_ = other.cur_; - end_ = other.end_; - other.cur_ = other.end_ = nullptr; - return *this; - } - - // allocates memory of given size and alignment - // takes care of debug filling - // returns nullptr if no memory available - void* allocate(std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT; - - // tries to deallocate memory - // only works if deallocation in reversed order - // returns true if succesfully deallocated - bool try_deallocate(void *ptr, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT; - - char* top() FOONATHAN_NOEXCEPT - { - return cur_; - } - - char* end() FOONATHAN_NOEXCEPT - { - return end_; - } - - // number of nodes that can be allocated from the cache - std::size_t no_nodes(std::size_t node_size) const FOONATHAN_NOEXCEPT; - - private: - char *cur_, *end_; - }; - - // intrusive list for unused memory nodes - // gives only a stack like interface - class list_impl - { - public: - list_impl() FOONATHAN_NOEXCEPT - : first_(nullptr) {} - - list_impl(list_impl &&other) FOONATHAN_NOEXCEPT - : first_(other.first_) - { - other.first_ = nullptr; - } - - ~list_impl() FOONATHAN_NOEXCEPT = default; - - list_impl& operator=(list_impl &&other) FOONATHAN_NOEXCEPT - { - first_ = other.first_; - other.first_ = nullptr; - return *this; - } - - // inserts all memory from an intervall into the list - // it will be inserted into the front - std::size_t insert(char *begin, char *end, std::size_t node_size) FOONATHAN_NOEXCEPT; - - // pushes a single node into the list - // it takes care of debug filling - void push(void *ptr, std::size_t node_size) FOONATHAN_NOEXCEPT; - - // pops the first node from the list - // it takes care of debug fillilng - // returns nullptr if empty - void* pop(std::size_t node_size) FOONATHAN_NOEXCEPT; - - private: - char *first_; - }; - - cache cache_; - list_impl list_; + std::size_t fence_size() const FOONATHAN_NOEXCEPT; + void insert_impl(void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + + char *first_; std::size_t node_size_, capacity_; }; @@ -189,14 +104,23 @@ namespace foonathan { namespace memory //=== constructor ===// ordered_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT; - // calls other constructor plus insert ordered_free_memory_list(std::size_t node_size, - void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + void *mem, std::size_t size) FOONATHAN_NOEXCEPT + : ordered_free_memory_list(node_size) + { + insert(mem, size); + } ordered_free_memory_list(ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT; + ~ordered_free_memory_list() FOONATHAN_NOEXCEPT = default; - ordered_free_memory_list& operator=(ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT; + ordered_free_memory_list& operator=(ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT + { + ordered_free_memory_list tmp(detail::move(other)); + swap(*this, tmp); + return *this; + } friend void swap(ordered_free_memory_list &a, ordered_free_memory_list &b) FOONATHAN_NOEXCEPT; @@ -222,7 +146,13 @@ namespace foonathan { namespace memory void deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT; //=== getter ===// - std::size_t node_size() const FOONATHAN_NOEXCEPT; + std::size_t node_size() const FOONATHAN_NOEXCEPT + { + return node_size_; + } + + // alignment of all nodes + std::size_t alignment() const FOONATHAN_NOEXCEPT; // number of nodes remaining std::size_t capacity() const FOONATHAN_NOEXCEPT @@ -232,80 +162,21 @@ namespace foonathan { namespace memory bool empty() const FOONATHAN_NOEXCEPT { - return list_.empty(); + return capacity_ == 0u; } - // alignment of all nodes - std::size_t alignment() const FOONATHAN_NOEXCEPT; - private: - // node size with fence - std::size_t node_fence_size() const FOONATHAN_NOEXCEPT; + std::size_t fence_size() const FOONATHAN_NOEXCEPT; - // xor linked list storing the free nodes - // keeps the list ordered to support arrays - class list_impl - { - public: - list_impl() FOONATHAN_NOEXCEPT - : first_(nullptr), last_(nullptr), - insert_(nullptr), insert_prev_(nullptr) {} - - list_impl(std::size_t node_size, - void *memory, std::size_t no_nodes) FOONATHAN_NOEXCEPT - : list_impl() - { - insert(node_size, memory, no_nodes, false); - } - - list_impl(list_impl &&other) FOONATHAN_NOEXCEPT - : first_(other.first_), last_(other.last_), - insert_(other.insert_), insert_prev_(other.insert_prev_) - { - other.first_ = other.last_ = nullptr; - other.insert_ = other.insert_prev_ = nullptr; - } - - ~list_impl() FOONATHAN_NOEXCEPT = default; - - list_impl& operator=(list_impl &&other) FOONATHAN_NOEXCEPT - { - list_impl tmp(detail::move(other)); - swap(*this, tmp); - return *this; - } - - friend void swap(list_impl &a, list_impl &b) FOONATHAN_NOEXCEPT - { - detail::adl_swap(a.first_, b.first_); - detail::adl_swap(a.last_, b.last_); - detail::adl_swap(a.insert_, b.insert_); - detail::adl_swap(a.insert_prev_, b.insert_prev_); - } - - // inserts nodes into the list - // node_size is the node_size_ member of the actual free list class - void insert(std::size_t node_size, - void* memory, std::size_t no_nodes, bool new_memory) FOONATHAN_NOEXCEPT; - - // erases nodes from the list - // node_size is the node_size_ member of the actual free list class - void* erase(std::size_t node_size) FOONATHAN_NOEXCEPT; - void* erase(std::size_t node_size, std::size_t bytes_needed) FOONATHAN_NOEXCEPT; - - bool empty() const FOONATHAN_NOEXCEPT; - - private: - struct pos {char *prev, *after;}; - - // finds the position to insert memory - pos find_pos(std::size_t node_size, char* memory) const FOONATHAN_NOEXCEPT; - - char *first_, *last_; - char *insert_, *insert_prev_; // pointer to last insert position - } list_; + // returns previous pointer + char* insert_impl(void *mem, std::size_t size) FOONATHAN_NOEXCEPT; + + char* begin_node() FOONATHAN_NOEXCEPT; + char* end_node() FOONATHAN_NOEXCEPT; + std::uintptr_t begin_proxy_, end_proxy_; std::size_t node_size_, capacity_; + char *last_dealloc_, *last_dealloc_prev_; }; #if FOONATHAN_MEMORY_DEBUG_DOUBLE_DEALLOC_CHECk diff --git a/include/foonathan/memory/detail/memory_stack.hpp b/include/foonathan/memory/detail/memory_stack.hpp index 86b5ea4d..17757e37 100644 --- a/include/foonathan/memory/detail/memory_stack.hpp +++ b/include/foonathan/memory/detail/memory_stack.hpp @@ -8,6 +8,9 @@ #include #include "../config.hpp" +#include "align.hpp" +#include "debug_helpers.hpp" +#include "../debugging.hpp" namespace foonathan { namespace memory { @@ -24,20 +27,73 @@ namespace foonathan { namespace memory explicit fixed_memory_stack(void *memory) FOONATHAN_NOEXCEPT : cur_(static_cast(memory)) {} - fixed_memory_stack(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT; + fixed_memory_stack(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT + : cur_(other.cur_) + { + other.cur_ = nullptr; + } ~fixed_memory_stack() FOONATHAN_NOEXCEPT = default; - fixed_memory_stack& operator=(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT; + fixed_memory_stack& operator=(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT + { + cur_ = other.cur_; + other.cur_ = nullptr; + return *this; + } + + // bumps the top pointer by offset and fills + void bump(std::size_t offset, debug_magic m) FOONATHAN_NOEXCEPT + { + detail::debug_fill(cur_, offset, m); + cur_ += offset; + } + + // same as bump() but returns old value + void* bump_return(std::size_t offset, debug_magic m = debug_magic::new_memory) FOONATHAN_NOEXCEPT + { + auto memory = cur_; + detail::debug_fill(memory, offset, m); + cur_ += offset; + return memory; + } // allocates memory by advancing the stack, returns nullptr if insufficient // debug: mark memory as new_memory, put fence in front and back - void* allocate(const char *end, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT; + void* allocate(const char *end, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT + { + if (cur_ == nullptr) + return nullptr; + + auto remaining = std::size_t(end - cur_); + auto offset = align_offset(cur_ + debug_fence_size, alignment); + + if (debug_fence_size + offset + size + debug_fence_size > remaining) + return nullptr; + debug_fill(cur_, offset, debug_magic::alignment_memory); + cur_ += offset; + + debug_fill(cur_, debug_fence_size, debug_magic::fence_memory); + cur_ += debug_fence_size; + + auto memory = cur_; + debug_fill(cur_, size, debug_magic::new_memory); + cur_ += size; + + debug_fill(cur_, debug_fence_size, debug_magic::fence_memory); + cur_ += debug_fence_size; + + return memory; + } // unwindws the stack to a certain older position // debug: marks memory from new top to old top as freed // doesn't check for invalid pointer - void unwind(char *top) FOONATHAN_NOEXCEPT; + void unwind(char *top) FOONATHAN_NOEXCEPT + { + debug_fill(top, std::size_t(cur_ - top), debug_magic::freed_memory); + cur_ = top; + } // returns the current top char* top() const FOONATHAN_NOEXCEPT diff --git a/include/foonathan/memory/detail/small_free_list.hpp b/include/foonathan/memory/detail/small_free_list.hpp index b007e639..d7a0bd62 100644 --- a/include/foonathan/memory/detail/small_free_list.hpp +++ b/include/foonathan/memory/detail/small_free_list.hpp @@ -8,47 +8,29 @@ #include #include "../config.hpp" +#include "utility.hpp" namespace foonathan { namespace memory { namespace detail { - // a chunk in the free list - struct chunk; - - // a list of chunks - class chunk_list + struct chunk_base { - public: - chunk_list() FOONATHAN_NOEXCEPT = default; - chunk_list(chunk_list &&other) FOONATHAN_NOEXCEPT; - ~chunk_list() FOONATHAN_NOEXCEPT = default; - - chunk_list& operator=(chunk_list &&other) FOONATHAN_NOEXCEPT; + chunk_base *prev = this; + chunk_base *next = this; - friend void swap(chunk_list &a, chunk_list &b) FOONATHAN_NOEXCEPT; + unsigned char first_free = 0; // first free node for the linked list + unsigned char capacity = 0; // total number of free nodes available + unsigned char no_nodes = 0; // total number of nodes in memory - // inserts a new chunk into the list - void insert(chunk *c) FOONATHAN_NOEXCEPT; - - // inserts the next chunk from another list - chunk* insert(chunk_list &other) FOONATHAN_NOEXCEPT; - - // returns the next chunk - chunk* top() const FOONATHAN_NOEXCEPT - { - return first_; - } + chunk_base() FOONATHAN_NOEXCEPT = default; - bool empty() const FOONATHAN_NOEXCEPT - { - return first_ == nullptr; - } - - private: - chunk *first_ = nullptr; + chunk_base(unsigned char no) FOONATHAN_NOEXCEPT + : capacity(no), no_nodes(no) {} }; + struct chunk; + // the same as free_memory_list but optimized for small node sizes // it is slower and does not support arrays // but has very small overhead @@ -73,7 +55,12 @@ namespace foonathan { namespace memory ~small_free_memory_list() FOONATHAN_NOEXCEPT = default; - small_free_memory_list& operator=(small_free_memory_list &&other) FOONATHAN_NOEXCEPT; + small_free_memory_list& operator=(small_free_memory_list &&other) FOONATHAN_NOEXCEPT + { + small_free_memory_list tmp(detail::move(other)); + swap(*this, tmp); + return *this; + } friend void swap(small_free_memory_list &a, small_free_memory_list &b) FOONATHAN_NOEXCEPT; @@ -103,10 +90,19 @@ namespace foonathan { namespace memory // returns false, if there is none like that // never fails for n == 1 if not empty() // pre: capacity() >= n * node_size() - bool find_chunk(std::size_t n) FOONATHAN_NOEXCEPT; + bool find_chunk(std::size_t n) FOONATHAN_NOEXCEPT + { + return find_chunk_impl(n) != nullptr; + } //=== getter ===// - std::size_t node_size() const FOONATHAN_NOEXCEPT; + std::size_t node_size() const FOONATHAN_NOEXCEPT + { + return node_size_; + } + + // the alignment of all nodes + std::size_t alignment() const FOONATHAN_NOEXCEPT; // number of nodes remaining std::size_t capacity() const FOONATHAN_NOEXCEPT @@ -119,23 +115,20 @@ namespace foonathan { namespace memory return capacity_ == 0u; } - // the alignment of all nodes - std::size_t alignment() const FOONATHAN_NOEXCEPT; - private: - // finds the chunk from which memory is and returns it - // starts at dealloc_chunk_ and goes in both directions - // returns nullptr if no chunk - chunk* chunk_for(void *memory) FOONATHAN_NOEXCEPT; - - // node size with fence - std::size_t node_fence_size() const FOONATHAN_NOEXCEPT; + std::size_t fence_size() const FOONATHAN_NOEXCEPT; - chunk_list unused_chunks_, used_chunks_; - chunk *alloc_chunk_, *dealloc_chunk_; + chunk* find_chunk_impl(std::size_t n = 1) FOONATHAN_NOEXCEPT; + chunk* find_chunk_impl(unsigned char *node, chunk_base *first, chunk_base *last) FOONATHAN_NOEXCEPT; + chunk* find_chunk_impl(unsigned char *node) FOONATHAN_NOEXCEPT; + chunk_base base_; std::size_t node_size_, capacity_; + chunk_base *alloc_chunk_, *dealloc_chunk_; }; + + // for some reason, this is required in order to define it + void swap(small_free_memory_list &a, small_free_memory_list &b) FOONATHAN_NOEXCEPT; } // namespace detail }} // namespace foonathan::memory diff --git a/include/foonathan/memory/error.hpp b/include/foonathan/memory/error.hpp index 3134c69c..2d9e250c 100644 --- a/include/foonathan/memory/error.hpp +++ b/include/foonathan/memory/error.hpp @@ -139,6 +139,7 @@ namespace foonathan { namespace memory /// because the maximum functions return an upper bound and not the actual supported maximum size, /// since it always depends on fence memory, alignment buffer and the like. /// \note A user should only \c catch for \c bad_allocation_size, not the derived classes. + /// \note Most checks will only be done if \ref FOONATHAN_MEMORY_CHECK_ALLOCATION_SIZE is \c true. /// \ingroup memory core class bad_allocation_size : public std::bad_alloc { @@ -254,28 +255,24 @@ namespace foonathan { namespace memory namespace detail { - inline void check_allocation_size(std::size_t passed, std::size_t supported, const allocator_info &info) + template + void check_allocation_size(std::size_t passed, Func f, const allocator_info &info) { + #if FOONATHAN_MEMORY_CHECK_ALLOCATION_SIZE + auto supported = f(); if (passed > supported) - FOONATHAN_THROW(bad_allocation_size(info, passed, supported)); + FOONATHAN_THROW(Ex(info, passed, supported)); + #else + (void)passed; + (void)f; + (void)info; + #endif } - inline void check_node_size(std::size_t passed, std::size_t supported, const allocator_info &info) + template + void check_allocation_size(std::size_t passed, std::size_t supported, const allocator_info &info) { - if (passed > supported) - FOONATHAN_THROW(bad_node_size(info, passed, supported)); - } - - inline void check_array_size(std::size_t passed, std::size_t supported, const allocator_info &info) - { - if (passed > supported) - FOONATHAN_THROW(bad_array_size(info, passed, supported)); - } - - inline void check_alignment(std::size_t passed, std::size_t supported, const allocator_info &info) - { - if (passed > supported) - FOONATHAN_THROW(bad_alignment(info, passed, supported)); + check_allocation_size(passed, [&]{return supported;}, info); } } // namespace detail }} // namespace foonathan::memory diff --git a/include/foonathan/memory/memory_arena.hpp b/include/foonathan/memory/memory_arena.hpp index 060a8f2e..5ca461ae 100644 --- a/include/foonathan/memory/memory_arena.hpp +++ b/include/foonathan/memory/memory_arena.hpp @@ -121,7 +121,12 @@ namespace foonathan { namespace memory void steal_top(memory_block_stack &other) FOONATHAN_NOEXCEPT; // returns the last pushed() inserted memory block - inserted_mb top() const FOONATHAN_NOEXCEPT; + inserted_mb top() const FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(head_); + auto mem = static_cast(head_); + return {static_cast(mem) + node::offset, head_->usable_size}; + } bool empty() const FOONATHAN_NOEXCEPT { @@ -132,7 +137,19 @@ namespace foonathan { namespace memory std::size_t size() const FOONATHAN_NOEXCEPT; private: - struct node; + struct node + { + node *prev; + std::size_t usable_size; + + node(node *prev, std::size_t size) FOONATHAN_NOEXCEPT + : prev(prev), usable_size(size) {} + + static const std::size_t div_alignment; + static const std::size_t mod_offset; + static const std::size_t offset; + }; + node *head_; }; diff --git a/include/foonathan/memory/memory_pool.hpp b/include/foonathan/memory/memory_pool.hpp index 8f41a3b5..ea6d213f 100644 --- a/include/foonathan/memory/memory_pool.hpp +++ b/include/foonathan/memory/memory_pool.hpp @@ -114,7 +114,9 @@ namespace foonathan { namespace memory /// \requires \c n must be valid \concept{concept_array,array count}. void* allocate_array(std::size_t n) { - detail::check_array_size(n * node_size(), pool_type::value ? next_capacity() : 0, info()); + detail::check_allocation_size(n * node_size(), + [&]{return pool_type::value ? next_capacity() : 0;}, + info()); return allocate_array(n, node_size()); } @@ -187,7 +189,6 @@ namespace foonathan { namespace memory allocate_block(); mem = free_list_.allocate(n * node_size); if (!mem) - // generic: bad size FOONATHAN_THROW(bad_array_size(info(), n * node_size, capacity_left())); } return mem; @@ -233,8 +234,8 @@ namespace foonathan { namespace memory static void* allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) { - detail::check_node_size(size, max_node_size(state), state.info()); - detail::check_alignment(alignment, max_alignment(state), state.info()); + detail::check_allocation_size(size, max_node_size(state), state.info()); + detail::check_allocation_size(alignment, [&]{return max_alignment(state);}, state.info()); auto mem = state.allocate_node(); state.on_allocate(size); return mem; @@ -249,9 +250,9 @@ namespace foonathan { namespace memory static void* allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment) { - detail::check_node_size(size, max_node_size(state), state.info()); - detail::check_alignment(alignment, max_alignment(state), state.info()); - detail::check_array_size(count, max_array_size(state), state.info()); + detail::check_allocation_size(size, max_node_size(state), state.info()); + detail::check_allocation_size(alignment, [&]{return max_alignment(state);}, state.info()); + detail::check_allocation_size(count * size, max_array_size(state), state.info()); return allocate_array(PoolType{}, state, count, size); } diff --git a/include/foonathan/memory/memory_pool_collection.hpp b/include/foonathan/memory/memory_pool_collection.hpp index 6b824f28..69dca7fa 100644 --- a/include/foonathan/memory/memory_pool_collection.hpp +++ b/include/foonathan/memory/memory_pool_collection.hpp @@ -120,7 +120,7 @@ namespace foonathan { namespace memory /// \throws Anything thrown by the \concept{concept_blockallocator,BlockAllocator} if a growth is needed or a \ref bad_node_size exception if the node size is too big. void* allocate_node(std::size_t node_size) { - detail::check_node_size(node_size, max_node_size(), info()); + detail::check_allocation_size(node_size, [&]{return max_node_size();}, info()); auto& pool = pools_.get(node_size); if (pool.empty()) reserve_impl(pool, def_capacity()); @@ -138,9 +138,10 @@ namespace foonathan { namespace memory /// \c node_size must be valid \concept{concept_node,node size}. void* allocate_array(std::size_t count, std::size_t node_size) { - detail::check_node_size(node_size, max_node_size(), info()); - detail::check_allocation_size(count * node_size, PoolType::value ? next_capacity() : 0u, - info()); + detail::check_allocation_size(node_size, [&]{return max_node_size();}, info()); + detail::check_allocation_size(count * node_size, + [&]{return PoolType::value ? next_capacity() : 0u;}, + info()); auto& pool = pools_.get(node_size); if (pool.empty()) reserve_impl(pool, def_capacity()); @@ -323,8 +324,8 @@ namespace foonathan { namespace memory static void* allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) { - detail::check_node_size(size, max_node_size(state), state.info()); - detail::check_alignment(alignment, detail::alignment_for(size), state.info()); + // node already checked + detail::check_allocation_size(alignment, [&]{return detail::alignment_for(size);}, state.info()); auto mem = state.allocate_node(size); state.on_allocate(size); return mem; @@ -337,7 +338,7 @@ namespace foonathan { namespace memory std::size_t size, std::size_t alignment) { // node and array already checked - detail::check_alignment(alignment, max_alignment(state), state.info()); + detail::check_allocation_size(alignment, [&]{return detail::alignment_for(size);}, state.info()); return allocate_array(Pool{}, state, count, size); } diff --git a/include/foonathan/memory/memory_stack.hpp b/include/foonathan/memory/memory_stack.hpp index b015c9be..2854c80c 100644 --- a/include/foonathan/memory/memory_stack.hpp +++ b/include/foonathan/memory/memory_stack.hpp @@ -63,10 +63,9 @@ namespace foonathan { namespace memory template explicit memory_stack(std::size_t block_size, Args&&... args) - : arena_(block_size, detail::forward(args)...) - { - allocate_block(); - } + : arena_(block_size, detail::forward(args)...), + stack_(arena_.allocate_block().memory) + {} /// \effects Allocates a memory block of given size and alignment. /// It simply moves the top marker. @@ -79,14 +78,29 @@ namespace foonathan { namespace memory /// \requires \c size and \c alignment must be valid. void* allocate(std::size_t size, std::size_t alignment) { - detail::check_allocation_size(size, next_capacity(), info()); - auto mem = stack_.allocate(block_end(), size, alignment); - if (!mem) + detail::check_allocation_size(size, next_capacity(), info()); + + auto fence = detail::debug_fence_size; + auto offset = detail::align_offset(stack_.top() + fence, alignment); + + if (fence + offset + size + fence <= std::size_t(block_end() - stack_.top())) { - allocate_block(); - mem = stack_.allocate(block_end(), size, alignment); - FOONATHAN_MEMORY_ASSERT(mem); + stack_.bump(fence, debug_magic::fence_memory); + stack_.bump(offset, debug_magic::alignment_memory); } + else + { + auto block = arena_.allocate_block(); + FOONATHAN_MEMORY_ASSERT_MSG(fence + size + fence <= block.size, "new block size not big enough"); + + stack_ = detail::fixed_memory_stack(block.memory); + // no need to align, block should be aligned for maximum + stack_.bump(fence, debug_magic::fence_memory); + } + + FOONATHAN_MEMORY_ASSERT(detail::is_aligned(stack_.top(), alignment)); + auto mem = stack_.bump_return(size); + stack_.bump(fence, debug_magic::fence_memory); return mem; } @@ -180,11 +194,6 @@ namespace foonathan { namespace memory return {FOONATHAN_MEMORY_LOG_PREFIX "::memory_stack", this}; } - void allocate_block() - { - stack_ = detail::fixed_memory_stack(arena_.allocate_block().memory); - } - const char* block_end() const FOONATHAN_NOEXCEPT { auto block = arena_.current_block(); diff --git a/include/foonathan/memory/temporary_allocator.hpp b/include/foonathan/memory/temporary_allocator.hpp index 03644ccd..7f84f622 100644 --- a/include/foonathan/memory/temporary_allocator.hpp +++ b/include/foonathan/memory/temporary_allocator.hpp @@ -110,7 +110,7 @@ namespace foonathan { namespace memory /// \returns The result of \ref temporary_allocator::allocate(). static void* allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) { - detail::check_node_size(size, max_node_size(state), + detail::check_allocation_size(size, [&]{return max_node_size(state);}, {FOONATHAN_MEMORY_LOG_PREFIX "::temporary_allocator", &state}); return state.allocate(size, alignment); } diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 0dd1b270..d7568cb8 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -50,8 +50,8 @@ set(src detail/assert.cpp detail/free_list.cpp detail/free_list_array.cpp + detail/free_list_utils.hpp detail/ilog2.hpp - detail/memory_stack.cpp detail/small_free_list.cpp debugging.cpp error.cpp diff --git a/src/config.hpp.in b/src/config.hpp.in index 43150c20..34779980 100644 --- a/src/config.hpp.in +++ b/src/config.hpp.in @@ -9,6 +9,7 @@ #include //=== options ===// +#cmakedefine01 FOONATHAN_MEMORY_CHECK_ALLOCATION_SIZE #define FOONATHAN_MEMORY_IMPL_DEFAULT_ALLOCATOR ${FOONATHAN_MEMORY_DEFAULT_ALLOCATOR} #cmakedefine01 FOONATHAN_MEMORY_THREAD_SAFE_REFERENCE #cmakedefine01 FOONATHAN_MEMORY_DEBUG_ASSERT diff --git a/src/detail/align.cpp b/src/detail/align.cpp index 76f13505..aa8a3a9f 100644 --- a/src/detail/align.cpp +++ b/src/detail/align.cpp @@ -4,20 +4,11 @@ #include "detail/align.hpp" -#include "detail/assert.hpp" #include "ilog2.hpp" using namespace foonathan::memory; using namespace detail; -std::size_t foonathan::memory::detail::align_offset(void *ptr, std::size_t alignment) FOONATHAN_NOEXCEPT -{ - FOONATHAN_MEMORY_ASSERT(is_valid_alignment(alignment)); - auto address = reinterpret_cast(ptr); - auto misaligned = address & (alignment - 1); - return misaligned != 0 ? (alignment - misaligned) : 0; -} - bool foonathan::memory::detail::is_aligned(void *ptr, std::size_t alignment) FOONATHAN_NOEXCEPT { FOONATHAN_MEMORY_ASSERT(is_valid_alignment(alignment)); diff --git a/src/detail/debug_helpers.cpp b/src/detail/debug_helpers.cpp index f9c47b1e..e600985c 100644 --- a/src/detail/debug_helpers.cpp +++ b/src/detail/debug_helpers.cpp @@ -75,25 +75,6 @@ using namespace detail; { debug_fill(memory, size, free ? debug_magic::internal_freed_memory : debug_magic::internal_memory); } -#else - void detail::debug_fill(void *, std::size_t, debug_magic) FOONATHAN_NOEXCEPT {} - - void* detail::debug_is_filled(void *, std::size_t, debug_magic) FOONATHAN_NOEXCEPT - { - return nullptr; - } - - void* detail::debug_fill_new(void *memory, std::size_t, std::size_t) FOONATHAN_NOEXCEPT - { - return memory; - } - - void* detail::debug_fill_free(void *memory, std::size_t, std::size_t) FOONATHAN_NOEXCEPT - { - return static_cast(memory); - } - - void detail::debug_fill_internal(void *, std::size_t, bool) FOONATHAN_NOEXCEPT {} #endif void detail::debug_handle_invalid_ptr(const allocator_info &info, void *ptr) diff --git a/src/detail/free_list.cpp b/src/detail/free_list.cpp index e5c19d7a..8b19c03b 100644 --- a/src/detail/free_list.cpp +++ b/src/detail/free_list.cpp @@ -4,149 +4,129 @@ #include "detail/free_list.hpp" -#include - -#if FOONATHAN_HOSTED_IMPLEMENTATION - #include -#endif - #include "detail/align.hpp" #include "detail/debug_helpers.hpp" #include "detail/assert.hpp" #include "debugging.hpp" #include "error.hpp" +#include "free_list_utils.hpp" + using namespace foonathan::memory; using namespace detail; namespace { - // reads stored integer value - std::uintptr_t get_int(void *address) FOONATHAN_NOEXCEPT + // i.e. array + struct interval { - FOONATHAN_MEMORY_ASSERT(address); - return *static_cast(address); - } + char *prev; // last before + char *first; // first in + char *last; // last in + char *next; // first after - // sets stored integer value - void set_int(void *address, std::uintptr_t i) FOONATHAN_NOEXCEPT - { - FOONATHAN_MEMORY_ASSERT(address); - *static_cast(address) = i; - } + // number of nodes in the interval + std::size_t size(std::size_t node_size) const FOONATHAN_NOEXCEPT + { + // last is inclusive, so add actual_size to it + // note: cannot use next, might not be directly after + auto end = last + node_size; + FOONATHAN_MEMORY_ASSERT((end - first) % node_size == 0u); + return (end - first) / node_size; + } + }; - // pointer to integer - std::uintptr_t to_int(char *ptr) FOONATHAN_NOEXCEPT + // searches for n consecutive bytes + // begin and end are the proxy nodes + // assumes list is not empty + // similar to list_search_array() + interval list_search_array(char *first, + std::size_t bytes_needed, std::size_t node_size) FOONATHAN_NOEXCEPT { - return reinterpret_cast(ptr); - } + interval i; + i.prev = nullptr; + i.first = first; + // i.last/next are used as iterator for the end of the interval + i.last = first; + i.next = list_get_next(first); + + auto bytes_so_far = node_size; + while (i.next) + { + if (i.last + node_size != i.next) // not continous + { + // restart at next + i.prev = i.last; + i.first = i.next; + i.last = i.next; + i.next = list_get_next(i.last); - // integer to pointer - char* from_int(std::uintptr_t i) FOONATHAN_NOEXCEPT - { - return reinterpret_cast(i); + bytes_so_far = node_size; + } + else + { + // extend interval + auto new_next = list_get_next(i.next); + i.last = i.next; + i.next = new_next; + + bytes_so_far += node_size; + if (bytes_so_far >= bytes_needed) + return i; + } + } + // not enough continuous space + return {nullptr, nullptr, nullptr, nullptr}; } - // reads a stored pointer value - char* get_ptr(void *address) FOONATHAN_NOEXCEPT + // similar to list_search_array() + // begin/end are proxy nodes + interval xor_list_search_array(char *begin, char *end, + std::size_t bytes_needed, std::size_t node_size) FOONATHAN_NOEXCEPT { - return from_int(get_int(address)); - } + interval i; + i.prev = begin; + i.first = xor_list_get_other(begin, nullptr); + // i.last/next are used as iterator for the end of the interval + i.last = i.first; + i.next = xor_list_get_other(i.last, i.prev); + + auto bytes_so_far = node_size; + while (i.next != end) + { + if (i.last + node_size != i.next) // not continous + { + // restart at i.next + i.prev = i.last; + i.first = i.next; + i.last = i.next; + i.next = xor_list_get_other(i.first, i.prev); - // stores a pointer value - void set_ptr(void *address, char *ptr) FOONATHAN_NOEXCEPT - { - set_int(address, to_int(ptr)); + bytes_so_far = node_size; + } + else + { + // extend interval + auto new_next = xor_list_get_other(i.next, i.last); + i.last = i.next; + i.next = new_next; + + bytes_so_far += node_size; + if (bytes_so_far >= bytes_needed) + return i; + } + } + // not enough continuous space + return {nullptr, nullptr, nullptr, nullptr}; } } FOONATHAN_CONSTEXPR std::size_t free_memory_list::min_element_size; FOONATHAN_CONSTEXPR std::size_t free_memory_list::min_element_alignment; -void *free_memory_list::cache::allocate(std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT -{ - // use alignment as fence size - auto fence = debug_fence_size ? alignment : 0u; - if (fence + size + fence > std::size_t(end_ - cur_)) - return nullptr; - - debug_fill(cur_, fence, debug_magic::fence_memory); - cur_ += fence; - - auto mem = cur_; - debug_fill(cur_, size, debug_magic::new_memory); - cur_ += size; - - debug_fill(cur_, fence, debug_magic::fence_memory); - cur_ += fence; - - return mem; -} - -bool free_memory_list::cache::try_deallocate(void *ptr, - std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT -{ - auto fence_size = debug_fence_size ? alignment : 0u; - auto node = static_cast(ptr); - if (node + size + fence_size != cur_) - // cannot be deallocated - return false; - debug_fill(node, size, debug_magic::freed_memory); - cur_ = node - fence_size; // shrink cur back - return true; -} - -std::size_t free_memory_list::cache::no_nodes(std::size_t node_size) const FOONATHAN_NOEXCEPT -{ - auto actual_size = node_size + (debug_fence_size ? 2 * alignment_for(node_size) : 0u); - return std::size_t(end_ - cur_) / actual_size; -} - -std::size_t free_memory_list::list_impl::insert(char *begin, char *end, - std::size_t node_size) FOONATHAN_NOEXCEPT -{ - // increase node size by fence, if necessary - // alignment is fence memory - auto actual_size = node_size + (debug_fence_size ? 2 * alignment_for(node_size) : 0u); - auto no_nodes = std::size_t(end - begin) / actual_size; - if (no_nodes == 0u) - return 0u; - - auto cur = begin; - for (std::size_t i = 0u; i != no_nodes - 1; ++i) - { - set_ptr(cur, cur + actual_size); - cur += actual_size; - } - set_ptr(cur, first_); - first_ = begin; - - return no_nodes; -} - -void free_memory_list::list_impl::push(void *ptr, std::size_t node_size) FOONATHAN_NOEXCEPT -{ - // alignment is fence memory - auto node = static_cast(debug_fill_free(ptr, node_size, alignment_for(node_size))); - - set_ptr(node, first_); - first_ = node; -} - -void *free_memory_list::list_impl::pop(std::size_t node_size) FOONATHAN_NOEXCEPT -{ - if (!first_) - return nullptr; - - auto mem = first_; - first_ = get_ptr(mem); - - // alignment is fence memory - return debug_fill_new(mem, node_size, alignment_for(node_size)); -} - free_memory_list::free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT -: node_size_(node_size > min_element_size ? node_size : min_element_size), +: first_(nullptr), + node_size_(node_size > min_element_size ? node_size : min_element_size), capacity_(0u) {} @@ -158,9 +138,10 @@ free_memory_list::free_memory_list(std::size_t node_size, } free_memory_list::free_memory_list(free_memory_list &&other) FOONATHAN_NOEXCEPT -: cache_(detail::move(other.cache_)), list_(detail::move(other.list_)), +: first_(other.first_), node_size_(other.node_size_), capacity_(other.capacity_) { + other.first_ = nullptr; other.capacity_ = 0u; } @@ -173,21 +154,18 @@ free_memory_list& free_memory_list::operator=(free_memory_list &&other) FOONATHA void foonathan::memory::detail::swap(free_memory_list &a, free_memory_list &b) FOONATHAN_NOEXCEPT { - detail::adl_swap(a.cache_, b.cache_); - detail::adl_swap(a.list_, b.list_); + detail::adl_swap(a.first_, b.first_); detail::adl_swap(a.node_size_, b.node_size_); detail::adl_swap(a.capacity_, b.capacity_); } void free_memory_list::insert(void* mem, std::size_t size) FOONATHAN_NOEXCEPT { - // insert into cache and old cache into list + FOONATHAN_MEMORY_ASSERT(mem); FOONATHAN_MEMORY_ASSERT(is_aligned(mem, alignment())); + detail::debug_fill_internal(mem, size, false); - list_.insert(cache_.top(), cache_.end(), node_size_); // insert cache into list - cache_ = cache(mem, size); // insert new memory into cache - - capacity_ += cache_.no_nodes(node_size_); + insert_impl(mem, size); } void* free_memory_list::allocate() FOONATHAN_NOEXCEPT @@ -195,440 +173,391 @@ void* free_memory_list::allocate() FOONATHAN_NOEXCEPT FOONATHAN_MEMORY_ASSERT(!empty()); --capacity_; - // try to return from list, to reserve cache for arrays - auto mem = list_.pop(node_size_); - if (!mem) - // use cache - mem = cache_.allocate(node_size_, alignment()); - // mem must not be nullptr now - FOONATHAN_MEMORY_ASSERT(mem); - return mem; + auto mem = first_; + first_ = list_get_next(first_); + return debug_fill_new(mem, node_size_, fence_size()); } void* free_memory_list::allocate(std::size_t n) FOONATHAN_NOEXCEPT { - auto old_nodes = cache_.no_nodes(node_size_); + FOONATHAN_MEMORY_ASSERT(!empty()); + if (n <= node_size_) + return allocate(); + + auto actual_size = node_size_ + 2 * fence_size(); - // allocate from cache - auto mem = cache_.allocate(n, alignment()); - if (!mem) + auto i = list_search_array(first_, n + 2 * fence_size(), actual_size); + if (i.first == nullptr) return nullptr; - auto diff = old_nodes - cache_.no_nodes(node_size_); - capacity_ -= diff; - return mem; + if (i.prev) + list_set_next(i.prev, i.next); // change next from previous to first after + else + first_ = i.next; + capacity_ -= i.size(actual_size); + + return debug_fill_new(i.first, n, fence_size()); } void free_memory_list::deallocate(void* ptr) FOONATHAN_NOEXCEPT { - // try to insert into cache - if (!cache_.try_deallocate(ptr, node_size_, alignment())) - // insert into list if failed - list_.push(ptr, node_size_); ++capacity_; + + auto node = static_cast(debug_fill_free(ptr, node_size_, fence_size())); + list_set_next(node, first_); + first_ = node; } void free_memory_list::deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT { - auto old_nodes = cache_.no_nodes(node_size_); - - // try to insert into cache - if (cache_.try_deallocate(ptr, n, alignment())) - { - auto diff = cache_.no_nodes(node_size_) - old_nodes; - capacity_ += diff; - } - else // insert into list otherwise + if (n <= node_size_) + deallocate(ptr); + else { - auto fence = (debug_fence_size ? alignment() : 0u); - auto node = static_cast(ptr) - fence; - capacity_ += list_.insert(node, node + fence + n + fence, node_size_); + auto mem = debug_fill_free(ptr, n, fence_size()); + insert_impl(mem, n + 2 * fence_size()); } } -std::size_t free_memory_list::node_size() const FOONATHAN_NOEXCEPT +std::size_t free_memory_list::alignment() const FOONATHAN_NOEXCEPT { - return node_size_; + return alignment_for(node_size_); } -bool free_memory_list::empty() const FOONATHAN_NOEXCEPT +std::size_t free_memory_list::fence_size() const FOONATHAN_NOEXCEPT { - return capacity() == 0u; + // node size is fence size + return debug_fence_size ? node_size_ : 0u; } -std::size_t free_memory_list::alignment() const FOONATHAN_NOEXCEPT +void free_memory_list::insert_impl(void *mem, std::size_t size) FOONATHAN_NOEXCEPT { - return alignment_for(node_size_); + auto actual_size = node_size_ + 2 * fence_size(); + auto no_nodes = size / actual_size; + FOONATHAN_MEMORY_ASSERT(no_nodes > 0); + + auto cur = static_cast(mem); + for (std::size_t i = 0u; i != no_nodes - 1; ++i) + { + list_set_next(cur, cur + actual_size); + cur += actual_size; + } + list_set_next(cur, first_); + first_ = static_cast(mem); + + capacity_ += no_nodes; } namespace { - - // returns the next pointer given the previous pointer - char* get_next(void *address, char *prev) FOONATHAN_NOEXCEPT + // converts a block into a linked list + void xor_link_block(void *memory, std::size_t node_size, std::size_t no_nodes, + char *prev, char *next) FOONATHAN_NOEXCEPT { - return from_int(get_int(address) ^ to_int(prev)); - } + auto cur = static_cast(memory); + xor_list_change(prev, next, cur); // change next pointer of prev - // returns the prev pointer given the next pointer - char* get_prev(char *address, char *next) FOONATHAN_NOEXCEPT - { - return from_int(get_int(address) ^ to_int(next)); + auto last_cur = prev; + for (std::size_t i = 0u; i != no_nodes - 1; ++i) + { + xor_list_set(cur, last_cur, cur + node_size); // cur gets last_cur and next node in continous memory + last_cur = cur; + cur += node_size; + } + xor_list_set(cur, last_cur, next); // last memory node gets next as next + xor_list_change(next, prev, cur); // change prev pointer of next } - // sets the next and previous pointer - void set_next_prev(char *address, char *prev, char *next) FOONATHAN_NOEXCEPT + struct pos { - set_int(address, to_int(prev) ^ to_int(next)); - } + char *prev, *next; + }; - // changes next pointer given the old next pointer - void change_next(char *address, char *old_next, char *new_next) FOONATHAN_NOEXCEPT + // finds position to insert memory to keep list ordered + // first_prev -> first -> ... (memory somewhere here) ... -> last -> last_next + pos find_pos_interval(const allocator_info &info, char *memory, + char *first_prev, char *first, + char *last, char *last_next) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(address); - // use old_next to get previous pointer - auto prev = get_prev(address, old_next); - // don't change previous pointer - set_next_prev(address, prev, new_next); - } + // note: first_prev/last_next can be the proxy nodes, then first_prev isn't necessarily less than first! + FOONATHAN_MEMORY_ASSERT(less(first, memory) && less(memory, last)); - // same for prev - void change_prev(char *address, char *old_prev, char *new_prev) FOONATHAN_NOEXCEPT - { - FOONATHAN_MEMORY_ASSERT(address); - auto next = get_next(address, old_prev); - set_next_prev(address, new_prev, next); - } + // need to insert somewhere in the middle + // search through the entire list + // search from both ends at once + auto cur_forward = first; + auto prev_forward = first_prev; - // advances a pointer pair forward - void next(char* &cur, char* &prev) FOONATHAN_NOEXCEPT - { - auto next = get_next(cur, prev); - prev = cur; - cur = next; + auto cur_backward = last; + auto prev_backward = last_next; + + do + { + if (greater(cur_forward, memory)) + return {prev_forward, cur_forward}; + else if (less(cur_backward, memory)) + // the next position is the previous backwards pointer + return {cur_backward, prev_backward}; + debug_check_double_dealloc([&] + { + return cur_forward != memory + && cur_backward != memory; + }, info, memory); + xor_list_iter_next(cur_forward, prev_forward); + xor_list_iter_next(cur_backward, prev_backward); + } while (less(prev_forward, prev_backward)); + + // ran outside of list + debug_check_double_dealloc([]{return false;}, info, memory); + return {nullptr, nullptr}; } - // advances a pointer pair backward - void prev(char* &cur, char* &next) FOONATHAN_NOEXCEPT + // finds the position in the entire list + pos find_pos(const allocator_info &info, char *memory, + char *begin_node, char *end_node, + char *last_dealloc, char *last_dealloc_prev) FOONATHAN_NOEXCEPT { - auto prev = get_prev(cur, next); - next = cur; - cur = prev; + auto first = xor_list_get_other(begin_node, nullptr); + auto last = xor_list_get_other(end_node, nullptr); + + if (greater(first, memory)) + // insert at front + return {begin_node, first}; + else if (less(last, memory)) + // insert at the end + return {last, end_node}; + else if (less(last_dealloc_prev, memory) && less(memory, last_dealloc)) + // insert before last_dealloc + return {last_dealloc_prev, last_dealloc}; + else if (less(memory, last_dealloc)) + // insert into [first, last_dealloc_prev] + return find_pos_interval(info, memory, + begin_node, first, + last_dealloc_prev, last_dealloc); + else if (greater(memory, last_dealloc)) + // insert into (last_dealloc, last] + return find_pos_interval(info, memory, + last_dealloc_prev, last_dealloc, + last, end_node); + + FOONATHAN_MEMORY_UNREACHABLE("memory must be in some half or outside"); + return {nullptr, nullptr}; } } FOONATHAN_CONSTEXPR std::size_t ordered_free_memory_list::min_element_size; FOONATHAN_CONSTEXPR std::size_t ordered_free_memory_list::min_element_alignment; -void ordered_free_memory_list::list_impl::insert(std::size_t node_size, - void *memory, std::size_t no_nodes, bool new_memory) FOONATHAN_NOEXCEPT +ordered_free_memory_list::ordered_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT +: node_size_(node_size > min_element_size ? node_size : min_element_size), + capacity_(0u), + last_dealloc_(end_node()), last_dealloc_prev_(begin_node()) +{ + xor_list_set(begin_node(), nullptr, end_node()); + xor_list_set(end_node(), begin_node(), nullptr); +} + +ordered_free_memory_list::ordered_free_memory_list(ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT +: node_size_(other.node_size_), capacity_(other.capacity_) { - FOONATHAN_MEMORY_ASSERT(no_nodes > 0u); - auto pos = find_pos(node_size, static_cast(memory)); + if (!other.empty()) + { + auto first = xor_list_get_other(other.begin_node(), nullptr); + auto last = xor_list_get_other(other.end_node(), nullptr); - auto cur = static_cast(memory), prev = pos.prev; - if (pos.prev) - // update next pointer of preceding node from pos.after to cur - change_next(pos.prev, pos.after, cur); - else - // update first_ pointer - first_ = cur; + xor_list_set(begin_node(), nullptr, first); + xor_list_change(first, other.begin_node(), begin_node()); + xor_list_change(last, other.end_node(), end_node()); + xor_list_set(end_node(), last, nullptr); - for (std::size_t i = 0u; i != no_nodes - 1; ++i) - { - // previous node is old position of iterator, next node is node_size further - set_next_prev(cur, prev, cur + node_size); - next(cur, prev); + other.capacity_ = 0u; + xor_list_set(other.begin_node(), nullptr, other.end_node()); + xor_list_set(other.end_node(), other.begin_node(), nullptr); } - // from last node: prev is old position, next is calculated position after - // cur is now the last node - set_next_prev(cur, prev, pos.after); - - if (pos.after) - // update prev pointer of following node from pos.prev to cur - change_prev(pos.after, pos.prev, cur); else - // update last_ pointer - last_ = cur; - - // point insert to last inserted node, if not new memory - if (!new_memory) { - insert_ = cur; - insert_prev_ = prev; + xor_list_set(begin_node(), nullptr, end_node()); + xor_list_set(end_node(), begin_node(), nullptr); } + + // for programming convenience, last_dealloc is reset + last_dealloc_prev_ = begin_node(); + last_dealloc_ = xor_list_get_other(last_dealloc_prev_, nullptr); } -void* ordered_free_memory_list::list_impl::erase(std::size_t) FOONATHAN_NOEXCEPT +void foonathan::memory::detail::swap(ordered_free_memory_list &a, ordered_free_memory_list &b) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(!empty()); + auto a_first = xor_list_get_other(a.begin_node(), nullptr); + auto a_last = xor_list_get_other(a.end_node(), nullptr); - auto to_erase = first_; + auto b_first = xor_list_get_other(b.begin_node(), nullptr); + auto b_last = xor_list_get_other(b.end_node(), nullptr); - // first_ has no previous node - auto new_first = get_next(first_, nullptr); - if (new_first) - // change new_first previous node from first_ to nullptr - change_prev(new_first, first_, nullptr); + if (!a.empty()) + { + xor_list_set(b.begin_node(), nullptr, a_first); + xor_list_change(a_first, a.begin_node(), b.begin_node()); + xor_list_change(a_last, a.end_node(), b.end_node()); + xor_list_set(b.end_node(), a_last, nullptr); + } else - // update last_ pointer, list is now empty - last_ = nullptr; + { + xor_list_set(b.begin_node(), nullptr, b.end_node()); + xor_list_set(b.end_node(), b.begin_node(), nullptr); + } - // update insert pointer if needed - if (insert_ == first_) + if (!b.empty()) { - insert_ = new_first; - insert_prev_ = nullptr; + xor_list_set(a.begin_node(), nullptr, b_first); + xor_list_change(b_first, b.begin_node(), a.begin_node()); + xor_list_change(b_last, b.end_node(), a.end_node()); + xor_list_set(a.end_node(), b_last, nullptr); + } + else + { + xor_list_set(a.begin_node(), nullptr, a.end_node()); + xor_list_set(a.end_node(), a.begin_node(), nullptr); } - first_ = new_first; - return to_erase; -} + detail::adl_swap(a.node_size_, b.node_size_); + detail::adl_swap(a.capacity_, b.capacity_); -void* ordered_free_memory_list::list_impl:: - erase(std::size_t node_size, std::size_t bytes_needed) FOONATHAN_NOEXCEPT -{ - FOONATHAN_MEMORY_ASSERT(!empty()); - if (bytes_needed <= node_size) - return erase(node_size); + // for programming convenience, last_dealloc is reset + a.last_dealloc_prev_ = a.begin_node(); + a.last_dealloc_ = xor_list_get_other(a.last_dealloc_prev_, nullptr); - for (char* cur = last_, *next = nullptr; cur; prev(cur, next)) - { - // whether or not to update insert because it would be removed - auto update_insert = cur == insert_; + b.last_dealloc_prev_ = b.begin_node(); + b.last_dealloc_ = xor_list_get_other(b.last_dealloc_prev_, nullptr); +} - auto last = cur, end = next; - auto available = node_size; // we already have node_size bytes available - while (get_prev(cur, next) == cur - node_size) - { - prev(cur, next); - if (cur == insert_) - update_insert = true; +void ordered_free_memory_list::insert(void *mem, std::size_t size) FOONATHAN_NOEXCEPT +{ + FOONATHAN_MEMORY_ASSERT(mem); + FOONATHAN_MEMORY_ASSERT(is_aligned(mem, alignment())); + debug_fill_internal(mem, size, false); - available += node_size; - if (available >= bytes_needed) // found enough blocks - { - // begin_prev is node before array - // cur is first node in array - // last is last node in array - // end is one after last node - auto begin_prev = get_prev(cur, next); - - FOONATHAN_MEMORY_ASSERT(std::size_t(last - cur) % node_size == 0u); - - // update next - if (begin_prev) - // change next from cur to end - change_next(begin_prev, cur, end); - else - // update first_ - first_ = end; - - // update prev - if (end) - { - // change end prev from last to begin_prev - change_prev(end, last, begin_prev); - - // update insert position so that it points out of the array - if (end == insert_ || update_insert) - { - insert_prev_ = begin_prev; - insert_ = end; - } - } - else - { - // update last_ - last_ = begin_prev; - - // update insert position - if (update_insert) - { - insert_ = begin_prev; - insert_prev_ = begin_prev ? get_prev(begin_prev, end) : nullptr; - } - } - - return cur; - } - } - } - return nullptr; + insert_impl(mem, size); } -ordered_free_memory_list::list_impl::pos - ordered_free_memory_list::list_impl::find_pos(std::size_t, - char* memory) const FOONATHAN_NOEXCEPT +void* ordered_free_memory_list::allocate() FOONATHAN_NOEXCEPT { -#if FOONATHAN_HOSTED_IMPLEMENTATION - auto greater = std::greater(); - auto less = std::less(); -#else - // compare integral values and hope it works - auto greater = [](char *a, char *b) - { - return to_int(a) > to_int(b); - }; - auto less = [](char *a, char *b) - { - return to_int(a) < to_int(b); - }; -#endif + FOONATHAN_MEMORY_ASSERT(!empty()); - auto info = allocator_info(FOONATHAN_MEMORY_LOG_PREFIX "::detail::ordered_free_memory_list", this); + // remove first node + auto prev = begin_node(); + auto node = xor_list_get_other(prev, nullptr); + auto next = xor_list_get_other(node, prev); - // starting position is insert_, if set, otherwise first_ - // first_ might be null, too, but this is handled - // insert_prev_ is the previous node in either case - char* cur = insert_ ? insert_ : first_; + xor_list_set(prev, nullptr, next); // link prev to next + xor_list_change(next, node, prev); // change prev of next + --capacity_; - if (!cur) - // empty list - return {nullptr, nullptr}; - else if (less(cur, memory)) + if (node == last_dealloc_) { - // memory is greater, advance until greater - char *prev = insert_prev_; - next(cur, prev); - - while (cur) - { - if (greater(cur, memory)) - break; - detail::debug_check_pointer([&] - { - return cur != memory; - }, info, memory); - next(cur, prev); - } - - return {prev, cur}; + // move last_dealloc_ one further in + last_dealloc_ = next; + FOONATHAN_MEMORY_ASSERT(last_dealloc_prev_ == prev); } - else - { - // memory is smaller, go back until smaller - char* next = get_next(cur, insert_prev_); - while (cur) - { - if (less(cur, memory)) - break; - detail::debug_check_pointer([&] - { - return cur != memory; - }, info, memory); - prev(cur, next); - } - return {cur, next}; - } + return debug_fill_new(node, node_size_, fence_size()); } -bool ordered_free_memory_list::list_impl::empty() const FOONATHAN_NOEXCEPT +void* ordered_free_memory_list::allocate(std::size_t n) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(bool(first_) == bool(last_)); - return !bool(first_); -} + FOONATHAN_MEMORY_ASSERT(!empty()); -ordered_free_memory_list::ordered_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT -: node_size_(node_size > min_element_size ? node_size : min_element_size), - capacity_(0u) -{} + if (n <= node_size_) + return allocate(); -ordered_free_memory_list::ordered_free_memory_list(std::size_t node_size, - void *mem, std::size_t size) FOONATHAN_NOEXCEPT -: ordered_free_memory_list(node_size) -{ - insert(mem, size); -} + auto actual_size = node_size_ + 2 * fence_size(); -ordered_free_memory_list::ordered_free_memory_list( - ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT -: list_(detail::move(other.list_)), - node_size_(other.node_size_), capacity_(other.capacity_) -{ - other.capacity_ = 0u; -} + auto i = xor_list_search_array(begin_node(), end_node(), n + 2 * fence_size(), actual_size); + if (i.first == nullptr) + return nullptr; -ordered_free_memory_list &ordered_free_memory_list::operator=( - ordered_free_memory_list &&other) FOONATHAN_NOEXCEPT -{ - ordered_free_memory_list tmp(detail::move(other)); - swap(*this, tmp); - return *this; -} + xor_list_change(i.prev, i.first, i.next); // change next pointer from i.prev to i.next + xor_list_change(i.next, i.last, i.prev); // change prev pointer from i.next to i.prev + capacity_ -= i.size(actual_size); -void foonathan::memory::detail::swap(ordered_free_memory_list &a, ordered_free_memory_list &b) FOONATHAN_NOEXCEPT -{ - detail::adl_swap(a.list_, b.list_); - detail::adl_swap(a.node_size_, b.node_size_); - detail::adl_swap(a.capacity_, b.capacity_); -} + if (less(i.prev, last_dealloc_) && less(last_dealloc_, i.next)) + { + // move last_dealloc just outside range + last_dealloc_ = i.next; + last_dealloc_prev_ = i.prev; + } -void ordered_free_memory_list::insert(void* mem, std::size_t size) FOONATHAN_NOEXCEPT -{ - FOONATHAN_MEMORY_ASSERT(is_aligned(mem, alignment())); - auto no_nodes = size / node_fence_size(); - list_.insert(node_fence_size(), mem, no_nodes, true); - capacity_ += no_nodes; + return debug_fill_new(i.first, n, fence_size()); } -void* ordered_free_memory_list::allocate() FOONATHAN_NOEXCEPT +void ordered_free_memory_list::deallocate(void *ptr) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(capacity_ > 0u); - --capacity_; + auto node = static_cast(debug_fill_free(ptr, node_size_, fence_size())); + + auto p = find_pos(allocator_info(FOONATHAN_MEMORY_LOG_PREFIX "::detail::ordered_free_memory_list", this), + node, begin_node(), end_node(), last_dealloc_, last_dealloc_prev_); - auto node = list_.erase(node_fence_size()); + xor_list_insert(node, p.prev, p.next); + ++capacity_; - return debug_fill_new(node, node_size(), alignment()); + last_dealloc_ = node; + last_dealloc_prev_ = p.prev; } -void* ordered_free_memory_list::allocate(std::size_t n) FOONATHAN_NOEXCEPT +void ordered_free_memory_list::deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT { - auto fence = debug_fence_size ? alignment() : 0u; - auto bytes_needed = n + 2 * fence; - auto nodes = list_.erase(node_fence_size(), bytes_needed); - if (!nodes) - return nullptr; + if (n <= node_size_) + deallocate(ptr); + else + { + auto mem = debug_fill_free(ptr, n, fence_size()); + auto prev = insert_impl(mem, n + 2 * fence_size()); - auto no_nodes = bytes_needed / node_fence_size() + (bytes_needed % node_fence_size() != 0); - capacity_ -= no_nodes; - return debug_fill_new(nodes, n, fence); + last_dealloc_ = static_cast(mem); + last_dealloc_prev_ = prev; + } } -void ordered_free_memory_list::deallocate(void* ptr) FOONATHAN_NOEXCEPT +std::size_t ordered_free_memory_list::alignment() const FOONATHAN_NOEXCEPT { - auto node = debug_fill_free(ptr, node_size(), alignment()); - - list_.insert(node_fence_size(), node, 1, false); + return alignment_for(node_size_); +} - ++capacity_; +std::size_t ordered_free_memory_list::fence_size() const FOONATHAN_NOEXCEPT +{ + // node size is fence size + return debug_fence_size ? node_size_ : 0u; } -void ordered_free_memory_list:: - deallocate(void *ptr, std::size_t n) FOONATHAN_NOEXCEPT +char* ordered_free_memory_list::insert_impl(void *mem, std::size_t size) FOONATHAN_NOEXCEPT { - auto fence = debug_fence_size ? alignment() : 0u; - auto node = debug_fill_free(ptr, n, fence); + auto actual_size = node_size_ + 2 * fence_size(); + auto no_nodes = size / actual_size; + FOONATHAN_MEMORY_ASSERT(no_nodes > 0); - auto bytes = n + 2 * fence; - auto no_nodes = bytes / node_fence_size() + (bytes % node_fence_size() != 0); - list_.insert(node_fence_size(), node, no_nodes, false); + auto p = find_pos(allocator_info(FOONATHAN_MEMORY_LOG_PREFIX "::detail::ordered_free_memory_list", this), + static_cast(mem), + begin_node(), end_node(), + last_dealloc_, last_dealloc_prev_); + xor_link_block(mem, actual_size, no_nodes, p.prev, p.next); capacity_ += no_nodes; -} -std::size_t ordered_free_memory_list::node_size() const FOONATHAN_NOEXCEPT -{ - return node_size_; + if (p.prev == last_dealloc_prev_) + { + last_dealloc_ = static_cast(mem); + } + + return p.prev; } -std::size_t ordered_free_memory_list::alignment() const FOONATHAN_NOEXCEPT +char* ordered_free_memory_list::begin_node() FOONATHAN_NOEXCEPT { - return alignment_for(node_size_); + void* mem = &begin_proxy_; + return static_cast(mem); } -std::size_t ordered_free_memory_list::node_fence_size() const FOONATHAN_NOEXCEPT +char* ordered_free_memory_list::end_node() FOONATHAN_NOEXCEPT { - return node_size_ + (debug_fence_size ? 2 * alignment() : 0u); + void* mem = &end_proxy_; + return static_cast(mem); } diff --git a/src/detail/free_list_utils.hpp b/src/detail/free_list_utils.hpp new file mode 100644 index 00000000..b094a813 --- /dev/null +++ b/src/detail/free_list_utils.hpp @@ -0,0 +1,136 @@ +// Copyright (C) 2015-2016 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_SRC_DETAIL_FREE_LIST_UTILS_HPP_INCLUDED +#define FOONATHAN_MEMORY_SRC_DETAIL_FREE_LIST_UTILS_HPP_INCLUDED + +#include + +#include "config.hpp" +#include "detail/align.hpp" +#include "detail/assert.hpp" + +#if FOONATHAN_HOSTED_IMPLEMENTATION + #include + #include +#endif + +namespace foonathan { namespace memory +{ + namespace detail + { + //=== storage ===/// + // reads stored integer value + inline std::uintptr_t get_int(void *address) FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(address); + std::uintptr_t res; + #if FOONATHAN_HOSTED_IMPLEMENTATION + std::memcpy(&res, address, sizeof(std::uintptr_t)); + #else + auto mem = static_cast(static_cast(&res)); + for (auto i = 0u; i != sizeof(std::uintptr_t); ++i) + mem[i] = static_cast(address)[i]; + #endif + return res; + } + + // sets stored integer value + inline void set_int(void *address, std::uintptr_t i) FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(address); + #if FOONATHAN_HOSTED_IMPLEMENTATION + std::memcpy(address, &i, sizeof(std::uintptr_t)); + #else + auto mem = static_cast(static_cast(&i)); + for (auto i = 0u; i != sizeof(std::uintptr_t); ++i) + static_cast(address)[i] = mem[i]; + #endif + } + + // pointer to integer + inline std::uintptr_t to_int(char *ptr) FOONATHAN_NOEXCEPT + { + return reinterpret_cast(ptr); + } + + // integer to pointer + inline char *from_int(std::uintptr_t i) FOONATHAN_NOEXCEPT + { + return reinterpret_cast(i); + } + + //=== intrusive linked list ===// + // reads a stored pointer value + inline char *list_get_next(void *address) FOONATHAN_NOEXCEPT + { + return from_int(get_int(address)); + } + + // stores a pointer value + inline void list_set_next(void *address, char *ptr) FOONATHAN_NOEXCEPT + { + set_int(address, to_int(ptr)); + } + + //=== intrusive xor linked list ===// + // returns the other pointer given one pointer + inline char *xor_list_get_other(void *address, char *prev_or_next) FOONATHAN_NOEXCEPT + { + return from_int(get_int(address) ^ to_int(prev_or_next)); + } + + // sets the next and previous pointer (order actually does not matter) + inline void xor_list_set(void *address, char *prev, char *next) FOONATHAN_NOEXCEPT + { + set_int(address, to_int(prev) ^ to_int(next)); + } + + // changes other pointer given one pointer + inline void xor_list_change(void *address, char *old_ptr, char *new_ptr) FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(address); + auto other = xor_list_get_other(address, old_ptr); + xor_list_set(address, other, new_ptr); + } + + // advances a pointer pair forward/backward + inline void xor_list_iter_next(char *&cur, char *&prev) FOONATHAN_NOEXCEPT + { + auto next = xor_list_get_other(cur, prev); + prev = cur; + cur = next; + } + + // links new node between prev and next + inline void xor_list_insert(char *new_node, char *prev, char *next) FOONATHAN_NOEXCEPT + { + xor_list_set(new_node, prev, next); + xor_list_change(prev, next, new_node); // change prev's next to new_node + xor_list_change(next, prev, new_node); // change next's prev to new_node + } + + //=== sorted list utils ===// + // if std::less/std::greater not available compare integer representation and hope it works + inline bool less(void *a, void *b) FOONATHAN_NOEXCEPT + { +#if FOONATHAN_HOSTED_IMPLEMENTATION + return std::less()(a, b); +#else + return to_int(a) < to_int(b); +#endif + } + + inline bool greater(void *a, void *b) FOONATHAN_NOEXCEPT + { +#if FOONATHAN_HOSTED_IMPLEMENTATION + return std::greater()(a, b); +#else + return to_int(a) < to_int(b); +#endif + } + } // namespace detail +}} // namespace foonathan::memory + +#endif // FOONATHAN_MEMORY_SRC_DETAIL_FREE_LIST_UTILS_HPP_INCLUDED diff --git a/src/detail/memory_stack.cpp b/src/detail/memory_stack.cpp deleted file mode 100644 index 9821c970..00000000 --- a/src/detail/memory_stack.cpp +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (C) 2015-2016 Jonathan Müller -// This file is subject to the license terms in the LICENSE file -// found in the top-level directory of this distribution. - -#include "detail/memory_stack.hpp" - -#include "detail/align.hpp" -#include "detail/debug_helpers.hpp" -#include "debugging.hpp" - -using namespace foonathan::memory; -using namespace detail; - -fixed_memory_stack::fixed_memory_stack(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT -: cur_(other.cur_) -{ - other.cur_ = nullptr; -} - -fixed_memory_stack& fixed_memory_stack::operator=(fixed_memory_stack &&other) FOONATHAN_NOEXCEPT -{ - cur_ = other.cur_; - other.cur_ = nullptr; - return *this; -} - -void* fixed_memory_stack::allocate(const char *end, std::size_t size, std::size_t alignment) FOONATHAN_NOEXCEPT -{ - if (cur_ == nullptr) - return nullptr; - - auto remaining = std::size_t(end - cur_); - auto offset = align_offset(cur_ + debug_fence_size, alignment); - - if (debug_fence_size + offset + size + debug_fence_size > remaining) - return nullptr; - debug_fill(cur_, offset, debug_magic::alignment_memory); - cur_ += offset; - - debug_fill(cur_, debug_fence_size, debug_magic::fence_memory); - cur_ += debug_fence_size; - - auto memory = cur_; - debug_fill(cur_, size, debug_magic::new_memory); - cur_ += size; - - debug_fill(cur_, debug_fence_size, debug_magic::fence_memory); - cur_ += debug_fence_size; - - return memory; -} - -void fixed_memory_stack::unwind(char *top) FOONATHAN_NOEXCEPT -{ - debug_fill(top, std::size_t(cur_ - top), debug_magic::freed_memory); - cur_ = top; -} diff --git a/src/detail/small_free_list.cpp b/src/detail/small_free_list.cpp index 18046ea6..6a3e457b 100644 --- a/src/detail/small_free_list.cpp +++ b/src/detail/small_free_list.cpp @@ -10,337 +10,381 @@ #include "detail/align.hpp" #include "detail/debug_helpers.hpp" #include "detail/assert.hpp" -#include "detail/utility.hpp" #include "error.hpp" +#include "free_list_utils.hpp" + using namespace foonathan::memory; using namespace detail; -struct detail::chunk +struct foonathan::memory::detail::chunk +: chunk_base { - chunk *next = this, *prev = this; - unsigned char first_node = 0u, capacity = 0u, no_nodes = 0u; -}; + static const std::size_t memory_offset; + static const std::size_t max_nodes; -namespace -{ - static FOONATHAN_CONSTEXPR auto alignment_div = sizeof(chunk) / detail::max_alignment; - static FOONATHAN_CONSTEXPR auto alignment_mod = sizeof(chunk) % detail::max_alignment; - // offset from chunk to actual list - static FOONATHAN_CONSTEXPR auto chunk_memory_offset = alignment_mod == 0u ? sizeof(chunk) - : (alignment_div + 1) * detail::max_alignment; - // maximum nodes per chunk - static FOONATHAN_CONSTEXPR auto chunk_max_nodes = std::numeric_limits::max(); - - // returns the memory of the actual free list of a chunk - unsigned char* list_memory(void *c) FOONATHAN_NOEXCEPT + // gives it the size of the memory block it is created in and the size of a node + chunk(std::size_t total_memory, std::size_t node_size) FOONATHAN_NOEXCEPT + : chunk_base(static_cast((total_memory - memory_offset) / node_size)) { - return static_cast(c) + chunk_memory_offset; + FOONATHAN_MEMORY_ASSERT((total_memory - memory_offset) / node_size <= max_nodes); + FOONATHAN_MEMORY_ASSERT(capacity > 0); + auto p = list_memory(); + for (unsigned char i = 0u; i != no_nodes; p += node_size) + *p = ++i; } - // creates a chunk at mem - // mem must have at least the size chunk_memory_offset + no_nodes * node_size - chunk* create_chunk(void *mem, std::size_t node_size, unsigned char no_nodes) FOONATHAN_NOEXCEPT + // returns memory of the free list + unsigned char* list_memory() FOONATHAN_NOEXCEPT { - auto c = ::new(mem) chunk; - c->first_node = 0; - c->no_nodes = no_nodes; - c->capacity = no_nodes; - auto p = list_memory(c); - for (unsigned char i = 0u; i != no_nodes; p += node_size) - *p = ++i; - return c; + auto mem = static_cast(this); + return static_cast(mem) + memory_offset; + } + + // returns the nth node + unsigned char* node_memory(unsigned char i, std::size_t node_size) FOONATHAN_NOEXCEPT + { + FOONATHAN_MEMORY_ASSERT(i < no_nodes); + return list_memory() + i * node_size; } - // whether or not a pointer can be from a certain chunk - bool from_chunk(chunk *c, std::size_t node_size, void *mem) FOONATHAN_NOEXCEPT + // checks whether a node came from this chunk + bool from(unsigned char *node, std::size_t node_size) FOONATHAN_NOEXCEPT { - // comparision not strictly legal, but works - return list_memory(c) <= mem - && mem < list_memory(c) + node_size * c->no_nodes; + auto begin = list_memory(); + auto end = list_memory() + no_nodes * node_size; + return (begin <= node) & (node < end); } - // whether or not a pointer is in the list of a certain chunk - bool chunk_contains(chunk *c, std::size_t node_size, void *pointer) FOONATHAN_NOEXCEPT + // checks whether a node is already in this chunk + bool contains(unsigned char *node, std::size_t node_size) FOONATHAN_NOEXCEPT { - auto cur_index = c->first_node; - while (cur_index != c->no_nodes) + auto cur_index = first_free; + while (cur_index != no_nodes) { - auto cur_mem = list_memory(c) + cur_index * node_size; - if (cur_mem == pointer) + auto cur_mem = node_memory(cur_index, node_size); + if (cur_mem == node) return true; cur_index = *cur_mem; } return false; } - // advances a pointer to the next chunk - void next(chunk* &c) FOONATHAN_NOEXCEPT + // allocates a single node + // chunk most not be empty + unsigned char* allocate(std::size_t node_size) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(c); - c = c->next; - } + --capacity; - // advances a pointer to the previous chunk - void prev(chunk* &c) FOONATHAN_NOEXCEPT - { - FOONATHAN_MEMORY_ASSERT(c); - c = c->prev; + auto node = node_memory(first_free, node_size); + first_free = *node; + return node; } -} -chunk_list::chunk_list(chunk_list &&other) FOONATHAN_NOEXCEPT -: first_(other.first_) -{ - other.first_ = nullptr; -} + // deallocates a single node given its address and index + // it must be from this chunk + void deallocate(unsigned char *node, unsigned char node_index) FOONATHAN_NOEXCEPT + { + ++capacity; -chunk_list& chunk_list::operator=(chunk_list &&other) FOONATHAN_NOEXCEPT -{ - chunk_list tmp(detail::move(other)); - swap(*this, tmp); - return *this; -} + *node = first_free; + first_free = node_index; + } +}; -void foonathan::memory::detail::swap(chunk_list &a, chunk_list &b) FOONATHAN_NOEXCEPT -{ - detail::adl_swap(a.first_, b.first_); -} +const std::size_t chunk::memory_offset + = sizeof(chunk) % detail::max_alignment == 0 + ? sizeof(chunk) + : (sizeof(chunk) / detail::max_alignment + 1) * detail::max_alignment; +const std::size_t chunk::max_nodes + = std::numeric_limits::max(); -void chunk_list::insert(chunk *c) FOONATHAN_NOEXCEPT +namespace { - // insert at the front + // converts a chunk_base to a chunk (if it is one) + chunk *make_chunk(chunk_base *c) FOONATHAN_NOEXCEPT + { + return static_cast(c); + } - if (first_ == nullptr) + // same as above but also requires a certain size + chunk *make_chunk(chunk_base *c, unsigned char size_needed) FOONATHAN_NOEXCEPT { - c->next = c; - c->prev = c; - first_ = c; + return c->capacity >= size_needed ? make_chunk(c) : nullptr; } - else + + // checks if memory was from a chunk, assumes chunk isn't proxy + chunk *from_chunk(chunk_base *c, unsigned char *node, std::size_t node_size) FOONATHAN_NOEXCEPT { - c->next = first_; - c->prev = first_->prev; - first_->prev = c; - first_ = c; + auto res = make_chunk(c); + return res->from(node, node_size) ? res : nullptr; } -} -chunk* chunk_list::insert(chunk_list &other) FOONATHAN_NOEXCEPT -{ - FOONATHAN_MEMORY_ASSERT(!other.empty()); - auto c = other.first_; - if (other.first_ == other.first_->next) - // only element - other.first_ = nullptr; - else + // inserts already interconnected chunks into the list + // list will be kept ordered + void insert_chunks(chunk_base *list, chunk_base *begin, chunk_base *end) FOONATHAN_NOEXCEPT { - c->prev->next = c->next; - c->next->prev = c->prev; - other.first_ = other.first_->next; + FOONATHAN_MEMORY_ASSERT(begin && end); + + if (list->next == list) // empty + { + begin->prev = list; + end->next = list->next; + list->next = begin; + list->prev = end; + } + else if (less(list->prev, begin)) // insert at end + { + list->prev->next = begin; + begin->prev = list->prev; + end->next = list; + list->prev = end; + } + else + { + auto prev = list; + auto cur = list->next; + while (less(cur, begin)) + { + prev = cur; + cur = cur->next; + } + FOONATHAN_MEMORY_ASSERT(greater(cur, end)); + FOONATHAN_MEMORY_ASSERT(prev == list || less(prev, begin)); + prev->next = begin; + begin->prev = prev; + end->next = cur; + cur->prev = end; + } } - insert(c); - return c; } FOONATHAN_CONSTEXPR std::size_t small_free_memory_list::min_element_size; FOONATHAN_CONSTEXPR std::size_t small_free_memory_list::min_element_alignment; small_free_memory_list::small_free_memory_list(std::size_t node_size) FOONATHAN_NOEXCEPT -: alloc_chunk_(nullptr), dealloc_chunk_(nullptr), - node_size_(node_size), capacity_(0u) {} +: node_size_(node_size), capacity_(0u), + alloc_chunk_(&base_), dealloc_chunk_(&base_) {} small_free_memory_list::small_free_memory_list(std::size_t node_size, - void *mem, std::size_t size) FOONATHAN_NOEXCEPT + void *mem, std::size_t size) FOONATHAN_NOEXCEPT : small_free_memory_list(node_size) { insert(mem, size); } small_free_memory_list::small_free_memory_list(small_free_memory_list &&other) FOONATHAN_NOEXCEPT -: unused_chunks_(detail::move(other.unused_chunks_)), used_chunks_(detail::move(other.used_chunks_)), - alloc_chunk_(other.alloc_chunk_), dealloc_chunk_(other.dealloc_chunk_), - node_size_(other.node_size_), capacity_(other.capacity_) +: node_size_(other.node_size_), capacity_(other.capacity_), + // reset markers for simplicity + alloc_chunk_(&base_), dealloc_chunk_(&base_) { - other.alloc_chunk_ = other.dealloc_chunk_ = nullptr; - other.capacity_ = 0u; -} - -small_free_memory_list& small_free_memory_list::operator=(small_free_memory_list &&other) FOONATHAN_NOEXCEPT -{ - small_free_memory_list tmp(detail::move(other)); - swap(*this, tmp); - return *this; + if (!other.empty()) + { + base_.next = other.base_.next; + base_.prev = other.base_.prev; + other.base_.next->prev = &base_; + other.base_.prev->next = &base_; + + other.base_.next = &other.base_; + other.base_.prev = &other.base_; + other.capacity_ = 0u; + } + else + { + base_.next = &base_; + base_.prev = &base_; + } } void foonathan::memory::detail::swap(small_free_memory_list &a, small_free_memory_list &b) FOONATHAN_NOEXCEPT { - detail::adl_swap(a.unused_chunks_, b.unused_chunks_); - detail::adl_swap(a.used_chunks_, b.used_chunks_); - detail::adl_swap(a.alloc_chunk_, b.alloc_chunk_); - detail::adl_swap(a.dealloc_chunk_, b.dealloc_chunk_); + auto b_next = b.base_.next; + auto b_prev = b.base_.prev; + + if (!a.empty()) + { + b.base_.next = a.base_.next; + b.base_.prev = a.base_.prev; + b.base_.next->prev = &b.base_; + b.base_.prev->next = &b.base_; + } + else + { + b.base_.next = &b.base_; + b.base_.prev = &b.base_; + } + + if (!b.empty()) + { + a.base_.next = b_next; + a.base_.prev = b_prev; + a.base_.next->prev = &a.base_; + a.base_.prev->next = &a.base_; + } + else + { + a.base_.next = &a.base_; + a.base_.prev = &a.base_; + } + detail::adl_swap(a.node_size_, b.node_size_); detail::adl_swap(a.capacity_, b.capacity_); + + // reset markers for simplicity + a.alloc_chunk_ = a.dealloc_chunk_ = &a.base_; + b.alloc_chunk_ = b.dealloc_chunk_ = &b.base_; } -void small_free_memory_list::insert(void *memory, std::size_t size) FOONATHAN_NOEXCEPT +void small_free_memory_list::insert(void *mem, std::size_t size) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(is_aligned(memory, max_alignment)); - auto chunk_unit = chunk_memory_offset + node_fence_size() * chunk_max_nodes; - auto no_chunks = size / chunk_unit; - auto mem = static_cast(memory); - for (std::size_t i = 0; i != no_chunks; ++i) + FOONATHAN_MEMORY_ASSERT(mem); + FOONATHAN_MEMORY_ASSERT(is_aligned(mem, max_alignment)); + debug_fill_internal(mem, size, false); + + auto actual_size = node_size_ + 2 * fence_size(); + auto total_chunk_size = chunk::memory_offset + actual_size * chunk::max_nodes; + auto no_chunks = size / total_chunk_size; + auto remainder = size % total_chunk_size; + + auto memory = static_cast(mem); + auto prev = static_cast(nullptr); + for (auto i = std::size_t(0); i != no_chunks; ++i) { - auto c = create_chunk(mem, node_fence_size(), chunk_max_nodes); - unused_chunks_.insert(c); - mem += chunk_unit; + auto c = ::new(static_cast(memory)) chunk(total_chunk_size, actual_size); + + c->prev = prev; + if (prev) + prev->next = c; + prev = c; + + memory += total_chunk_size; } - std::size_t remaining = 0; - if (size % chunk_unit > chunk_memory_offset) + + auto new_nodes = no_chunks * chunk::max_nodes; + if (remainder >= chunk::memory_offset + actual_size) // at least one node { - remaining = size % chunk_unit - chunk_memory_offset; - if (remaining > node_fence_size()) - { - auto c = create_chunk(mem, node_fence_size(), - static_cast(remaining / node_fence_size())); - unused_chunks_.insert(c); - } + auto c = ::new(static_cast(memory)) chunk(remainder, actual_size); + c->prev = prev; + if (prev) + prev->next = c; + prev = c; + + new_nodes += c->no_nodes; } - auto inserted_memory = no_chunks * chunk_max_nodes + remaining / node_fence_size(); - FOONATHAN_MEMORY_ASSERT_MSG(inserted_memory > 0u, "too small memory size"); - capacity_ += inserted_memory; + + FOONATHAN_MEMORY_ASSERT_MSG(new_nodes > 0, "memory block too small"); + insert_chunks(&base_, static_cast(mem), prev); + capacity_ += new_nodes; } void* small_free_memory_list::allocate() FOONATHAN_NOEXCEPT { - if (!alloc_chunk_ || alloc_chunk_->capacity == 0u) - find_chunk(1); - FOONATHAN_MEMORY_ASSERT(alloc_chunk_ && alloc_chunk_->capacity != 0u); + auto chunk = find_chunk_impl(1); + alloc_chunk_ = chunk; + FOONATHAN_MEMORY_ASSERT(chunk && chunk->capacity >= 1); - auto node_memory = list_memory(alloc_chunk_) + alloc_chunk_->first_node * node_fence_size(); - alloc_chunk_->first_node = *node_memory; - --alloc_chunk_->capacity; --capacity_; - return debug_fill_new(node_memory, node_size(), alignment()); + auto mem = chunk->allocate(node_size_ + 2 * fence_size()); + FOONATHAN_MEMORY_ASSERT(mem); + return detail::debug_fill_new(mem, node_size_, fence_size()); } -void small_free_memory_list::deallocate(void *memory) FOONATHAN_NOEXCEPT +void small_free_memory_list::deallocate(void *mem) FOONATHAN_NOEXCEPT { - auto node_memory = static_cast(debug_fill_free(memory, node_size(), alignment())); - auto dealloc_chunk = chunk_for(node_memory); - auto info = allocator_info(FOONATHAN_MEMORY_LOG_PREFIX "::detail::small_free_memory_list", this); - // memory was never managed by this list - debug_check_pointer([&] - { - return bool(dealloc_chunk); - }, info, memory); + auto actual_size = node_size_ + 2 * fence_size(); + auto node = static_cast(detail::debug_fill_free(mem, node_size_, fence_size())); - auto offset = static_cast(node_memory - list_memory(dealloc_chunk)); + auto chunk = find_chunk_impl(node); + dealloc_chunk_ = chunk; + // memory was never allocated from list + detail::debug_check_pointer([&]{return chunk != nullptr;}, info, mem); + auto offset = node - chunk->list_memory(); // memory is not at the right position - debug_check_pointer([&] - { - return offset % node_fence_size() == 0u; - }, info, memory); + debug_check_pointer([&]{return offset % actual_size == 0u;}, info, mem); // double-free - debug_check_double_dealloc([&] - { - return !chunk_contains(dealloc_chunk, node_fence_size(), node_memory); - }, info, memory); - - *node_memory = dealloc_chunk->first_node; - dealloc_chunk->first_node = static_cast(offset / node_fence_size()); - ++dealloc_chunk->capacity; + debug_check_double_dealloc([&]{return !chunk->contains(node, actual_size);}, info, mem); + + auto index = offset / actual_size; + FOONATHAN_MEMORY_ASSERT(index < chunk->no_nodes); + chunk->deallocate(node, static_cast(index)); + ++capacity_; } -std::size_t small_free_memory_list::node_size() const FOONATHAN_NOEXCEPT +std::size_t small_free_memory_list::alignment() const FOONATHAN_NOEXCEPT { - return node_size_; + return alignment_for(node_size_); } -std::size_t small_free_memory_list::alignment() const FOONATHAN_NOEXCEPT +std::size_t small_free_memory_list::fence_size() const FOONATHAN_NOEXCEPT { - return alignment_for(node_size_); + // node size is fence size + return debug_fence_size ? node_size_ : 0u; } -bool small_free_memory_list::find_chunk(std::size_t n) FOONATHAN_NOEXCEPT +chunk* small_free_memory_list::find_chunk_impl(std::size_t n) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(capacity_ >= n && n <= chunk_max_nodes); - if (alloc_chunk_ && alloc_chunk_->capacity >= n) - return true; - else if (!unused_chunks_.empty()) - { - alloc_chunk_ = used_chunks_.insert(unused_chunks_); - if (!dealloc_chunk_) - dealloc_chunk_ = alloc_chunk_; - return true; - } - FOONATHAN_MEMORY_ASSERT(dealloc_chunk_); - if (dealloc_chunk_->capacity >= n) - { - alloc_chunk_ = dealloc_chunk_; - return true; - } + if (auto c = make_chunk(alloc_chunk_, n)) + return c; + else if (auto c = make_chunk(dealloc_chunk_, n)) + return c; + + auto cur_forward = alloc_chunk_->next; + auto cur_backward = alloc_chunk_->prev; - auto forward_iter = dealloc_chunk_, backward_iter = dealloc_chunk_; do { - forward_iter = forward_iter->next; - backward_iter = backward_iter->prev; - - if (forward_iter->capacity >= n) - { - alloc_chunk_ = forward_iter; - return true; - } - else if (backward_iter->capacity >= n) - { - alloc_chunk_ = backward_iter; - return true; - } - } while (forward_iter != backward_iter); - return false; + if (auto c = make_chunk(cur_forward, n)) + return c; + else if (auto c = make_chunk(cur_backward, n)) + return c; + + cur_forward = cur_forward->next; + cur_backward = cur_backward->next; + } while (cur_forward != cur_backward); + FOONATHAN_MEMORY_UNREACHABLE("there is memory available somewhere..."); + return nullptr; } -chunk* small_free_memory_list::chunk_for(void *memory) FOONATHAN_NOEXCEPT +chunk* small_free_memory_list::find_chunk_impl(unsigned char *node, chunk_base *first, chunk_base *last) FOONATHAN_NOEXCEPT { - FOONATHAN_MEMORY_ASSERT(dealloc_chunk_ && alloc_chunk_); - if (from_chunk(dealloc_chunk_, node_fence_size(), memory)) - return dealloc_chunk_; - else if (from_chunk(alloc_chunk_, node_fence_size(), memory)) - { - dealloc_chunk_ = alloc_chunk_; - return alloc_chunk_; - } + auto actual_size = node_size_ + 2 * fence_size(); - auto i = 0u; - auto forward_iter = dealloc_chunk_, backward_iter = dealloc_chunk_; do { - next(forward_iter); - prev(backward_iter); - ++i; - if (from_chunk(forward_iter, node_fence_size(), memory)) - { - dealloc_chunk_ = forward_iter; - return forward_iter; - } - else if (from_chunk(backward_iter, node_fence_size(), memory)) - { - dealloc_chunk_ = backward_iter; - return backward_iter; - } - } while (forward_iter != backward_iter); - // at this point, both iterators point to the same chunk - // this only happens after the entire list has been searched + if (auto c = from_chunk(first, node, actual_size)) + return c; + else if (auto c = from_chunk(last, node, actual_size)) + return c; + + first = first->next; + last = last->prev; + } while (!greater(first, last)); return nullptr; } -std::size_t small_free_memory_list::node_fence_size() const FOONATHAN_NOEXCEPT +chunk* small_free_memory_list::find_chunk_impl(unsigned char *node) FOONATHAN_NOEXCEPT { - return node_size_ + (debug_fence_size ? 2 * alignment() : 0u); + auto actual_size = node_size_ + 2 * fence_size(); + + if (auto c = from_chunk(dealloc_chunk_, node, actual_size)) + return c; + else if (auto c = from_chunk(alloc_chunk_, node, actual_size)) + return c; + else if (less(dealloc_chunk_, node)) + { + // node is in (dealloc_chunk_, base_.prev] + return find_chunk_impl(node, dealloc_chunk_->next, base_.prev); + } + else if (greater(dealloc_chunk_, node)) + { + // node is in [base.next, dealloc_chunk_) + return find_chunk_impl(node, base_.next, dealloc_chunk_->prev); + } + FOONATHAN_MEMORY_UNREACHABLE("must be in one half"); + return nullptr; } diff --git a/src/memory_arena.cpp b/src/memory_arena.cpp index b445612e..55950ad9 100644 --- a/src/memory_arena.cpp +++ b/src/memory_arena.cpp @@ -7,24 +7,10 @@ #include #include "detail/align.hpp" -#include "error.hpp" using namespace foonathan::memory; using namespace detail; -struct memory_block_stack::node -{ - node *prev; - std::size_t usable_size; - - node(node *prev, std::size_t size) FOONATHAN_NOEXCEPT - : prev(prev), usable_size(size) {} - - static const std::size_t div_alignment; - static const std::size_t mod_offset; - static const std::size_t offset; -}; - const std::size_t memory_block_stack::node::div_alignment = sizeof(memory_block_stack::node) / max_alignment; const std::size_t memory_block_stack::node::mod_offset = sizeof(memory_block_stack::node) % max_alignment != 0u; const std::size_t memory_block_stack::node::offset = (div_alignment + mod_offset) * max_alignment; @@ -54,13 +40,6 @@ void memory_block_stack::steal_top(memory_block_stack &other) FOONATHAN_NOEXCEPT head_ = to_steal; } -memory_block_stack::inserted_mb memory_block_stack::top() const FOONATHAN_NOEXCEPT -{ - FOONATHAN_MEMORY_ASSERT(head_); - auto mem = static_cast(head_); - return {static_cast(mem) + node::offset, head_->usable_size}; -} - std::size_t memory_block_stack::size() const FOONATHAN_NOEXCEPT { std::size_t res = 0u; @@ -69,7 +48,6 @@ std::size_t memory_block_stack::size() const FOONATHAN_NOEXCEPT return res; } - #if FOONATHAN_MEMORY_EXTERN_TEMPLATE template class foonathan::memory::memory_arena; template class foonathan::memory::memory_arena; diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index 8fa35d1e..f59e7215 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -4,7 +4,7 @@ # builds test -add_executable(foonathan_memory_profiling profiling.cpp) +add_executable(foonathan_memory_profiling benchmark.hpp profiling.cpp) target_link_libraries(foonathan_memory_profiling foonathan_memory) target_include_directories(foonathan_memory_profiling PRIVATE ${FOONATHAN_MEMORY_SOURCE_DIR}/include/foonathan/memory) diff --git a/test/benchmark.hpp b/test/benchmark.hpp new file mode 100644 index 00000000..cc10c566 --- /dev/null +++ b/test/benchmark.hpp @@ -0,0 +1,172 @@ +// Copyright (C) 2016 Jonathan Müller +// This file is subject to the license terms in the LICENSE file +// found in the top-level directory of this distribution. + +#ifndef FOONATHAN_MEMORY_TEST_BENCHMARK_HPP_INCLUDED +#define FOONATHAN_MEMORY_TEST_BENCHMARK_HPP_INCLUDED + +// Benchmarking functions and allocator scenarios + +#include +#include +#include +#include + +#include "allocator_traits.hpp" + +using unit = std::chrono::nanoseconds; + +template +std::size_t measure(F func, Args&&... args) +{ + auto start = std::chrono::system_clock::now(); + func(std::forward(args)...); + auto duration = std::chrono::duration_cast + (std::chrono::system_clock::now() - start); + return std::size_t(duration.count()); +} + +const std::size_t sample_size = 1024u; + +template +std::size_t benchmark(F measure_func, Alloc make_alloc, Args&& ... args) +{ + auto min_time = std::size_t(-1); + for (std::size_t i = 0u; i != sample_size; ++i) + { + auto alloc = make_alloc(); + auto time = measure_func(alloc, std::forward(args)...); + if (time < min_time) + min_time = time; + } + return min_time; +} + +struct single +{ + std::size_t count; + + single(std::size_t c) + : count(c) {} + + template + std::size_t operator()(RawAllocator &alloc, std::size_t size) + { + using namespace foonathan::memory; + return measure([&]() + { + for (std::size_t i = 0u; i != count; ++i) + { + volatile auto ptr = allocator_traits::allocate_node(alloc, size, 1); + allocator_traits::deallocate_node(alloc, ptr, size, 1); + } + }); + } + + template + std::size_t operator()(RawAllocator &alloc, + std::size_t array_size, std::size_t node_size) + { + return measure([&]() + { + for (std::size_t i = 0u; i != count; ++i) + { + auto ptr = allocator_traits::allocate_array(alloc, array_size, node_size, 1); + allocator_traits::deallocate_array(alloc, ptr, + array_size, node_size, 1); + } + }); + } + + static const char* name() {return "single";} +}; + +struct basic_bulk +{ + using order_func = void(*)(std::vector&); + + order_func func; + std::size_t count; + + basic_bulk(order_func f, std::size_t c) + : func(f), count(c) {} + + template + std::size_t operator()(RawAllocator &alloc, std::size_t node_size) + { + using namespace foonathan::memory; + + std::vector ptrs; + ptrs.reserve(count); + + auto alloc_t = measure([&]() + { + for (std::size_t i = 0u; i != count; ++i) + ptrs.push_back(allocator_traits::allocate_node(alloc, node_size, 1)); + }); + func(ptrs); + auto dealloc_t = measure([&]() + { + for (auto ptr : ptrs) + allocator_traits::deallocate_node(alloc, ptr, node_size, 1); + }); + return alloc_t + dealloc_t; + } + + template + std::size_t operator()(RawAllocator& alloc, std::size_t array_size, std::size_t node_size) + { + using namespace foonathan::memory; + + std::vector ptrs; + ptrs.reserve(count); + + auto alloc_t = measure([&]() + { + for (std::size_t i = 0u; i != count; ++i) + ptrs.push_back(allocator_traits:: + allocate_array(alloc, array_size, node_size, 1)); + }); + func(ptrs); + auto dealloc_t = measure([&]() + { + for (auto ptr : ptrs) + allocator_traits::deallocate_array(alloc, ptr, + array_size, node_size, 1); + }); + return alloc_t + dealloc_t; + } +}; + +struct bulk : basic_bulk +{ + bulk(std::size_t c) + : basic_bulk([](std::vector &){}, c) {} + + static const char* name() {return "bulk";} +}; + +struct bulk_reversed : basic_bulk +{ + bulk_reversed(std::size_t c) + : basic_bulk([](std::vector& ptrs) + { + std::reverse(ptrs.begin(), ptrs.end()); + }, c) {} + + static const char* name() {return "bulk_reversed";} +}; + +struct butterfly : basic_bulk +{ + butterfly(std::size_t c) + : basic_bulk([](std::vector &ptrs) + { + std::shuffle(ptrs.begin(), ptrs.end(), std::mt19937{}); + }, c) + {} + + static const char* name() {return "butterfly\n";} +}; + +#endif // FOONATHAN_MEMORY_TEST_BENCHMARK_HPP_INCLUDED diff --git a/test/memory_stack.cpp b/test/memory_stack.cpp index efcd9ab4..07cc8428 100644 --- a/test/memory_stack.cpp +++ b/test/memory_stack.cpp @@ -35,7 +35,7 @@ TEST_CASE("memory_stack", "[stack]") auto m = stack.top(); auto memory = stack.allocate(10, 16); - REQUIRE(detail::align_offset(memory, 16) == 0u); + REQUIRE(detail::is_aligned(memory, 16)); stack.unwind(m); REQUIRE(stack.capacity_left() == @@ -45,7 +45,7 @@ TEST_CASE("memory_stack", "[stack]") REQUIRE(alloc.no_allocated() == 1u); REQUIRE(alloc.no_deallocated() == 0u); } - SECTION("multiple blok allocation/unwind") + SECTION("multiple block allocation/unwind") { // note: tests are mostly hoping not to get a segfault diff --git a/test/profiling.cpp b/test/profiling.cpp index 14f06dc8..d06fbafc 100644 --- a/test/profiling.cpp +++ b/test/profiling.cpp @@ -4,12 +4,9 @@ // Profiling code to check performance of allocators. -#include -#include #include #include -#include -#include +#include #include "allocator_storage.hpp" #include "heap_allocator.hpp" @@ -17,160 +14,14 @@ #include "memory_pool.hpp" #include "memory_stack.hpp" -namespace memory = foonathan::memory; +using namespace foonathan::memory; -using unit = std::chrono::microseconds; - -template -std::size_t measure(F func, Args&&... args) -{ - auto start = std::chrono::system_clock::now(); - func(std::forward(args)...); - auto duration = std::chrono::duration_cast - (std::chrono::system_clock::now() - start); - return std::size_t(duration.count()); -} - -struct single -{ - std::size_t count; - - single(std::size_t c) - : count(c) {} - - template - std::size_t operator()(RawAllocator &alloc, std::size_t size) - { - return measure([&]() - { - for (std::size_t i = 0u; i != count; ++i) - { - volatile auto ptr = alloc.allocate_node(size, 1); - alloc.deallocate_node(ptr, size, 1); - } - }); - } - - template - std::size_t operator()(RawAllocator &alloc, - std::size_t array_size, std::size_t node_size) - { - return measure([&]() - { - for (std::size_t i = 0u; i != count; ++i) - { - volatile auto ptr = alloc.allocate_array(array_size, node_size, 1); - alloc.deallocate_array(ptr, array_size, - node_size, 1); - } - }); - } - - static const char* name() {return "single";} -}; - -struct basic_bulk -{ - using order_func = void(*)(std::vector&); - - order_func func; - std::size_t count; - - basic_bulk(order_func f, std::size_t c) - : func(f), count(c) {} - - template - std::size_t operator()(RawAllocator &alloc, std::size_t node_size) - { - std::vector ptrs; - ptrs.reserve(count); - - auto alloc_t = measure([&]() - { - for (std::size_t i = 0u; i != count; ++i) - ptrs.push_back(alloc.allocate_node(node_size, 1)); - }); - func(ptrs); - auto dealloc_t = measure([&]() - { - for (auto ptr : ptrs) - alloc.deallocate_node(ptr, node_size, 1); - }); - return alloc_t + dealloc_t; - } - - template - std::size_t operator()(RawAllocator& alloc, std::size_t array_size, std::size_t node_size) - { - std::vector ptrs; - ptrs.reserve(count); - - auto alloc_t = measure([&]() - { - for (std::size_t i = 0u; i != count; ++i) - ptrs.push_back( - alloc.allocate_array(array_size, node_size, 1)); - }); - func(ptrs); - auto dealloc_t = measure([&]() - { - for (auto ptr : ptrs) - alloc.deallocate_array(ptr, array_size, node_size, 1); - }); - return alloc_t + dealloc_t; - } -}; - -struct bulk : basic_bulk -{ - bulk(std::size_t c) - : basic_bulk([](std::vector &){}, c) {} - - static const char* name() {return "bulk";} -}; - -struct bulk_reversed : basic_bulk -{ - bulk_reversed(std::size_t c) - : basic_bulk([](std::vector& ptrs) - { - std::reverse(ptrs.begin(), ptrs.end()); - }, c) {} - - static const char* name() {return "bulk_reversed";} -}; - -struct butterfly : basic_bulk -{ - butterfly(std::size_t c) - : basic_bulk([](std::vector &ptrs) - { - std::shuffle(ptrs.begin(), ptrs.end(), std::mt19937{}); - }, c) - {} - - static const char* name() {return "butterfly\n";} -}; - -const std::size_t sample_size = 1024u; - -template -std::size_t benchmark(F measure_func, Args&& ... args) -{ - auto min_time = std::size_t(-1); - for (std::size_t i = 0u; i != sample_size; ++i) - { - auto time = measure_func(std::forward(args)...); - if (time < min_time) - min_time = time; - } - return min_time; -} +#include "benchmark.hpp" template void benchmark_node(std::size_t count, std::size_t size, Allocators&... allocators) { - int dummy[] = {(std::cout << benchmark(Func{count}, allocators, size) << '\t', 0)...}; + int dummy[] = {(std::cout << benchmark(Func{count}, allocators, size) << '|', 0)...}; (void)dummy; std::cout << '\n'; } @@ -179,24 +30,25 @@ template void benchmark_node(std::initializer_list counts, std::initializer_list node_sizes) { - using namespace foonathan::memory; - std::cout << Func::name() << "\n\t\tHeap\tNew\tSmall\tNode\tArray\tStack\n"; + std::cout << "##" << Func::name() << "\n"; + std::cout << '\n'; + std::cout << "Size|Heap|New|Small|Node|Array|Stack\n"; + std::cout << "----|-----|---|-----|----|-----|-----\n"; for (auto count : counts) for (auto size : node_sizes) { - auto heap_alloc = make_allocator_adapter(heap_allocator{}); - auto new_alloc = make_allocator_adapter(new_allocator{}); - auto small_alloc = make_allocator_adapter( - memory_pool{size, count * size * 2}); - auto node_alloc = make_allocator_adapter( - memory_pool{size, count * size * 2}); - auto array_alloc = make_allocator_adapter( - memory_pool{size, count * size * 2}); - auto stack_alloc = make_allocator_adapter( - memory_stack<>{count * size * 2}); + auto heap_alloc = [&]{return heap_allocator{};}; + auto new_alloc = [&]{return new_allocator{};}; + + auto small_alloc = [&]{return memory_pool(size, count * size + 1024);}; + auto node_alloc = [&]{return memory_pool(size, count * std::max(size, sizeof(char*)) + 1024);}; + auto array_alloc = [&]{return memory_pool(size, count * std::max(size, sizeof(char*)) + 1024);}; + + auto stack_alloc = [&]{return memory_stack<>(count * size);}; - std::cout << count << '*' << std::setw(2) << size << ": \t"; - benchmark_node(count, size, heap_alloc, new_alloc, + std::cout << count << "\\*" << size << "|"; + benchmark_node(count, size, + heap_alloc, new_alloc, small_alloc, node_alloc, array_alloc, stack_alloc); } @@ -215,8 +67,7 @@ template void benchmark_array(std::size_t count, std::size_t array_size, std::size_t node_size, Allocators& ... allocators) { - int dummy[] = {(std::cout << benchmark(Func{count}, allocators, array_size, node_size) - << '\t', 0)...}; + int dummy[] = {(std::cout << benchmark(Func{count}, allocators, array_size, node_size) << '|', 0)...}; (void) dummy; std::cout << '\n'; } @@ -227,23 +78,29 @@ void benchmark_array(std::initializer_list counts, std::initializer_list array_sizes) { using namespace foonathan::memory; - std::cout << Func::name() << "\n\t\tHeap\tNew\tNode\tArray\tStack\n"; + std::cout << "##" << Func::name() << "\n"; + std::cout << '\n'; + std::cout << "Size|Heap|New|Node|Array|Stack\n"; + std::cout << "----|-----|---|----|-----|-----\n"; for (auto count : counts) for (auto node_size : node_sizes) for (auto array_size : array_sizes) { - auto mem_needed = count * node_size * array_size * 2; + auto mem_needed = count * std::max(node_size, sizeof(char*)) * array_size + 1024; + + auto heap_alloc = [&]{return heap_allocator{};}; + auto new_alloc = [&]{return new_allocator{};}; + + auto node_alloc = [&]{return memory_pool(node_size, mem_needed);}; + auto array_alloc = [&]{return memory_pool(node_size, mem_needed);}; - auto heap_alloc = make_allocator_adapter(heap_allocator{}); - auto new_alloc = make_allocator_adapter(new_allocator{}); - auto node_alloc = make_allocator_adapter(memory_pool{node_size, mem_needed}); - auto array_alloc = make_allocator_adapter(memory_pool{node_size, mem_needed}); - auto stack_alloc = make_allocator_adapter(memory_stack<>{mem_needed}); + auto stack_alloc = [&]{return memory_stack<>(count * mem_needed);}; - std::cout << count << '*' << std::setw(3) << node_size - << '*' << std::setw(3) << array_size<< ": \t"; - benchmark_array(count , array_size, node_size, - heap_alloc, new_alloc, node_alloc, array_alloc, stack_alloc); + std::cout << count << "\\*" << node_size << "\\*" << array_size<< "|"; + benchmark_array(count, array_size, node_size, + heap_alloc, new_alloc, + node_alloc, array_alloc, + stack_alloc); } std::cout << '\n'; } @@ -259,11 +116,24 @@ void benchmark_array(std::initializer_list counts, int main() { - using namespace foonathan::memory; + class comma_numpunct : public std::numpunct + { + protected: + virtual char do_thousands_sep() const + { + return ','; + } - std::cout << "Node\n\n"; - benchmark_node({256, 512, 1024}, {1, 4, 8, 256}); + virtual std::string do_grouping() const + { + return "\03"; + } + }; + + std::cout.imbue({std::locale(), new comma_numpunct}); - std::cout << "Array\n\n"; + std::cout << "#Node\n\n"; + benchmark_node({256, 512, 1024}, {1, 4, 8, 256}); + std::cout << "#Array\n\n"; benchmark_array({256, 512}, {1, 4, 8}, {1, 4, 8}); }