about summary refs log tree commit diff
path: root/third_party/immer/immer/heap/free_list_heap.hpp
diff options
context:
space:
mode:
authorVincent Ambo <mail@tazj.in>2020-07-15T07·20+0100
committerVincent Ambo <mail@tazj.in>2020-07-15T07·23+0100
commit1213b086a1015a662ab7ebd658f784534fd3116a (patch)
treed3bc8f3b7f40b8b60f0ef6fbd649cf765f4fdfb6 /third_party/immer/immer/heap/free_list_heap.hpp
parent1390827b9ea1e04bc9863e48930bfd16db3b716e (diff)
parent7f19d641647ac4ef313ed88d6b5c140983ce5436 (diff)
merge(3p/immer): Subtree merge at 'ad3e3556d' as 'third_party/immer' r/1299
Change-Id: I9636a41ad44b4218293833fd3e9456d9b07c731b
Diffstat (limited to 'third_party/immer/immer/heap/free_list_heap.hpp')
-rw-r--r--third_party/immer/immer/heap/free_list_heap.hpp83
1 files changed, 83 insertions, 0 deletions
diff --git a/third_party/immer/immer/heap/free_list_heap.hpp b/third_party/immer/immer/heap/free_list_heap.hpp
new file mode 100644
index 0000000000..dc25b10184
--- /dev/null
+++ b/third_party/immer/immer/heap/free_list_heap.hpp
@@ -0,0 +1,83 @@
+//
+// immer: immutable data structures for C++
+// Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente
+//
+// This software is distributed under the Boost Software License, Version 1.0.
+// See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt
+//
+
+#pragma once
+
+#include <immer/heap/free_list_node.hpp>
+#include <immer/heap/with_data.hpp>
+
+#include <atomic>
+#include <cassert>
+
+namespace immer {
+
+/*!
+ * Adaptor that does not release the memory to the parent heap but
+ * instead it keeps the memory in a thread-safe global free list. Must
+ * be preceded by a `with_data<free_list_node, ...>` heap adaptor.
+ *
+ * @tparam Size Maximum size of the objects to be allocated.
+ * @tparam Base Type of the parent heap.
+ */
+template <std::size_t Size, std::size_t Limit, typename Base>
+struct free_list_heap : Base
+{
+    using base_t = Base;
+
+    template <typename... Tags>
+    static void* allocate(std::size_t size, Tags...)
+    {
+        assert(size <= sizeof(free_list_node) + Size);
+        assert(size >= sizeof(free_list_node));
+
+        free_list_node* n;
+        do {
+            n = head().data;
+            if (!n) {
+                auto p = base_t::allocate(Size + sizeof(free_list_node));
+                return static_cast<free_list_node*>(p);
+            }
+        } while (!head().data.compare_exchange_weak(n, n->next));
+        head().count.fetch_sub(1u, std::memory_order_relaxed);
+        return n;
+    }
+
+    template <typename... Tags>
+    static void deallocate(std::size_t size, void* data, Tags...)
+    {
+        assert(size <= sizeof(free_list_node) + Size);
+        assert(size >= sizeof(free_list_node));
+
+        // we use relaxed, because we are fine with temporarily having
+        // a few more/less buffers in free list
+        if (head().count.load(std::memory_order_relaxed) >= Limit) {
+            base_t::deallocate(Size + sizeof(free_list_node), data);
+        } else {
+            auto n = static_cast<free_list_node*>(data);
+            do {
+                n->next = head().data;
+            } while (!head().data.compare_exchange_weak(n->next, n));
+            head().count.fetch_add(1u, std::memory_order_relaxed);
+        }
+    }
+
+private:
+    struct head_t
+    {
+        std::atomic<free_list_node*> data;
+        std::atomic<std::size_t> count;
+    };
+
+    static head_t& head()
+    {
+        static head_t head_{{nullptr}, {0}};
+        return head_;
+    }
+};
+
+} // namespace immer