Bitcoin Core 28.99.0
P2P Digital Currency
lockedpool.h
Go to the documentation of this file.
1// Copyright (c) 2016-2020 The Bitcoin Core developers
2// Distributed under the MIT software license, see the accompanying
3// file COPYING or http://www.opensource.org/licenses/mit-license.php.
4
5#ifndef BITCOIN_SUPPORT_LOCKEDPOOL_H
6#define BITCOIN_SUPPORT_LOCKEDPOOL_H
7
8#include <cstddef>
9#include <list>
10#include <map>
11#include <memory>
12#include <mutex>
13#include <unordered_map>
14
20{
21public:
22 virtual ~LockedPageAllocator() = default;
31 virtual void* AllocateLocked(size_t len, bool *lockingSuccess) = 0;
32
36 virtual void FreeLocked(void* addr, size_t len) = 0;
37
42 virtual size_t GetLimit() = 0;
43};
44
45/* An arena manages a contiguous region of memory by dividing it into
46 * chunks.
47 */
48class Arena
49{
50public:
51 Arena(void *base, size_t size, size_t alignment);
52 virtual ~Arena();
53
54 Arena(const Arena& other) = delete; // non construction-copyable
55 Arena& operator=(const Arena&) = delete; // non copyable
56
58 struct Stats
59 {
60 size_t used;
61 size_t free;
62 size_t total;
65 };
66
71 void* alloc(size_t size);
72
77 void free(void *ptr);
78
80 Stats stats() const;
81
82#ifdef ARENA_DEBUG
83 void walk() const;
84#endif
85
90 bool addressInArena(void *ptr) const { return ptr >= base && ptr < end; }
91private:
92 typedef std::multimap<size_t, void*> SizeToChunkSortedMap;
95
96 typedef std::unordered_map<void*, SizeToChunkSortedMap::const_iterator> ChunkToSizeMap;
101
103 std::unordered_map<void*, size_t> chunks_used;
104
106 void* base;
108 void* end;
110 size_t alignment;
111};
112
127{
128public:
134 static const size_t ARENA_SIZE = 256*1024;
138 static const size_t ARENA_ALIGN = 16;
139
142 typedef bool (*LockingFailed_Callback)();
143
145 struct Stats
146 {
147 size_t used;
148 size_t free;
149 size_t total;
150 size_t locked;
153 };
154
162 explicit LockedPool(std::unique_ptr<LockedPageAllocator> allocator, LockingFailed_Callback lf_cb_in = nullptr);
164
165 LockedPool(const LockedPool& other) = delete; // non construction-copyable
166 LockedPool& operator=(const LockedPool&) = delete; // non copyable
167
172 void* alloc(size_t size);
173
178 void free(void *ptr);
179
181 Stats stats() const;
182private:
183 std::unique_ptr<LockedPageAllocator> allocator;
184
186 class LockedPageArena: public Arena
187 {
188 public:
189 LockedPageArena(LockedPageAllocator *alloc_in, void *base_in, size_t size, size_t align);
191 private:
192 void *base;
193 size_t size;
195 };
196
197 bool new_arena(size_t size, size_t align);
198
199 std::list<LockedPageArena> arenas;
204 mutable std::mutex mutex;
205};
206
219{
220public:
223 {
224 static std::once_flag init_flag;
225 std::call_once(init_flag, LockedPoolManager::CreateInstance);
227 }
228
229private:
230 explicit LockedPoolManager(std::unique_ptr<LockedPageAllocator> allocator);
231
233 static void CreateInstance();
235 static bool LockingFailed();
236
238};
239
240#endif // BITCOIN_SUPPORT_LOCKEDPOOL_H
void * base
Base address of arena.
Definition: lockedpool.h:106
size_t alignment
Minimum chunk alignment.
Definition: lockedpool.h:110
ChunkToSizeMap chunks_free_end
Map from end of free chunk to its node in size_to_free_chunk.
Definition: lockedpool.h:100
Arena & operator=(const Arena &)=delete
std::unordered_map< void *, size_t > chunks_used
Map from begin of used chunk to its size.
Definition: lockedpool.h:103
void * alloc(size_t size)
Allocate size bytes from this arena.
Definition: lockedpool.cpp:51
SizeToChunkSortedMap size_to_free_chunk
Map to enable O(log(n)) best-fit allocation, as it's sorted by size.
Definition: lockedpool.h:94
bool addressInArena(void *ptr) const
Return whether a pointer points inside this arena.
Definition: lockedpool.h:90
void * end
End address of arena.
Definition: lockedpool.h:108
Arena(void *base, size_t size, size_t alignment)
Definition: lockedpool.cpp:40
Stats stats() const
Get arena usage statistics.
Definition: lockedpool.cpp:125
std::unordered_map< void *, SizeToChunkSortedMap::const_iterator > ChunkToSizeMap
Definition: lockedpool.h:96
ChunkToSizeMap chunks_free
Map from begin of free chunk to its node in size_to_free_chunk.
Definition: lockedpool.h:98
virtual ~Arena()
std::multimap< size_t, void * > SizeToChunkSortedMap
Definition: lockedpool.h:92
void free(void *ptr)
Free a previously allocated chunk of memory.
Definition: lockedpool.cpp:87
Arena(const Arena &other)=delete
OS-dependent allocation and deallocation of locked/pinned memory pages.
Definition: lockedpool.h:20
virtual void * AllocateLocked(size_t len, bool *lockingSuccess)=0
Allocate and lock memory pages.
virtual ~LockedPageAllocator()=default
virtual void FreeLocked(void *addr, size_t len)=0
Unlock and free memory pages.
virtual size_t GetLimit()=0
Get the total limit on the amount of memory that may be locked by this process, in bytes.
Create an arena from locked pages.
Definition: lockedpool.h:187
LockedPageArena(LockedPageAllocator *alloc_in, void *base_in, size_t size, size_t align)
Definition: lockedpool.cpp:365
LockedPageAllocator * allocator
Definition: lockedpool.h:194
Pool for locked memory chunks.
Definition: lockedpool.h:127
void free(void *ptr)
Free a previously allocated chunk of memory.
Definition: lockedpool.cpp:307
Stats stats() const
Get pool usage statistics.
Definition: lockedpool.cpp:321
std::unique_ptr< LockedPageAllocator > allocator
Definition: lockedpool.h:183
void * alloc(size_t size)
Allocate size bytes from this arena.
Definition: lockedpool.cpp:285
size_t cumulative_bytes_locked
Definition: lockedpool.h:201
LockedPool(std::unique_ptr< LockedPageAllocator > allocator, LockingFailed_Callback lf_cb_in=nullptr)
Create a new LockedPool.
Definition: lockedpool.cpp:278
LockedPool & operator=(const LockedPool &)=delete
LockingFailed_Callback lf_cb
Definition: lockedpool.h:200
std::list< LockedPageArena > arenas
Definition: lockedpool.h:199
bool new_arena(size_t size, size_t align)
Definition: lockedpool.cpp:336
static const size_t ARENA_ALIGN
Chunk alignment.
Definition: lockedpool.h:138
static const size_t ARENA_SIZE
Size of one arena of locked memory.
Definition: lockedpool.h:134
LockedPool(const LockedPool &other)=delete
std::mutex mutex
Mutex protects access to this pool's data structures, including arenas.
Definition: lockedpool.h:204
bool(* LockingFailed_Callback)()
Callback when allocation succeeds but locking fails.
Definition: lockedpool.h:142
Singleton class to keep track of locked (ie, non-swappable) memory, for use in std::allocator templat...
Definition: lockedpool.h:219
LockedPoolManager(std::unique_ptr< LockedPageAllocator > allocator)
Definition: lockedpool.cpp:377
static bool LockingFailed()
Called when locking fails, warn the user here.
Definition: lockedpool.cpp:382
static LockedPoolManager * _instance
Definition: lockedpool.h:237
static void CreateInstance()
Create a new LockedPoolManager specialized to the OS.
Definition: lockedpool.cpp:388
static LockedPoolManager & Instance()
Return the current instance, or create it once.
Definition: lockedpool.h:222
Memory statistics.
Definition: lockedpool.h:59
size_t used
Definition: lockedpool.h:60
size_t chunks_used
Definition: lockedpool.h:63
size_t total
Definition: lockedpool.h:62
size_t free
Definition: lockedpool.h:61
size_t chunks_free
Definition: lockedpool.h:64
Memory statistics.
Definition: lockedpool.h:146