Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
stenzek
GitHub Repository: stenzek/duckstation
Path: blob/master/src/common/heap_array.h
4802 views
1
// SPDX-FileCopyrightText: 2019-2024 Connor McLaughlin <[email protected]>
2
// SPDX-License-Identifier: CC-BY-NC-ND-4.0
3
4
#pragma once
5
6
#include <algorithm>
7
#include <cassert>
8
#include <cstdlib>
9
#include <cstring>
10
#include <span>
11
#include <type_traits>
12
13
template<typename T, std::size_t SIZE, std::size_t ALIGNMENT = 0>
14
class FixedHeapArray
15
{
16
public:
17
using value_type = T;
18
using size_type = std::size_t;
19
using difference_type = std::ptrdiff_t;
20
using reference = T&;
21
using const_reference = const T&;
22
using pointer = T*;
23
using const_pointer = const T*;
24
using this_type = FixedHeapArray<T, SIZE, ALIGNMENT>;
25
26
FixedHeapArray() { allocate(); }
27
28
FixedHeapArray(const this_type& copy)
29
{
30
allocate();
31
std::copy(copy.cbegin(), copy.cend(), begin());
32
}
33
34
FixedHeapArray(this_type&& move)
35
{
36
m_data = move.m_data;
37
move.m_data = nullptr;
38
}
39
40
~FixedHeapArray() { deallocate(); }
41
42
size_type size() const { return SIZE; }
43
size_type size_bytes() const { return SIZE * sizeof(T); }
44
size_type capacity() const { return SIZE; }
45
bool empty() const { return false; }
46
47
pointer begin() { return m_data; }
48
pointer end() { return m_data + SIZE; }
49
50
const_pointer data() const { return m_data; }
51
pointer data() { return m_data; }
52
53
const_pointer cbegin() const { return m_data; }
54
const_pointer cend() const { return m_data + SIZE; }
55
56
const_reference operator[](size_type index) const
57
{
58
assert(index < SIZE);
59
return m_data[index];
60
}
61
reference operator[](size_type index)
62
{
63
assert(index < SIZE);
64
return m_data[index];
65
}
66
67
const_reference front() const { return m_data[0]; }
68
const_reference back() const { return m_data[SIZE - 1]; }
69
reference front() { return m_data[0]; }
70
reference back() { return m_data[SIZE - 1]; }
71
72
void fill(const_reference value) { std::fill(begin(), end(), value); }
73
74
void swap(this_type& move) { std::swap(m_data, move.m_data); }
75
76
std::span<T, SIZE> span() { return std::span<T, SIZE>(m_data); }
77
std::span<const T, SIZE> cspan() const { return std::span<const T, SIZE>(m_data); }
78
79
this_type& operator=(const this_type& rhs)
80
{
81
std::copy(begin(), end(), rhs.cbegin());
82
return *this;
83
}
84
85
this_type& operator=(this_type&& move)
86
{
87
deallocate();
88
m_data = move.m_data;
89
move.m_data = nullptr;
90
return *this;
91
}
92
93
#define RELATIONAL_OPERATOR(op) \
94
bool operator op(const this_type& rhs) const \
95
{ \
96
for (size_type i = 0; i < SIZE; i++) \
97
{ \
98
if (!(m_data[i] op rhs.m_data[i])) \
99
return false; \
100
} \
101
}
102
103
RELATIONAL_OPERATOR(==);
104
RELATIONAL_OPERATOR(!=);
105
RELATIONAL_OPERATOR(<);
106
RELATIONAL_OPERATOR(<=);
107
RELATIONAL_OPERATOR(>);
108
RELATIONAL_OPERATOR(>=);
109
110
#undef RELATIONAL_OPERATOR
111
112
private:
113
void allocate()
114
{
115
if constexpr (ALIGNMENT > 0)
116
{
117
#ifdef _MSC_VER
118
m_data = static_cast<T*>(_aligned_malloc(SIZE * sizeof(T), ALIGNMENT));
119
assert(m_data);
120
if (!m_data) [[unlikely]]
121
std::abort();
122
#else
123
if (posix_memalign(reinterpret_cast<void**>(&m_data), ALIGNMENT, SIZE * sizeof(T)) != 0) [[unlikely]]
124
std::abort();
125
#endif
126
}
127
else
128
{
129
m_data = static_cast<T*>(std::malloc(SIZE * sizeof(T)));
130
assert(m_data);
131
if (!m_data) [[unlikely]]
132
std::abort();
133
}
134
}
135
void deallocate()
136
{
137
if constexpr (ALIGNMENT > 0)
138
{
139
#ifdef _MSC_VER
140
_aligned_free(m_data);
141
#else
142
std::free(m_data);
143
#endif
144
}
145
else
146
{
147
std::free(m_data);
148
}
149
}
150
151
T* m_data;
152
};
153
154
template<typename T, size_t alignment = 0>
155
class DynamicHeapArray
156
{
157
static_assert(std::is_trivially_copyable_v<T>, "T is trivially copyable");
158
static_assert(std::is_standard_layout_v<T>, "T is standard layout");
159
160
public:
161
using value_type = T;
162
using size_type = std::size_t;
163
using difference_type = std::ptrdiff_t;
164
using reference = T&;
165
using const_reference = const T&;
166
using pointer = T*;
167
using const_pointer = const T*;
168
using this_type = DynamicHeapArray<T, alignment>;
169
170
DynamicHeapArray() : m_data(nullptr), m_size(0) {}
171
explicit DynamicHeapArray(size_t size) { internal_resize(size, nullptr, 0); }
172
DynamicHeapArray(const T* begin, const T* end)
173
{
174
const size_t size = reinterpret_cast<const char*>(end) - reinterpret_cast<const char*>(begin);
175
if (size > 0)
176
{
177
internal_resize(size / sizeof(T), nullptr, 0);
178
std::memcpy(m_data, begin, size);
179
}
180
else
181
{
182
m_data = nullptr;
183
m_size = 0;
184
}
185
}
186
DynamicHeapArray(const T* begin, size_t count)
187
{
188
if (count > 0)
189
{
190
internal_resize(count, nullptr, 0);
191
std::memcpy(m_data, begin, sizeof(T) * count);
192
}
193
else
194
{
195
m_data = nullptr;
196
m_size = 0;
197
}
198
}
199
explicit DynamicHeapArray(const std::span<const T> data)
200
{
201
if (!data.empty())
202
{
203
internal_resize(data.size(), nullptr, 0);
204
std::memcpy(m_data, data.data(), sizeof(T) * data.size());
205
}
206
else
207
{
208
m_data = nullptr;
209
m_size = 0;
210
}
211
}
212
213
DynamicHeapArray(const this_type& copy)
214
{
215
if (copy.m_size > 0)
216
{
217
internal_resize(copy.m_size, nullptr, 0);
218
std::memcpy(m_data, copy.m_data, sizeof(T) * copy.m_size);
219
}
220
else
221
{
222
m_data = nullptr;
223
m_size = 0;
224
}
225
}
226
227
DynamicHeapArray(this_type&& move) noexcept
228
{
229
m_data = move.m_data;
230
m_size = move.m_size;
231
move.m_data = nullptr;
232
move.m_size = 0;
233
}
234
235
~DynamicHeapArray() { internal_deallocate(); }
236
237
size_type size() const { return m_size; }
238
size_type size_bytes() const { return m_size * sizeof(T); }
239
size_type capacity() const { return m_size; }
240
bool empty() const { return (m_size == 0); }
241
242
pointer begin() { return m_data; }
243
pointer end() { return m_data + m_size; }
244
245
const_pointer data() const { return m_data; }
246
pointer data() { return m_data; }
247
248
const_pointer cbegin() const { return m_data; }
249
const_pointer cend() const { return m_data + m_size; }
250
251
const_reference operator[](size_type index) const
252
{
253
assert(index < m_size);
254
return m_data[index];
255
}
256
reference operator[](size_type index)
257
{
258
assert(index < m_size);
259
return m_data[index];
260
}
261
262
const_reference front() const { return m_data[0]; }
263
const_reference back() const { return m_data[m_size - 1]; }
264
reference front() { return m_data[0]; }
265
reference back() { return m_data[m_size - 1]; }
266
267
void fill(const_reference value) { std::fill(begin(), end(), value); }
268
269
void swap(this_type& rhs)
270
{
271
std::swap(m_data, rhs.m_data);
272
std::swap(m_size, rhs.m_size);
273
}
274
275
void resize(size_t new_size) { internal_resize(new_size, m_data, m_size); }
276
277
void deallocate()
278
{
279
internal_deallocate();
280
m_data = nullptr;
281
m_size = 0;
282
}
283
284
void assign(const std::span<const T> data) { assign(data.data(), data.size()); }
285
286
void assign(const T* begin, const T* end)
287
{
288
const size_t size = reinterpret_cast<const char*>(end) - reinterpret_cast<const char*>(begin);
289
const size_t count = size / sizeof(T);
290
if (count > 0)
291
{
292
if (m_size != count)
293
{
294
internal_deallocate();
295
internal_resize(count, nullptr, 0);
296
}
297
298
std::memcpy(m_data, begin, size);
299
}
300
else
301
{
302
internal_deallocate();
303
304
m_data = nullptr;
305
m_size = 0;
306
}
307
}
308
void assign(const T* begin, size_t count)
309
{
310
if (count > 0)
311
{
312
if (m_size != count)
313
{
314
internal_deallocate();
315
internal_resize(count, nullptr, 0);
316
}
317
318
std::memcpy(m_data, begin, sizeof(T) * count);
319
}
320
else
321
{
322
internal_deallocate();
323
324
m_data = nullptr;
325
m_size = 0;
326
}
327
}
328
void assign(const this_type& copy) { assign(copy.m_data, copy.m_size); }
329
void assign(this_type&& move)
330
{
331
internal_deallocate();
332
m_data = move.m_data;
333
m_size = move.m_size;
334
move.m_data = nullptr;
335
move.m_size = 0;
336
}
337
338
std::span<T> span() { return std::span<T>(m_data, m_size); }
339
std::span<const T> cspan() const { return std::span<const T>(m_data, m_size); }
340
341
std::span<T> span(size_t offset, size_t size = static_cast<size_t>(-1))
342
{
343
std::span<T> ret;
344
if (offset < m_size) [[likely]]
345
ret = std::span<T>(m_data + offset, std::min(m_size - offset, size));
346
return ret;
347
}
348
349
std::span<const T> cspan(size_t offset, size_t size = static_cast<size_t>(-1)) const
350
{
351
std::span<const T> ret;
352
if (offset < m_size) [[likely]]
353
ret = std::span<const T>(m_data + offset, std::min(m_size - offset, size));
354
return ret;
355
}
356
357
this_type& operator=(const this_type& rhs)
358
{
359
assign(rhs);
360
return *this;
361
}
362
363
this_type& operator=(this_type&& move) noexcept
364
{
365
assign(std::move(move));
366
return *this;
367
}
368
369
#define RELATIONAL_OPERATOR(op, size_op) \
370
bool operator op(const this_type& rhs) const \
371
{ \
372
if (m_size != rhs.m_size) \
373
return m_size size_op rhs.m_size; \
374
for (size_type i = 0; i < m_size; i++) \
375
{ \
376
if (!(m_data[i] op rhs.m_data[i])) \
377
return false; \
378
} \
379
}
380
381
RELATIONAL_OPERATOR(==, !=);
382
RELATIONAL_OPERATOR(!=, ==);
383
RELATIONAL_OPERATOR(<, <);
384
RELATIONAL_OPERATOR(<=, <=);
385
RELATIONAL_OPERATOR(>, >);
386
RELATIONAL_OPERATOR(>=, >=);
387
388
#undef RELATIONAL_OPERATOR
389
390
private:
391
void internal_resize(size_t size, T* prev_ptr, [[maybe_unused]] size_t prev_size)
392
{
393
if constexpr (alignment > 0)
394
{
395
#ifdef _MSC_VER
396
m_data = static_cast<T*>(_aligned_realloc(prev_ptr, size * sizeof(T), alignment));
397
assert(m_data);
398
if (!m_data) [[unlikely]]
399
std::abort();
400
#else
401
if (posix_memalign(reinterpret_cast<void**>(&m_data), alignment, size * sizeof(T)) != 0) [[unlikely]]
402
std::abort();
403
404
if (prev_ptr)
405
{
406
std::memcpy(m_data, prev_ptr, prev_size * sizeof(T));
407
std::free(prev_ptr);
408
}
409
#endif
410
}
411
else
412
{
413
m_data = static_cast<T*>(std::realloc(prev_ptr, size * sizeof(T)));
414
assert(m_data);
415
if (!m_data) [[unlikely]]
416
std::abort();
417
}
418
419
m_size = size;
420
}
421
void internal_deallocate()
422
{
423
if constexpr (alignment > 0)
424
{
425
#ifdef _MSC_VER
426
_aligned_free(m_data);
427
#else
428
std::free(m_data);
429
#endif
430
}
431
else
432
{
433
std::free(m_data);
434
}
435
}
436
437
T* m_data;
438
size_t m_size;
439
};
440
441