Skip to content

Commit d1aaef4

Browse files
committed
[NFC][sanitizer] Parametrize PersistentAllocator with type
1 parent 3a208c6 commit d1aaef4

File tree

3 files changed

+22
-17
lines changed

3 files changed

+22
-17
lines changed

compiler-rt/lib/sanitizer_common/sanitizer_chained_origin_depot.cpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ struct ChainedOriginDepotNode {
5959

6060
} // namespace
6161

62-
static PersistentAllocator allocator;
62+
static PersistentAllocator<ChainedOriginDepotNode> allocator;
6363

6464
static StackDepotBase<ChainedOriginDepotNode, 4, 20> depot;
6565

@@ -71,8 +71,7 @@ uptr ChainedOriginDepotNode::allocated() { return allocator.allocated(); }
7171

7272
ChainedOriginDepotNode *ChainedOriginDepotNode::allocate(
7373
const args_type &args) {
74-
return static_cast<ChainedOriginDepotNode *>(
75-
allocator.alloc(sizeof(ChainedOriginDepotNode)));
74+
return allocator.alloc();
7675
}
7776

7877
/* This is murmur2 hash for the 64->32 bit case.

compiler-rt/lib/sanitizer_common/sanitizer_persistent_allocator.h

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,52 +20,58 @@
2020

2121
namespace __sanitizer {
2222

23+
template <typename T>
2324
class PersistentAllocator {
2425
public:
25-
void *alloc(uptr size);
26+
T *alloc(uptr count = 1);
2627
uptr allocated() const {
2728
SpinMutexLock l(&mtx);
2829
return atomic_load_relaxed(&mapped_size) +
2930
atomic_load_relaxed(&region_pos) - atomic_load_relaxed(&region_end);
3031
}
3132

3233
private:
33-
void *tryAlloc(uptr size);
34-
void *refillAndAlloc(uptr size);
34+
T *tryAlloc(uptr count);
35+
T *refillAndAlloc(uptr count);
3536
mutable StaticSpinMutex mtx; // Protects alloc of new blocks.
3637
atomic_uintptr_t region_pos; // Region allocator for Node's.
3738
atomic_uintptr_t region_end;
3839
atomic_uintptr_t mapped_size;
3940
};
4041

41-
inline void *PersistentAllocator::tryAlloc(uptr size) {
42+
template <typename T>
43+
inline T *PersistentAllocator<T>::tryAlloc(uptr count) {
4244
// Optimisic lock-free allocation, essentially try to bump the region ptr.
4345
for (;;) {
4446
uptr cmp = atomic_load(&region_pos, memory_order_acquire);
4547
uptr end = atomic_load(&region_end, memory_order_acquire);
48+
uptr size = count * sizeof(T);
4649
if (cmp == 0 || cmp + size > end) return nullptr;
4750
if (atomic_compare_exchange_weak(&region_pos, &cmp, cmp + size,
4851
memory_order_acquire))
49-
return (void *)cmp;
52+
return reinterpret_cast<T *>(cmp);
5053
}
5154
}
5255

53-
inline void *PersistentAllocator::alloc(uptr size) {
56+
template <typename T>
57+
inline T *PersistentAllocator<T>::alloc(uptr count) {
5458
// First, try to allocate optimisitically.
55-
void *s = tryAlloc(size);
59+
T *s = tryAlloc(count);
5660
if (LIKELY(s))
5761
return s;
58-
return refillAndAlloc(size);
62+
return refillAndAlloc(count);
5963
}
6064

61-
inline void *PersistentAllocator::refillAndAlloc(uptr size) {
65+
template <typename T>
66+
inline T *PersistentAllocator<T>::refillAndAlloc(uptr count) {
6267
// If failed, lock, retry and alloc new superblock.
6368
SpinMutexLock l(&mtx);
6469
for (;;) {
65-
void *s = tryAlloc(size);
70+
T *s = tryAlloc(count);
6671
if (s)
6772
return s;
6873
atomic_store(&region_pos, 0, memory_order_relaxed);
74+
uptr size = count * sizeof(T);
6975
uptr allocsz = 64 * 1024;
7076
if (allocsz < size)
7177
allocsz = size;

compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919

2020
namespace __sanitizer {
2121

22-
static PersistentAllocator allocator;
23-
static PersistentAllocator traceAllocator;
22+
static PersistentAllocator<StackDepotNode> allocator;
23+
static PersistentAllocator<uptr> traceAllocator;
2424

2525
struct StackDepotNode {
2626
using hash_type = u64;
@@ -43,7 +43,7 @@ struct StackDepotNode {
4343
return allocator.allocated() + traceAllocator.allocated();
4444
}
4545
static StackDepotNode *allocate(const args_type &args) {
46-
return (StackDepotNode *)allocator.alloc(sizeof(StackDepotNode));
46+
return allocator.alloc();
4747
}
4848
static hash_type hash(const args_type &args) {
4949
MurMur2Hash64Builder H(args.size * sizeof(uptr));
@@ -59,7 +59,7 @@ struct StackDepotNode {
5959
atomic_store(&tag_and_use_count, args.tag << kUseCountBits,
6060
memory_order_relaxed);
6161
stack_hash = hash;
62-
stack_trace = (uptr *)traceAllocator.alloc((args.size + 1) * sizeof(uptr));
62+
stack_trace = traceAllocator.alloc(args.size + 1);
6363
*stack_trace = args.size;
6464
internal_memcpy(stack_trace + 1, args.trace, args.size * sizeof(uptr));
6565
}

0 commit comments

Comments
 (0)