aboutsummaryrefslogtreecommitdiff
path: root/lib/sanitizer_common/sanitizer_allocator_primary32.h
diff options
context:
space:
mode:
Diffstat (limited to 'lib/sanitizer_common/sanitizer_allocator_primary32.h')
-rw-r--r--lib/sanitizer_common/sanitizer_allocator_primary32.h72
1 files changed, 33 insertions, 39 deletions
diff --git a/lib/sanitizer_common/sanitizer_allocator_primary32.h b/lib/sanitizer_common/sanitizer_allocator_primary32.h
index 6c682aff64a2..67970e95b31e 100644
--- a/lib/sanitizer_common/sanitizer_allocator_primary32.h
+++ b/lib/sanitizer_common/sanitizer_allocator_primary32.h
@@ -63,9 +63,9 @@ class SizeClassAllocator32 {
struct TransferBatch {
static const uptr kMaxNumCached = SizeClassMap::kMaxNumCachedHint - 2;
- void SetFromArray(uptr region_beg_unused, void *batch[], uptr count) {
+ void SetFromArray(void *batch[], uptr count) {
+ DCHECK_LE(count, kMaxNumCached);
count_ = count;
- CHECK_LE(count_, kMaxNumCached);
for (uptr i = 0; i < count; i++)
batch_[i] = batch[i];
}
@@ -73,9 +73,9 @@ class SizeClassAllocator32 {
void Clear() { count_ = 0; }
void Add(void *ptr) {
batch_[count_++] = ptr;
- CHECK_LE(count_, kMaxNumCached);
+ DCHECK_LE(count_, kMaxNumCached);
}
- void CopyToArray(void *to_batch[]) {
+ void CopyToArray(void *to_batch[]) const {
for (uptr i = 0, n = Count(); i < n; i++)
to_batch[i] = batch_[i];
}
@@ -84,8 +84,8 @@ class SizeClassAllocator32 {
static uptr AllocationSizeRequiredForNElements(uptr n) {
return sizeof(uptr) * 2 + sizeof(void *) * n;
}
- static uptr MaxCached(uptr class_id) {
- return Min(kMaxNumCached, SizeClassMap::MaxCachedHint(class_id));
+ static uptr MaxCached(uptr size) {
+ return Min(kMaxNumCached, SizeClassMap::MaxCachedHint(size));
}
TransferBatch *next;
@@ -108,7 +108,7 @@ class SizeClassAllocator32 {
typedef SizeClassAllocator32LocalCache<ThisT> AllocatorCache;
void Init(s32 release_to_os_interval_ms) {
- possible_regions.TestOnlyInit();
+ possible_regions.Init();
internal_memset(size_class_info_array, 0, sizeof(size_class_info_array));
}
@@ -125,7 +125,7 @@ class SizeClassAllocator32 {
}
void *MapWithCallback(uptr size) {
- void *res = MmapOrDie(size, "SizeClassAllocator32");
+ void *res = MmapOrDie(size, PrimaryAllocatorName);
MapUnmapCallback().OnMap((uptr)res, size);
return res;
}
@@ -153,13 +153,14 @@ class SizeClassAllocator32 {
NOINLINE TransferBatch *AllocateBatch(AllocatorStats *stat, AllocatorCache *c,
uptr class_id) {
- CHECK_LT(class_id, kNumClasses);
+ DCHECK_LT(class_id, kNumClasses);
SizeClassInfo *sci = GetSizeClassInfo(class_id);
SpinMutexLock l(&sci->mutex);
- if (sci->free_list.empty() &&
- UNLIKELY(!PopulateFreeList(stat, c, sci, class_id)))
- return nullptr;
- CHECK(!sci->free_list.empty());
+ if (sci->free_list.empty()) {
+ if (UNLIKELY(!PopulateFreeList(stat, c, sci, class_id)))
+ return nullptr;
+ DCHECK(!sci->free_list.empty());
+ }
TransferBatch *b = sci->free_list.front();
sci->free_list.pop_front();
return b;
@@ -167,15 +168,13 @@ class SizeClassAllocator32 {
NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id,
TransferBatch *b) {
- CHECK_LT(class_id, kNumClasses);
+ DCHECK_LT(class_id, kNumClasses);
CHECK_GT(b->Count(), 0);
SizeClassInfo *sci = GetSizeClassInfo(class_id);
SpinMutexLock l(&sci->mutex);
sci->free_list.push_front(b);
}
- uptr GetRegionBeginBySizeClass(uptr class_id) { return 0; }
-
bool PointerIsMine(const void *p) {
uptr mem = reinterpret_cast<uptr>(p);
if (mem < kSpaceBeg || mem >= kSpaceBeg + kSpaceSize)
@@ -251,12 +250,9 @@ class SizeClassAllocator32 {
}
}
- void PrintStats() {
- }
+ void PrintStats() {}
- static uptr AdditionalSize() {
- return 0;
- }
+ static uptr AdditionalSize() { return 0; }
typedef SizeClassMap SizeClassMapT;
static const uptr kNumClasses = SizeClassMap::kNumClasses;
@@ -265,17 +261,15 @@ class SizeClassAllocator32 {
static const uptr kRegionSize = 1 << kRegionSizeLog;
static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
- struct SizeClassInfo {
- SpinMutex mutex;
+ struct ALIGNED(SANITIZER_CACHE_LINE_SIZE) SizeClassInfo {
+ StaticSpinMutex mutex;
IntrusiveList<TransferBatch> free_list;
u32 rand_state;
- char padding[kCacheLineSize - 2 * sizeof(uptr) -
- sizeof(IntrusiveList<TransferBatch>)];
};
- COMPILER_CHECK(sizeof(SizeClassInfo) == kCacheLineSize);
+ COMPILER_CHECK(sizeof(SizeClassInfo) % kCacheLineSize == 0);
uptr ComputeRegionId(uptr mem) {
- uptr res = mem >> kRegionSizeLog;
+ const uptr res = mem >> kRegionSizeLog;
CHECK_LT(res, kNumPossibleRegions);
return res;
}
@@ -285,9 +279,9 @@ class SizeClassAllocator32 {
}
uptr AllocateRegion(AllocatorStats *stat, uptr class_id) {
- CHECK_LT(class_id, kNumClasses);
- uptr res = reinterpret_cast<uptr>(MmapAlignedOrDieOnFatalError(
- kRegionSize, kRegionSize, "SizeClassAllocator32"));
+ DCHECK_LT(class_id, kNumClasses);
+ const uptr res = reinterpret_cast<uptr>(MmapAlignedOrDieOnFatalError(
+ kRegionSize, kRegionSize, PrimaryAllocatorName));
if (UNLIKELY(!res))
return 0;
MapUnmapCallback().OnMap(res, kRegionSize);
@@ -298,7 +292,7 @@ class SizeClassAllocator32 {
}
SizeClassInfo *GetSizeClassInfo(uptr class_id) {
- CHECK_LT(class_id, kNumClasses);
+ DCHECK_LT(class_id, kNumClasses);
return &size_class_info_array[class_id];
}
@@ -329,22 +323,22 @@ class SizeClassAllocator32 {
bool PopulateFreeList(AllocatorStats *stat, AllocatorCache *c,
SizeClassInfo *sci, uptr class_id) {
- uptr size = ClassIdToSize(class_id);
- uptr reg = AllocateRegion(stat, class_id);
- if (UNLIKELY(!reg))
+ const uptr region = AllocateRegion(stat, class_id);
+ if (UNLIKELY(!region))
return false;
if (kRandomShuffleChunks)
if (UNLIKELY(sci->rand_state == 0))
// The random state is initialized from ASLR (PIE) and time.
sci->rand_state = reinterpret_cast<uptr>(sci) ^ NanoTime();
- uptr n_chunks = kRegionSize / (size + kMetadataSize);
- uptr max_count = TransferBatch::MaxCached(class_id);
- CHECK_GT(max_count, 0);
+ const uptr size = ClassIdToSize(class_id);
+ const uptr n_chunks = kRegionSize / (size + kMetadataSize);
+ const uptr max_count = TransferBatch::MaxCached(size);
+ DCHECK_GT(max_count, 0);
TransferBatch *b = nullptr;
- const uptr kShuffleArraySize = 48;
+ constexpr uptr kShuffleArraySize = 48;
uptr shuffle_array[kShuffleArraySize];
uptr count = 0;
- for (uptr i = reg; i < reg + n_chunks * size; i += size) {
+ for (uptr i = region; i < region + n_chunks * size; i += size) {
shuffle_array[count++] = i;
if (count == kShuffleArraySize) {
if (UNLIKELY(!PopulateBatches(c, sci, class_id, &b, max_count,