diff options
Diffstat (limited to 'compiler-rt/lib/hwasan/hwasan_allocator.cpp')
-rw-r--r-- | compiler-rt/lib/hwasan/hwasan_allocator.cpp | 59 |
1 files changed, 50 insertions, 9 deletions
diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.cpp b/compiler-rt/lib/hwasan/hwasan_allocator.cpp index 0b6b7347892e..ef6d4d6c7678 100644 --- a/compiler-rt/lib/hwasan/hwasan_allocator.cpp +++ b/compiler-rt/lib/hwasan/hwasan_allocator.cpp @@ -29,8 +29,8 @@ static AllocatorCache fallback_allocator_cache; static SpinMutex fallback_mutex; static atomic_uint8_t hwasan_allocator_tagging_enabled; -static const tag_t kFallbackAllocTag = 0xBB; -static const tag_t kFallbackFreeTag = 0xBC; +static constexpr tag_t kFallbackAllocTag = 0xBB & kTagMask; +static constexpr tag_t kFallbackFreeTag = 0xBC; enum RightAlignMode { kRightAlignNever, @@ -80,11 +80,29 @@ void GetAllocatorStats(AllocatorStatCounters s) { allocator.GetStats(s); } +uptr GetAliasRegionStart() { +#if defined(HWASAN_ALIASING_MODE) + constexpr uptr kAliasRegionOffset = 1ULL << (kTaggableRegionCheckShift - 1); + uptr AliasRegionStart = + __hwasan_shadow_memory_dynamic_address + kAliasRegionOffset; + + CHECK_EQ(AliasRegionStart >> kTaggableRegionCheckShift, + __hwasan_shadow_memory_dynamic_address >> kTaggableRegionCheckShift); + CHECK_EQ( + (AliasRegionStart + kAliasRegionOffset - 1) >> kTaggableRegionCheckShift, + __hwasan_shadow_memory_dynamic_address >> kTaggableRegionCheckShift); + return AliasRegionStart; +#else + return 0; +#endif +} + void HwasanAllocatorInit() { atomic_store_relaxed(&hwasan_allocator_tagging_enabled, !flags()->disable_allocator_tagging); SetAllocatorMayReturnNull(common_flags()->allocator_may_return_null); - allocator.Init(common_flags()->allocator_release_to_os_interval_ms); + allocator.Init(common_flags()->allocator_release_to_os_interval_ms, + GetAliasRegionStart()); for (uptr i = 0; i < sizeof(tail_magic); i++) tail_magic[i] = GetCurrentThread()->GenerateRandomTag(); } @@ -148,7 +166,8 @@ static void *HwasanAllocate(StackTrace *stack, uptr orig_size, uptr alignment, // Tagging can only be skipped when both tag_in_malloc and tag_in_free are // false. When tag_in_malloc = false and tag_in_free = true malloc needs to // retag to 0. - if ((flags()->tag_in_malloc || flags()->tag_in_free) && + if (InTaggableRegion(reinterpret_cast<uptr>(user_ptr)) && + (flags()->tag_in_malloc || flags()->tag_in_free) && atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) { if (flags()->tag_in_malloc && malloc_bisect(stack, orig_size)) { tag_t tag = t ? t->GenerateRandomTag() : kFallbackAllocTag; @@ -175,6 +194,8 @@ static void *HwasanAllocate(StackTrace *stack, uptr orig_size, uptr alignment, static bool PointerAndMemoryTagsMatch(void *tagged_ptr) { CHECK(tagged_ptr); uptr tagged_uptr = reinterpret_cast<uptr>(tagged_ptr); + if (!InTaggableRegion(tagged_uptr)) + return true; tag_t mem_tag = *reinterpret_cast<tag_t *>( MemToShadow(reinterpret_cast<uptr>(UntagPtr(tagged_ptr)))); return PossiblyShortTagMatches(mem_tag, tagged_uptr, 1); @@ -187,9 +208,12 @@ static void HwasanDeallocate(StackTrace *stack, void *tagged_ptr) { if (!PointerAndMemoryTagsMatch(tagged_ptr)) ReportInvalidFree(stack, reinterpret_cast<uptr>(tagged_ptr)); - void *untagged_ptr = UntagPtr(tagged_ptr); + void *untagged_ptr = InTaggableRegion(reinterpret_cast<uptr>(tagged_ptr)) + ? UntagPtr(tagged_ptr) + : tagged_ptr; void *aligned_ptr = reinterpret_cast<void *>( RoundDownTo(reinterpret_cast<uptr>(untagged_ptr), kShadowAlignment)); + tag_t pointer_tag = GetTagFromPointer(reinterpret_cast<uptr>(tagged_ptr)); Metadata *meta = reinterpret_cast<Metadata *>(allocator.GetMetaData(aligned_ptr)); uptr orig_size = meta->get_requested_size(); @@ -219,10 +243,27 @@ static void HwasanDeallocate(StackTrace *stack, void *tagged_ptr) { Min(TaggedSize(orig_size), (uptr)flags()->max_free_fill_size); internal_memset(aligned_ptr, flags()->free_fill_byte, fill_size); } - if (flags()->tag_in_free && malloc_bisect(stack, 0) && - atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) + if (InTaggableRegion(reinterpret_cast<uptr>(tagged_ptr)) && + flags()->tag_in_free && malloc_bisect(stack, 0) && + atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) { + // Always store full 8-bit tags on free to maximize UAF detection. + tag_t tag; + if (t) { + // Make sure we are not using a short granule tag as a poison tag. This + // would make us attempt to read the memory on a UaF. + // The tag can be zero if tagging is disabled on this thread. + do { + tag = t->GenerateRandomTag(/*num_bits=*/8); + } while ( + UNLIKELY((tag < kShadowAlignment || tag == pointer_tag) && tag != 0)); + } else { + static_assert(kFallbackFreeTag >= kShadowAlignment, + "fallback tag must not be a short granule tag."); + tag = kFallbackFreeTag; + } TagMemoryAligned(reinterpret_cast<uptr>(aligned_ptr), TaggedSize(orig_size), - t ? t->GenerateRandomTag() : kFallbackFreeTag); + tag); + } if (t) { allocator.Deallocate(t->allocator_cache(), aligned_ptr); if (auto *ha = t->heap_allocations()) @@ -365,7 +406,7 @@ int hwasan_posix_memalign(void **memptr, uptr alignment, uptr size, // OOM error is already taken care of by HwasanAllocate. return errno_ENOMEM; CHECK(IsAligned((uptr)ptr, alignment)); - *(void **)UntagPtr(memptr) = ptr; + *memptr = ptr; return 0; } |