From 7b05e17db15879b486f3299a9a41ac17b87700f4 Mon Sep 17 00:00:00 2001 From: Mathieu Chartier Date: Thu, 15 Oct 2015 17:47:48 -0700 Subject: [PATCH] Add ArenaUniquePtr Motivation is to use it for adding arenas in the verifier. Also added arena tags to prevent double free errors. Bug: 10921004 Change-Id: I545f3374eceb9a6a35e738cb899d1493098fb760 --- runtime/base/allocator.h | 24 +++++++++++++++--------- runtime/base/scoped_arena_allocator.h | 23 ++++++++++++++++++++++- runtime/base/scoped_arena_containers.h | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 71 insertions(+), 10 deletions(-) diff --git a/runtime/base/allocator.h b/runtime/base/allocator.h index f9960acfb..ad255b869 100644 --- a/runtime/base/allocator.h +++ b/runtime/base/allocator.h @@ -19,6 +19,7 @@ #include #include +#include #include "atomic.h" #include "base/macros.h" @@ -150,19 +151,24 @@ class TrackingAllocatorImpl : public std::allocator { template // C++ doesn't allow template typedefs. This is a workaround template typedef which is // TrackingAllocatorImpl if kEnableTrackingAllocator is true, std::allocator otherwise. -class TrackingAllocator : public TypeStaticIf, - std::allocator>::type { -}; +using TrackingAllocator = typename TypeStaticIf, + std::allocator>::type; template> -class AllocationTrackingMultiMap : public std::multimap< - Key, T, Compare, TrackingAllocator, kTag>> { -}; +using AllocationTrackingMultiMap = std::multimap< + Key, T, Compare, TrackingAllocator, kTag>>; template> -class AllocationTrackingSet : public std::set> { -}; +using AllocationTrackingSet = std::set>; + +template, + class Pred = std::equal_to> +using AllocationTrackingUnorderedMap = std::unordered_map< + Key, T, Hash, Pred, TrackingAllocator, kTag>>; } // namespace art diff --git a/runtime/base/scoped_arena_allocator.h b/runtime/base/scoped_arena_allocator.h index 2554fb075..a30c73d74 100644 --- a/runtime/base/scoped_arena_allocator.h +++ b/runtime/base/scoped_arena_allocator.h @@ -31,6 +31,16 @@ class ScopedArenaAllocator; template class ScopedArenaAllocatorAdapter; +// Tag associated with each allocation to help prevent double free. +enum class ArenaFreeTag : uint8_t { + // Allocation is used and has not yet been destroyed. + kUsed, + // Allocation has been destroyed. + kFree, +}; + +static constexpr size_t kArenaAlignment = 8; + // Holds a list of Arenas for use by ScopedArenaAllocator stack. class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool { public: @@ -50,6 +60,12 @@ class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryToo MemStats GetPeakStats() const; + // Return the arena tag associated with a pointer. + static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { + DCHECK(kIsDebugBuild) << "Only debug builds have tags"; + return *(reinterpret_cast(ptr) - 1); + } + private: struct Peak; struct Current; @@ -72,13 +88,18 @@ class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryToo if (UNLIKELY(IsRunningOnMemoryTool())) { return AllocWithMemoryTool(bytes, kind); } - size_t rounded_bytes = RoundUp(bytes, 8); + // Add kArenaAlignment for the free or used tag. Required to preserve alignment. + size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kArenaAlignment : 0u), kArenaAlignment); uint8_t* ptr = top_ptr_; if (UNLIKELY(static_cast(top_end_ - ptr) < rounded_bytes)) { ptr = AllocateFromNextArena(rounded_bytes); } CurrentStats()->RecordAlloc(bytes, kind); top_ptr_ = ptr + rounded_bytes; + if (kIsDebugBuild) { + ptr += kArenaAlignment; + ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; + } return ptr; } diff --git a/runtime/base/scoped_arena_containers.h b/runtime/base/scoped_arena_containers.h index 562c2bf01..b74aef184 100644 --- a/runtime/base/scoped_arena_containers.h +++ b/runtime/base/scoped_arena_containers.h @@ -196,6 +196,40 @@ inline ScopedArenaAllocatorAdapter ScopedArenaAllocator::Adapter(ArenaAllo return ScopedArenaAllocatorAdapter(this, kind); } +// Special deleter that only calls the destructor. Also checks for double free errors. +template +class ArenaDelete { + static constexpr uint8_t kMagicFill = 0xCE; + public: + void operator()(T* ptr) const { + ptr->~T(); + if (RUNNING_ON_MEMORY_TOOL > 0) { + // Writing to the memory will fail if it we already destroyed the pointer with + // DestroyOnlyDelete since we make it no access. + memset(ptr, kMagicFill, sizeof(T)); + MEMORY_TOOL_MAKE_NOACCESS(ptr, sizeof(T)); + } else if (kIsDebugBuild) { + CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast(ptr)) == ArenaFreeTag::kUsed) + << "Freeing invalid object " << ptr; + ArenaStack::ArenaTagForAllocation(reinterpret_cast(ptr)) = ArenaFreeTag::kFree; + // Write a magic value to try and catch use after free error. + memset(ptr, kMagicFill, sizeof(T)); + } + } +}; + +// Declare but do not define a partial specialization for T[]. +// This is to prevent accidental use of this unsupported use case. +template +class ArenaDelete { + public: + void operator()(T* ptr) const = delete; +}; + +// Arena unique ptr that only calls the destructor of the element. +template +using ArenaUniquePtr = std::unique_ptr>; + } // namespace art #endif // ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_ -- 2.11.0