From: Andreas Gampe Date: Sat, 20 Aug 2016 03:22:19 +0000 (-0700) Subject: ART: Enforce arena alignment in Realloc X-Git-Url: http://git.osdn.net/view?a=commitdiff_plain;h=f6dd829292b451a0eaaf339964fcf9ebc804897b;p=android-x86%2Fart.git ART: Enforce arena alignment in Realloc Enforce the expected alignment in Realloc when the given buffer can be expanded. Add debug checks to prove alignment guarantees. Change-Id: I7dbaf3ca0896f714cff07ff08f067a7c088a6c8d Test: m test-art-host --- diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc index b84e29f7c..aeb990cae 100644 --- a/runtime/base/arena_allocator.cc +++ b/runtime/base/arena_allocator.cc @@ -163,6 +163,7 @@ Arena::Arena() : bytes_allocated_(0), next_(nullptr) { MallocArena::MallocArena(size_t size) { memory_ = reinterpret_cast(calloc(1, size)); CHECK(memory_ != nullptr); // Abort on OOM. + DCHECK_ALIGNED(memory_, ArenaAllocator::kAlignment); size_ = size; } @@ -370,6 +371,7 @@ uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) { arena_head_ = new_arena; // Update our internal data structures. begin_ = new_arena->Begin(); + DCHECK_ALIGNED(begin_, kAlignment); ptr_ = begin_ + bytes; end_ = new_arena->End(); } diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h index 6c1a8984c..3fad96b39 100644 --- a/runtime/base/arena_allocator.h +++ b/runtime/base/arena_allocator.h @@ -310,6 +310,7 @@ class ArenaAllocator return AllocFromNewArena(bytes); } uint8_t* ret = ptr_; + DCHECK_ALIGNED(ret, kAlignment); ptr_ += bytes; return ret; } @@ -319,20 +320,24 @@ class ArenaAllocator ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { DCHECK_GE(new_size, ptr_size); DCHECK_EQ(ptr == nullptr, ptr_size == 0u); - auto* end = reinterpret_cast(ptr) + ptr_size; + // We always allocate aligned. + const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment); + auto* end = reinterpret_cast(ptr) + aligned_ptr_size; // If we haven't allocated anything else, we can safely extend. if (end == ptr_) { DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_. - const size_t size_delta = new_size - ptr_size; + const size_t aligned_new_size = RoundUp(new_size, kAlignment); + const size_t size_delta = aligned_new_size - aligned_ptr_size; // Check remain space. const size_t remain = end_ - ptr_; if (remain >= size_delta) { ptr_ += size_delta; ArenaAllocatorStats::RecordAlloc(size_delta, kind); + DCHECK_ALIGNED(ptr_, kAlignment); return ptr; } } - auto* new_ptr = Alloc(new_size, kind); + auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size. memcpy(new_ptr, ptr, ptr_size); // TODO: Call free on ptr if linear alloc supports free. return new_ptr; @@ -362,11 +367,12 @@ class ArenaAllocator bool Contains(const void* ptr) const; + static constexpr size_t kAlignment = 8; + private: void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); uint8_t* AllocFromNewArena(size_t bytes); - static constexpr size_t kAlignment = 8; void UpdateBytesAllocated();