From 22a0ef8fbe78577ad4127e5becf20b3afa797478 Mon Sep 17 00:00:00 2001 From: Vladimir Marko Date: Tue, 10 Jun 2014 14:47:51 +0100 Subject: [PATCH] Round up arena allocations to 8 bytes. Prevent SIGBUS/BUS_ADRALN when we use the arena for classes with 64-bit alignment. Change-Id: I5382ed7072fcfb2349f61558e1fd8257315ee336 --- compiler/utils/arena_allocator.cc | 2 +- compiler/utils/arena_allocator.h | 2 +- compiler/utils/scoped_arena_allocator.cc | 2 +- compiler/utils/scoped_arena_allocator.h | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/utils/arena_allocator.cc b/compiler/utils/arena_allocator.cc index ca4635d35..6a39641f3 100644 --- a/compiler/utils/arena_allocator.cc +++ b/compiler/utils/arena_allocator.cc @@ -215,7 +215,7 @@ void ArenaAllocator::UpdateBytesAllocated() { } void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) { - size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3; + size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8); if (UNLIKELY(ptr_ + rounded_bytes > end_)) { // Obtain a new block. ObtainNewArenaForAllocation(rounded_bytes); diff --git a/compiler/utils/arena_allocator.h b/compiler/utils/arena_allocator.h index dbe482dae..ac3938ff2 100644 --- a/compiler/utils/arena_allocator.h +++ b/compiler/utils/arena_allocator.h @@ -156,7 +156,7 @@ class ArenaAllocator : private ArenaAllocatorStats { if (UNLIKELY(running_on_valgrind_)) { return AllocValgrind(bytes, kind); } - bytes = RoundUp(bytes, 4); + bytes = RoundUp(bytes, 8); if (UNLIKELY(ptr_ + bytes > end_)) { // Obtain a new block. ObtainNewArenaForAllocation(bytes); diff --git a/compiler/utils/scoped_arena_allocator.cc b/compiler/utils/scoped_arena_allocator.cc index b8b0e6ef7..aeb2f768d 100644 --- a/compiler/utils/scoped_arena_allocator.cc +++ b/compiler/utils/scoped_arena_allocator.cc @@ -92,7 +92,7 @@ void ArenaStack::UpdateBytesAllocated() { } void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) { - size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 4); + size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8); uint8_t* ptr = top_ptr_; if (UNLIKELY(static_cast(top_end_ - ptr) < rounded_bytes)) { ptr = AllocateFromNextArena(rounded_bytes); diff --git a/compiler/utils/scoped_arena_allocator.h b/compiler/utils/scoped_arena_allocator.h index c090062db..37799cb14 100644 --- a/compiler/utils/scoped_arena_allocator.h +++ b/compiler/utils/scoped_arena_allocator.h @@ -67,7 +67,7 @@ class ArenaStack : private DebugStackRefCounter { if (UNLIKELY(running_on_valgrind_)) { return AllocValgrind(bytes, kind); } - size_t rounded_bytes = RoundUp(bytes, 4); + size_t rounded_bytes = RoundUp(bytes, 8); uint8_t* ptr = top_ptr_; if (UNLIKELY(static_cast(top_end_ - ptr) < rounded_bytes)) { ptr = AllocateFromNextArena(rounded_bytes); -- 2.11.0