summaryrefslogtreecommitdiff
path: root/runtime/base/arena_allocator.h
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2017-03-14 14:18:46 +0000
committer Vladimir Marko <vmarko@google.com> 2017-03-14 19:03:20 +0000
commitf44d36c8423f81cbb5e9f55d8813e26ffa1a7f3b (patch)
tree324b41485ce6c414c1a006c72cbcc5ed9f466138 /runtime/base/arena_allocator.h
parent8d6768d47b66a688d35399d524ad5a5450e9d9d4 (diff)
Revert^2 "Hash-based DexCache field array."
Test: testrunner.py --host --interpreter Bug: 30627598 This reverts commit 6374c58f2ea403b3a05fb27376110fe4d0fc8e3f. Change-Id: I275508e288a85d3aa08f7405a1a4f362af43b775
Diffstat (limited to 'runtime/base/arena_allocator.h')
-rw-r--r--runtime/base/arena_allocator.h55
1 files changed, 34 insertions, 21 deletions
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index f92fbea15d..c39429ce06 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -34,7 +34,6 @@ class ArenaPool;
class ArenaAllocator;
class ArenaStack;
class ScopedArenaAllocator;
-class MemMap;
class MemStats;
template <typename T>
@@ -244,22 +243,6 @@ class Arena {
DISALLOW_COPY_AND_ASSIGN(Arena);
};
-class MallocArena FINAL : public Arena {
- public:
- explicit MallocArena(size_t size = Arena::kDefaultSize);
- virtual ~MallocArena();
-};
-
-class MemMapArena FINAL : public Arena {
- public:
- MemMapArena(size_t size, bool low_4gb, const char* name);
- virtual ~MemMapArena();
- void Release() OVERRIDE;
-
- private:
- std::unique_ptr<MemMap> map_;
-};
-
class ArenaPool {
public:
explicit ArenaPool(bool use_malloc = true,
@@ -319,8 +302,31 @@ class ArenaAllocator
return ret;
}
+ // Returns zeroed memory.
+ void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
+ // It is an error to request 16-byte aligned allocation of unaligned size.
+ DCHECK_ALIGNED(bytes, 16);
+ if (UNLIKELY(IsRunningOnMemoryTool())) {
+ return AllocWithMemoryToolAlign16(bytes, kind);
+ }
+ uintptr_t padding =
+ ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
+ ArenaAllocatorStats::RecordAlloc(bytes, kind);
+ if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
+ static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
+ return AllocFromNewArena(bytes);
+ }
+ ptr_ += padding;
+ uint8_t* ret = ptr_;
+ DCHECK_ALIGNED(ret, 16);
+ ptr_ += bytes;
+ return ret;
+ }
+
// Realloc never frees the input pointer, it is the caller's job to do this if necessary.
- void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
+ void* Realloc(void* ptr,
+ size_t ptr_size,
+ size_t new_size,
ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
DCHECK_GE(new_size, ptr_size);
DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
@@ -371,12 +377,17 @@ class ArenaAllocator
bool Contains(const void* ptr) const;
- static constexpr size_t kAlignment = 8;
+ // The alignment guaranteed for individual allocations.
+ static constexpr size_t kAlignment = 8u;
+
+ // The alignment required for the whole Arena rather than individual allocations.
+ static constexpr size_t kArenaAlignment = 16u;
private:
void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
+ void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
uint8_t* AllocFromNewArena(size_t bytes);
-
+ uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
void UpdateBytesAllocated();
@@ -396,7 +407,9 @@ class ArenaAllocator
class MemStats {
public:
- MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
+ MemStats(const char* name,
+ const ArenaAllocatorStats* stats,
+ const Arena* first_arena,
ssize_t lost_bytes_adjustment = 0);
void Dump(std::ostream& os) const;