Inline RosAlloc::Alloc().
Bug: 9986565
Change-Id: I9bc411b8ae39379f9d730f40974857a585405fde
diff --git a/runtime/gc/allocator/rosalloc-inl.h b/runtime/gc/allocator/rosalloc-inl.h
new file mode 100644
index 0000000..f395314
--- /dev/null
+++ b/runtime/gc/allocator/rosalloc-inl.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
+#define ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
+
+#include "rosalloc.h"
+
+namespace art {
+namespace gc {
+namespace allocator {
+
+inline ALWAYS_INLINE void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated) {
+ if (UNLIKELY(size > kLargeSizeThreshold)) {
+ return AllocLargeObject(self, size, bytes_allocated);
+ }
+ void* m = AllocFromRun(self, size, bytes_allocated);
+ // Check if the returned memory is really all zero.
+ if (kCheckZeroMemory && m != NULL) {
+ byte* bytes = reinterpret_cast<byte*>(m);
+ for (size_t i = 0; i < size; ++i) {
+ DCHECK_EQ(bytes[i], 0);
+ }
+ }
+ return m;
+}
+
+} // namespace allocator
+} // namespace gc
+} // namespace art
+
+#endif // ART_RUNTIME_GC_ALLOCATOR_ROSALLOC_INL_H_
diff --git a/runtime/gc/allocator/rosalloc.cc b/runtime/gc/allocator/rosalloc.cc
index 9e65894..3030fa7 100644
--- a/runtime/gc/allocator/rosalloc.cc
+++ b/runtime/gc/allocator/rosalloc.cc
@@ -27,11 +27,6 @@
namespace gc {
namespace allocator {
-// If true, log verbose details of operations.
-static const bool kTraceRosAlloc = false;
-// If true, check that the returned memory is actually zero.
-static const bool kCheckZeroMemory = kIsDebugBuild;
-
extern "C" void* art_heap_rosalloc_morecore(RosAlloc* rosalloc, intptr_t increment);
size_t RosAlloc::bracketSizes[kNumOfSizeBrackets];
@@ -401,44 +396,34 @@
}
}
-void* RosAlloc::Alloc(Thread* self, size_t size, size_t* bytes_allocated) {
- if (UNLIKELY(size > kLargeSizeThreshold)) {
- size_t num_pages = RoundUp(size, kPageSize) / kPageSize;
- void* r;
- {
- MutexLock mu(self, lock_);
- r = AllocPages(self, num_pages, kPageMapLargeObject);
- }
- if (bytes_allocated != NULL) {
- *bytes_allocated = num_pages * kPageSize;
- }
- if (kTraceRosAlloc) {
- if (r != NULL) {
- LOG(INFO) << "RosAlloc::Alloc() : (large obj) 0x" << std::hex << reinterpret_cast<intptr_t>(r)
- << "-0x" << (reinterpret_cast<intptr_t>(r) + num_pages * kPageSize)
- << "(" << std::dec << (num_pages * kPageSize) << ")";
- } else {
- LOG(INFO) << "RosAlloc::Alloc() : (large obj) NULL";
- }
- }
- // Check if the returned memory is really all zero.
- if (kCheckZeroMemory && r != NULL) {
- byte* bytes = reinterpret_cast<byte*>(r);
- for (size_t i = 0; i < size; ++i) {
- DCHECK_EQ(bytes[i], 0);
- }
- }
- return r;
+void* RosAlloc::AllocLargeObject(Thread* self, size_t size, size_t* bytes_allocated) {
+ DCHECK(size > kLargeSizeThreshold);
+ size_t num_pages = RoundUp(size, kPageSize) / kPageSize;
+ void* r;
+ {
+ MutexLock mu(self, lock_);
+ r = AllocPages(self, num_pages, kPageMapLargeObject);
}
- void* m = AllocFromRun(self, size, bytes_allocated);
+ if (bytes_allocated != NULL) {
+ *bytes_allocated = num_pages * kPageSize;
+ }
+ if (kTraceRosAlloc) {
+ if (r != NULL) {
+ LOG(INFO) << "RosAlloc::AllocLargeObject() : 0x" << std::hex << reinterpret_cast<intptr_t>(r)
+ << "-0x" << (reinterpret_cast<intptr_t>(r) + num_pages * kPageSize)
+ << "(" << std::dec << (num_pages * kPageSize) << ")";
+ } else {
+ LOG(INFO) << "RosAlloc::AllocLargeObject() : NULL";
+ }
+ }
// Check if the returned memory is really all zero.
- if (kCheckZeroMemory && m != NULL) {
- byte* bytes = reinterpret_cast<byte*>(m);
+ if (kCheckZeroMemory && r != NULL) {
+ byte* bytes = reinterpret_cast<byte*>(r);
for (size_t i = 0; i < size; ++i) {
DCHECK_EQ(bytes[i], 0);
}
}
- return m;
+ return r;
}
void RosAlloc::FreeInternal(Thread* self, void* ptr) {
diff --git a/runtime/gc/allocator/rosalloc.h b/runtime/gc/allocator/rosalloc.h
index c81306f..d5b6de1 100644
--- a/runtime/gc/allocator/rosalloc.h
+++ b/runtime/gc/allocator/rosalloc.h
@@ -345,6 +345,12 @@
// runs for the rest.
static const size_t kMaxThreadLocalSizeBracketIdx = 10;
+ // If true, check that the returned memory is actually zero.
+ static constexpr bool kCheckZeroMemory = kIsDebugBuild;
+
+ // If true, log verbose details of operations.
+ static constexpr bool kTraceRosAlloc = false;
+
struct hash_run {
size_t operator()(const RosAlloc::Run* r) const {
return reinterpret_cast<size_t>(r);
@@ -429,6 +435,9 @@
// The internal of non-bulk Free().
void FreeInternal(Thread* self, void* ptr) LOCKS_EXCLUDED(lock_);
+ // Allocates large objects.
+ void* AllocLargeObject(Thread* self, size_t size, size_t* bytes_allocated) LOCKS_EXCLUDED(lock_);
+
public:
RosAlloc(void* base, size_t capacity);
void* Alloc(Thread* self, size_t size, size_t* bytes_allocated)