Merge "Optimizing: Use 16-bit Thumb2 PUSH/POP when possible."
diff --git a/build/Android.common_build.mk b/build/Android.common_build.mk
index 7d34dae..c6e2b95 100644
--- a/build/Android.common_build.mk
+++ b/build/Android.common_build.mk
@@ -220,6 +220,14 @@
# -Wmissing-declarations \
+
+ifdef ART_IMT_SIZE
+ art_cflags += -DIMT_SIZE=$(ART_IMT_SIZE)
+else
+ # Default is 64
+ art_cflags += -DIMT_SIZE=64
+endif
+
ifeq ($(ART_SMALL_MODE),true)
art_cflags += -DART_SMALL_MODE=1
endif
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 0cceaa4..7b679ea 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -977,6 +977,22 @@
Trim();
}
+class TrimIndirectReferenceTableClosure : public Closure {
+ public:
+ explicit TrimIndirectReferenceTableClosure(Barrier* barrier) : barrier_(barrier) {
+ }
+ virtual void Run(Thread* thread) OVERRIDE NO_THREAD_SAFETY_ANALYSIS {
+ ATRACE_BEGIN("Trimming reference table");
+ thread->GetJniEnv()->locals.Trim();
+ ATRACE_END();
+ barrier_->Pass(Thread::Current());
+ }
+
+ private:
+ Barrier* const barrier_;
+};
+
+
void Heap::Trim() {
Thread* self = Thread::Current();
{
@@ -998,6 +1014,19 @@
WaitForGcToCompleteLocked(kGcCauseTrim, self);
collector_type_running_ = kCollectorTypeHeapTrim;
}
+ // Trim reference tables.
+ {
+ ScopedObjectAccess soa(self);
+ JavaVMExt* vm = soa.Vm();
+ // Trim globals indirect reference table.
+ vm->TrimGlobals();
+ // Trim locals indirect reference tables.
+ Barrier barrier(0);
+ TrimIndirectReferenceTableClosure closure(&barrier);
+ ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
+ size_t barrier_count = Runtime::Current()->GetThreadList()->RunCheckpoint(&closure);
+ barrier.Increment(self, barrier_count);
+ }
uint64_t start_ns = NanoTime();
// Trim the managed spaces.
uint64_t total_alloc_space_allocated = 0;
diff --git a/runtime/indirect_reference_table.cc b/runtime/indirect_reference_table.cc
index 4d177a3..0d84a1e 100644
--- a/runtime/indirect_reference_table.cc
+++ b/runtime/indirect_reference_table.cc
@@ -162,13 +162,12 @@
DCHECK(table_ != NULL);
DCHECK_GE(segment_state_.parts.numHoles, prevState.parts.numHoles);
- int idx = ExtractIndex(iref);
-
if (GetIndirectRefKind(iref) == kHandleScopeOrInvalid &&
Thread::Current()->HandleScopeContains(reinterpret_cast<jobject>(iref))) {
LOG(WARNING) << "Attempt to remove local handle scope entry from IRT, ignoring";
return true;
}
+ const int idx = ExtractIndex(iref);
if (idx < bottomIndex) {
// Wrong segment.
LOG(WARNING) << "Attempt to remove index outside index area (" << idx
@@ -236,6 +235,13 @@
return true;
}
+void IndirectReferenceTable::Trim() {
+ const size_t top_index = Capacity();
+ auto* release_start = AlignUp(reinterpret_cast<uint8_t*>(&table_[top_index]), kPageSize);
+ uint8_t* release_end = table_mem_map_->End();
+ madvise(release_start, release_end - release_start, MADV_DONTNEED);
+}
+
void IndirectReferenceTable::VisitRoots(RootCallback* callback, void* arg, uint32_t tid,
RootType root_type) {
for (auto ref : *this) {
diff --git a/runtime/indirect_reference_table.h b/runtime/indirect_reference_table.h
index 168f9f2..fbd5714 100644
--- a/runtime/indirect_reference_table.h
+++ b/runtime/indirect_reference_table.h
@@ -331,6 +331,9 @@
return Offset(OFFSETOF_MEMBER(IndirectReferenceTable, segment_state_));
}
+ // Release pages past the end of the table that may have previously held references.
+ void Trim() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
private:
// Extract the table index from an indirect reference.
static uint32_t ExtractIndex(IndirectRef iref) {
diff --git a/runtime/java_vm_ext.cc b/runtime/java_vm_ext.cc
index a5abce6..5d04fac 100644
--- a/runtime/java_vm_ext.cc
+++ b/runtime/java_vm_ext.cc
@@ -756,6 +756,11 @@
}
}
+void JavaVMExt::TrimGlobals() {
+ WriterMutexLock mu(Thread::Current(), globals_lock_);
+ globals_.Trim();
+}
+
void JavaVMExt::VisitRoots(RootCallback* callback, void* arg) {
Thread* self = Thread::Current();
{
diff --git a/runtime/java_vm_ext.h b/runtime/java_vm_ext.h
index 2957ba3..749b9fb 100644
--- a/runtime/java_vm_ext.h
+++ b/runtime/java_vm_ext.h
@@ -131,6 +131,9 @@
return unchecked_functions_;
}
+ void TrimGlobals() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
+ LOCKS_EXCLUDED(globals_lock_);
+
private:
Runtime* const runtime_;
diff --git a/runtime/jni_internal.cc b/runtime/jni_internal.cc
index 1dcfcab..4797e69 100644
--- a/runtime/jni_internal.cc
+++ b/runtime/jni_internal.cc
@@ -566,7 +566,8 @@
return soa.AddLocalReference<jobject>(decoded_obj);
}
- static void DeleteLocalRef(JNIEnv* env, jobject obj) {
+ static void DeleteLocalRef(JNIEnv* env, jobject obj)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
if (obj == nullptr) {
return;
}
diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h
index a77972e..56867dd 100644
--- a/runtime/mirror/class.h
+++ b/runtime/mirror/class.h
@@ -29,6 +29,10 @@
#include "read_barrier_option.h"
#include "utils.h"
+#ifndef IMT_SIZE
+#error IMT_SIZE not defined
+#endif
+
namespace art {
struct ClassOffsets;
@@ -58,7 +62,7 @@
// Interface method table size. Increasing this value reduces the chance of two interface methods
// colliding in the interface method table but increases the size of classes that implement
// (non-marker) interfaces.
- static constexpr size_t kImtSize = 64;
+ static constexpr size_t kImtSize = IMT_SIZE;
// imtable entry embedded in class object.
struct MANAGED ImTableEntry {
diff --git a/runtime/trace.cc b/runtime/trace.cc
index 2cc50b3..b510844 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -735,7 +735,9 @@
if (the_trace_ != nullptr) {
std::string name;
thread->GetThreadName(name);
- the_trace_->exited_threads_.Put(thread->GetTid(), name);
+ // The same thread/tid may be used multiple times. As SafeMap::Put does not allow to override
+ // a previous mapping, use SafeMap::Overwrite.
+ the_trace_->exited_threads_.Overwrite(thread->GetTid(), name);
}
}