summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--runtime/base/logging.cc5
-rw-r--r--runtime/gc/collector/concurrent_copying.cc4
-rw-r--r--runtime/gc/collector/concurrent_copying.h2
-rw-r--r--runtime/gc/collector/mark_sweep.cc11
-rw-r--r--runtime/mirror/class.cc2
-rw-r--r--runtime/well_known_classes.cc4
6 files changed, 18 insertions, 10 deletions
diff --git a/runtime/base/logging.cc b/runtime/base/logging.cc
index 3ee15a2469..28352cb2c0 100644
--- a/runtime/base/logging.cc
+++ b/runtime/base/logging.cc
@@ -193,7 +193,10 @@ LogMessage::LogMessage(const char* file, unsigned int line, LogSeverity severity
}
}
LogMessage::~LogMessage() {
- if (!PrintDirectly(data_->GetSeverity()) && data_->GetSeverity() != LogSeverity::NONE) {
+ if (PrintDirectly(data_->GetSeverity())) {
+ // Add newline at the end to match the not printing directly behavior.
+ std::cerr << '\n';
+ } else if (data_->GetSeverity() != LogSeverity::NONE) {
if (data_->GetSeverity() < gMinimumLogSeverity) {
return; // No need to format something we're not going to output.
}
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 70faf4baf7..7afe6f9ab4 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -1508,7 +1508,9 @@ void ConcurrentCopying::ReclaimPhase() {
uint64_t unevac_from_bytes = region_space_->GetBytesAllocatedInUnevacFromSpace();
uint64_t unevac_from_objects = region_space_->GetObjectsAllocatedInUnevacFromSpace();
uint64_t to_bytes = bytes_moved_.LoadSequentiallyConsistent();
+ cumulative_bytes_moved_.FetchAndAddRelaxed(to_bytes);
uint64_t to_objects = objects_moved_.LoadSequentiallyConsistent();
+ cumulative_objects_moved_.FetchAndAddRelaxed(to_objects);
if (kEnableFromSpaceAccountingCheck) {
CHECK_EQ(from_space_num_objects_at_first_pause_, from_objects + unevac_from_objects);
CHECK_EQ(from_space_num_bytes_at_first_pause_, from_bytes + unevac_from_bytes);
@@ -2360,6 +2362,8 @@ void ConcurrentCopying::DumpPerformanceInfo(std::ostream& os) {
if (rb_slow_path_count_gc_total_ > 0) {
os << "GC slow path count " << rb_slow_path_count_gc_total_ << "\n";
}
+ os << "Cumulative bytes moved " << cumulative_bytes_moved_.LoadRelaxed() << "\n";
+ os << "Cumulative objects moved " << cumulative_objects_moved_.LoadRelaxed() << "\n";
}
} // namespace collector
diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h
index 55c4570173..5b0e2d6274 100644
--- a/runtime/gc/collector/concurrent_copying.h
+++ b/runtime/gc/collector/concurrent_copying.h
@@ -272,6 +272,8 @@ class ConcurrentCopying : public GarbageCollector {
// How many objects and bytes we moved. Used for accounting.
Atomic<size_t> bytes_moved_;
Atomic<size_t> objects_moved_;
+ Atomic<uint64_t> cumulative_bytes_moved_;
+ Atomic<uint64_t> cumulative_objects_moved_;
// The skipped blocks are memory blocks/chucks that were copies of
// objects that were unused due to lost races (cas failures) at
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 1d15ee7334..3904160fd4 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -413,7 +413,7 @@ class MarkSweep::MarkObjectSlowPath {
if (UNLIKELY(obj == nullptr || !IsAligned<kPageSize>(obj) ||
(kIsDebugBuild && large_object_space != nullptr &&
!large_object_space->Contains(obj)))) {
- LOG(INTERNAL_FATAL) << "Tried to mark " << obj << " not contained by any spaces\n";
+ LOG(INTERNAL_FATAL) << "Tried to mark " << obj << " not contained by any spaces";
if (holder_ != nullptr) {
size_t holder_size = holder_->SizeOf();
ArtField* field = holder_->FindFieldByOffset(offset_);
@@ -436,18 +436,17 @@ class MarkSweep::MarkObjectSlowPath {
<< " num_of_ref_fields="
<< (holder_->IsClass()
? holder_->AsClass()->NumReferenceStaticFields()
- : holder_->GetClass()->NumReferenceInstanceFields())
- << "\n";
+ : holder_->GetClass()->NumReferenceInstanceFields());
// Print the memory content of the holder.
for (size_t i = 0; i < holder_size / sizeof(uint32_t); ++i) {
uint32_t* p = reinterpret_cast<uint32_t*>(holder_);
LOG(INTERNAL_FATAL) << &p[i] << ": " << "holder+" << (i * sizeof(uint32_t)) << " = "
- << std::hex << p[i] << "\n";
+ << std::hex << p[i];
}
}
PrintFileToLog("/proc/self/maps", LogSeverity::INTERNAL_FATAL);
MemMap::DumpMaps(LOG(INTERNAL_FATAL), true);
- LOG(INTERNAL_FATAL) << "Attempting see if it's a bad thread root\n";
+ LOG(INTERNAL_FATAL) << "Attempting see if it's a bad thread root";
mark_sweep_->VerifySuspendedThreadRoots();
LOG(FATAL) << "Can't mark invalid object";
}
@@ -575,7 +574,7 @@ class MarkSweep::VerifyRootVisitor : public SingleRootVisitor {
if (heap->GetLiveBitmap()->GetContinuousSpaceBitmap(root) == nullptr) {
space::LargeObjectSpace* large_object_space = heap->GetLargeObjectsSpace();
if (large_object_space != nullptr && !large_object_space->Contains(root)) {
- LOG(INTERNAL_FATAL) << "Found invalid root: " << root << " " << info << "\n";
+ LOG(INTERNAL_FATAL) << "Found invalid root: " << root << " " << info;
}
}
}
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 1cf9dd1612..f948be79c9 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -136,7 +136,7 @@ void Class::SetDexCache(DexCache* new_dex_cache) {
void Class::SetClassSize(uint32_t new_class_size) {
if (kIsDebugBuild && new_class_size < GetClassSize()) {
DumpClass(LOG(INTERNAL_FATAL), kDumpClassFullDetail);
- LOG(INTERNAL_FATAL) << new_class_size << " vs " << GetClassSize() << "\n";
+ LOG(INTERNAL_FATAL) << new_class_size << " vs " << GetClassSize();
LOG(FATAL) << " class=" << PrettyTypeOf(this);
}
// Not called within a transaction.
diff --git a/runtime/well_known_classes.cc b/runtime/well_known_classes.cc
index ddce344af3..2c992753fd 100644
--- a/runtime/well_known_classes.cc
+++ b/runtime/well_known_classes.cc
@@ -181,7 +181,7 @@ static jfieldID CacheField(JNIEnv* env, jclass c, bool is_static,
if (fid == nullptr) {
ScopedObjectAccess soa(env);
if (soa.Self()->IsExceptionPending()) {
- LOG(INTERNAL_FATAL) << soa.Self()->GetException()->Dump() << '\n';
+ LOG(INTERNAL_FATAL) << soa.Self()->GetException()->Dump();
}
std::ostringstream os;
WellKnownClasses::ToClass(c)->DumpClass(os, mirror::Class::kDumpClassFullDetail);
@@ -198,7 +198,7 @@ jmethodID CacheMethod(JNIEnv* env, jclass c, bool is_static,
if (mid == nullptr) {
ScopedObjectAccess soa(env);
if (soa.Self()->IsExceptionPending()) {
- LOG(INTERNAL_FATAL) << soa.Self()->GetException()->Dump() << '\n';
+ LOG(INTERNAL_FATAL) << soa.Self()->GetException()->Dump();
}
std::ostringstream os;
WellKnownClasses::ToClass(c)->DumpClass(os, mirror::Class::kDumpClassFullDetail);