diff options
| -rw-r--r-- | compiler/dex/mir_optimization.cc | 10 | ||||
| -rw-r--r-- | runtime/Android.mk | 4 | ||||
| -rw-r--r-- | runtime/debugger.cc | 3 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space.cc | 62 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space.h | 9 | ||||
| -rw-r--r-- | runtime/stack.cc | 1 | ||||
| -rw-r--r-- | runtime/thread.cc | 11 | 
7 files changed, 84 insertions, 16 deletions
diff --git a/compiler/dex/mir_optimization.cc b/compiler/dex/mir_optimization.cc index 635393796a..5d83991001 100644 --- a/compiler/dex/mir_optimization.cc +++ b/compiler/dex/mir_optimization.cc @@ -923,11 +923,11 @@ void MIRGraph::BasicBlockOptimization() {        for (unsigned int i = 0; i < extended_basic_blocks_.size(); i++) {          BasicBlockOpt(GetBasicBlock(extended_basic_blocks_[i]));        } -    } -  } else { -    PreOrderDfsIterator iter(this); -    for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) { -      BasicBlockOpt(bb); +    } else { +      PreOrderDfsIterator iter(this); +      for (BasicBlock* bb = iter.Next(); bb != NULL; bb = iter.Next()) { +        BasicBlockOpt(bb); +      }      }    }    if (cu_->enable_debug & (1 << kDebugDumpCFG)) { diff --git a/runtime/Android.mk b/runtime/Android.mk index 4e5afab4e7..2c13284d16 100644 --- a/runtime/Android.mk +++ b/runtime/Android.mk @@ -344,10 +344,10 @@ $$(ENUM_OPERATOR_OUT_GEN): $$(GENERATED_SRC_DIR)/%_operator_out.cc : $(LOCAL_PAT    LOCAL_SHARED_LIBRARIES += liblog libnativehelper    LOCAL_SHARED_LIBRARIES += libbacktrace # native stack trace support    ifeq ($$(art_target_or_host),target) -    LOCAL_SHARED_LIBRARIES += libcutils libdl libselinux +    LOCAL_SHARED_LIBRARIES += libcutils libdl libselinux libutils      LOCAL_STATIC_LIBRARIES := libziparchive libz    else # host -    LOCAL_STATIC_LIBRARIES += libcutils libziparchive-host libz +    LOCAL_STATIC_LIBRARIES += libcutils libziparchive-host libz libutils      LOCAL_LDLIBS += -ldl -lpthread      ifeq ($(HOST_OS),linux)        LOCAL_LDLIBS += -lrt diff --git a/runtime/debugger.cc b/runtime/debugger.cc index 52a2141a0b..e4b8a8a335 100644 --- a/runtime/debugger.cc +++ b/runtime/debugger.cc @@ -452,6 +452,7 @@ void Dbg::StartJdwp() {  void Dbg::StopJdwp() {    delete gJdwpState; +  gJdwpState = NULL;    delete gRegistry;    gRegistry = NULL;  } @@ -1113,7 +1114,7 @@ bool Dbg::MatchType(JDWP::RefTypeId instance_class_id, JDWP::RefTypeId class_id)    CHECK(c1 != NULL);    mirror::Class* c2 = DecodeClass(class_id, status);    CHECK(c2 != NULL); -  return c1->IsAssignableFrom(c2); +  return c2->IsAssignableFrom(c1);  }  static JDWP::FieldId ToFieldId(const mirror::ArtField* f) diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index 393935474a..31a3f35dcb 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -78,6 +78,8 @@ namespace collector {  static constexpr bool kProtectFromSpace = true;  static constexpr bool kResetFromSpace = true; +// TODO: move this to a new file as a new garbage collector? +static constexpr bool kEnableSimplePromo = false;  // TODO: Unduplicate logic.  void SemiSpace::ImmuneSpace(space::ContinuousSpace* space) { @@ -134,7 +136,9 @@ SemiSpace::SemiSpace(Heap* heap, const std::string& name_prefix)        finalizer_reference_list_(nullptr),        phantom_reference_list_(nullptr),        cleared_reference_list_(nullptr), -      self_(nullptr) { +      self_(nullptr), +      last_gc_to_space_end_(nullptr), +      bytes_promoted_(0) {  }  void SemiSpace::InitializePhase() { @@ -169,6 +173,17 @@ void SemiSpace::MarkingPhase() {    // Need to do this with mutators paused so that somebody doesn't accidentally allocate into the    // wrong space.    heap_->SwapSemiSpaces(); +  if (kEnableSimplePromo) { +    // If last_gc_to_space_end_ is out of the bounds of the from-space +    // (the to-space from last GC), then point it to the beginning of +    // the from-space. For example, the very first GC or the +    // pre-zygote compaction. +    if (!from_space_->HasAddress(reinterpret_cast<mirror::Object*>(last_gc_to_space_end_))) { +      last_gc_to_space_end_ = from_space_->Begin(); +    } +    // Reset this before the marking starts below. +    bytes_promoted_ = 0; +  }    // Assume the cleared space is already empty.    BindBitmaps();    // Process dirty cards and add dirty cards to mod-union tables. @@ -268,6 +283,13 @@ void SemiSpace::ReclaimPhase() {    } else {      mprotect(from_space_->Begin(), from_space_->Capacity(), PROT_READ);    } + +  if (kEnableSimplePromo) { +    // Record the end (top) of the to space so we can distinguish +    // between objects that were allocated since the last GC and the +    // older objects. +    last_gc_to_space_end_ = to_space_->End(); +  }  }  void SemiSpace::ResizeMarkStack(size_t new_size) { @@ -308,11 +330,38 @@ Object* SemiSpace::MarkObject(Object* obj) {      if (from_space_->HasAddress(obj)) {        mirror::Object* forward_address = GetForwardingAddressInFromSpace(obj);        // If the object has already been moved, return the new forward address. -      if (!to_space_->HasAddress(forward_address)) { +      if (forward_address == nullptr) {          // Otherwise, we need to move the object and add it to the markstack for processing.          size_t object_size = obj->SizeOf();          size_t dummy = 0; -        forward_address = to_space_->Alloc(self_, object_size, &dummy); +        if (kEnableSimplePromo && reinterpret_cast<byte*>(obj) < last_gc_to_space_end_) { +          // If it's allocated before the last GC (older), move (pseudo-promote) it to +          // the non-moving space (as sort of an old generation.) +          size_t bytes_promoted; +          space::MallocSpace* non_moving_space = GetHeap()->GetNonMovingSpace(); +          forward_address = non_moving_space->Alloc(self_, object_size, &bytes_promoted); +          if (forward_address == nullptr) { +            // If out of space, fall back to the to-space. +            forward_address = to_space_->Alloc(self_, object_size, &dummy); +          } else { +            GetHeap()->num_bytes_allocated_.fetch_add(bytes_promoted); +            bytes_promoted_ += bytes_promoted; +            // Mark forward_address on the live bit map. +            accounting::SpaceBitmap* live_bitmap = non_moving_space->GetLiveBitmap(); +            DCHECK(live_bitmap != nullptr); +            DCHECK(!live_bitmap->Test(forward_address)); +            live_bitmap->Set(forward_address); +            // Mark forward_address on the mark bit map. +            accounting::SpaceBitmap* mark_bitmap = non_moving_space->GetMarkBitmap(); +            DCHECK(mark_bitmap != nullptr); +            DCHECK(!mark_bitmap->Test(forward_address)); +            mark_bitmap->Set(forward_address); +          } +          DCHECK(forward_address != nullptr); +        } else { +          // If it's allocated after the last GC (younger), copy it to the to-space. +          forward_address = to_space_->Alloc(self_, object_size, &dummy); +        }          // Copy over the object and add it to the mark stack since we still need to update it's          // references.          memcpy(reinterpret_cast<void*>(forward_address), obj, object_size); @@ -322,6 +371,9 @@ Object* SemiSpace::MarkObject(Object* obj) {                         monitor_size_must_be_same_as_object);          obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(forward_address)));          MarkStackPush(forward_address); +      } else { +        DCHECK(to_space_->HasAddress(forward_address) || +               (kEnableSimplePromo && GetHeap()->GetNonMovingSpace()->HasAddress(forward_address)));        }        ret = forward_address;        // TODO: Do we need this if in the else statement? @@ -535,7 +587,9 @@ inline Object* SemiSpace::GetMarkedForwardAddress(mirror::Object* obj) const    if (from_space_->HasAddress(obj)) {      mirror::Object* forwarding_address = GetForwardingAddressInFromSpace(const_cast<Object*>(obj));      // If the object is forwarded then it MUST be marked. -    if (to_space_->HasAddress(forwarding_address)) { +    DCHECK(forwarding_address == nullptr || to_space_->HasAddress(forwarding_address) || +           (kEnableSimplePromo && GetHeap()->GetNonMovingSpace()->HasAddress(forwarding_address))); +    if (forwarding_address != nullptr) {        return forwarding_address;      }      // Must not be marked, return nullptr; diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h index 0f0cae1966..b0724f9c0c 100644 --- a/runtime/gc/collector/semi_space.h +++ b/runtime/gc/collector/semi_space.h @@ -281,6 +281,15 @@ class SemiSpace : public GarbageCollector {    Thread* self_; +  // Used for kEnableSimplePromo. The end/top of the bump pointer +  // space at the end of the last collection. +  byte* last_gc_to_space_end_; + +  // Used for kEnableSimplePromo. During a collection, keeps track of +  // how many bytes of objects have been copied so far from the bump +  // pointer space to the non-moving space. +  uint64_t bytes_promoted_; +   private:    DISALLOW_COPY_AND_ASSIGN(SemiSpace);  }; diff --git a/runtime/stack.cc b/runtime/stack.cc index a50538399c..4e3fb4a307 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -259,6 +259,7 @@ std::string StackVisitor::DescribeLocation() const {  }  instrumentation::InstrumentationStackFrame& StackVisitor::GetInstrumentationStackFrame(uint32_t depth) const { +  CHECK_LT(depth, thread_->GetInstrumentationStack()->size());    return thread_->GetInstrumentationStack()->at(depth);  } diff --git a/runtime/thread.cc b/runtime/thread.cc index 715be99942..66e164d298 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -1828,6 +1828,12 @@ class CatchBlockStackVisitor : public StackVisitor {      self_->EndAssertNoThreadSuspension(last_no_assert_suspension_cause_);      // Do instrumentation events after allowing thread suspension again.      instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); +    if (!is_deoptimization_) { +      // The debugger may suspend this thread and walk its stack. Let's do this before popping +      // instrumentation frames. +      instrumentation->ExceptionCaughtEvent(self_, throw_location_, catch_method, handler_dex_pc_, +                                            exception_); +    }      for (size_t i = 0; i < instrumentation_frames_to_pop_; ++i) {        // We pop the instrumentation stack here so as not to corrupt it during the stack walk.        if (i != instrumentation_frames_to_pop_ - 1 || self_->GetInstrumentationStack()->front().method_ != catch_method) { @@ -1835,10 +1841,7 @@ class CatchBlockStackVisitor : public StackVisitor {          instrumentation->PopMethodForUnwind(self_, is_deoptimization_);        }      } -    if (!is_deoptimization_) { -      instrumentation->ExceptionCaughtEvent(self_, throw_location_, catch_method, handler_dex_pc_, -                                            exception_); -    } else { +    if (is_deoptimization_) {        // TODO: proper return value.        self_->SetDeoptimizationShadowFrame(top_shadow_frame_);      }  |