diff options
| -rw-r--r-- | compiler/debug/elf_debug_frame_writer.h | 3 | ||||
| -rw-r--r-- | compiler/debug/elf_debug_writer.cc | 3 | ||||
| -rw-r--r-- | dex2oat/dex2oat.cc | 29 | ||||
| -rw-r--r-- | dex2oat/dex2oat_options.cc | 3 | ||||
| -rw-r--r-- | dex2oat/dex2oat_options.def | 1 | ||||
| -rw-r--r-- | openjdkjvm/OpenjdkJvm.cc | 1 | ||||
| -rw-r--r-- | runtime/gc/heap.h | 4 | ||||
| -rw-r--r-- | runtime/gc/space/region_space-inl.h | 60 | ||||
| -rw-r--r-- | runtime/gc/space/region_space.cc | 29 | ||||
| -rw-r--r-- | runtime/gc/space/region_space.h | 15 |
10 files changed, 121 insertions, 27 deletions
diff --git a/compiler/debug/elf_debug_frame_writer.h b/compiler/debug/elf_debug_frame_writer.h index e0116c6f41..27b70c8caa 100644 --- a/compiler/debug/elf_debug_frame_writer.h +++ b/compiler/debug/elf_debug_frame_writer.h @@ -182,7 +182,7 @@ void WriteCFISection(linker::ElfBuilder<ElfTypes>* builder, std::vector<const MethodDebugInfo*> sorted_method_infos; sorted_method_infos.reserve(method_infos.size()); for (size_t i = 0; i < method_infos.size(); i++) { - if (!method_infos[i].deduped) { + if (!method_infos[i].cfi.empty() && !method_infos[i].deduped) { sorted_method_infos.push_back(&method_infos[i]); } } @@ -222,6 +222,7 @@ void WriteCFISection(linker::ElfBuilder<ElfTypes>* builder, buffer.clear(); for (const MethodDebugInfo* mi : sorted_method_infos) { DCHECK(!mi->deduped); + DCHECK(!mi->cfi.empty()); const Elf_Addr code_address = mi->code_address + (mi->is_code_address_text_relative ? builder->GetText()->GetAddress() : 0); if (format == dwarf::DW_EH_FRAME_FORMAT) { diff --git a/compiler/debug/elf_debug_writer.cc b/compiler/debug/elf_debug_writer.cc index 16f163b085..393db3dccb 100644 --- a/compiler/debug/elf_debug_writer.cc +++ b/compiler/debug/elf_debug_writer.cc @@ -226,7 +226,8 @@ std::vector<uint8_t> MakeElfFileForJIT( num_cfis++; }); DCHECK_EQ(num_syms, 1u); - DCHECK_EQ(num_cfis, 1u); + // CFI might be missing. TODO: Ensure we have CFI for all methods. + DCHECK_LE(num_cfis, 1u); } return buffer; } diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc index ea4158abb0..35af918757 100644 --- a/dex2oat/dex2oat.cc +++ b/dex2oat/dex2oat.cc @@ -382,6 +382,9 @@ NO_RETURN static void Usage(const char* fmt, ...) { UsageError(" --avoid-storing-invocation: Avoid storing the invocation args in the key value"); UsageError(" store. Used to test determinism with different args."); UsageError(""); + UsageError(" --write-invocation-to=<file>: Write the invocation commandline to the given file"); + UsageError(" for later use. Used to test determinism with different host architectures."); + UsageError(""); UsageError(" --runtime-arg <argument>: used to specify various arguments for the runtime,"); UsageError(" such as initial heap size, maximum heap size, and verbose output."); UsageError(" Use a separate --runtime-arg switch for each argument."); @@ -1001,6 +1004,21 @@ class Dex2Oat final { CompilerFilter::NameOfFilter(compiler_options_->GetCompilerFilter())); key_value_store_->Put(OatHeader::kConcurrentCopying, kUseReadBarrier ? OatHeader::kTrueValue : OatHeader::kFalseValue); + if (invocation_file_.get() != -1) { + std::ostringstream oss; + for (int i = 0; i < argc; ++i) { + if (i > 0) { + oss << std::endl; + } + oss << argv[i]; + } + std::string invocation(oss.str()); + if (TEMP_FAILURE_RETRY(write(invocation_file_.get(), + invocation.c_str(), + invocation.size())) == -1) { + Usage("Unable to write invocation file"); + } + } } // This simple forward is here so the string specializations below don't look out of place. @@ -1116,6 +1134,16 @@ class Dex2Oat final { AssignTrueIfExists(args, M::Host, &is_host_); AssignTrueIfExists(args, M::AvoidStoringInvocation, &avoid_storing_invocation_); + if (args.Exists(M::InvocationFile)) { + invocation_file_.reset(open(args.Get(M::InvocationFile)->c_str(), + O_CREAT|O_WRONLY|O_TRUNC|O_CLOEXEC, + S_IRUSR|S_IWUSR)); + if (invocation_file_.get() == -1) { + int err = errno; + Usage("Unable to open invocation file '%s' for writing due to %s.", + args.Get(M::InvocationFile)->c_str(), strerror(err)); + } + } AssignIfExists(args, M::CopyDexFiles, ©_dex_files_); if (args.Exists(M::ForceDeterminism)) { @@ -2712,6 +2740,7 @@ class Dex2Oat final { std::vector<std::unique_ptr<const DexFile>> opened_dex_files_; bool avoid_storing_invocation_; + android::base::unique_fd invocation_file_; std::string swap_file_name_; int swap_fd_; size_t min_dex_files_for_swap_ = kDefaultMinDexFilesForSwap; diff --git a/dex2oat/dex2oat_options.cc b/dex2oat/dex2oat_options.cc index 236c1fc894..783b326881 100644 --- a/dex2oat/dex2oat_options.cc +++ b/dex2oat/dex2oat_options.cc @@ -229,6 +229,9 @@ static Parser CreateArgumentParser() { {"false", linker::CopyOption::kNever}, {"always", linker::CopyOption::kAlways}}) .IntoKey(M::CopyDexFiles) + .Define("--write-invocation-to=_") + .WithType<std::string>() + .IntoKey(M::InvocationFile) .Define("--classpath-dir=_") .WithType<std::string>() .IntoKey(M::ClasspathDir) diff --git a/dex2oat/dex2oat_options.def b/dex2oat/dex2oat_options.def index c8cb7e7b72..0b77859a40 100644 --- a/dex2oat/dex2oat_options.def +++ b/dex2oat/dex2oat_options.def @@ -84,6 +84,7 @@ DEX2OAT_OPTIONS_KEY (Unit, MultiImage) DEX2OAT_OPTIONS_KEY (std::string, NoInlineFrom) DEX2OAT_OPTIONS_KEY (Unit, ForceDeterminism) DEX2OAT_OPTIONS_KEY (std::string, ClasspathDir) +DEX2OAT_OPTIONS_KEY (std::string, InvocationFile) DEX2OAT_OPTIONS_KEY (std::string, ClassLoaderContext) DEX2OAT_OPTIONS_KEY (std::string, StoredClassLoaderContext) DEX2OAT_OPTIONS_KEY (std::string, DirtyImageObjects) diff --git a/openjdkjvm/OpenjdkJvm.cc b/openjdkjvm/OpenjdkJvm.cc index 7a388c9583..b2c4bb7236 100644 --- a/openjdkjvm/OpenjdkJvm.cc +++ b/openjdkjvm/OpenjdkJvm.cc @@ -77,6 +77,7 @@ JNIEXPORT jint JVM_Open(const char* fname, jint flags, jint mode) { << fname << "')"; } + flags |= O_CLOEXEC; int fd = TEMP_FAILURE_RETRY(open(fname, flags & ~JVM_O_DELETE, mode)); if (fd < 0) { int err = errno; diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index aa09cbed5c..6696cc1f06 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -543,6 +543,10 @@ class Heap { return total_bytes_freed_ever_; } + space::RegionSpace* GetRegionSpace() const { + return region_space_; + } + // Implements java.lang.Runtime.maxMemory, returning the maximum amount of memory a program can // consume. For a regular VM this would relate to the -Xmx option and would return -1 if no Xmx // were specified. Android apps start with a growth limit (small heap size) which is diff --git a/runtime/gc/space/region_space-inl.h b/runtime/gc/space/region_space-inl.h index 9f5c1179e5..86a0a6e418 100644 --- a/runtime/gc/space/region_space-inl.h +++ b/runtime/gc/space/region_space-inl.h @@ -249,27 +249,47 @@ inline void RegionSpace::WalkInternal(Visitor&& visitor) { } else if (r->IsLargeTail()) { // Do nothing. } else { - // For newly allocated and evacuated regions, live bytes will be -1. - uint8_t* pos = r->Begin(); - uint8_t* top = r->Top(); - const bool need_bitmap = - r->LiveBytes() != static_cast<size_t>(-1) && - r->LiveBytes() != static_cast<size_t>(top - pos); - if (need_bitmap) { - GetLiveBitmap()->VisitMarkedRange( - reinterpret_cast<uintptr_t>(pos), - reinterpret_cast<uintptr_t>(top), - visitor); + WalkNonLargeRegion(visitor, r); + } + } +} + +template<typename Visitor> +inline void RegionSpace::WalkNonLargeRegion(Visitor&& visitor, const Region* r) { + DCHECK(!r->IsLarge() && !r->IsLargeTail()); + // For newly allocated and evacuated regions, live bytes will be -1. + uint8_t* pos = r->Begin(); + uint8_t* top = r->Top(); + // We need the region space bitmap to iterate over a region's objects + // if + // - its live bytes count is invalid (i.e. -1); or + // - its live bytes count is lower than the allocated bytes count. + // + // In both of the previous cases, we do not have the guarantee that + // all allocated objects are "alive" (i.e. valid), so we depend on + // the region space bitmap to identify which ones to visit. + // + // On the other hand, when all allocated bytes are known to be alive, + // we know that they form a range of consecutive objects (modulo + // object alignment constraints) that can be visited iteratively: we + // can compute the next object's location by using the current + // object's address and size (and object alignment constraints). + const bool need_bitmap = + r->LiveBytes() != static_cast<size_t>(-1) && + r->LiveBytes() != static_cast<size_t>(top - pos); + if (need_bitmap) { + GetLiveBitmap()->VisitMarkedRange( + reinterpret_cast<uintptr_t>(pos), + reinterpret_cast<uintptr_t>(top), + visitor); + } else { + while (pos < top) { + mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos); + if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) { + visitor(obj); + pos = reinterpret_cast<uint8_t*>(GetNextObject(obj)); } else { - while (pos < top) { - mirror::Object* obj = reinterpret_cast<mirror::Object*>(pos); - if (obj->GetClass<kDefaultVerifyFlags, kWithoutReadBarrier>() != nullptr) { - visitor(obj); - pos = reinterpret_cast<uint8_t*>(GetNextObject(obj)); - } else { - break; - } - } + break; } } } diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc index 07783bacfe..a5ba1dcd37 100644 --- a/runtime/gc/space/region_space.cc +++ b/runtime/gc/space/region_space.cc @@ -616,7 +616,7 @@ void RegionSpace::PoisonDeadObjectsInUnevacuatedRegion(Region* r) { // Functor poisoning the space between `obj` and the previously // visited (live) object (or the beginng of the region), if any. - auto maybe_poison = [this, &prev_obj_end](mirror::Object* obj) REQUIRES(Locks::mutator_lock_) { + auto maybe_poison = [&prev_obj_end](mirror::Object* obj) REQUIRES(Locks::mutator_lock_) { DCHECK_ALIGNED(obj, kAlignment); uint8_t* cur_obj_begin = reinterpret_cast<uint8_t*>(obj); if (cur_obj_begin != prev_obj_end) { @@ -840,6 +840,9 @@ void RegionSpace::Region::Dump(std::ostream& os) const { if (live_bytes_ != static_cast<size_t>(-1)) { os << " ratio over allocated bytes=" << (static_cast<float>(live_bytes_) / RoundUp(BytesAllocated(), kRegionSize)); + uint64_t longest_consecutive_free_bytes = GetLongestConsecutiveFreeBytes(); + os << " longest_consecutive_free_bytes=" << longest_consecutive_free_bytes + << " (" << PrettySize(longest_consecutive_free_bytes) << ")"; } os << " is_newly_allocated=" << std::boolalpha << is_newly_allocated_ << std::noboolalpha @@ -847,6 +850,30 @@ void RegionSpace::Region::Dump(std::ostream& os) const { << " thread=" << thread_ << '\n'; } +uint64_t RegionSpace::Region::GetLongestConsecutiveFreeBytes() const { + if (IsFree()) { + return kRegionSize; + } + if (IsLarge() || IsLargeTail()) { + return 0u; + } + uintptr_t max_gap = 0u; + uintptr_t prev_object_end = reinterpret_cast<uintptr_t>(Begin()); + // Iterate through all live objects and find the largest free gap. + auto visitor = [&max_gap, &prev_object_end](mirror::Object* obj) + REQUIRES_SHARED(Locks::mutator_lock_) { + uintptr_t current = reinterpret_cast<uintptr_t>(obj); + uintptr_t diff = current - prev_object_end; + max_gap = std::max(diff, max_gap); + uintptr_t object_end = reinterpret_cast<uintptr_t>(obj) + obj->SizeOf(); + prev_object_end = RoundUp(object_end, kAlignment); + }; + space::RegionSpace* region_space = art::Runtime::Current()->GetHeap()->GetRegionSpace(); + region_space->WalkNonLargeRegion(visitor, this); + return static_cast<uint64_t>(max_gap); +} + + size_t RegionSpace::AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size) { size_t num_bytes = obj->SizeOf(); if (usable_size != nullptr) { diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h index 75c99ec964..a6f501becb 100644 --- a/runtime/gc/space/region_space.h +++ b/runtime/gc/space/region_space.h @@ -370,9 +370,6 @@ class RegionSpace final : public ContinuousMemMapAllocSpace { private: RegionSpace(const std::string& name, MemMap&& mem_map); - template<bool kToSpaceOnly, typename Visitor> - ALWAYS_INLINE void WalkInternal(Visitor&& visitor) NO_THREAD_SAFETY_ANALYSIS; - class Region { public: Region() @@ -616,6 +613,8 @@ class RegionSpace final : public ContinuousMemMapAllocSpace { DCHECK_LE(Top(), end_); } + uint64_t GetLongestConsecutiveFreeBytes() const; + private: size_t idx_; // The region's index in the region space. size_t live_bytes_; // The live bytes. Used to compute the live percent. @@ -640,6 +639,14 @@ class RegionSpace final : public ContinuousMemMapAllocSpace { friend class RegionSpace; }; + template<bool kToSpaceOnly, typename Visitor> + ALWAYS_INLINE void WalkInternal(Visitor&& visitor) NO_THREAD_SAFETY_ANALYSIS; + + // Visitor will be iterating on objects in increasing address order. + template<typename Visitor> + ALWAYS_INLINE void WalkNonLargeRegion(Visitor&& visitor, const Region* r) + NO_THREAD_SAFETY_ANALYSIS; + Region* RefToRegion(mirror::Object* ref) REQUIRES(!region_lock_) { MutexLock mu(Thread::Current(), region_lock_); return RefToRegionLocked(ref); @@ -674,7 +681,7 @@ class RegionSpace final : public ContinuousMemMapAllocSpace { // - the region containing `obj` is fully used; and // - `obj` is not the last object of that region; // the returned location is not guaranteed to be a valid object. - mirror::Object* GetNextObject(mirror::Object* obj) + static mirror::Object* GetNextObject(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); void AdjustNonFreeRegionLimit(size_t new_non_free_region_index) REQUIRES(region_lock_) { |