ARM: Try to emit branches early to save memory.
Try to emit unexpanded branches when binding a Label to
reduce the memory needed for Fixup dependencies. We use
a simple heuristic to limit the time we spend trying
but this heuristic is enough to resolve all branches in
the emitted String.equals() intrinsic.
Test: Run ART test suite on host and Nexus 6.
Test: Manually check memory usage for a certain apk.
Bug: 28603175
Change-Id: I362e07c2c1285ff609cf8df0a00b6b420729b9ce
diff --git a/compiler/utils/arm/assembler_thumb2.cc b/compiler/utils/arm/assembler_thumb2.cc
index ebdfc98..f5ccf40 100644
--- a/compiler/utils/arm/assembler_thumb2.cc
+++ b/compiler/utils/arm/assembler_thumb2.cc
@@ -2018,6 +2018,45 @@
return adjustment;
}
+bool Thumb2Assembler::Fixup::IsCandidateForEmitEarly() const {
+ DCHECK(size_ == original_size_);
+ if (target_ == kUnresolved) {
+ return false;
+ }
+ // GetOffset() does not depend on current_code_size for branches, only for literals.
+ constexpr uint32_t current_code_size = 0u;
+ switch (GetSize()) {
+ case kBranch16Bit:
+ return IsInt(cond_ != AL ? 9 : 12, GetOffset(current_code_size));
+ case kBranch32Bit:
+ // We don't support conditional branches beyond +-1MiB
+ // or unconditional branches beyond +-16MiB.
+ return true;
+
+ case kCbxz16Bit:
+ return IsUint<7>(GetOffset(current_code_size));
+ case kCbxz32Bit:
+ return IsInt<9>(GetOffset(current_code_size));
+ case kCbxz48Bit:
+ // We don't support conditional branches beyond +-1MiB.
+ return true;
+
+ case kLiteral1KiB:
+ case kLiteral4KiB:
+ case kLiteral64KiB:
+ case kLiteral1MiB:
+ case kLiteralFar:
+ case kLiteralAddr1KiB:
+ case kLiteralAddr4KiB:
+ case kLiteralAddr64KiB:
+ case kLiteralAddrFar:
+ case kLongOrFPLiteral1KiB:
+ case kLongOrFPLiteral64KiB:
+ case kLongOrFPLiteralFar:
+ return false;
+ }
+}
+
uint32_t Thumb2Assembler::Fixup::AdjustSizeIfNeeded(uint32_t current_code_size) {
uint32_t old_code_size = current_code_size;
switch (GetSize()) {
@@ -3343,6 +3382,30 @@
void Thumb2Assembler::Bind(Label* label) {
BindLabel(label, buffer_.Size());
+
+ // Try to emit some Fixups now to reduce the memory needed during the branch fixup later.
+ while (!fixups_.empty() && fixups_.back().IsCandidateForEmitEarly()) {
+ const Fixup& last_fixup = fixups_.back();
+ // Fixups are ordered by location, so the candidate can surely be emitted if it is
+ // a forward branch. If it's a backward branch, it may go over any number of other
+ // fixups. We could check for any number of emit early candidates but we want this
+ // heuristics to be quick, so check just one.
+ uint32_t target = last_fixup.GetTarget();
+ if (target < last_fixup.GetLocation() &&
+ fixups_.size() >= 2u &&
+ fixups_[fixups_.size() - 2u].GetLocation() >= target) {
+ const Fixup& prev_fixup = fixups_[fixups_.size() - 2u];
+ if (!prev_fixup.IsCandidateForEmitEarly()) {
+ break;
+ }
+ uint32_t min_target = std::min(target, prev_fixup.GetTarget());
+ if (fixups_.size() >= 3u && fixups_[fixups_.size() - 3u].GetLocation() >= min_target) {
+ break;
+ }
+ }
+ last_fixup.Emit(&buffer_, buffer_.Size());
+ fixups_.pop_back();
+ }
}