diff options
author | 2016-11-16 10:17:46 -0800 | |
---|---|---|
committer | 2016-11-30 13:21:21 -0800 | |
commit | 71bf7b43380eb445973f32a7f789d9670f8cc97d (patch) | |
tree | 76425f8578b247a845fe61724d71efc63059760f /compiler | |
parent | 52f52361c2ea37941fbda71e3c653bb8096bf516 (diff) |
Optimizations around escape analysis. With tests.
Details:
(1) added new intrinsics
(2) implemented optimizations
more !can be null information
more null check removals
replace return-this uses with incoming parameter
remove dead StringBuffer/Builder calls (with escape analysis)
(3) Fixed exposed bug in CanBeMoved()
Performance gain:
This improves CafeineString by about 360%
(removes null check from first loop, eliminates second loop completely)
Test: test-art-host
Change-Id: Iaf16a1b9cab6a7386f43d71c6b51dd59600e81c1
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/intrinsics_list.h | 6 | ||||
-rw-r--r-- | compiler/optimizing/escape.cc | 49 | ||||
-rw-r--r-- | compiler/optimizing/escape.h | 23 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 63 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_mips.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 6 | ||||
-rw-r--r-- | compiler/optimizing/load_store_elimination.cc | 29 | ||||
-rw-r--r-- | compiler/optimizing/nodes.h | 17 |
13 files changed, 184 insertions, 45 deletions
diff --git a/compiler/intrinsics_list.h b/compiler/intrinsics_list.h index 555baf6de9..9bd25d8484 100644 --- a/compiler/intrinsics_list.h +++ b/compiler/intrinsics_list.h @@ -117,6 +117,12 @@ V(StringNewStringFromBytes, kStatic, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringFactory;", "newStringFromBytes", "([BIII)Ljava/lang/String;") \ V(StringNewStringFromChars, kStatic, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringFactory;", "newStringFromChars", "(II[C)Ljava/lang/String;") \ V(StringNewStringFromString, kStatic, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringFactory;", "newStringFromString", "(Ljava/lang/String;)Ljava/lang/String;") \ + V(StringBufferAppend, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringBuffer;", "append", "(Ljava/lang/String;)Ljava/lang/StringBuffer;") \ + V(StringBufferLength, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kNoThrow, "Ljava/lang/StringBuffer;", "length", "()I") \ + V(StringBufferToString, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringBuffer;", "toString", "()Ljava/lang/String;") \ + V(StringBuilderAppend, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringBuilder;", "append", "(Ljava/lang/String;)Ljava/lang/StringBuilder;") \ + V(StringBuilderLength, kVirtual, kNeedsEnvironmentOrCache, kReadSideEffects, kNoThrow, "Ljava/lang/StringBuilder;", "length", "()I") \ + V(StringBuilderToString, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Ljava/lang/StringBuilder;", "toString", "()Ljava/lang/String;") \ V(UnsafeCASInt, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Lsun/misc/Unsafe;", "compareAndSwapInt", "(Ljava/lang/Object;JII)Z") \ V(UnsafeCASLong, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Lsun/misc/Unsafe;", "compareAndSwapLong", "(Ljava/lang/Object;JJJ)Z") \ V(UnsafeCASObject, kVirtual, kNeedsEnvironmentOrCache, kAllSideEffects, kCanThrow, "Lsun/misc/Unsafe;", "compareAndSwapObject", "(Ljava/lang/Object;JLjava/lang/Object;Ljava/lang/Object;)Z") \ diff --git a/compiler/optimizing/escape.cc b/compiler/optimizing/escape.cc index c80e19ef15..9df5bf1017 100644 --- a/compiler/optimizing/escape.cc +++ b/compiler/optimizing/escape.cc @@ -23,16 +23,19 @@ namespace art { void CalculateEscape(HInstruction* reference, bool (*no_escape)(HInstruction*, HInstruction*), /*out*/ bool* is_singleton, - /*out*/ bool* is_singleton_and_non_escaping) { + /*out*/ bool* is_singleton_and_not_returned, + /*out*/ bool* is_singleton_and_not_deopt_visible) { // For references not allocated in the method, don't assume anything. if (!reference->IsNewInstance() && !reference->IsNewArray()) { *is_singleton = false; - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_returned = false; + *is_singleton_and_not_deopt_visible = false; return; } // Assume the best until proven otherwise. *is_singleton = true; - *is_singleton_and_non_escaping = true; + *is_singleton_and_not_returned = true; + *is_singleton_and_not_deopt_visible = true; // Visit all uses to determine if this reference can escape into the heap, // a method call, an alias, etc. for (const HUseListNode<HInstruction*>& use : reference->GetUses()) { @@ -45,7 +48,8 @@ void CalculateEscape(HInstruction* reference, // for the uncommon cases. Similarly, null checks are eventually eliminated for explicit // allocations, but if we see one before it is simplified, assume an alias. *is_singleton = false; - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_returned = false; + *is_singleton_and_not_deopt_visible = false; return; } else if (user->IsPhi() || user->IsSelect() || user->IsInvoke() || (user->IsInstanceFieldSet() && (reference == user->InputAt(1))) || @@ -56,7 +60,8 @@ void CalculateEscape(HInstruction* reference, // The reference is merged to HPhi/HSelect, passed to a callee, or stored to heap. // Hence, the reference is no longer the only name that can refer to its value. *is_singleton = false; - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_returned = false; + *is_singleton_and_not_deopt_visible = false; return; } else if ((user->IsUnresolvedInstanceFieldGet() && (reference == user->InputAt(0))) || (user->IsUnresolvedInstanceFieldSet() && (reference == user->InputAt(0)))) { @@ -64,37 +69,35 @@ void CalculateEscape(HInstruction* reference, // Note that we could optimize this case and still perform some optimizations until // we hit the unresolved access, but the conservative assumption is the simplest. *is_singleton = false; - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_returned = false; + *is_singleton_and_not_deopt_visible = false; return; } else if (user->IsReturn()) { - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_returned = false; } } - // Need for further analysis? - if (!*is_singleton_and_non_escaping) { - return; - } - - // Look at the environment uses and if it's for HDeoptimize, it's treated the - // same as a return which escapes at the end of executing the compiled code. - // Other environment uses are fine, as long as all client optimizations that - // rely on this informations are disabled for debuggable. + // Look at the environment uses if it's for HDeoptimize. Other environment uses are fine, + // as long as client optimizations that rely on this information are disabled for debuggable. for (const HUseListNode<HEnvironment*>& use : reference->GetEnvUses()) { HEnvironment* user = use.GetUser(); if (user->GetHolder()->IsDeoptimize()) { - *is_singleton_and_non_escaping = false; + *is_singleton_and_not_deopt_visible = false; break; } } } -bool IsNonEscapingSingleton(HInstruction* reference, - bool (*no_escape)(HInstruction*, HInstruction*)) { - bool is_singleton = true; - bool is_singleton_and_non_escaping = true; - CalculateEscape(reference, no_escape, &is_singleton, &is_singleton_and_non_escaping); - return is_singleton_and_non_escaping; +bool DoesNotEscape(HInstruction* reference, bool (*no_escape)(HInstruction*, HInstruction*)) { + bool is_singleton = false; + bool is_singleton_and_not_returned = false; + bool is_singleton_and_not_deopt_visible = false; // not relevant for escape + CalculateEscape(reference, + no_escape, + &is_singleton, + &is_singleton_and_not_returned, + &is_singleton_and_not_deopt_visible); + return is_singleton_and_not_returned; } } // namespace art diff --git a/compiler/optimizing/escape.h b/compiler/optimizing/escape.h index 6514843247..75e37b0551 100644 --- a/compiler/optimizing/escape.h +++ b/compiler/optimizing/escape.h @@ -31,9 +31,18 @@ class HInstruction; * allocation. The method assigns true to parameter 'is_singleton' if the reference * is the only name that can refer to its value during the lifetime of the method, * meaning that the reference is not aliased with something else, is not stored to - * heap memory, and not passed to another method. The method assigns true to parameter - * 'is_singleton_and_non_escaping' if the reference is a singleton and is not returned - * to the caller or used as an environment local of an HDeoptimize instruction. + * heap memory, and not passed to another method. In addition, the method assigns + * true to parameter 'is_singleton_and_not_returned' if the reference is a singleton + * and not returned to the caller and to parameter 'is_singleton_and_not_deopt_visible' + * if the reference is a singleton and not used as an environment local of an + * HDeoptimize instruction (clients of the final value must run after BCE to ensure + * all such instructions have been introduced already). + * + * Note that being visible to a HDeoptimize instruction does not count for ordinary + * escape analysis, since switching between compiled code and interpreted code keeps + * non escaping references restricted to the lifetime of the method and the thread + * executing it. This property only concerns optimizations that are interested in + * escape analysis with respect to the *compiled* code (such as LSE). * * When set, the no_escape function is applied to any use of the allocation instruction * prior to any built-in escape analysis. This allows clients to define better escape @@ -45,14 +54,14 @@ class HInstruction; void CalculateEscape(HInstruction* reference, bool (*no_escape)(HInstruction*, HInstruction*), /*out*/ bool* is_singleton, - /*out*/ bool* is_singleton_and_non_escaping); + /*out*/ bool* is_singleton_and_not_returned, + /*out*/ bool* is_singleton_and_not_deopt_visible); /* - * Convenience method for testing singleton and non-escaping property at once. + * Convenience method for testing the singleton and not returned properties at once. * Callers should be aware that this method invokes the full analysis at each call. */ -bool IsNonEscapingSingleton(HInstruction* reference, - bool (*no_escape)(HInstruction*, HInstruction*)); +bool DoesNotEscape(HInstruction* reference, bool (*no_escape)(HInstruction*, HInstruction*)); } // namespace art diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index 85b461dcf6..658b80468e 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -16,6 +16,7 @@ #include "instruction_simplifier.h" +#include "escape.h" #include "intrinsics.h" #include "mirror/class-inl.h" #include "scoped_thread_state_change-inl.h" @@ -107,6 +108,8 @@ class InstructionSimplifierVisitor : public HGraphDelegateVisitor { void SimplifyStringCharAt(HInvoke* invoke); void SimplifyStringIsEmptyOrLength(HInvoke* invoke); void SimplifyNPEOnArgN(HInvoke* invoke, size_t); + void SimplifyReturnThis(HInvoke* invoke); + void SimplifyAllocationIntrinsic(HInvoke* invoke); void SimplifyMemBarrier(HInvoke* invoke, MemBarrierKind barrier_kind); OptimizingCompilerStats* stats_; @@ -1864,11 +1867,61 @@ void InstructionSimplifierVisitor::SimplifyStringIsEmptyOrLength(HInvoke* invoke // is provably non-null, we can clear the flag. void InstructionSimplifierVisitor::SimplifyNPEOnArgN(HInvoke* invoke, size_t n) { HInstruction* arg = invoke->InputAt(n); - if (!arg->CanBeNull()) { + if (invoke->CanThrow() && !arg->CanBeNull()) { invoke->SetCanThrow(false); } } +// Methods that return "this" can replace the returned value with the receiver. +void InstructionSimplifierVisitor::SimplifyReturnThis(HInvoke* invoke) { + if (invoke->HasUses()) { + HInstruction* receiver = invoke->InputAt(0); + invoke->ReplaceWith(receiver); + RecordSimplification(); + } +} + +// Helper method for StringBuffer escape analysis. +static bool NoEscapeForStringBufferReference(HInstruction* reference, HInstruction* user) { + if (user->IsInvokeStaticOrDirect()) { + // Any constructor on StringBuffer is okay. + return user->AsInvokeStaticOrDirect()->GetResolvedMethod()->IsConstructor() && + user->InputAt(0) == reference; + } else if (user->IsInvokeVirtual()) { + switch (user->AsInvokeVirtual()->GetIntrinsic()) { + case Intrinsics::kStringBufferLength: + case Intrinsics::kStringBufferToString: + DCHECK_EQ(user->InputAt(0), reference); + return true; + case Intrinsics::kStringBufferAppend: + // Returns "this", so only okay if no further uses. + DCHECK_EQ(user->InputAt(0), reference); + DCHECK_NE(user->InputAt(1), reference); + return !user->HasUses(); + default: + break; + } + } + return false; +} + +// Certain allocation intrinsics are not removed by dead code elimination +// because of potentially throwing an OOM exception or other side effects. +// This method removes such intrinsics when special circumstances allow. +void InstructionSimplifierVisitor::SimplifyAllocationIntrinsic(HInvoke* invoke) { + if (!invoke->HasUses()) { + // Instruction has no uses. If unsynchronized, we can remove right away, safely ignoring + // the potential OOM of course. Otherwise, we must ensure the receiver object of this + // call does not escape since only thread-local synchronization may be removed. + bool is_synchronized = invoke->GetIntrinsic() == Intrinsics::kStringBufferToString; + HInstruction* receiver = invoke->InputAt(0); + if (!is_synchronized || DoesNotEscape(receiver, NoEscapeForStringBufferReference)) { + invoke->GetBlock()->RemoveInstruction(invoke); + RecordSimplification(); + } + } +} + void InstructionSimplifierVisitor::SimplifyMemBarrier(HInvoke* invoke, MemBarrierKind barrier_kind) { uint32_t dex_pc = invoke->GetDexPc(); HMemoryBarrier* mem_barrier = new (GetGraph()->GetArena()) HMemoryBarrier(barrier_kind, dex_pc); @@ -1926,6 +1979,14 @@ void InstructionSimplifierVisitor::VisitInvoke(HInvoke* instruction) { case Intrinsics::kStringStringIndexOfAfter: SimplifyNPEOnArgN(instruction, 1); // 0th has own NullCheck break; + case Intrinsics::kStringBufferAppend: + case Intrinsics::kStringBuilderAppend: + SimplifyReturnThis(instruction); + break; + case Intrinsics::kStringBufferToString: + case Intrinsics::kStringBuilderToString: + SimplifyAllocationIntrinsic(instruction); + break; case Intrinsics::kUnsafeLoadFence: SimplifyMemBarrier(instruction, MemBarrierKind::kLoadAny); break; diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc index 8234b2467d..8f64faeac0 100644 --- a/compiler/optimizing/intrinsics_arm.cc +++ b/compiler/optimizing/intrinsics_arm.cc @@ -2613,6 +2613,12 @@ UNIMPLEMENTED_INTRINSIC(ARM, LongLowestOneBit) UNIMPLEMENTED_INTRINSIC(ARM, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(ARM, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(ARM, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(ARM, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(ARM, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(ARM, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(ARM, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 17a97da6cc..d8a896e926 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -2781,6 +2781,12 @@ UNIMPLEMENTED_INTRINSIC(ARM64, LongLowestOneBit) UNIMPLEMENTED_INTRINSIC(ARM64, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(ARM64, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(ARM64, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(ARM64, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index c8e3534164..8059dd66fb 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2703,6 +2703,12 @@ UNIMPLEMENTED_INTRINSIC(ARMVIXL, LongLowestOneBit) UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(ARMVIXL, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index 7c81588cda..9b5d7a02dd 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -2497,6 +2497,12 @@ UNIMPLEMENTED_INTRINSIC(MIPS, MathTanh) UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(MIPS, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(MIPS, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(MIPS, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index 2d4f417b14..5a998861eb 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -1949,6 +1949,12 @@ UNIMPLEMENTED_INTRINSIC(MIPS64, MathTanh) UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(MIPS64, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(MIPS64, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(MIPS64, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 06ab46f536..922c3bcac9 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -3331,6 +3331,12 @@ UNIMPLEMENTED_INTRINSIC(X86, LongLowestOneBit) UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(X86, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(X86, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(X86, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(X86, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(X86, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(X86, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(X86, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(X86, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 2ea8670100..05d270a4e6 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -3000,6 +3000,12 @@ UNIMPLEMENTED_INTRINSIC(X86_64, DoubleIsInfinite) UNIMPLEMENTED_INTRINSIC(X86_64, StringStringIndexOf); UNIMPLEMENTED_INTRINSIC(X86_64, StringStringIndexOfAfter); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBufferAppend); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBufferLength); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBufferToString); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBuilderAppend); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBuilderLength); +UNIMPLEMENTED_INTRINSIC(X86_64, StringBuilderToString); // 1.8. UNIMPLEMENTED_INTRINSIC(X86_64, UnsafeGetAndAddInt) diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc index edecf17f33..2856c3ea11 100644 --- a/compiler/optimizing/load_store_elimination.cc +++ b/compiler/optimizing/load_store_elimination.cc @@ -37,8 +37,13 @@ class ReferenceInfo : public ArenaObject<kArenaAllocMisc> { : reference_(reference), position_(pos), is_singleton_(true), - is_singleton_and_non_escaping_(true) { - CalculateEscape(reference_, nullptr, &is_singleton_, &is_singleton_and_non_escaping_); + is_singleton_and_not_returned_(true), + is_singleton_and_not_deopt_visible_(true) { + CalculateEscape(reference_, + nullptr, + &is_singleton_, + &is_singleton_and_not_returned_, + &is_singleton_and_not_deopt_visible_); } HInstruction* GetReference() const { @@ -59,19 +64,17 @@ class ReferenceInfo : public ArenaObject<kArenaAllocMisc> { // Returns true if reference_ is a singleton and not returned to the caller or // used as an environment local of an HDeoptimize instruction. // The allocation and stores into reference_ may be eliminated for such cases. - bool IsSingletonAndNonEscaping() const { - return is_singleton_and_non_escaping_; + bool IsSingletonAndRemovable() const { + return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_; } private: HInstruction* const reference_; - const size_t position_; // position in HeapLocationCollector's ref_info_array_. - bool is_singleton_; // can only be referred to by a single name in the method. + const size_t position_; // position in HeapLocationCollector's ref_info_array_. - // reference_ is singleton and does not escape in the end either by - // returning to the caller, or being used as an environment local of an - // HDeoptimize instruction. - bool is_singleton_and_non_escaping_; + bool is_singleton_; // can only be referred to by a single name in the method, + bool is_singleton_and_not_returned_; // and not returned to caller, + bool is_singleton_and_not_deopt_visible_; // and not used as an environment local of HDeoptimize. DISALLOW_COPY_AND_ASSIGN(ReferenceInfo); }; @@ -623,7 +626,7 @@ class LSEVisitor : public HGraphVisitor { bool from_all_predecessors = true; ReferenceInfo* ref_info = heap_location_collector_.GetHeapLocation(i)->GetReferenceInfo(); HInstruction* singleton_ref = nullptr; - if (ref_info->IsSingletonAndNonEscaping()) { + if (ref_info->IsSingletonAndRemovable()) { // We do more analysis of liveness when merging heap values for such // cases since stores into such references may potentially be eliminated. singleton_ref = ref_info->GetReference(); @@ -796,7 +799,7 @@ class LSEVisitor : public HGraphVisitor { } else if (index != nullptr) { // For array element, don't eliminate stores since it can be easily aliased // with non-constant index. - } else if (ref_info->IsSingletonAndNonEscaping()) { + } else if (ref_info->IsSingletonAndRemovable()) { // Store into a field of a singleton that's not returned. The value cannot be // killed due to aliasing/invocation. It can be redundant since future loads can // directly get the value set by this instruction. The value can still be killed due to @@ -970,7 +973,7 @@ class LSEVisitor : public HGraphVisitor { // new_instance isn't used for field accesses. No need to process it. return; } - if (ref_info->IsSingletonAndNonEscaping() && + if (ref_info->IsSingletonAndRemovable() && !new_instance->IsFinalizable() && !new_instance->NeedsAccessCheck()) { singleton_new_instances_.push_back(new_instance); diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index eebc49c991..0734fd18f0 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -2072,6 +2072,8 @@ class HInstruction : public ArenaObject<kArenaAllocInstruction> { #undef INSTRUCTION_TYPE_CHECK // Returns whether the instruction can be moved within the graph. + // TODO: this method is used by LICM and GVN with possibly different + // meanings? split and rename? virtual bool CanBeMoved() const { return false; } // Returns whether the two instructions are of the same kind. @@ -3789,7 +3791,7 @@ class HInvoke : public HInstruction { bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); } - bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); } + bool CanBeMoved() const OVERRIDE { return IsIntrinsic() && !DoesAnyWrite(); } bool InstructionDataEquals(const HInstruction* other) const OVERRIDE { return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_; @@ -4181,6 +4183,19 @@ class HInvokeVirtual FINAL : public HInvoke { kVirtual), vtable_index_(vtable_index) {} + bool CanBeNull() const OVERRIDE { + switch (GetIntrinsic()) { + case Intrinsics::kThreadCurrentThread: + case Intrinsics::kStringBufferAppend: + case Intrinsics::kStringBufferToString: + case Intrinsics::kStringBuilderAppend: + case Intrinsics::kStringBuilderToString: + return false; + default: + return HInvoke::CanBeNull(); + } + } + bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE { // TODO: Add implicit null checks in intrinsics. return (obj == InputAt(0)) && !GetLocations()->Intrinsified(); |