summaryrefslogtreecommitdiff
path: root/compiler/optimizing/nodes.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/nodes.cc')
-rw-r--r--compiler/optimizing/nodes.cc118
1 files changed, 103 insertions, 15 deletions
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 926bc156cf..8de9700250 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -198,10 +198,38 @@ void HGraph::ComputeDominanceInformation() {
}
}
-void HGraph::TransformToSsa() {
- DCHECK(!reverse_post_order_.empty());
- SsaBuilder ssa_builder(this);
- ssa_builder.BuildSsa();
+BuildSsaResult HGraph::TryBuildingSsa(StackHandleScopeCollection* handles) {
+ BuildDominatorTree();
+
+ // The SSA builder requires loops to all be natural. Specifically, the dead phi
+ // elimination phase checks the consistency of the graph when doing a post-order
+ // visit for eliminating dead phis: a dead phi can only have loop header phi
+ // users remaining when being visited.
+ BuildSsaResult result = AnalyzeNaturalLoops();
+ if (result != kBuildSsaSuccess) {
+ return result;
+ }
+
+ // Precompute per-block try membership before entering the SSA builder,
+ // which needs the information to build catch block phis from values of
+ // locals at throwing instructions inside try blocks.
+ ComputeTryBlockInformation();
+
+ // Create the inexact Object reference type and store it in the HGraph.
+ ScopedObjectAccess soa(Thread::Current());
+ ClassLinker* linker = Runtime::Current()->GetClassLinker();
+ inexact_object_rti_ = ReferenceTypeInfo::Create(
+ handles->NewHandle(linker->GetClassRoot(ClassLinker::kJavaLangObject)),
+ /* is_exact */ false);
+
+ // Tranforms graph to SSA form.
+ result = SsaBuilder(this, handles).BuildSsa();
+ if (result != kBuildSsaSuccess) {
+ return result;
+ }
+
+ in_ssa_form_ = true;
+ return kBuildSsaSuccess;
}
HBasicBlock* HGraph::SplitEdge(HBasicBlock* block, HBasicBlock* successor) {
@@ -410,7 +438,7 @@ void HGraph::SimplifyCFG() {
}
}
-bool HGraph::AnalyzeNaturalLoops() const {
+BuildSsaResult HGraph::AnalyzeNaturalLoops() const {
// Order does not matter.
for (HReversePostOrderIterator it(*this); !it.Done(); it.Advance()) {
HBasicBlock* block = it.Current();
@@ -418,16 +446,16 @@ bool HGraph::AnalyzeNaturalLoops() const {
if (block->IsCatchBlock()) {
// TODO: Dealing with exceptional back edges could be tricky because
// they only approximate the real control flow. Bail out for now.
- return false;
+ return kBuildSsaFailThrowCatchLoop;
}
HLoopInformation* info = block->GetLoopInformation();
if (!info->Populate()) {
// Abort if the loop is non natural. We currently bailout in such cases.
- return false;
+ return kBuildSsaFailNonNaturalLoop;
}
}
}
- return true;
+ return kBuildSsaSuccess;
}
void HGraph::InsertConstant(HConstant* constant) {
@@ -446,8 +474,13 @@ HNullConstant* HGraph::GetNullConstant(uint32_t dex_pc) {
// id and/or any invariants the graph is assuming when adding new instructions.
if ((cached_null_constant_ == nullptr) || (cached_null_constant_->GetBlock() == nullptr)) {
cached_null_constant_ = new (arena_) HNullConstant(dex_pc);
+ cached_null_constant_->SetReferenceTypeInfo(inexact_object_rti_);
InsertConstant(cached_null_constant_);
}
+ if (kIsDebugBuild) {
+ ScopedObjectAccess soa(Thread::Current());
+ DCHECK(cached_null_constant_->GetReferenceTypeInfo().IsValid());
+ }
return cached_null_constant_;
}
@@ -777,6 +810,10 @@ void HEnvironment::RemoveAsUserOfInput(size_t index) const {
user_record.GetInstruction()->RemoveEnvironmentUser(user_record.GetUseNode());
}
+HInstruction::InstructionKind HInstruction::GetKind() const {
+ return GetKindInternal();
+}
+
HInstruction* HInstruction::GetNextDisregardingMoves() const {
HInstruction* next = GetNext();
while (next != nullptr && next->IsParallelMove()) {
@@ -960,7 +997,7 @@ void H##name::Accept(HGraphVisitor* visitor) { \
visitor->Visit##name(this); \
}
-FOR_EACH_INSTRUCTION(DEFINE_ACCEPT)
+FOR_EACH_CONCRETE_INSTRUCTION(DEFINE_ACCEPT)
#undef DEFINE_ACCEPT
@@ -2023,6 +2060,16 @@ void HGraph::TransformLoopHeaderForBCE(HBasicBlock* header) {
new_pre_header->SetTryCatchInformation(try_catch_info);
}
+static void CheckAgainstUpperBound(ReferenceTypeInfo rti, ReferenceTypeInfo upper_bound_rti)
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (rti.IsValid()) {
+ DCHECK(upper_bound_rti.IsSupertypeOf(rti))
+ << " upper_bound_rti: " << upper_bound_rti
+ << " rti: " << rti;
+ DCHECK(!upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes() || rti.IsExact());
+ }
+}
+
void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
if (kIsDebugBuild) {
DCHECK_EQ(GetType(), Primitive::kPrimNot);
@@ -2031,16 +2078,23 @@ void HInstruction::SetReferenceTypeInfo(ReferenceTypeInfo rti) {
if (IsBoundType()) {
// Having the test here spares us from making the method virtual just for
// the sake of a DCHECK.
- ReferenceTypeInfo upper_bound_rti = AsBoundType()->GetUpperBound();
- DCHECK(upper_bound_rti.IsSupertypeOf(rti))
- << " upper_bound_rti: " << upper_bound_rti
- << " rti: " << rti;
- DCHECK(!upper_bound_rti.GetTypeHandle()->CannotBeAssignedFromOtherTypes() || rti.IsExact());
+ CheckAgainstUpperBound(rti, AsBoundType()->GetUpperBound());
}
}
reference_type_info_ = rti;
}
+void HBoundType::SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null) {
+ if (kIsDebugBuild) {
+ ScopedObjectAccess soa(Thread::Current());
+ DCHECK(upper_bound.IsValid());
+ DCHECK(!upper_bound_.IsValid()) << "Upper bound should only be set once.";
+ CheckAgainstUpperBound(GetReferenceTypeInfo(), upper_bound);
+ }
+ upper_bound_ = upper_bound;
+ upper_can_be_null_ = can_be_null;
+}
+
ReferenceTypeInfo::ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
ReferenceTypeInfo::ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
@@ -2087,12 +2141,31 @@ bool HInstruction::HasAnyEnvironmentUseBefore(HInstruction* other) {
}
void HInvoke::SetIntrinsic(Intrinsics intrinsic,
- IntrinsicNeedsEnvironmentOrCache needs_env_or_cache) {
+ IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
+ IntrinsicSideEffects side_effects,
+ IntrinsicExceptions exceptions) {
intrinsic_ = intrinsic;
IntrinsicOptimizations opt(this);
+
+ // Adjust method's side effects from intrinsic table.
+ switch (side_effects) {
+ case kNoSideEffects: SetSideEffects(SideEffects::None()); break;
+ case kReadSideEffects: SetSideEffects(SideEffects::AllReads()); break;
+ case kWriteSideEffects: SetSideEffects(SideEffects::AllWrites()); break;
+ case kAllSideEffects: SetSideEffects(SideEffects::AllExceptGCDependency()); break;
+ }
+
if (needs_env_or_cache == kNoEnvironmentOrCache) {
opt.SetDoesNotNeedDexCache();
opt.SetDoesNotNeedEnvironment();
+ } else {
+ // If we need an environment, that means there will be a call, which can trigger GC.
+ SetSideEffects(GetSideEffects().Union(SideEffects::CanTriggerGC()));
+ }
+ // Adjust method's exception status from intrinsic table.
+ switch (exceptions) {
+ case kNoThrow: SetCanThrow(false); break;
+ case kCanThrow: SetCanThrow(true); break;
}
}
@@ -2220,4 +2293,19 @@ HInstruction* HGraph::InsertOppositeCondition(HInstruction* cond, HInstruction*
}
}
+std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs) {
+ os << "["
+ << " source=" << rhs.GetSource()
+ << " destination=" << rhs.GetDestination()
+ << " type=" << rhs.GetType()
+ << " instruction=";
+ if (rhs.GetInstruction() != nullptr) {
+ os << rhs.GetInstruction()->DebugName() << ' ' << rhs.GetInstruction()->GetId();
+ } else {
+ os << "null";
+ }
+ os << " ]";
+ return os;
+}
+
} // namespace art