summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/builder.cc14
-rw-r--r--compiler/optimizing/reference_type_propagation.cc36
-rw-r--r--compiler/optimizing/reference_type_propagation.h4
3 files changed, 33 insertions, 21 deletions
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index f98029da03..f3f82417f7 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -723,10 +723,16 @@ bool HGraphBuilder::BuildInvoke(const Instruction& instruction,
}
}
- invoke = new (arena_) HInvokeStaticOrDirect(
- arena_, number_of_arguments, return_type, dex_pc, target_method.dex_method_index,
- is_recursive, string_init_offset, invoke_type, optimized_invoke_type,
- clinit_check_requirement);
+ invoke = new (arena_) HInvokeStaticOrDirect(arena_,
+ number_of_arguments,
+ return_type,
+ dex_pc,
+ target_method.dex_method_index,
+ is_recursive,
+ string_init_offset,
+ invoke_type,
+ optimized_invoke_type,
+ clinit_check_requirement);
}
size_t start_index = 0;
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index 4f1f45769d..615e019b3f 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -27,7 +27,7 @@ void ReferenceTypePropagation::Run() {
// To properly propagate type info we need to visit in the dominator-based order.
// Reverse post order guarantees a node's dominators are visited first.
// We take advantage of this order in `VisitBasicBlock`.
- for (HReversePostOrderIterator it(*graph_); !it.Done(); it.Advance()) {
+ for (HReversePostOrderIterator it(*GetGraph()); !it.Done(); it.Advance()) {
VisitBasicBlock(it.Current());
}
ProcessWorklist();
@@ -35,23 +35,12 @@ void ReferenceTypePropagation::Run() {
void ReferenceTypePropagation::VisitBasicBlock(HBasicBlock* block) {
// TODO: handle other instructions that give type info
- // (Call/array accesses)
+ // (array accesses)
// Initialize exact types first for faster convergence.
for (HInstructionIterator it(block->GetInstructions()); !it.Done(); it.Advance()) {
HInstruction* instr = it.Current();
- // TODO: Make ReferenceTypePropagation a visitor or create a new one.
- if (instr->IsNewInstance()) {
- VisitNewInstance(instr->AsNewInstance());
- } else if (instr->IsLoadClass()) {
- VisitLoadClass(instr->AsLoadClass());
- } else if (instr->IsNewArray()) {
- VisitNewArray(instr->AsNewArray());
- } else if (instr->IsInstanceFieldGet()) {
- VisitInstanceFieldGet(instr->AsInstanceFieldGet());
- } else if (instr->IsStaticFieldGet()) {
- VisitStaticFieldGet(instr->AsStaticFieldGet());
- }
+ instr->Accept(this);
}
// Handle Phis.
@@ -96,7 +85,7 @@ void ReferenceTypePropagation::BoundTypeForIfNotNull(HBasicBlock* block) {
HInstruction* user = it.Current()->GetUser();
if (notNullBlock->Dominates(user->GetBlock())) {
if (bound_type == nullptr) {
- bound_type = new (graph_->GetArena()) HBoundType(obj, ReferenceTypeInfo::CreateTop(false));
+ bound_type = new (GetGraph()->GetArena()) HBoundType(obj, ReferenceTypeInfo::CreateTop(false));
notNullBlock->InsertInstructionBefore(bound_type, notNullBlock->GetFirstInstruction());
}
user->ReplaceInput(bound_type, it.Current()->GetIndex());
@@ -145,7 +134,7 @@ void ReferenceTypePropagation::BoundTypeForIfInstanceOf(HBasicBlock* block) {
ReferenceTypeInfo obj_rti = obj->GetReferenceTypeInfo();
ReferenceTypeInfo class_rti = load_class->GetLoadedClassRTI();
- bound_type = new (graph_->GetArena()) HBoundType(obj, class_rti);
+ bound_type = new (GetGraph()->GetArena()) HBoundType(obj, class_rti);
// Narrow the type as much as possible.
{
@@ -295,6 +284,21 @@ bool ReferenceTypePropagation::UpdateReferenceTypeInfo(HInstruction* instr) {
return !previous_rti.IsEqual(instr->GetReferenceTypeInfo());
}
+void ReferenceTypePropagation::VisitInvoke(HInvoke* instr) {
+ if (instr->GetType() != Primitive::kPrimNot) {
+ return;
+ }
+
+ ScopedObjectAccess soa(Thread::Current());
+ ClassLinker* cl = Runtime::Current()->GetClassLinker();
+ mirror::DexCache* dex_cache = cl->FindDexCache(instr->GetDexFile());
+ ArtMethod* method = dex_cache->GetResolvedMethod(
+ instr->GetDexMethodIndex(), Runtime::Current()->GetClassLinker()->GetImagePointerSize());
+ DCHECK(method != nullptr);
+ mirror::Class* klass = method->GetReturnType();
+ SetClassAsTypeInfo(instr, klass);
+}
+
void ReferenceTypePropagation::UpdateBoundType(HBoundType* instr) {
ReferenceTypeInfo new_rti = instr->InputAt(0)->GetReferenceTypeInfo();
// Be sure that we don't go over the bounded type.
diff --git a/compiler/optimizing/reference_type_propagation.h b/compiler/optimizing/reference_type_propagation.h
index 74e425fb3e..a2677a85d9 100644
--- a/compiler/optimizing/reference_type_propagation.h
+++ b/compiler/optimizing/reference_type_propagation.h
@@ -28,10 +28,11 @@ namespace art {
/**
* Propagates reference types to instructions.
*/
-class ReferenceTypePropagation : public HOptimization {
+class ReferenceTypePropagation : public HOptimization, public HGraphDelegateVisitor {
public:
ReferenceTypePropagation(HGraph* graph, StackHandleScopeCollection* handles)
: HOptimization(graph, true, kReferenceTypePropagationPassName),
+ HGraphDelegateVisitor(graph),
handles_(handles),
worklist_(graph->GetArena(), kDefaultWorklistSize) {}
@@ -56,6 +57,7 @@ class ReferenceTypePropagation : public HOptimization {
void UpdateReferenceTypeInfo(HInstruction* instr, uint16_t type_idx, const DexFile& dex_file);
void VisitInstanceFieldGet(HInstanceFieldGet* instr);
void VisitStaticFieldGet(HStaticFieldGet* instr);
+ void VisitInvoke(HInvoke* instr);
void ProcessWorklist();
void AddToWorklist(HInstruction* instr);