summaryrefslogtreecommitdiff
path: root/compiler/optimizing/nodes.h
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/nodes.h')
-rw-r--r--compiler/optimizing/nodes.h175
1 files changed, 120 insertions, 55 deletions
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 0f2c1cffee..4f894b07c7 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -35,6 +35,7 @@
#include "mirror/class.h"
#include "offsets.h"
#include "primitive.h"
+#include "utils/array_ref.h"
namespace art {
@@ -240,8 +241,9 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
// put deoptimization instructions, etc.
void TransformLoopHeaderForBCE(HBasicBlock* header);
- // Removes `block` from the graph.
- void DeleteDeadBlock(HBasicBlock* block);
+ // Removes `block` from the graph. Assumes `block` has been disconnected from
+ // other blocks and has no instructions or phis.
+ void DeleteDeadEmptyBlock(HBasicBlock* block);
// Splits the edge between `block` and `successor` while preserving the
// indices in the predecessor/successor lists. If there are multiple edges
@@ -350,8 +352,6 @@ class HGraph : public ArenaObject<kArenaAllocGraph> {
HCurrentMethod* GetCurrentMethod();
- HBasicBlock* FindCommonDominator(HBasicBlock* first, HBasicBlock* second) const;
-
const DexFile& GetDexFile() const {
return dex_file_;
}
@@ -661,6 +661,9 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
return successors_;
}
+ ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
+ ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
+
bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
return ContainsElement(successors_, block, start_from);
}
@@ -811,12 +814,6 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
return GetPredecessorIndexOf(predecessor) == idx;
}
- // Returns the number of non-exceptional successors. SsaChecker ensures that
- // these are stored at the beginning of the successor list.
- size_t NumberOfNormalSuccessors() const {
- return EndsWithTryBoundary() ? 1 : GetSuccessors().size();
- }
-
// Create a new block between this block and its predecessors. The new block
// is added to the graph, all predecessor edges are relinked to it and an edge
// is created to `this`. Returns the new empty block. Reverse post order or
@@ -837,6 +834,15 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
// blocks are consistent (for example ending with a control flow instruction).
HBasicBlock* SplitAfter(HInstruction* cursor);
+ // Split catch block into two blocks after the original move-exception bytecode
+ // instruction, or at the beginning if not present. Returns the newly created,
+ // latter block, or nullptr if such block could not be created (must be dead
+ // in that case). Note that this method just updates raw block information,
+ // like predecessors, successors, dominators, and instruction list. It does not
+ // update the graph, reverse post order, loop information, nor make sure the
+ // blocks are consistent (for example ending with a control flow instruction).
+ HBasicBlock* SplitCatchBlockAfterMoveException();
+
// Merge `other` at the end of `this`. Successors and dominated blocks of
// `other` are changed to be successors and dominated blocks of `this`. Note
// that this method does not update the graph, reverse post order, loop
@@ -1430,7 +1436,7 @@ class SideEffects : public ValueObject {
return flags_ == (kAllChangeBits | kAllDependOnBits);
}
- // Returns true if this may read something written by other.
+ // Returns true if `this` may read something written by `other`.
bool MayDependOn(SideEffects other) const {
const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
return (other.flags_ & depends_on_flags);
@@ -1725,6 +1731,13 @@ class ReferenceTypeInfo : ValueObject {
return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
}
+ bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
+ DCHECK(IsValid());
+ DCHECK(rti.IsValid());
+ return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
+ GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
+ }
+
// Returns true if the type information provide the same amount of details.
// Note that it does not mean that the instructions have the same actual type
// (because the type can be the result of a merge).
@@ -2390,6 +2403,10 @@ class HTryBoundary : public HTemplateInstruction<0> {
// Returns the block's non-exceptional successor (index zero).
HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
+ ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
+ return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
+ }
+
// Returns whether `handler` is among its exception handlers (non-zero index
// successors).
bool HasExceptionHandler(const HBasicBlock& handler) const {
@@ -2417,25 +2434,6 @@ class HTryBoundary : public HTemplateInstruction<0> {
DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
};
-// Iterator over exception handlers of a given HTryBoundary, i.e. over
-// exceptional successors of its basic block.
-class HExceptionHandlerIterator : public ValueObject {
- public:
- explicit HExceptionHandlerIterator(const HTryBoundary& try_boundary)
- : block_(*try_boundary.GetBlock()), index_(block_.NumberOfNormalSuccessors()) {}
-
- bool Done() const { return index_ == block_.GetSuccessors().size(); }
- HBasicBlock* Current() const { return block_.GetSuccessors()[index_]; }
- size_t CurrentSuccessorIndex() const { return index_; }
- void Advance() { ++index_; }
-
- private:
- const HBasicBlock& block_;
- size_t index_;
-
- DISALLOW_COPY_AND_ASSIGN(HExceptionHandlerIterator);
-};
-
// Deoptimize to interpreter, upon checking a condition.
class HDeoptimize : public HTemplateInstruction<1> {
public:
@@ -2604,6 +2602,11 @@ class HBinaryOperation : public HExpression<2> {
VLOG(compiler) << DebugName() << " is not defined for the (long, int) case.";
return nullptr;
}
+ virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const {
+ VLOG(compiler) << DebugName() << " is not defined for the (null, null) case.";
+ return nullptr;
+ }
// Returns an input that can legally be used as the right input and is
// constant, or null.
@@ -2694,6 +2697,10 @@ class HEqual : public HCondition {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
+ HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(1);
+ }
DECLARE_INSTRUCTION(Equal);
@@ -2726,6 +2733,10 @@ class HNotEqual : public HCondition {
return GetBlock()->GetGraph()->GetIntConstant(
Compute(x->GetValue(), y->GetValue()), GetDexPc());
}
+ HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
+ HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
+ return GetBlock()->GetGraph()->GetIntConstant(0);
+ }
DECLARE_INSTRUCTION(NotEqual);
@@ -3399,11 +3410,12 @@ class HInvokeStaticOrDirect : public HInvoke {
ClinitCheckRequirement clinit_check_requirement)
: HInvoke(arena,
number_of_arguments,
- // There is one extra argument for the HCurrentMethod node, and
- // potentially one other if the clinit check is explicit, and one other
- // if the method is a string factory.
- 1u + (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u)
- + (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u),
+ // There is potentially one extra argument for the HCurrentMethod node, and
+ // potentially one other if the clinit check is explicit, and potentially
+ // one other if the method is a string factory.
+ (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
+ (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u) +
+ (dispatch_info.method_load_kind == MethodLoadKind::kStringInit ? 1u : 0u),
return_type,
dex_pc,
method_index,
@@ -3411,12 +3423,31 @@ class HInvokeStaticOrDirect : public HInvoke {
invoke_type_(invoke_type),
clinit_check_requirement_(clinit_check_requirement),
target_method_(target_method),
- dispatch_info_(dispatch_info) {}
+ dispatch_info_(dispatch_info) { }
void SetDispatchInfo(const DispatchInfo& dispatch_info) {
+ bool had_current_method_input = HasCurrentMethodInput();
+ bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
+
+ // Using the current method is the default and once we find a better
+ // method load kind, we should not go back to using the current method.
+ DCHECK(had_current_method_input || !needs_current_method_input);
+
+ if (had_current_method_input && !needs_current_method_input) {
+ DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
+ RemoveInputAt(GetSpecialInputIndex());
+ }
dispatch_info_ = dispatch_info;
}
+ void AddSpecialInput(HInstruction* input) {
+ // We allow only one special input.
+ DCHECK(!IsStringInit() && !HasCurrentMethodInput());
+ DCHECK(InputCount() == GetSpecialInputIndex() ||
+ (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
+ InsertInputAt(GetSpecialInputIndex(), input);
+ }
+
bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
// We access the method via the dex cache so we can't do an implicit null check.
// TODO: for intrinsics we can generate implicit null checks.
@@ -3427,17 +3458,35 @@ class HInvokeStaticOrDirect : public HInvoke {
return return_type_ == Primitive::kPrimNot && !IsStringInit();
}
+ // Get the index of the special input, if any.
+ //
+ // If the invoke IsStringInit(), it initially has a HFakeString special argument
+ // which is removed by the instruction simplifier; if the invoke HasCurrentMethodInput(),
+ // the "special input" is the current method pointer; otherwise there may be one
+ // platform-specific special input, such as PC-relative addressing base.
+ uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
+
InvokeType GetInvokeType() const { return invoke_type_; }
MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
- uint32_t GetCurrentMethodInputIndex() const { return GetNumberOfArguments(); }
bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
- bool HasPcRelDexCache() const {
+ bool HasPcRelativeDexCache() const {
return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
}
+ bool HasCurrentMethodInput() const {
+ // This function can be called only after the invoke has been fully initialized by the builder.
+ if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
+ DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
+ return true;
+ } else {
+ DCHECK(InputCount() == GetSpecialInputIndex() ||
+ !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
+ return false;
+ }
+ }
bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
MethodReference GetTargetMethod() const { return target_method_; }
@@ -3452,7 +3501,7 @@ class HInvokeStaticOrDirect : public HInvoke {
}
uint32_t GetDexCacheArrayOffset() const {
- DCHECK(HasPcRelDexCache());
+ DCHECK(HasPcRelativeDexCache());
return dispatch_info_.method_load_data;
}
@@ -3468,26 +3517,25 @@ class HInvokeStaticOrDirect : public HInvoke {
return GetInvokeType() == kStatic;
}
- // Remove the art::HLoadClass instruction set as last input by
- // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of
- // the initial art::HClinitCheck instruction (only relevant for
- // static calls with explicit clinit check).
- void RemoveLoadClassAsLastInput() {
+ // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
+ // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
+ // instruction; only relevant for static calls with explicit clinit check.
+ void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
DCHECK(IsStaticWithExplicitClinitCheck());
size_t last_input_index = InputCount() - 1;
HInstruction* last_input = InputAt(last_input_index);
DCHECK(last_input != nullptr);
- DCHECK(last_input->IsLoadClass()) << last_input->DebugName();
+ DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
RemoveAsUserOfInput(last_input_index);
inputs_.pop_back();
- clinit_check_requirement_ = ClinitCheckRequirement::kImplicit;
- DCHECK(IsStaticWithImplicitClinitCheck());
+ clinit_check_requirement_ = new_requirement;
+ DCHECK(!IsStaticWithExplicitClinitCheck());
}
bool IsStringFactoryFor(HFakeString* str) const {
if (!IsStringInit()) return false;
- // +1 for the current method.
- if (InputCount() == (number_of_arguments_ + 1)) return false;
+ DCHECK(!HasCurrentMethodInput());
+ if (InputCount() == (number_of_arguments_)) return false;
return InputAt(InputCount() - 1)->AsFakeString() == str;
}
@@ -3502,7 +3550,7 @@ class HInvokeStaticOrDirect : public HInvoke {
}
// Is this a call to a static method whose declaring class has an
- // explicit intialization check in the graph?
+ // explicit initialization check in the graph?
bool IsStaticWithExplicitClinitCheck() const {
return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit);
}
@@ -3513,6 +3561,11 @@ class HInvokeStaticOrDirect : public HInvoke {
return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit);
}
+ // Does this method load kind need the current method as an input?
+ static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
+ return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
+ }
+
DECLARE_INSTRUCTION(InvokeStaticOrDirect);
protected:
@@ -3530,6 +3583,9 @@ class HInvokeStaticOrDirect : public HInvoke {
return input_record;
}
+ void InsertInputAt(size_t index, HInstruction* input);
+ void RemoveInputAt(size_t index);
+
private:
const InvokeType invoke_type_;
ClinitCheckRequirement clinit_check_requirement_;
@@ -3541,6 +3597,7 @@ class HInvokeStaticOrDirect : public HInvoke {
DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
};
+std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
class HInvokeVirtual : public HInvoke {
public:
@@ -3601,10 +3658,14 @@ class HNewInstance : public HExpression<1> {
uint32_t dex_pc,
uint16_t type_index,
const DexFile& dex_file,
+ bool can_throw,
+ bool finalizable,
QuickEntrypointEnum entrypoint)
: HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
type_index_(type_index),
dex_file_(dex_file),
+ can_throw_(can_throw),
+ finalizable_(finalizable),
entrypoint_(entrypoint) {
SetRawInputAt(0, current_method);
}
@@ -3614,11 +3675,13 @@ class HNewInstance : public HExpression<1> {
// Calls runtime so needs an environment.
bool NeedsEnvironment() const OVERRIDE { return true; }
- // It may throw when called on:
- // - interfaces
- // - abstract/innaccessible/unknown classes
- // TODO: optimize when possible.
- bool CanThrow() const OVERRIDE { return true; }
+
+ // It may throw when called on type that's not instantiable/accessible.
+ // It can throw OOME.
+ // TODO: distinguish between the two cases so we can for example allow allocation elimination.
+ bool CanThrow() const OVERRIDE { return can_throw_ || true; }
+
+ bool IsFinalizable() const { return finalizable_; }
bool CanBeNull() const OVERRIDE { return false; }
@@ -3629,6 +3692,8 @@ class HNewInstance : public HExpression<1> {
private:
const uint16_t type_index_;
const DexFile& dex_file_;
+ const bool can_throw_;
+ const bool finalizable_;
const QuickEntrypointEnum entrypoint_;
DISALLOW_COPY_AND_ASSIGN(HNewInstance);